├── pors.sh
├── docs
├── html
├── main.png
├── Doxyfile_extra
└── generate.sh
├── playbooks
├── pors
│ ├── roles
│ ├── setup.yml
│ └── configure.yml
├── system
│ ├── roles
│ ├── callback_plugins
│ ├── ae_patch_datetime.yml
│ ├── ae_sudoers_disabletty.yml
│ ├── ae_checksplunkversion.yml
│ ├── ae_system_yum.yml
│ ├── ae_system_journald.yml
│ ├── ae_system_reboot.yml
│ ├── ae_configure_journal.yml
│ ├── dnsmasq.yml
│ ├── ae_create_useracc.yml
│ ├── system_upgrade_rebootcheck.yml
│ ├── disk_format.yml
│ ├── manage_unattended_updates.yml
│ ├── ae_system_tuned.yml
│ ├── delete_lnxuser_account.yml
│ ├── ae_system_upgrade.yml
│ ├── ae_ssh.yml
│ ├── ae_system_base.yml
│ ├── ae_download_openvmtools.yml
│ └── ae_manage_splunkuser.yml
├── cribl
│ ├── callback_plugins
│ ├── group_vars
│ │ └── all
│ ├── ae_install_logstream.yml
│ ├── ae_upgrade_logstream.yml
│ ├── ae_download_logstream.yml
│ └── configure_logstream.yml
├── googlecp
│ ├── callback_plugins
│ ├── gcp_deploy_instance.yml
│ └── gcp_get_ids.yml
├── proxmox
│ ├── callback_plugins
│ ├── px_configure_vm.yml
│ ├── px_add_disk.yml
│ ├── px_create_vm.yml
│ ├── px_get_nodes.yml
│ └── px_destroy_vm.yml
├── shelper
│ ├── callback_plugins
│ ├── ae_upgrade_shelper.yml
│ └── ae_install_shelper.yml
├── splunk
│ ├── .gitignore
│ ├── callback_plugins
│ ├── ae_enable_app.yml
│ ├── fetch_files.yml
│ ├── deploy_splunk_pubpem.yml
│ ├── configure_monitoringroles.yml
│ ├── ae_deploy_systemconfigs.yml
│ ├── ae_download_splunk.yml
│ ├── backup_splunk.yml
│ ├── ae_renewcerts.yml
│ ├── add_to_monitoring.yml
│ ├── configure_monitoringconsole.yml
│ ├── deploy_pors_splunk_user.yml
│ ├── deploy_monitoringkeys.yml
│ ├── configure_licensemaster.yml
│ ├── ae_install_splunk.yml
│ ├── configure_deployer.yml
│ ├── ae_configure_ix_peernode.yml
│ ├── configure_deploymentserver.yml
│ ├── splunk_actions.yml
│ ├── ae_configure_heavyforwarder.yml
│ ├── ae_configure_masternode.yml
│ ├── ae_configure_searchhead.yml
│ └── ae_upgrade_splunk.yml
├── vmware
│ ├── callback_plugins
│ ├── ae_install_openvmtools.yml
│ ├── vcenter_always_poweron-vm.yml
│ ├── vcenter_login.yml
│ ├── vcenter_show-avail.yml
│ ├── ae_download_openvmtools.yml
│ └── vcenter_poweron-vm.yml
└── local
│ ├── templates
│ └── getvar.yml.j2
│ └── get_ansible_var.yml
├── roles
├── apps
│ ├── defaults
│ ├── files
│ │ ├── README
│ │ ├── apps
│ │ │ ├── local
│ │ │ │ └── README
│ │ │ └── metadata
│ │ │ │ └── README
│ │ ├── master-apps
│ │ │ ├── local
│ │ │ │ └── README
│ │ │ └── metadata
│ │ │ │ └── README
│ │ ├── deployment-apps
│ │ │ ├── local
│ │ │ │ └── README
│ │ │ └── metadata
│ │ │ │ └── README
│ │ └── shcluster
│ │ │ └── apps
│ │ │ ├── local
│ │ │ └── README
│ │ │ └── metadata
│ │ │ └── README
│ ├── app.template
│ │ ├── files
│ │ │ ├── README
│ │ │ ├── apps
│ │ │ │ ├── local
│ │ │ │ │ └── README
│ │ │ │ └── metadata
│ │ │ │ │ └── README
│ │ │ ├── manager-apps
│ │ │ │ ├── local
│ │ │ │ │ └── README
│ │ │ │ └── metadata
│ │ │ │ │ └── README
│ │ │ ├── master-apps
│ │ │ │ ├── local
│ │ │ │ │ └── README
│ │ │ │ └── metadata
│ │ │ │ │ └── README
│ │ │ ├── shcluster
│ │ │ │ └── apps
│ │ │ │ │ ├── local
│ │ │ │ │ └── README
│ │ │ │ │ └── metadata
│ │ │ │ │ └── README
│ │ │ └── deployment-apps
│ │ │ │ ├── local
│ │ │ │ └── README
│ │ │ │ └── metadata
│ │ │ │ └── README
│ │ ├── vars
│ │ │ ├── main.yml.j2
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ ├── remove_app.yml
│ │ │ ├── set_permissions.yml
│ │ │ └── git_checkout.yml
│ ├── vars
│ │ ├── main.yml.j2
│ │ └── main.yml
│ └── tasks
│ │ ├── remove_app.yml
│ │ ├── set_permissions.yml
│ │ └── git_checkout.yml
├── common
│ ├── defaults
│ ├── handlers
│ │ ├── main.yml
│ │ ├── cleanup.yml
│ │ └── cribl.yml
│ └── templates
│ │ ├── deploy_apps.yml.j2
│ │ ├── ae_deploy_apps.yml.j2
│ │ └── add_app_groupvars.j2
├── conf
│ ├── defaults
│ ├── ldap
│ │ ├── defaults
│ │ └── tasks
│ │ │ ├── main.yml
│ │ │ └── configure.yml
│ ├── web
│ │ ├── defaults
│ │ ├── handlers
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ ├── main.yml
│ │ │ └── touch.yml
│ ├── inputs
│ │ ├── defaults
│ │ ├── handlers
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ └── main.yml
│ ├── outputs
│ │ ├── defaults
│ │ ├── handlers
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ ├── touch.yml
│ │ │ └── main.yml
│ ├── server
│ │ ├── defaults
│ │ ├── handlers
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ └── touch.yml
│ ├── distsearch
│ │ ├── defaults
│ │ ├── handlers
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ ├── main.yml
│ │ │ └── touch.yml
│ └── user
│ │ └── tasks
│ │ ├── main.yml
│ │ └── add.yml
├── group
│ ├── defaults
│ ├── licensemaster
│ │ └── tasks
│ │ │ ├── main.yml
│ │ │ ├── copy_enterprise_license_keys.yml
│ │ │ └── distribute_distsearch_trustedkey.yml
│ ├── searchhead
│ │ └── tasks
│ │ │ └── main.yml
│ └── shcmember
│ │ └── tasks
│ │ ├── main.yml
│ │ ├── init.yml
│ │ ├── state.yml
│ │ ├── restart_shcmember.yml
│ │ └── bootstrap.yml
├── install
│ ├── defaults
│ ├── templates
│ │ └── etc
│ │ │ └── auth
│ │ │ └── splunk.secret.j2
│ └── tasks
│ │ ├── set_sepolicy.yml
│ │ ├── package.yml
│ │ ├── post_actions.yml
│ │ ├── sudoperms.yml
│ │ ├── cleanup.yml
│ │ ├── add_authorized_key.yml
│ │ ├── firewalld_open_splunkports.yml
│ │ ├── splunk_service_stop.yml
│ │ ├── splunk_service_start.yml
│ │ ├── transfer_pkg.yml
│ │ ├── set_splunk_secret.yml
│ │ ├── operation_mode.yml
│ │ ├── fetch_distserverkeys.yml
│ │ ├── firewalld.yml
│ │ ├── splunk_startstop.yml
│ │ ├── splunk_login.yml
│ │ ├── sys_check.yml
│ │ ├── splunk_start.yml
│ │ ├── install_splunk.yml
│ │ └── splunk_checks.yml
├── proxmox
│ ├── defaults
│ ├── tasks
│ │ ├── kvm_reboot.yml
│ │ ├── kvm_add_disk.yml
│ │ ├── kvm_poweron.yml
│ │ ├── ct_create.yml
│ │ └── main.yml
│ └── templates
│ │ └── ci_network.j2
├── shelper
│ ├── defaults
│ └── tasks
│ │ ├── testing.yml
│ │ ├── prereq.yml
│ │ ├── remove_app.yml
│ │ └── main.yml
├── system
│ ├── defaults
│ └── template
│ │ └── system_local_configs
│ │ ├── files
│ │ ├── README
│ │ └── system
│ │ │ └── local
│ │ │ ├── 00_ANSIBLE_FILEMANAGED.conf
│ │ │ └── zz_ANSIBLE_FILEMANAGED.conf
│ │ ├── vars
│ │ ├── main.yml.j2
│ │ └── main.yml
│ │ └── tasks
│ │ ├── main.yml
│ │ ├── push_conf.yml
│ │ ├── configure_local.yml
│ │ └── inject_myfiles.yml
├── upgrade
│ ├── defaults
│ ├── templates
│ │ └── etc
│ │ │ └── auth
│ │ │ └── splunk.secret.j2
│ └── tasks
│ │ ├── splunk_stop.yml
│ │ ├── splunk_start.yml
│ │ ├── shc_detention.yml
│ │ ├── transfer_splpkg.yml
│ │ ├── main.yml
│ │ ├── enable_boot_start_splunk.yml
│ │ └── backup_splunk.yml
├── common_tasks
│ ├── defaults
│ └── tasks
│ │ ├── packages.yml
│ │ ├── gather_facts.yml
│ │ ├── set_touch_dir.yml
│ │ ├── main.yml
│ │ ├── check_ssh.yml
│ │ ├── check_sudo.yml
│ │ └── monitoring.yml
├── googlecp
│ ├── defaults
│ └── tasks
│ │ ├── main.yml
│ │ ├── gcp_configure_address.yml
│ │ └── gcp_create_instance.yml
├── local_tmpdir
│ ├── defaults
│ └── tasks
│ │ └── main.yml
├── openvmtools
│ ├── defaults
│ └── tasks
│ │ └── main.yml
├── pors
│ ├── ssh
│ │ ├── defaults
│ │ └── tasks
│ │ │ ├── main.yml
│ │ │ └── create_ssh_key.yml
│ ├── setup
│ │ ├── defaults
│ │ ├── vars
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ ├── base_dirs.yml
│ │ │ ├── finish.yml
│ │ │ ├── user_setup_root.yml
│ │ │ ├── main.yml
│ │ │ ├── user_setup.yml
│ │ │ ├── requirements.yml
│ │ │ └── check_sudo.yml
│ └── configure
│ │ └── tasks
│ │ ├── main.yml
│ │ ├── ask_github.yml
│ │ └── ask_pors_repo.yml
├── splunk_info
│ ├── defaults
│ └── tasks
│ │ ├── main.yml
│ │ └── latest_manifest.yml
├── splunk_sites
│ ├── defaults
│ └── tasks
│ │ ├── main.yml
│ │ ├── get_deployer_sites.yml
│ │ ├── get_peernode_sites.yml
│ │ └── get_shcmember_sites.yml
├── ssh
│ └── check
│ │ ├── defaults
│ │ └── tasks
│ │ ├── main.yml
│ │ └── check_ssh.yml
├── system_base
│ ├── defaults
│ └── tasks
│ │ ├── set_hostname.yml
│ │ ├── set_dns_domain.yml
│ │ ├── set_dns_servers.yml
│ │ ├── unattended_upgrade.yml
│ │ ├── disable_unattended_updates.yml
│ │ ├── update_package_db.yml
│ │ └── main.yml
├── system_disk
│ ├── defaults
│ └── tasks
│ │ ├── identify_disk.yml
│ │ ├── create_swap_nolvm.yml
│ │ └── main.yml
├── system_prep
│ ├── defaults
│ └── tasks
│ │ ├── regen_initrd.yml
│ │ ├── regen_machine_id.yml
│ │ ├── main.yml
│ │ ├── update_rhsm.yml
│ │ └── regen_ssh_host_keys.yml
├── system_sshid
│ ├── defaults
│ └── tasks
│ │ ├── add_authkey.yml
│ │ ├── main.yml
│ │ ├── update_authkey.yml
│ │ └── add_privkey.yml
├── system_sudo
│ ├── defaults
│ └── tasks
│ │ ├── main.yml
│ │ └── disable_requiretty.yml
├── system_user
│ ├── defaults
│ └── tasks
│ │ ├── become_root_test.yml
│ │ ├── ssh_authorized_keys.yml
│ │ ├── login_test.yml
│ │ ├── nopass_sudoers.yml
│ │ └── delete.yml
├── system_yum
│ ├── defaults
│ └── tasks
│ │ ├── update.yml
│ │ ├── install_elrepo_kernel.yml
│ │ └── main.yml
├── install_logstream
│ ├── defaults
│ └── tasks
│ │ ├── optimize_selinux.yml
│ │ ├── transfer_pkg.yml
│ │ ├── service_stop.yml
│ │ ├── set_perms.yml
│ │ ├── sudoperms.yml
│ │ ├── backup.yml
│ │ ├── add_user.yml
│ │ ├── install.yml
│ │ ├── service_checks.yml
│ │ ├── firewalld.yml
│ │ └── service_start.yml
├── logstream_info
│ ├── defaults
│ └── tasks
│ │ └── main.yml
├── patch_datetime
│ ├── defaults
│ └── tasks
│ │ ├── push_patch.yml
│ │ ├── main.yml
│ │ └── post_patch.yml
├── splunk_download
│ ├── defaults
│ └── tasks
│ │ └── main.yml
├── system_dnsmasq
│ ├── defaults
│ ├── tasks
│ │ ├── install.yml
│ │ ├── main.yml
│ │ ├── activate.yml
│ │ └── configure.yml
│ └── templates
│ │ └── networkmanager.conf.j2
├── system_journal
│ ├── defaults
│ └── handlers
│ │ └── main.yml
├── system_optimize
│ ├── defaults
│ ├── templates
│ │ └── etc
│ │ │ └── auth
│ │ │ └── splunk.secret.j2
│ └── tasks
│ │ ├── post_actions.yml
│ │ ├── optimize_selinux.yml
│ │ ├── set_perms.yml
│ │ ├── main.yml
│ │ ├── firewalld.yml
│ │ └── sys_check.yml
├── system_reboot
│ ├── defaults
│ └── tasks
│ │ ├── main.yml
│ │ └── reboot.yml
├── system_splunkcert
│ ├── defaults
│ ├── vars
│ │ └── main.yml
│ └── tasks
│ │ ├── splunk_stop.yml
│ │ ├── splunk_start.yml
│ │ ├── renew_server_cert.yml
│ │ ├── renew_web_cert.yml
│ │ ├── main.yml
│ │ ├── check_web_enabled.yml
│ │ └── check_certage.yml
├── system_upgrade
│ ├── defaults
│ └── tasks
│ │ ├── main.yml
│ │ └── update.yml
├── app_link
│ ├── defaults
│ └── tasks
│ │ ├── main.yml
│ │ └── tmplcheck.yml
├── logstream_configure
│ ├── defaults
│ ├── handlers
│ ├── tasks
│ │ ├── main.yml
│ │ ├── configure_leader.yml
│ │ └── configure_worker.yml
│ └── templates
│ │ ├── worker.j2
│ │ └── leader.j2
├── system_ntp
│ ├── templates
│ │ └── clock.j2
│ ├── .ansible-lint
│ ├── .gitignore
│ ├── .github
│ │ └── FUNDING.yml
│ ├── .yamllint
│ ├── vars
│ │ ├── Debian.yml
│ │ ├── FreeBSD.yml
│ │ ├── Archlinux.yml
│ │ ├── RedHat.yml
│ │ └── Suse.yml
│ ├── handlers
│ │ └── main.yml
│ ├── molecule
│ │ └── default
│ │ │ ├── converge.yml
│ │ │ └── molecule.yml
│ ├── defaults
│ │ └── main.yml
│ └── meta
│ │ └── main.yml
├── fetchfiles
│ └── tasks
│ │ ├── main.yml
│ │ └── fetch.yml
├── requirements_gcp.yml
├── requirements.yml
└── system_boot
│ └── tasks
│ └── main.yml
├── filter_plugins
├── .gitignore
├── app_default.py
├── app_role_list.py
├── create_distsearch_serverlist.py
├── create_distsearch_servers.py
└── create_shcluster_mgmt_uri_servers.py
├── EXAMPLES
├── inventories
│ ├── staging
│ │ ├── roles
│ │ ├── group_vars
│ │ │ ├── all
│ │ │ │ ├── README
│ │ │ │ ├── custom_tasks.yml
│ │ │ │ ├── web.conf.yml
│ │ │ │ ├── idx_clustering.yml
│ │ │ │ ├── distsearch.conf.yml
│ │ │ │ ├── splunk_auth.yml
│ │ │ │ ├── system_journald.yml
│ │ │ │ ├── splunk_repository.yml
│ │ │ │ ├── outputs.conf.yml
│ │ │ │ ├── cribl_packages.yml
│ │ │ │ └── deployment.yml
│ │ │ ├── site0
│ │ │ │ └── README
│ │ │ ├── site1
│ │ │ │ └── README
│ │ │ ├── site2
│ │ │ │ └── README
│ │ │ ├── site3
│ │ │ │ └── README
│ │ │ ├── masternode
│ │ │ │ ├── README
│ │ │ │ └── deployment.yml
│ │ │ ├── searchhead
│ │ │ │ └── README
│ │ │ ├── site1_deployer
│ │ │ │ ├── README
│ │ │ │ ├── deployment.yml
│ │ │ │ └── server.conf.yml
│ │ │ ├── site1_peernode
│ │ │ │ ├── README
│ │ │ │ ├── web.conf.yml
│ │ │ │ ├── deployment.yml
│ │ │ │ └── inputs.conf.yml
│ │ │ ├── licensemaster
│ │ │ │ ├── README
│ │ │ │ └── server.conf.yml
│ │ │ ├── site1_shcmember
│ │ │ │ ├── README
│ │ │ │ ├── distsearch.conf.yml
│ │ │ │ └── deployment.yml
│ │ │ ├── deploymentserver
│ │ │ │ ├── README
│ │ │ │ └── deployment.yml
│ │ │ ├── monitoringconsole
│ │ │ │ ├── README
│ │ │ │ └── distsearch.conf
│ │ │ ├── site1_heavyforwarder
│ │ │ │ ├── README
│ │ │ │ ├── web.conf.yml
│ │ │ │ └── inputs.conf.yml
│ │ │ ├── universal_forwarder
│ │ │ │ └── shelper_installation.yml
│ │ │ ├── logstream_leader
│ │ │ │ └── logstream.yml
│ │ │ ├── logstream_worker
│ │ │ │ └── logstream.yml
│ │ │ └── httpeventcollector
│ │ │ │ ├── web.conf.yml
│ │ │ │ └── inputs.conf.yml
│ │ ├── hosts_dynamic.proxmox.yml.example
│ │ └── hosts_dynamic.vsphere.yml.example
│ ├── development
│ │ └── group_vars
│ │ │ └── .placeholder
│ └── production
│ │ └── group_vars
│ │ └── .placeholder
├── pors_repo_common_sudo_cribl-polkit.j2-example
├── pors_repo_common_sudo_cribl-admin-example
├── pors-server.profile.d-example
├── pors_repo_common_sudo_splunk-admin-example
├── pors-bashrc
├── pors-server.sudoers.d-example
└── gen_hosts.yml
├── .gitmodules
├── .gitignore
└── dialogrc_mono
/pors.sh:
--------------------------------------------------------------------------------
1 | pors
--------------------------------------------------------------------------------
/docs/html:
--------------------------------------------------------------------------------
1 | ../github.io/pors/
--------------------------------------------------------------------------------
/playbooks/pors/roles:
--------------------------------------------------------------------------------
1 | ../../roles/
--------------------------------------------------------------------------------
/playbooks/system/roles:
--------------------------------------------------------------------------------
1 | ../../roles
--------------------------------------------------------------------------------
/roles/apps/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/common/defaults:
--------------------------------------------------------------------------------
1 | ../defaults/
--------------------------------------------------------------------------------
/roles/conf/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/group/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/install/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/proxmox/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/shelper/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/system/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/upgrade/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/filter_plugins/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 |
--------------------------------------------------------------------------------
/roles/common_tasks/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/conf/ldap/defaults:
--------------------------------------------------------------------------------
1 | ../../defaults
--------------------------------------------------------------------------------
/roles/conf/web/defaults:
--------------------------------------------------------------------------------
1 | ../../defaults
--------------------------------------------------------------------------------
/roles/googlecp/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/local_tmpdir/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/openvmtools/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/pors/ssh/defaults:
--------------------------------------------------------------------------------
1 | ../../defaults/
--------------------------------------------------------------------------------
/roles/splunk_info/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/splunk_sites/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/ssh/check/defaults:
--------------------------------------------------------------------------------
1 | ../../defaults
--------------------------------------------------------------------------------
/roles/system_base/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/system_disk/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/system_prep/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/system_sshid/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/system_sudo/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/system_user/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/system_yum/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/apps/files/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/conf/inputs/defaults:
--------------------------------------------------------------------------------
1 | ../../defaults
--------------------------------------------------------------------------------
/roles/conf/outputs/defaults:
--------------------------------------------------------------------------------
1 | ../../defaults
--------------------------------------------------------------------------------
/roles/conf/server/defaults:
--------------------------------------------------------------------------------
1 | ../../defaults
--------------------------------------------------------------------------------
/roles/install_logstream/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/logstream_info/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/patch_datetime/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/pors/setup/defaults:
--------------------------------------------------------------------------------
1 | ../../defaults/
--------------------------------------------------------------------------------
/roles/splunk_download/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/system_dnsmasq/defaults:
--------------------------------------------------------------------------------
1 | ../defaults/
--------------------------------------------------------------------------------
/roles/system_journal/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/system_optimize/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/system_reboot/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/system_splunkcert/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/system_upgrade/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/roles:
--------------------------------------------------------------------------------
1 | ../../roles
--------------------------------------------------------------------------------
/roles/app_link/defaults:
--------------------------------------------------------------------------------
1 | /opt/pors/roles/defaults
--------------------------------------------------------------------------------
/roles/conf/distsearch/defaults:
--------------------------------------------------------------------------------
1 | ../../defaults
--------------------------------------------------------------------------------
/roles/logstream_configure/defaults:
--------------------------------------------------------------------------------
1 | ../defaults
--------------------------------------------------------------------------------
/roles/apps/app.template/files/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/files/apps/local/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/files/apps/metadata/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/logstream_configure/handlers:
--------------------------------------------------------------------------------
1 | ../common/handlers
--------------------------------------------------------------------------------
/EXAMPLES/inventories/development/group_vars/.placeholder:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/production/group_vars/.placeholder:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/playbooks/cribl/callback_plugins:
--------------------------------------------------------------------------------
1 | ../../callback_plugins/
--------------------------------------------------------------------------------
/playbooks/cribl/group_vars/all:
--------------------------------------------------------------------------------
1 | /opt/pors/roles/defaults
--------------------------------------------------------------------------------
/playbooks/googlecp/callback_plugins:
--------------------------------------------------------------------------------
1 | ../../callback_plugins/
--------------------------------------------------------------------------------
/playbooks/proxmox/callback_plugins:
--------------------------------------------------------------------------------
1 | ../../callback_plugins/
--------------------------------------------------------------------------------
/playbooks/shelper/callback_plugins:
--------------------------------------------------------------------------------
1 | ../../callback_plugins/
--------------------------------------------------------------------------------
/playbooks/splunk/.gitignore:
--------------------------------------------------------------------------------
1 | /clean
2 | /clean.nogrp
3 |
--------------------------------------------------------------------------------
/playbooks/splunk/callback_plugins:
--------------------------------------------------------------------------------
1 | ../../callback_plugins/
--------------------------------------------------------------------------------
/playbooks/system/callback_plugins:
--------------------------------------------------------------------------------
1 | ../../callback_plugins/
--------------------------------------------------------------------------------
/playbooks/vmware/callback_plugins:
--------------------------------------------------------------------------------
1 | ../../callback_plugins/
--------------------------------------------------------------------------------
/roles/apps/files/master-apps/local/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/playbooks/shelper/ae_upgrade_shelper.yml:
--------------------------------------------------------------------------------
1 | ae_install_shelper.yml
--------------------------------------------------------------------------------
/roles/apps/app.template/files/apps/local/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/files/deployment-apps/local/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/files/master-apps/metadata/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/files/shcluster/apps/local/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/files/shcluster/apps/metadata/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/app.template/files/apps/metadata/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/files/deployment-apps/metadata/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/system_ntp/templates/clock.j2:
--------------------------------------------------------------------------------
1 | ZONE="{{ ntp_timezone }}"
2 |
--------------------------------------------------------------------------------
/roles/apps/app.template/files/manager-apps/local/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/app.template/files/master-apps/local/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/app.template/files/master-apps/metadata/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/app.template/files/shcluster/apps/local/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/fetchfiles/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - include: fetch.yml
4 |
--------------------------------------------------------------------------------
/roles/ssh/check/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - include: check_ssh.yml
4 |
--------------------------------------------------------------------------------
/roles/system/template/system_local_configs/files/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/system_reboot/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - include: reboot.yml
4 |
--------------------------------------------------------------------------------
/roles/apps/app.template/files/deployment-apps/local/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/app.template/files/deployment-apps/metadata/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/app.template/files/manager-apps/metadata/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/apps/app.template/files/shcluster/apps/metadata/README:
--------------------------------------------------------------------------------
1 | Placeholder
2 |
--------------------------------------------------------------------------------
/roles/system_sudo/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: disable_requiretty.yml
3 |
--------------------------------------------------------------------------------
/roles/requirements_gcp.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | collections:
4 | - google.cloud
5 |
--------------------------------------------------------------------------------
/roles/system_ntp/.ansible-lint:
--------------------------------------------------------------------------------
1 | skip_list:
2 | - 'yaml'
3 | - 'role-name'
4 |
--------------------------------------------------------------------------------
/roles/system_ntp/.gitignore:
--------------------------------------------------------------------------------
1 | *.retry
2 | */__pycache__
3 | *.pyc
4 | .cache
5 |
6 |
--------------------------------------------------------------------------------
/roles/system_splunkcert/vars/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | package_pyOpenSSL: pyOpenSSL
4 |
--------------------------------------------------------------------------------
/roles/system_upgrade/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - include: update.yml
4 |
5 |
--------------------------------------------------------------------------------
/roles/install/templates/etc/auth/splunk.secret.j2:
--------------------------------------------------------------------------------
1 | {{ splunk_auth.splunk_secret }}
2 |
--------------------------------------------------------------------------------
/roles/upgrade/templates/etc/auth/splunk.secret.j2:
--------------------------------------------------------------------------------
1 | {{ splunk_auth.splunk_secret }}
2 |
--------------------------------------------------------------------------------
/docs/main.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/secure-diversITy/ansible_pors/HEAD/docs/main.png
--------------------------------------------------------------------------------
/roles/conf/web/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: ../../../common/handlers/splunkd.yml
3 |
--------------------------------------------------------------------------------
/roles/system_optimize/templates/etc/auth/splunk.secret.j2:
--------------------------------------------------------------------------------
1 | {{ splunk_auth.splunk_secret }}
2 |
--------------------------------------------------------------------------------
/roles/app_link/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - include: tmplcheck.yml
4 | - include: enable.yml
5 |
--------------------------------------------------------------------------------
/roles/conf/inputs/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: ../../../common/handlers/splunkd.yml
3 |
--------------------------------------------------------------------------------
/roles/conf/outputs/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: ../../../common/handlers/splunkd.yml
3 |
--------------------------------------------------------------------------------
/roles/conf/server/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: ../../../common/handlers/splunkd.yml
3 |
--------------------------------------------------------------------------------
/roles/apps/vars/main.yml.j2:
--------------------------------------------------------------------------------
1 | ---
2 | app_name: {{ app_name }}
3 | app_variable: {{ app_variable }}
4 |
--------------------------------------------------------------------------------
/roles/conf/distsearch/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: ../../../common/handlers/splunkd.yml
3 |
--------------------------------------------------------------------------------
/roles/conf/ldap/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - include: configure.yml
4 | when: splunk_ldap_configure
5 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/all/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to all groups
2 |
--------------------------------------------------------------------------------
/roles/splunk_info/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: installed_version.yml
3 | - include: latest_manifest.yml
4 |
--------------------------------------------------------------------------------
/roles/apps/app.template/vars/main.yml.j2:
--------------------------------------------------------------------------------
1 | ---
2 | app_name: {{ app_name }}
3 | app_variable: {{ app_variable }}
4 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site0/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the site0 group
2 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the site1 group
2 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site2/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the site2 group
2 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site3/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the site3 group
2 |
--------------------------------------------------------------------------------
/roles/openvmtools/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: install_vmtools_rpm.yml
3 | when: vmtools_installation.type == "rpm"
4 |
--------------------------------------------------------------------------------
/roles/shelper/tasks/testing.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: shelper reload
4 | shell: echo
5 | notify: shelper reload all
6 |
7 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/masternode/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the masternode group
2 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/searchhead/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the searchhead group
2 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_deployer/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the deployer group
2 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_peernode/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the peernode group
2 |
--------------------------------------------------------------------------------
/roles/requirements.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | collections:
4 | - community.general
5 | - community.crypto
6 | - ansible.posix
7 |
--------------------------------------------------------------------------------
/roles/system/template/system_local_configs/vars/main.yml.j2:
--------------------------------------------------------------------------------
1 | ---
2 | app_name: {{ app_name }}
3 | app_variable: {{ app_variable }}
4 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/licensemaster/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the licensemaster group
2 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_shcmember/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the shcmember group
2 |
--------------------------------------------------------------------------------
/roles/common/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: splunkd.yml
3 | - include: cribl.yml
4 | - include: ../../shelper/handlers/main.yml
5 |
--------------------------------------------------------------------------------
/roles/install/tasks/set_sepolicy.yml:
--------------------------------------------------------------------------------
1 | # Disable SELinux
2 | - name: Disable SELinux completely
3 | selinux:
4 | state: disabled
5 |
--------------------------------------------------------------------------------
/roles/install_logstream/tasks/optimize_selinux.yml:
--------------------------------------------------------------------------------
1 | - name: Set SELinux mode
2 | selinux:
3 | state: "{{ system.selinux }}"
4 |
--------------------------------------------------------------------------------
/roles/pors/configure/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - include: check.yml
4 | - include: ask.yml
5 |
6 | - include: write_answers.yml
7 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/deploymentserver/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the deploymentserver group
2 |
--------------------------------------------------------------------------------
/roles/system_ntp/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 | ---
3 | github: geerlingguy
4 | patreon: geerlingguy
5 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/monitoringconsole/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the monitoringconsole group
2 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_heavyforwarder/README:
--------------------------------------------------------------------------------
1 | # This folder contains group_vars that apply to the heavyforwarder group
2 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/universal_forwarder/shelper_installation.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | shelper_installation:
4 | enabled: false
5 |
--------------------------------------------------------------------------------
/playbooks/local/templates/getvar.yml.j2:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Get variable
4 | debug:
5 | var: {{ varname }}
6 | register: parsed_var
7 |
--------------------------------------------------------------------------------
/roles/install/tasks/package.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Gather package facts"
4 | package_facts:
5 | manager: auto
6 | no_log: true
7 |
8 |
--------------------------------------------------------------------------------
/roles/logstream_info/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - set_fact:
3 | remote_user: "{{ cribl_ssh_user }}"
4 |
5 | - include: installed_version.yml
6 |
--------------------------------------------------------------------------------
/roles/system_optimize/tasks/post_actions.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Clean history
3 | debug: msg="clearing.."
4 | notify: clear history root
5 |
6 |
--------------------------------------------------------------------------------
/roles/common_tasks/tasks/packages.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Install basic packages"
4 | package:
5 | name:
6 | - python3-selinux
7 |
8 |
--------------------------------------------------------------------------------
/roles/system_upgrade/tasks/update.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "System upgrade"
4 | ansible.builtin.package:
5 | state: latest
6 | name: '*'
7 |
--------------------------------------------------------------------------------
/roles/system_base/tasks/set_hostname.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Set hostname to {{ server_shortname }}
3 | hostname:
4 | name: "{{ server_shortname }}"
5 |
--------------------------------------------------------------------------------
/roles/system_dnsmasq/tasks/install.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Ensure dnsmasq is installed"
4 | package:
5 | name: dnsmasq
6 | state: present
7 |
8 |
--------------------------------------------------------------------------------
/roles/system_journal/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: restart journald
4 | service: name=systemd-journald state={{ systemd_journal_restart_state }}
5 |
--------------------------------------------------------------------------------
/roles/upgrade/tasks/splunk_stop.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Stop Splunk
4 | service: name=splunk
5 | pattern=splunkd
6 | state=stopped
7 |
--------------------------------------------------------------------------------
/docs/Doxyfile_extra:
--------------------------------------------------------------------------------
1 | @INCLUDE = Doxyfile
2 | # Taken from:
3 | # https://github.com/aklapatch/doxygenTheme
4 | HTML_EXTRA_STYLESHEET=CustomDoxygen.css
5 |
--------------------------------------------------------------------------------
/roles/system/template/system_local_configs/files/system/local/00_ANSIBLE_FILEMANAGED.conf:
--------------------------------------------------------------------------------
1 | ########## ANSIBLE FILE-MANAGED - DO NOT TOUCH WITHOUT ANSIBLE ##########
2 |
--------------------------------------------------------------------------------
/roles/system/template/system_local_configs/files/system/local/zz_ANSIBLE_FILEMANAGED.conf:
--------------------------------------------------------------------------------
1 | ########## ANSIBLE FILE-MANAGED - DO NOT TOUCH WITHOUT ANSIBLE ##########
2 |
--------------------------------------------------------------------------------
/roles/system_boot/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | #- include: set_iptables.yml
4 | # remote_user: "{{ pors_ssh_user }}"
5 | # become: yes
6 | # become_user: root
7 |
--------------------------------------------------------------------------------
/roles/conf/distsearch/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: touch.yml
3 | when: splunk_distsearch_conf is defined
4 |
5 | - include_tasks: distributedSearch/server.yml
6 |
--------------------------------------------------------------------------------
/roles/fetchfiles/tasks/fetch.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Get files"
4 | ansible.posix.synchronize:
5 | mode: pull
6 | src: "{{ fetch_src }}"
7 | dest: "{{ fetch_dest }}"
8 |
--------------------------------------------------------------------------------
/roles/upgrade/tasks/splunk_start.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Start Splunk again
4 | service: name=splunk
5 | state=started
6 | args="--accept-license --answer-yes"
7 |
8 |
--------------------------------------------------------------------------------
/roles/pors/setup/vars/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | pors_base_dirs:
4 | - "/home/{{ pors_user }}/.pors"
5 | - "/home/{{ pors_user }}/.ssh"
6 | - "/opt/pors_repo"
7 | - "/opt/pors_data"
8 |
--------------------------------------------------------------------------------
/roles/system_ntp/.yamllint:
--------------------------------------------------------------------------------
1 | ---
2 | extends: default
3 |
4 | rules:
5 | line-length:
6 | max: 120
7 | level: warning
8 |
9 | ignore: |
10 | .github/stale.yml
11 | .travis.yml
12 |
--------------------------------------------------------------------------------
/roles/system_splunkcert/tasks/splunk_stop.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Stop Splunk
3 | become: yes
4 | become_user: root
5 | service: name=splunk
6 | pattern=splunkd
7 | state=stopped
8 |
--------------------------------------------------------------------------------
/roles/app_link/tasks/tmplcheck.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Check if custom template exists
4 | stat:
5 | path: "{{ custom_roles_dir }}/common/templates/ae_deploy_apps.yml.j2"
6 | register: custom_templ
7 |
--------------------------------------------------------------------------------
/roles/system_dnsmasq/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - block:
4 |
5 | - include: install.yml
6 | - include: configure.yml
7 | - include: activate.yml
8 |
9 | when: configure_install_dnsmasq
10 |
--------------------------------------------------------------------------------
/playbooks/local/get_ansible_var.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Get a configured ansible variable
3 | hosts: "pors_server"
4 | gather_facts: false
5 | connection: local
6 |
7 | roles:
8 | - local_tmpdir
9 |
10 |
--------------------------------------------------------------------------------
/roles/common_tasks/tasks/gather_facts.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "(Re-)gathering facts"
4 | setup:
5 | when: ssh_avail.failed == False
6 | ignore_unreachable: True
7 | ignore_errors: True
8 | register: facts
9 |
--------------------------------------------------------------------------------
/roles/common_tasks/tasks/set_touch_dir.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - file:
4 | path: "{{ pors_touch_dir }}"
5 | owner: "{{ pors_ssh_user }}"
6 | group: "{{ pors_ssh_user }}"
7 | mode: 0733
8 | state: directory
9 |
--------------------------------------------------------------------------------
/roles/group/licensemaster/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: copy_enterprise_license_keys.yml
3 | when: lm_push_license is defined and lm_push_license == True
4 |
5 | #- include: distribute_distsearch_trustedkey.yml
6 |
--------------------------------------------------------------------------------
/roles/system_ntp/vars/Debian.yml:
--------------------------------------------------------------------------------
1 | ---
2 | __ntp_daemon: ntp
3 | ntp_tzdata_package: tzdata
4 | __ntp_package: ntp
5 | __ntp_config_file: /etc/ntp.conf
6 | __ntp_driftfile: /var/lib/ntp/drift
7 | ntp_cron_daemon: cron
8 |
--------------------------------------------------------------------------------
/roles/system_ntp/vars/FreeBSD.yml:
--------------------------------------------------------------------------------
1 | ---
2 | __ntp_daemon: ntpd
3 | ntp_tzdata_package: tzdata
4 | __ntp_package: ntp
5 | __ntp_config_file: /etc/ntp.conf
6 | __ntp_driftfile: /var/db/ntpd.drift
7 | ntp_cron_daemon: cron
8 |
--------------------------------------------------------------------------------
/roles/system_prep/tasks/regen_initrd.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Regenerate initrd image to apply the current hostname"
4 | shell: dracut --no-hostonly --force
5 | when: ansible_facts['os_family'] == "RedHat"
6 |
7 |
--------------------------------------------------------------------------------
/roles/system_yum/tasks/update.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: System upgrade
3 | yum:
4 | update_cache: yes
5 | state: latest
6 | name: '*'
7 | use_backend: yum
8 | become: yes
9 | become_user: root
10 |
--------------------------------------------------------------------------------
/roles/system_ntp/vars/Archlinux.yml:
--------------------------------------------------------------------------------
1 | ---
2 | __ntp_daemon: ntpd
3 | ntp_tzdata_package: tzdata
4 | __ntp_package: ntp
5 | __ntp_config_file: /etc/ntp.conf
6 | __ntp_driftfile: /var/lib/ntp/drift
7 | ntp_cron_daemon: cronie
8 |
--------------------------------------------------------------------------------
/roles/system_user/tasks/become_root_test.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Test getting root permissions with user: {{ user_name }}"
4 | become: yes
5 | become_user: root
6 | remote_user: "{{ user_name }}"
7 | shell: whoami
8 |
--------------------------------------------------------------------------------
/roles/conf/user/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - include_tasks: add.yml
4 | no_log: true
5 | loop: "{{ vault_hook_splunk_accounts }}"
6 | loop_control:
7 | loop_var: splacc
8 | when: vault_hook_splunk_accounts is defined
9 |
--------------------------------------------------------------------------------
/roles/system_ntp/vars/RedHat.yml:
--------------------------------------------------------------------------------
1 | ---
2 | __ntp_daemon: chronyd
3 | ntp_tzdata_package: tzdata
4 | __ntp_package: chrony
5 | __ntp_config_file: /etc/chrony.conf
6 | __ntp_driftfile: /var/lib/ntp/drift
7 | ntp_cron_daemon: crond
8 |
--------------------------------------------------------------------------------
/roles/system_ntp/vars/Suse.yml:
--------------------------------------------------------------------------------
1 | ---
2 | __ntp_daemon: ntpd
3 | ntp_tzdata_package: timezone
4 | __ntp_package: ntp
5 | __ntp_config_file: /etc/ntp.conf
6 | __ntp_driftfile: /var/lib/ntp/drift/ntp.drift
7 | ntp_cron_daemon: cron
8 |
--------------------------------------------------------------------------------
/roles/install/tasks/post_actions.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Clean 1/2
3 | debug: msg="clearing.."
4 | notify: clear history splunk
5 |
6 | - name: Clean 2/2
7 | debug: msg="clearing.."
8 | notify: clear history root
9 |
10 |
--------------------------------------------------------------------------------
/roles/install/tasks/sudoperms.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Add app config for sudo
4 | ansible.builtin.template:
5 | src: "{{ repo_base }}/common/sudo/splunk-admin"
6 | dest: /etc/sudoers.d/splunk-admin
7 | mode: 0600
8 |
9 |
--------------------------------------------------------------------------------
/playbooks/system/ae_patch_datetime.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: all
3 | remote_user: "{{ pors_ssh_user }}"
4 | become: yes
5 | become_user: root
6 |
7 | roles:
8 | - ../roles/splunk_info
9 | - ../roles/patch_datetime
10 |
--------------------------------------------------------------------------------
/playbooks/vmware/ae_install_openvmtools.yml:
--------------------------------------------------------------------------------
1 | - name: Install open-vm-tools on all hosts
2 | hosts: "*:!pors_server"
3 | remote_user: "{{ pors_ssh_user }}"
4 | become: yes
5 | become_user: root
6 |
7 | roles:
8 | - openvmtools
9 |
--------------------------------------------------------------------------------
/roles/system_prep/tasks/regen_machine_id.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Remove old machine-id
3 | file:
4 | path: /etc/machine-id
5 | state: absent
6 |
7 | - name: Regenerate machine-id
8 | shell: systemd-machine-id-setup
9 |
10 |
--------------------------------------------------------------------------------
/roles/system_sshid/tasks/add_authkey.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Set authorized keys for {{ splunk_install_user }}
3 | authorized_key:
4 | user: "{{ splunk_install_user }}"
5 | state: present
6 | key: "{{ vault_ssh_pubkey_splunk }}"
7 |
--------------------------------------------------------------------------------
/roles/install/tasks/cleanup.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Remove install pkg"
4 | ansible.builtin.file:
5 | path: "/{{ splunk_installation.remote_package_temp_path }}/{{ splunk_installation.package_file }}"
6 | state: absent
7 |
8 |
9 |
--------------------------------------------------------------------------------
/roles/upgrade/tasks/shc_detention.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Set manual detention: {{ shc_detention_state }}"
4 | shell: "{{ splunk_installation.splunk_home_path }}/bin/splunk edit shcluster-config -manual_detention {{ shc_detention_state }}"
5 |
6 |
--------------------------------------------------------------------------------
/roles/conf/ldap/tasks/configure.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Configure splunk's openldap connection"
4 | ansible.builtin.template:
5 | src: ldap.j2
6 | dest: "{{ splunk_installation.splunk_home_path }}/etc/openldap/ldap.conf"
7 | mode: 0600
8 |
--------------------------------------------------------------------------------
/roles/system_splunkcert/tasks/splunk_start.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Start Splunk again
4 | become: yes
5 | become_user: root
6 | service: name=splunk
7 | state=started
8 | args="--accept-license --answer-yes"
9 |
10 |
--------------------------------------------------------------------------------
/roles/logstream_configure/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: configure_leader.yml
3 | when: logstream_upgrade != True and logstream_leader is defined
4 |
5 | - include: configure_worker.yml
6 | when: logstream_upgrade != True and logstream_worker is defined
7 |
--------------------------------------------------------------------------------
/roles/group/searchhead/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #- include_tasks: "{{ base_install_dir }}/roles/splunk_sites/tasks/get_peernode_sites.yml"
3 |
4 | - include: distribute_distsearch_trustedkey.yml
5 | when: deploy_key_to_peers is defined and deploy_key_to_peers == True
6 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "docs/helpers/bash-doxygen"]
2 | path = docs/helpers/bash-doxygen
3 | url = https://github.com/Anvil/bash-doxygen.git
4 | [submodule "github.io"]
5 | path = github.io
6 | url = https://github.com/secure-diversITy/secure-diversity.github.io.git
7 |
--------------------------------------------------------------------------------
/roles/shelper/tasks/prereq.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Check/Install for git
3 | package:
4 | name:
5 | - git
6 | - make
7 | state: present
8 | use: auto
9 | when: shelper_installation.shelper_git_url is defined
10 |
11 |
--------------------------------------------------------------------------------
/roles/splunk_info/tasks/latest_manifest.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Determine latest manifest
3 | shell: "find {{ splunk_installation.splunk_home_path }} -maxdepth 1 -name 'splunk-{{ splunk_version_installed.stdout }}*-manifest' | tail -n 1"
4 | register: splunk_latest_manifest
5 |
--------------------------------------------------------------------------------
/roles/apps/vars/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Change to app name
3 | app_name: app.template
4 | # Change to app variable name. Must not contain dashes (-). Must be the same variable as defined in group_vars for app. This is due to variable name restrictions.
5 | app_variable: app.template
6 |
--------------------------------------------------------------------------------
/roles/patch_datetime/tasks/push_patch.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Patch remote host
3 | become: yes
4 | become_user: "{{ splunk_install_user }}"
5 | copy: src="{{ item }}"
6 | dest="/{{ splunk_installation.splunk_home_path }}/etc/"
7 | with_fileglob: datetime.xml
8 |
9 |
--------------------------------------------------------------------------------
/roles/system_sudo/tasks/disable_requiretty.yml:
--------------------------------------------------------------------------------
1 | - lineinfile:
2 | path: /etc/sudoers
3 | state: present
4 | backup: true
5 | regexp: '^Defaults\s*requiretty'
6 | line: '## DISABLED FOR ANSIBLE ## Defaults requiretty'
7 | validate: '/usr/sbin/visudo -cf %s'
8 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .ansibleengine
2 | deploy_apps.yml
3 | ae_deploy_apps.yml
4 | *.retry
5 | *.swp
6 | *.bak
7 | *.backup
8 | /lab_splunk_binary_replacement
9 | gen_hosts.yml
10 | *.pyc
11 | test.yml
12 | generated_*.yml
13 | roles/system_ntp/.github
14 | /playbooks/system/debug.yml
15 |
--------------------------------------------------------------------------------
/roles/patch_datetime/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: check.yml
3 |
4 | - include: push_patch.yml
5 | when: patch_required is not skipped
6 |
7 | - include: post_patch.yml
8 | when: patch_required is not skipped or (manifest_validation is defined and manifest_validation.rc != 0)
9 |
--------------------------------------------------------------------------------
/roles/pors/ssh/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - block:
4 |
5 | - include: validation.yml
6 | when: not initial_setup
7 | - include: create_ssh_key.yml
8 | when: not var_validation_only
9 |
10 | become: no
11 | run_once: true
12 | delegate_to: localhost
13 |
--------------------------------------------------------------------------------
/filter_plugins/app_default.py:
--------------------------------------------------------------------------------
1 | def app_default (app):
2 |
3 | empty_dict = {}
4 |
5 | if type(app) is dict:
6 | return app
7 | else:
8 | return empty_dict
9 |
10 | class FilterModule(object):
11 | def filters(self):
12 | return {'app_default': app_default}
13 |
--------------------------------------------------------------------------------
/playbooks/system/ae_sudoers_disabletty.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: all
3 | remote_user: "{{ pors_ssh_user }}"
4 | become: yes
5 | become_user: root
6 | gather_facts: false
7 |
8 | vars:
9 | ansible_ssh_user: "{{ pors_ssh_user }}"
10 |
11 | roles:
12 | - system_sudo
13 |
--------------------------------------------------------------------------------
/roles/apps/app.template/vars/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Change to app name
3 | app_name: app.template
4 | # Change to app variable name. Must not contain dashes (-). Must be the same variable as defined in group_vars for app. This is due to variable name restrictions.
5 | app_variable: app.template
6 |
--------------------------------------------------------------------------------
/roles/common/handlers/cleanup.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: clear history root
3 | become: yes
4 | become_user: root
5 | command: rm /root/.bash_history
6 |
7 | - name: clear history splunk
8 | command: rm ~/.bash_history
9 | become: yes
10 | become_user: "{{ splunk_install_user }}"
11 |
12 |
--------------------------------------------------------------------------------
/roles/install/tasks/add_authorized_key.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Add authorized key to user splunk
4 | authorized_key: user="{{ splunk_install_user }}"
5 | key="{{ splunk_installation.ssh_public_key }}"
6 | when: splunk_installation.ssh_public_key is defined
7 |
8 |
--------------------------------------------------------------------------------
/roles/system_optimize/tasks/optimize_selinux.yml:
--------------------------------------------------------------------------------
1 | - name: Check SELinux install state
2 | shell: getenforce
3 | register: selstate
4 | ignore_errors: True
5 |
6 | - name: Set SELinux mode (if installed)
7 | selinux:
8 | state: "{{ system.selinux }}"
9 | when: selstate.rc == 0
10 |
--------------------------------------------------------------------------------
/playbooks/splunk/ae_enable_app.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: pors_server
3 | gather_facts: False
4 | connection: local
5 |
6 | pre_tasks:
7 | - include_vars: "{{ env_inventory_dir }}/group_vars/{{ app2group }}/deployment.yml"
8 | ignore_errors: true
9 |
10 | roles:
11 | - app_link
12 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/logstream_leader/logstream.yml:
--------------------------------------------------------------------------------
1 | # leader default config
2 | logstream_leader:
3 | port: 4200
4 | disabledtls: true
5 | compression: none
6 | group: default
7 | envRegex: "/^CRIBL_/"
8 | connectionTimeout: 5000
9 | writeTimeout: 10000
10 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/logstream_worker/logstream.yml:
--------------------------------------------------------------------------------
1 | # worker default config
2 | logstream_worker:
3 | port: 4200
4 | disabledtls: true
5 | compression: none
6 | group: default
7 | envRegex: "/^CRIBL_/"
8 | connectionTimeout: 5000
9 | writeTimeout: 10000
10 |
--------------------------------------------------------------------------------
/roles/common_tasks/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - include_role:
4 | name: ssh/check
5 |
6 | - include: set_touch_dir.yml
7 | - include: check_sudo.yml
8 |
9 | - include: gather_facts.yml
10 | tags: task_gather_facts
11 | when:
12 | - ssh_avail is defined
13 |
14 | #- include: packages.yml
15 |
--------------------------------------------------------------------------------
/roles/pors/setup/tasks/base_dirs.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Create PORS base directories"
4 | become: yes
5 | become_user: root
6 | file:
7 | path: "{{ item }}"
8 | owner: "{{ pors_user }}"
9 | group: "{{ pors_group }}"
10 | state: directory
11 | loop: "{{ pors_base_dirs }}"
12 |
13 |
--------------------------------------------------------------------------------
/roles/system_splunkcert/tasks/renew_server_cert.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Renew splunk server certificate
3 | shell: "{{ splunk_installation.splunk_home_path }}/bin/splunk createssl server-cert -d {{ splunk_installation.splunk_home_path }}/etc/auth -n server -c $(hostname -f) -l 4096"
4 | notify: splunk restart
5 |
--------------------------------------------------------------------------------
/roles/upgrade/tasks/transfer_splpkg.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Copy Splunk package
3 | copy: src="{{ splunk_repository.repository_root }}/packages/{{ splunk_installation.package_file }}"
4 | dest="/{{ splunk_installation.remote_package_temp_path }}/{{ splunk_installation.package_file }}"
5 | mode=600
6 |
--------------------------------------------------------------------------------
/playbooks/system/ae_checksplunkversion.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: all
3 | remote_user: "{{ pors_ssh_user }}"
4 | become: yes
5 | become_user: root
6 | strategy: free
7 | gather_facts: false
8 |
9 | vars:
10 | ansible_ssh_user: "{{ pors_ssh_user }}"
11 |
12 | roles:
13 | - ../roles/splunk_info
14 |
--------------------------------------------------------------------------------
/playbooks/system/ae_system_yum.yml:
--------------------------------------------------------------------------------
1 | - name: Add yum repos (elrepo.org)
2 | hosts: "*:!pors_server"
3 | remote_user: "{{ pors_ssh_user }}"
4 | become: yes
5 | become_user: root
6 | gather_facts: false
7 |
8 | vars:
9 | ansible_ssh_user: "{{ pors_ssh_user }}"
10 |
11 | roles:
12 | - system_yum
13 |
--------------------------------------------------------------------------------
/roles/install_logstream/tasks/transfer_pkg.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Push installation package
3 | copy:
4 | src: "{{ repo_base }}/common/packages/cribl/{{ logstream_installation.package_file }}"
5 | dest: "/{{ logstream_installation.remote_package_temp_path }}/{{ logstream_installation.package_file }}"
6 | mode: 600
7 |
--------------------------------------------------------------------------------
/roles/system/template/system_local_configs/vars/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Change to app name
3 | app_name: system_local_configs
4 | # Change to app variable name. Must not contain dashes (-). Must be the same variable as defined in group_vars for app. This is due to variable name restrictions.
5 | app_variable: system_local_configs
6 |
--------------------------------------------------------------------------------
/playbooks/googlecp/gcp_deploy_instance.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Deploy Google Cloud instance(s)
3 | hosts: "pors_server"
4 | gather_facts: false
5 |
6 | vars:
7 | ansible_python_interpreter: /usr/bin/python3
8 | gcp_configure_address: True
9 | gcp_create_instance: True
10 |
11 | roles:
12 | - googlecp
13 |
--------------------------------------------------------------------------------
/roles/apps/tasks/remove_app.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Remove app
4 | file: path="{{ splunk_installation.splunk_home_path }}/etc/{{ item.key | replace('shcluster_apps', 'shcluster/apps') | replace('_', '-') }}/{{ app_name }}"
5 | state=absent
6 | when: item.value.delete == true
7 | with_dict: "{{ vars[app_variable] }}"
8 |
--------------------------------------------------------------------------------
/roles/install/tasks/firewalld_open_splunkports.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Open splunkd port on firewalld
4 | firewalld: port="{{ item }}"
5 | permanent=true
6 | state=enabled
7 | when: splunk_installation.firewalld_open_port is defined
8 | with_items: splunk_installation.firewalld_open_port | default()
9 |
--------------------------------------------------------------------------------
/roles/install/tasks/splunk_service_stop.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Stop Splunk (non-systemd only)
3 | service: name=splunk
4 | state=stopped
5 | # when: splunkversionnum < 72
6 |
7 | #- name: Stop Splunk (>= 7.2)
8 | # service: name=Splunkd
9 | # state=stopped
10 | # when: splunkversionnum >= 72
11 | #
12 |
--------------------------------------------------------------------------------
/roles/system_ntp/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: restart ntp
3 | service:
4 | name: "{{ ntp_daemon }}"
5 | state: restarted
6 | when: ntp_enabled | bool
7 |
8 | - name: restart cron
9 | service:
10 | name: "{{ ntp_cron_daemon }}"
11 | state: restarted
12 | when: ntp_cron_handler_enabled | bool
13 |
--------------------------------------------------------------------------------
/roles/system_optimize/tasks/set_perms.yml:
--------------------------------------------------------------------------------
1 | - name: Set directory permissions for splunk etc
2 | file: path={{ splunk_installation.splunk_home_path }}/etc/
3 | mode=0750
4 |
5 | - name: Set directory permissions for splunk etc/apps
6 | file: path={{ splunk_installation.splunk_home_path }}/etc/apps
7 | mode=0750
8 |
--------------------------------------------------------------------------------
/roles/system_splunkcert/tasks/renew_web_cert.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Renew splunk webserver certificate
3 | shell: "{{ splunk_installation.splunk_home_path }}/bin/splunk createssl web-cert -n $(hostname -f) -l 4096"
4 | args:
5 | chdir: "{{ splunk_installation.splunk_home_path }}/etc/auth/splunkweb"
6 | notify: splunk restart
7 |
--------------------------------------------------------------------------------
/roles/install/tasks/splunk_service_start.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Start Splunk (non-systemd only)
3 | service: name=splunk
4 | state=started
5 | # when: splunkversionnum < 72
6 |
7 | #- name: Start Splunk (>= v7.2)
8 | # systemd: name=Splunkd
9 | # state=started
10 | # when: splunkversionnum >= 72
11 |
12 |
--------------------------------------------------------------------------------
/playbooks/splunk/fetch_files.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # HINT: The "hosts" value here is a dynamic group name used in PORS
3 |
4 | - name: Fetch files from a remote system
5 | hosts: "{{ target }}"
6 | remote_user: "{{ pors_ssh_user }}"
7 | become: yes
8 | become_user: "{{ filefetch_user }}"
9 |
10 | roles:
11 | - fetchfiles
12 |
13 |
--------------------------------------------------------------------------------
/roles/conf/inputs/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Check if conf file exists
3 | stat: path={{ splunk_conf_path }}/inputs.conf
4 | register: inputs_conf
5 |
6 | - include: splunktcp/port.yml
7 | - include: splunktcp-ssl/port.yml
8 | - include: SSL/sslPassword.yml
9 | - include: SSL/rootCA.yml
10 | - include: SSL/serverCert.yml
11 |
--------------------------------------------------------------------------------
/roles/apps/app.template/tasks/remove_app.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Remove app
4 | file: path="{{ splunk_installation.splunk_home_path }}/etc/{{ item.key | replace('shcluster_apps', 'shcluster/apps') | replace('_', '-') }}/{{ app_name }}"
5 | state=absent
6 | when: item.value.delete == true
7 | with_dict: "{{ vars[app_variable] }}"
8 |
--------------------------------------------------------------------------------
/roles/system/template/system_local_configs/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # THIS GETS OVERWRITTEN EACH RUN FROM: roles/system/template/ DIRECTORY!
3 | - include: "{{ pors_install_dir }}/roles/common/handlers/splunkd.yml"
4 | - include: inject_myfiles.yml
5 | - include: configure_local.yml
6 | - include: remove_conf_files.yml
7 | - include: push_conf.yml
8 |
--------------------------------------------------------------------------------
/playbooks/splunk/deploy_splunk_pubpem.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: "Add public key to given target group"
3 | hosts: "all:!pors_server"
4 | remote_user: "{{ pors_ssh_user }}"
5 | become: yes
6 | become_user: "{{ splunk_install_user }}"
7 |
8 | vars:
9 | parsed_groups: "{{ target_group }}"
10 |
11 | roles:
12 | - group/searchhead
13 |
--------------------------------------------------------------------------------
/playbooks/vmware/vcenter_always_poweron-vm.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: '{{ server_hostname }}'
3 | gather_facts: false
4 |
5 | tasks:
6 | - name: start VM (this task will wait for IP)
7 | include_tasks: vcenter_poweron-vm.yml
8 |
9 | - name: include any system boot tasks
10 | include_role:
11 | name: system_boot
12 |
--------------------------------------------------------------------------------
/roles/upgrade/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: transfer_splpkg.yml
3 | - include: splunk_stop.yml
4 | - include: backup_splunk.yml
5 | when: (vars[splunk_installation].backup is defined and vars[splunk_installation].backup == true)
6 | - include: upgrade_splunk.yml
7 | - include: ../../system_splunkcert/tasks/main.yml
8 | - include: splunk_start.yml
9 |
--------------------------------------------------------------------------------
/playbooks/system/ae_system_journald.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: configure systemd journal daemon
3 | hosts: "*"
4 | remote_user: "{{ pors_ssh_user }}"
5 | become: yes
6 | become_user: root
7 | strategy: free
8 | gather_facts: false
9 |
10 | vars:
11 | ansible_ssh_user: "{{ pors_ssh_user }}"
12 |
13 | roles:
14 | - system_journal
15 |
--------------------------------------------------------------------------------
/roles/common/templates/deploy_apps.yml.j2:
--------------------------------------------------------------------------------
1 | - name: Deploy Apps
2 | hosts: "{{ '{{' }} target {{ '}}' }}"
3 | remote_user: "{{ '{{ pors_ssh_user }}' }}"
4 | become: yes
5 | become_user: "{{ '{{ splunk_install_user }}' }}"
6 | strategy: free
7 |
8 | roles:
9 | {{ app_roles.stdout_lines | app_role_list | to_nice_yaml | indent(4, true) }}
10 |
--------------------------------------------------------------------------------
/roles/install/tasks/transfer_pkg.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: "Copy installation package ({{ splunk_type }})"
3 | ansible.builtin.copy:
4 | src: "{{ repo_base }}/common/packages/{{ splunk_type }}/{{ splunk_installation.package_file }}"
5 | dest: "/{{ splunk_installation.remote_package_temp_path }}/{{ splunk_installation.package_file }}"
6 | mode: 600
7 |
--------------------------------------------------------------------------------
/playbooks/cribl/ae_install_logstream.yml:
--------------------------------------------------------------------------------
1 | - name: Install cribl logstream
2 | hosts: "{{ target }}"
3 | remote_user: "{{ cribl_ssh_user }}"
4 | become: yes
5 | become_user: root
6 | gather_facts: true
7 | strategy: free
8 |
9 | vars:
10 | logstream_upgrade: False
11 |
12 | roles:
13 | - common_tasks
14 | - install_logstream
15 |
--------------------------------------------------------------------------------
/playbooks/proxmox/px_configure_vm.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: (Re-)configure a Proxmox VM
3 | hosts: "pors_server"
4 | gather_facts: false
5 | connection: local
6 |
7 | vars:
8 | ansible_python_interpreter: /usr/bin/python3
9 | kvm_create: false
10 | kvm_configure: true
11 | kvm_poweron: false
12 |
13 | roles:
14 | - proxmox
15 |
--------------------------------------------------------------------------------
/playbooks/proxmox/px_add_disk.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Add disk to an existing VM (proxmox)
3 | #hosts: "{{ server_hostname }}*"
4 | hosts: all
5 | gather_facts: false
6 | connection: local
7 |
8 | vars:
9 | ansible_python_interpreter: /usr/bin/python3
10 | kvm_add_disk: True
11 | kvm_create: False
12 |
13 | roles:
14 | - proxmox
15 |
--------------------------------------------------------------------------------
/roles/common_tasks/tasks/check_ssh.yml:
--------------------------------------------------------------------------------
1 | - name: check SSH connectivity
2 | wait_for:
3 | port: 22
4 | host: '{{ (ansible_ssh_host|default(ansible_host))|default(inventory_hostname) }}'
5 | search_regex: OpenSSH
6 | delay: 2
7 | timeout: 14
8 | connection: local
9 | ignore_errors: True
10 | register: ssh_avail
11 | tags: check_ssh
12 |
--------------------------------------------------------------------------------
/playbooks/system/ae_system_reboot.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: "Rebooting a system"
3 | hosts: "{{ target }}"
4 | remote_user: "{{ pors_ssh_user }}"
5 | become: yes
6 | become_user: root
7 | gather_facts: false
8 |
9 | vars:
10 | ansible_ssh_user: "{{ pors_ssh_user }}"
11 |
12 | tasks:
13 |
14 | - include_role:
15 | name: system_reboot
16 |
17 |
--------------------------------------------------------------------------------
/playbooks/system/ae_configure_journal.yml:
--------------------------------------------------------------------------------
1 | - name: Deploy Journal Configs
2 | hosts: all
3 | become: yes
4 | become_user: root
5 | gather_facts: false
6 |
7 | vars:
8 | ansible_ssh_user: "{{ pors_ssh_user }}"
9 |
10 |
11 | pre_tasks:
12 | - include_vars: inventories/{{ target_env }}/group_vars/all/ansible
13 |
14 | roles:
15 | - system_journal
16 |
--------------------------------------------------------------------------------
/roles/install_logstream/tasks/service_stop.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Import checks
3 | import_tasks: service_checks.yml
4 |
5 | - name: Stop daemon
6 | service: name=cribl
7 | state=stopped
8 |
9 | - name: "Stop by binary"
10 | become: yes
11 | become_user: "{{ cribl_install_user }}"
12 | shell: "{{ logstream_installation.home_path }}/bin/cribl stop"
13 |
--------------------------------------------------------------------------------
/roles/system_optimize/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: sys_check.yml
3 |
4 | - include: firewalld.yml
5 |
6 | - include: optimize_tuned.yml
7 | when:
8 | - splunk_optimize.tuned is defined
9 | - splunk_optimize.tuned == True
10 |
11 | - include: optimize_selinux.yml
12 | when:
13 | - system.selinux is defined
14 |
15 | # - include: set_perms.yml
16 |
--------------------------------------------------------------------------------
/playbooks/shelper/ae_install_shelper.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Install the splunk helper
4 | hosts: "*:!pors_server"
5 | remote_user: "{{ pors_ssh_user }}"
6 | become: yes
7 | become_user: root
8 | #no_log: True
9 | gather_facts: False
10 |
11 | tasks:
12 | - include_role:
13 | name: shelper
14 | when: shelper_installation.enabled | d(False)
15 |
--------------------------------------------------------------------------------
/playbooks/system/dnsmasq.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Install/Configure dnsmasq"
4 | hosts: "{{ target }}"
5 | remote_user: "{{ pors_ssh_user }}"
6 | become: yes
7 | become_user: root
8 | gather_facts: true
9 |
10 | tasks:
11 |
12 | - include_role:
13 | name: common
14 | public: yes
15 |
16 | - import_role:
17 | name: system_dnsmasq
18 |
--------------------------------------------------------------------------------
/playbooks/splunk/configure_monitoringroles.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: "Configure splunk Monitoring Console (Roles)"
3 | hosts: monitoringconsole
4 | connection: local
5 |
6 | roles:
7 | - group/monitoringconsole
8 |
9 | vars:
10 | splunk_restart: True
11 |
12 | # flush notify handlers to activate the splunk config
13 | post_tasks:
14 | - meta: flush_handlers
15 |
--------------------------------------------------------------------------------
/roles/pors/setup/tasks/finish.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Finished!"
4 | debug:
5 | msg: |
6 |
7 | Congratulations! PORS has been setup succesfully and is now ready to use!
8 |
9 | Have fun and do not hesitate to contact us if any issues occur
10 |
11 | https://github.com/secure-diversITy/ansible_pors
12 |
13 | Have a great day
14 |
15 |
16 |
--------------------------------------------------------------------------------
/roles/system_reboot/tasks/reboot.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Reboot remote system"
4 | ansible.builtin.reboot:
5 | msg: "reboot initiated by PORS"
6 | register: rebootsys
7 |
8 | - name: "Verify system reboot"
9 | ansible.builtin.fail:
10 | msg: "ERROR: Failed to reboot the remote system or it has taken too long to boot up again"
11 | when: rebootsys.rebooted != True
12 |
--------------------------------------------------------------------------------
/playbooks/system/ae_create_useracc.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Create a new application admin linux user (shell)
3 | hosts: "{{ target }}"
4 | remote_user: "{{ pors_ssh_user }}"
5 | become: yes
6 | become_user: root
7 | gather_facts: False
8 |
9 | vars:
10 | ansible_ssh_user: "{{ pors_ssh_user }}"
11 |
12 | tasks:
13 |
14 | - include_role:
15 | name: system_user
16 |
--------------------------------------------------------------------------------
/roles/googlecp/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - set_fact: datetime="{{lookup('pipe','date \"+%Y/%m/%d %H:%M\"')}}"
3 | run_once: True
4 |
5 | - include_tasks: gcp_configure_address.yml
6 | when: gcp_configure_address is defined and gcp_configure_address != False
7 |
8 | - include_tasks: gcp_create_instance.yml
9 | when: gcp_create_instance is defined and gcp_create_instance != False
10 |
--------------------------------------------------------------------------------
/roles/splunk_sites/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - debug:
3 | var: target
4 |
5 | - include: get_peernode_sites.yml
6 | when: target == "peernode"
7 |
8 | - include: get_deployer_sites.yml
9 | when: target == "deployer"
10 |
11 | - include: get_shcmember_sites.yml
12 | when: target == "shcmember"
13 |
14 | - debug:
15 | var: parsed_groups
16 | when: parsed_groups is defined
17 |
--------------------------------------------------------------------------------
/roles/common/handlers/cribl.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: cribl start
3 | service:
4 | name: cribl
5 | state: start
6 | register: action_result
7 |
8 | - name: cribl stop
9 | service:
10 | name: cribl
11 | state: stopped
12 | register: action_result
13 |
14 | - name: cribl restart
15 | service:
16 | name: cribl
17 | state: restarted
18 | register: action_result
19 |
--------------------------------------------------------------------------------
/roles/group/shcmember/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - include: restart_shcmember.yml
4 |
5 | - include: state.yml
6 |
7 | - include: bootstrap.yml
8 | when:
9 | - get_shc_state.rc == 0
10 | - captain_elected.rc != 0
11 |
12 | - include_role:
13 | name: common_tasks
14 | tasks_from: monitoring.yml
15 |
16 | - include: restart_shcmember.yml
17 | when: captain_elected.rc != 0
18 |
--------------------------------------------------------------------------------
/roles/splunk_sites/tasks/get_deployer_sites.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Get and store all configured sites for siteX_deployer
3 | # into the list "parsed_groups"
4 |
5 | - set_fact:
6 | parsed_groups: []
7 |
8 | # find all site's groups
9 | - set_fact:
10 | parsed_groups: "{{ parsed_groups }} + ['{{ item }}']"
11 | when: "item is match('site.*deployer')"
12 | loop: "{{ groups.keys() | list }}"
13 |
--------------------------------------------------------------------------------
/roles/splunk_sites/tasks/get_peernode_sites.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Get and store all configured sites for siteX_peernode
3 | # into the list "parsed_groups"
4 |
5 | - set_fact:
6 | parsed_groups: []
7 |
8 | # find all site's groups
9 | - set_fact:
10 | parsed_groups: "{{ parsed_groups }} + ['{{ item }}']"
11 | when: "item is match('site.*peernode')"
12 | loop: "{{ groups.keys() | list }}"
13 |
--------------------------------------------------------------------------------
/roles/system/template/system_local_configs/tasks/push_conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # THIS GETS OVERWRITTEN EACH RUN FROM: roles/system/template/ DIRECTORY!
3 |
4 | - name: Copy conf to remote host
5 | become: yes
6 | become_user: "{{ splunk_install_user }}"
7 | copy:
8 | src: "{{ item }}"
9 | dest: "/{{ splunk_conf_path }}/"
10 | mode: 0660
11 | with_fileglob: system/local/*.conf
12 |
13 |
--------------------------------------------------------------------------------
/roles/pors/setup/tasks/user_setup_root.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Copy initial PORS files"
4 | become: yes
5 | become_user: root
6 | copy:
7 | remote_src: true
8 | src: "{{ item.s }}"
9 | dest: "{{ item.d }}"
10 | mode: "{{ item.m | d('0600') }}"
11 | loop:
12 | - { s: "{{ pors_install_dir }}/EXAMPLES/pors-server.profile.d-example" , d: "/etc/profile.d/pors.sh" , m: '0755' }
13 |
--------------------------------------------------------------------------------
/roles/splunk_sites/tasks/get_shcmember_sites.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Get and store all configured sites for siteX_shcmember
3 | # into the list "parsed_groups"
4 |
5 | - set_fact:
6 | parsed_groups: []
7 |
8 | # find all site's groups
9 | - set_fact:
10 | parsed_groups: "{{ parsed_groups }} + ['{{ item }}']"
11 | when: "item is match('site.*_shcmember')"
12 | loop: "{{ groups.keys() | list }}"
13 |
--------------------------------------------------------------------------------
/filter_plugins/app_role_list.py:
--------------------------------------------------------------------------------
1 | def app_role_list (app):
2 |
3 | aignore = [ 'app.template','files','tasks','vars' ]
4 | for lsapp in aignore:
5 | if lsapp in app: app.remove(lsapp)
6 |
7 | rolepath = 'apps/'
8 |
9 | app=[rolepath + role for role in app]
10 |
11 | return app
12 |
13 | class FilterModule(object):
14 | def filters(self):
15 | return {'app_role_list': app_role_list}
16 |
--------------------------------------------------------------------------------
/roles/conf/web/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: touch.yml
3 | when: splunk_web_conf is defined
4 |
5 | - include: settings/caCertPath.yml
6 | - include: settings/enableSplunkWebSSL.yml
7 | - include: settings/httpport.yml
8 | - include: settings/privKeyPath.yml
9 | - include: settings/startwebserver.yml
10 | - include: settings/updateCheckerBaseURL.yml
11 | - include: settings/enable_insecure_pdfgen.yml
12 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/httpeventcollector/web.conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk web.conf
4 | #####################################################################################################################
5 |
6 | splunk_web_conf:
7 | settings:
8 | startwebserver: 0
9 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_heavyforwarder/web.conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk web.conf
4 | #####################################################################################################################
5 |
6 | splunk_web_conf:
7 | settings:
8 | startwebserver: 0
9 |
--------------------------------------------------------------------------------
/playbooks/system/system_upgrade_rebootcheck.yml:
--------------------------------------------------------------------------------
1 | - name: "System upgrade reboot check"
2 | hosts: "*"
3 | remote_user: "{{ pors_ssh_user }}"
4 | become: yes
5 | become_user: root
6 | strategy: free
7 | gather_facts: True
8 |
9 | vars:
10 | ansible_ssh_user: "{{ pors_ssh_user }}"
11 |
12 | tasks:
13 |
14 | - include_role:
15 | name: system_upgrade
16 | tasks_from: check_reboot.yml
17 |
--------------------------------------------------------------------------------
/roles/system_base/tasks/set_dns_domain.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: set DNS suffix (SuSE)
3 | lineinfile:
4 | path: /etc/sysconfig/network/config
5 | regexp: '^NETCONFIG_DNS_STATIC_SEARCHLIST=.*'
6 | line: 'NETCONFIG_DNS_STATIC_SEARCHLIST="{{ server_domain }}"'
7 |
8 | - name: update /etc/resolv.conf
9 | shell: 'netconfig update -f'
10 | register: netconfig
11 | failed_when: "netconfig.rc not in [ 0, 1 ]"
12 |
--------------------------------------------------------------------------------
/roles/system_ntp/molecule/default/converge.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Converge
3 | hosts: all
4 | become: true
5 |
6 | vars:
7 | ntp_enabled: false
8 | ntp_manage_config: true
9 |
10 | pre_tasks:
11 | - name: Update apt cache.
12 | apt:
13 | update_cache: true
14 | cache_valid_time: 600
15 | when: ansible_os_family == 'Debian'
16 |
17 | roles:
18 | - role: geerlingguy.ntp
19 |
--------------------------------------------------------------------------------
/roles/system_prep/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | # general tasks
4 | - block:
5 | - include: regen_ssh_host_keys.yml
6 | - include: regen_machine_id.yml
7 | - include: regen_initrd.yml
8 | when:
9 | - regen_sys | d('destroy_deploy') != 'config_only'
10 | - not skip_regen_sys | d(False)
11 |
12 | # distro specific tasks
13 | - include: update_rhsm.yml
14 | when: ansible_facts['os_family'] == "RedHat"
15 |
--------------------------------------------------------------------------------
/EXAMPLES/pors_repo_common_sudo_cribl-polkit.j2-example:
--------------------------------------------------------------------------------
1 | polkit.addRule(function(action, subject) {
2 | if (action.id == "org.freedesktop.systemd1.manage-units" &&
3 | action.lookup("unit") == "cribl.service" &&
4 | subject.user == "{{ cribl_install_user }}")
5 | {
6 | var verb = action.lookup("verb");
7 | if (verb == "start" || verb == "stop" || verb == "restart")
8 | return polkit.Result.YES;
9 | }
10 | });
11 |
--------------------------------------------------------------------------------
/playbooks/splunk/ae_deploy_systemconfigs.yml:
--------------------------------------------------------------------------------
1 | - name: Deploy System Configs
2 | hosts: "{{ exp_host }}"
3 | remote_user: "{{ pors_ssh_user }}"
4 | become: yes
5 | become_user: "{{ splunk_install_user }}"
6 | gather_facts: False
7 |
8 | # pre_tasks:
9 | # - include_vars: inventories/{{ target_env }}/group_vars/all/ansible
10 |
11 | roles:
12 | - system_local/{{ target }}/{{ exp_host }}/system_local_configs
13 |
--------------------------------------------------------------------------------
/playbooks/vmware/vcenter_login.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Prepare cookie authentication
3 | uri:
4 | url: https://{{ vsphere_var }}/rest/com/vmware/cis/session
5 | force_basic_auth: yes
6 | validate_certs: "{{ vsphere.host[vsphere_var].validate_certs }}"
7 | method: POST
8 | user: "{{ vsphere.host[vsphere_var].user }}"
9 | password: "{{ vsphere.host[vsphere_var].password }}"
10 | register: login
11 |
12 |
--------------------------------------------------------------------------------
/roles/apps/tasks/set_permissions.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # sets proper permissions of the target folder
3 |
4 | - name: set permissions on app directory
5 | file:
6 | path: "{{ splunk_installation.splunk_home_path }}/etc/{{ item.key | replace('shcluster_apps', 'shcluster/apps') | replace('_', '-') }}/{{ app_name }}"
7 | recurse: yes
8 | mode: u+rwx,g+rx
9 | state: directory
10 | with_dict: "{{ vars[app_variable] }}"
11 |
--------------------------------------------------------------------------------
/playbooks/system/disk_format.yml:
--------------------------------------------------------------------------------
1 | - name: Format new disk storage
2 | hosts: "{{ server_hostname }}"
3 | remote_user: "{{ pors_ssh_user }}"
4 | become: yes
5 | become_user: root
6 | gather_facts: False
7 |
8 | vars:
9 | # format defined disks within an instance
10 | format_disk: True
11 | ansible_ssh_user: "{{ pors_ssh_user }}"
12 |
13 | tasks:
14 |
15 | - include_role:
16 | name: system_disk
17 |
--------------------------------------------------------------------------------
/roles/system_base/tasks/set_dns_servers.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: set DNS server list (SuSE)
3 | lineinfile:
4 | path: /etc/sysconfig/network/config
5 | regexp: '^NETCONFIG_DNS_STATIC_SERVERS=.*'
6 | line: 'NETCONFIG_DNS_STATIC_SERVERS="{{ server_dns1 }} {{ server_dns2 }}"'
7 |
8 | - name: update /etc/resolv.conf
9 | shell: 'netconfig update -f'
10 | register: netconfig
11 | failed_when: "netconfig.rc not in [ 0, 1 ]"
12 |
--------------------------------------------------------------------------------
/playbooks/system/manage_unattended_updates.yml:
--------------------------------------------------------------------------------
1 | - name: Manage unattended system updates
2 | hosts: "{{ target }}"
3 | remote_user: "{{ pors_ssh_user }}"
4 | become: yes
5 | become_user: root
6 | strategy: free
7 | gather_facts: true
8 |
9 | vars:
10 | ansible_ssh_user: "{{ pors_ssh_user }}"
11 |
12 | tasks:
13 |
14 | - include_role:
15 | name: system_base
16 | tasks_from: unattended_upgrade.yml
17 |
--------------------------------------------------------------------------------
/roles/apps/app.template/tasks/set_permissions.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # sets proper permissions of the target folder
3 |
4 | - name: set permissions on app directory
5 | file:
6 | path: "{{ splunk_installation.splunk_home_path }}/etc/{{ item.key | replace('shcluster_apps', 'shcluster/apps') | replace('_', '-') }}/{{ app_name }}"
7 | recurse: yes
8 | mode: u+rwx,g+rx
9 | state: directory
10 | with_dict: "{{ vars[app_variable] }}"
11 |
--------------------------------------------------------------------------------
/roles/upgrade/tasks/enable_boot_start_splunk.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Remove existing init.d script
4 | file: path=/etc/init.d/splunk
5 | state=absent
6 |
7 | - name: Enable Splunk boot-start
8 | shell: "{{ splunk_installation.splunk_home_path }}/bin/splunk enable boot-start -user splunk --answer-yes --accept-license"
9 |
10 | - name: Start splunk on system boot
11 | service:
12 | name: splunk
13 | enabled: yes
14 |
15 |
--------------------------------------------------------------------------------
/playbooks/cribl/ae_upgrade_logstream.yml:
--------------------------------------------------------------------------------
1 | - name: Upgrade cribl logstream
2 | hosts: "*:!pors_server"
3 | remote_user: "{{ cribl_ssh_user }}"
4 | become: yes
5 | become_user: root
6 | strategy: free
7 | gather_facts: true
8 | #do NOT use "no_log: True" here as it will be effective for all sub tasks then
9 |
10 | vars:
11 | logstream_upgrade: True
12 |
13 | roles:
14 | - logstream_info
15 | - install_logstream
16 |
17 |
--------------------------------------------------------------------------------
/playbooks/splunk/ae_download_splunk.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: localhost
3 | connection: local
4 |
5 | pre_tasks:
6 | - include_vars: "{{ env_inventory_dir }}/group_vars/universal_forwarder/splunk_installation.yml"
7 | when: spltype == "splunkforwarder"
8 |
9 | vars:
10 | tgzname: "{{ spltype }}-{{ globversion }}-{{ globhash }}-Linux-x86_64.tgz"
11 |
12 | tasks:
13 | - include_role:
14 | name: splunk_download
15 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_peernode/web.conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk web.conf
4 | #####################################################################################################################
5 |
6 | splunk_web_conf:
7 | settings:
8 | startwebserver: 0
9 | updateCheckerBaseURL: 0
10 |
--------------------------------------------------------------------------------
/roles/pors/setup/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - include: user_setup.yml
4 | - include: requirements.yml
5 |
6 | - fail:
7 | msg: Please re-run the exact same playbook command again. This is expected as we added mandatory files which requires a new run..
8 | when: init_files.changed or gi_inst.changed or gi_upgr.changed
9 |
10 | - include: check_sudo.yml
11 | - include: base_dirs.yml
12 | - include: user_setup_root.yml
13 |
14 | - include: finish.yml
15 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/masternode/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk deployment directory
4 | # Keep in mind to replace "-" and "/" with an underscore "_"
5 | #####################################################################################################################
6 |
7 | deployment:
8 | dir: master_apps
9 |
--------------------------------------------------------------------------------
/roles/system_sshid/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: add_privkey.yml
3 | when:
4 | - vault_ssh_privkey_splunk is defined
5 | - splunk_installation.splunk_home_path is defined
6 | - skip_ssh_splunk_privkey is undefined
7 |
8 | - include: add_authkey.yml
9 | when: skip_ssh_splunk_authkey is undefined and not in_app_deploy_play
10 |
11 | - include: update_authkey.yml
12 | when: system_shared_service_account is defined and not in_app_deploy_play
13 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_deployer/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk deployment directory
4 | # Keep in mind to replace "-" and "/" with an underscore "_"
5 | #####################################################################################################################
6 |
7 | deployment:
8 | dir: shcluster_apps
9 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/deploymentserver/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk deployment directory
4 | # Keep in mind to replace "-" and "/" with an underscore "_"
5 | #####################################################################################################################
6 |
7 | deployment:
8 | dir: deployment_apps
9 |
--------------------------------------------------------------------------------
/roles/system_base/tasks/unattended_upgrade.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - include: activate_unattended_updates.yml
4 | when:
5 | - unattended_system_upgrade.enabled | d(False)
6 | - not unattended_system_upgrade.forcedisable | d(False)
7 | - not unattended_system_upgrade_forcedisable | d(False)
8 |
9 | - include: disable_unattended_updates.yml
10 | when:
11 | - unattended_system_upgrade.forcedisable | d(False) or unattended_system_upgrade_forcedisable | d(False)
12 |
--------------------------------------------------------------------------------
/roles/system_prep/tasks/update_rhsm.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Check if RHSM is configured
3 | stat:
4 | path: /etc/rhsm/facts/fqdn.facts
5 | register: stat_rhsm_file
6 |
7 | - name: Update RH Satellite facts
8 | lineinfile:
9 | path: /etc/rhsm/facts/fqdn.facts
10 | backrefs: yes
11 | regex: '({"network.hostname": ")(.*)("})'
12 | line: '\1{{ inventory_hostname }}\3'
13 | when: stat_rhsm_file.stat.exists is defined and stat_rhsm_file.stat.exists == True
14 |
--------------------------------------------------------------------------------
/EXAMPLES/pors_repo_common_sudo_cribl-admin-example:
--------------------------------------------------------------------------------
1 | # Created by Ansible
2 |
3 | User_Alias USERCRIBL=%cribladmin
4 | Cmnd_Alias CMDCRIBL =/bin/su - {{ cribl_install_user }} -c *, /bin/su - {{ cribl_install_user }},/usr/sbin/tcpdump *, /usr/bin/systemctl * cribl, {{ logstream_root_path }}/cribl/bin/cribl *
5 | USERCRIBL ALL =(root) NOPASSWD: CMDCRIBL
6 |
7 | {{ cribl_install_user }} ALL=NOPASSWD: /usr/bin/systemctl * cribl, /usr/bin/systemctl * cribl.service
8 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_shcmember/distsearch.conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk distsearch.conf
4 | ####################################################################################################################
5 |
6 | splunk_distsearch_conf:
7 | distributedSearch:
8 | servers:
9 | - "{{ groups.peernode }}"
10 |
--------------------------------------------------------------------------------
/roles/install/tasks/set_splunk_secret.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Create common splunk.secret
4 | template:
5 | src: etc/auth/splunk.secret.j2
6 | dest: "{{ splunk_installation.splunk_home_path }}/etc/auth/splunk.secret"
7 |
8 | - name: Set permissions for splunk.secret
9 | file:
10 | path: "{{ splunk_installation.splunk_home_path }}/etc/auth/splunk.secret"
11 | owner: "{{ splunk_install_user }}"
12 | group: "{{ splunk_install_group }}"
13 | mode: 0400
14 |
15 |
--------------------------------------------------------------------------------
/playbooks/proxmox/px_create_vm.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Deploy a Proxmox VM
3 | hosts: "pors_server"
4 | gather_facts: false
5 | connection: local
6 |
7 | vars:
8 | server_notes: "Based on {{ proxmox.node[px_var].ansible_template_name }} and deployed/re-configured by ansible/PORS"
9 | ansible_python_interpreter: /usr/bin/python3
10 | kvm_create: true
11 | kvm_configure: true
12 | kvm_poweron: true
13 | kvm_autoreboot: true
14 |
15 | roles:
16 | - proxmox
17 |
--------------------------------------------------------------------------------
/playbooks/system/ae_system_tuned.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: "{{ target }}"
3 | remote_user: "{{ pors_ssh_user }}"
4 | become: yes
5 | become_user: root
6 | gather_facts: false
7 | serial: "{{ serial_config | d(0) }}"
8 |
9 | vars:
10 | ansible_ssh_user: "{{ pors_ssh_user }}"
11 |
12 | tasks:
13 |
14 | - include_role:
15 | name: common
16 | public: yes
17 |
18 | - include_role:
19 | name: system_optimize
20 | tasks_from: optimize_tuned
21 |
--------------------------------------------------------------------------------
/roles/system_yum/tasks/install_elrepo_kernel.yml:
--------------------------------------------------------------------------------
1 | - name: ELRepo - kernel-lt
2 | yum:
3 | name: "{{ system_yum.elrepo.kernel.name }}"
4 | enablerepo: "elrepo-kernel"
5 | update_cache: yes
6 |
7 | - name: Enable kernel as default on boot
8 | shell: "grub2-set-default {{ system_yum.elrepo.kernel.setboot }}"
9 | when:
10 | - ('ansible_distribution == "CentOS" or ansible_distribution == "Red Hat Enterprise Linux"')
11 | - system_yum.elrepo.kernel.setboot is defined
12 |
--------------------------------------------------------------------------------
/EXAMPLES/pors-server.profile.d-example:
--------------------------------------------------------------------------------
1 | # call PORS
2 | PORSPATH=/opt/pors
3 | PORSINV=/opt/pors_data/inventories
4 |
5 | alias pors='cd '$PORSPATH' && ./pors ${@}' # will ask for an environment on start
6 |
7 | alias pors-prod='cd '$PORSPATH' && ./pors --env production -i '$PORSINV'/production/hosts $@'
8 | alias pors-staging='cd '$PORSPATH' && ./pors --env staging -i '$PORSINV'/staging/hosts $@'
9 | alias pors-dev='cd '$PORSPATH' && ./pors --env development -i '$PORSINV'/development/hosts $@'
10 |
--------------------------------------------------------------------------------
/roles/install/tasks/operation_mode.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Check if we are running in a cluster (Indexer)
3 | shell: "{{ splunk_installation.splunk_home_path }}/bin/splunk btool server list clustering | egrep '^mode' |tr -d ' ' |cut -d '=' -f 2"
4 | register: splpeernode
5 |
6 | - name: Check if we are running in a cluster (SearchHead)
7 | shell: "{{ splunk_installation.splunk_home_path }}/bin/splunk btool server list shclustering | egrep '^mode' |tr -d ' ' |cut -d '=' -f 2"
8 | register: splshcmember
9 |
--------------------------------------------------------------------------------
/playbooks/splunk/backup_splunk.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Backup Splunk
4 | hosts: "*:!pors_server"
5 | remote_user: "{{ pors_ssh_user }}"
6 | become: yes
7 | become_user: root
8 | strategy: free
9 | gather_facts: False
10 |
11 | vars:
12 | ansible_ssh_user: "{{ pors_ssh_user }}"
13 |
14 | pre_tasks:
15 |
16 | - name: "Collecting system information"
17 | setup:
18 |
19 | tasks:
20 |
21 | - include_role:
22 | name: install
23 | tasks_from: backup_splunk.yml
24 |
25 |
--------------------------------------------------------------------------------
/roles/system_user/tasks/ssh_authorized_keys.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - stat:
4 | path: '{{ repo_base }}/common/ssh/authorized_keys_{{ user_name }}'
5 | become: no
6 | register: authkey
7 | run_once: True
8 | delegate_to: localhost
9 |
10 | - name: Set authorized keys for {{ user_name }}
11 | authorized_key:
12 | user: "{{ user_name }}"
13 | state: present
14 | key: "{{ lookup('file', '{{ repo_base }}/common/ssh/authorized_keys_{{ user_name }}') }}"
15 | when: authkey.stat.exists == True
16 |
--------------------------------------------------------------------------------
/playbooks/splunk/ae_renewcerts.yml:
--------------------------------------------------------------------------------
1 | - name: Renew splunk SSL certificates
2 | hosts: all
3 | remote_user: "{{ pors_ssh_user }}"
4 | become: yes
5 | become_user: "{{ splunk_install_user }}"
6 |
7 | vars:
8 | splunk_restart: True
9 |
10 | tasks:
11 | - include_role:
12 | name: system_splunkcert
13 |
14 | handlers:
15 | - include: ../../roles/common/handlers/splunkd.yml
16 |
17 | # flush notify handlers to activate the splunk config
18 | # post_tasks:
19 | # - meta: flush_handlers
20 |
21 |
--------------------------------------------------------------------------------
/roles/install_logstream/tasks/set_perms.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Set directory owner and group
4 | file:
5 | path: "{{ logstream_installation.home_path }}"
6 | owner: "{{ cribl_install_user }}"
7 | group: "{{ cribl_install_group }}"
8 | recurse: yes
9 | when:
10 | - logstream_installation.set_permissions == true
11 |
12 | - name: Set directory permissions
13 | file:
14 | path: "{{ logstream_installation.home_path }}/"
15 | mode: 0750
16 | when: logstream_installation.set_permissions == true
17 |
--------------------------------------------------------------------------------
/roles/common/templates/ae_deploy_apps.yml.j2:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Deploy Apps"
4 | hosts: "{{ '{{ target }}' }}"
5 | remote_user: "{{ '{{ pors_ssh_user }}' }}"
6 | become: yes
7 | become_user: "{{ '{{ splunk_install_user }}' }}"
8 | strategy: free
9 |
10 | vars:
11 |
12 | skip_ssh_splunk_authkey: True
13 | in_app_deploy_play: True
14 |
15 | roles:
16 |
17 | - role: system_sshid
18 | tags: always
19 |
20 | {{ app_roles.stdout_lines | link_app_list(target_env) | to_nice_yaml | indent(4, true) }}
21 |
--------------------------------------------------------------------------------
/roles/conf/user/tasks/add.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Add splunk user(s)"
4 | block:
5 |
6 | - name: "Set vars"
7 | ansible.builtin.set_fact:
8 | splunk_username: "{{ splacc.usr }}"
9 | splunk_password: "{{ splacc.pwd }}"
10 | splunk_user_role: "{{ splacc.role }}"
11 | no_log: true
12 |
13 | - include_role:
14 | name: install
15 | tasks_from: splunk_start.yml
16 |
17 | - include_role:
18 | name: install
19 | tasks_from: add_spl_ae_user.yml
20 |
21 |
22 |
--------------------------------------------------------------------------------
/roles/system_yum/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: update.yml
3 | when:
4 | - system.upgrade_os is defined and system.upgrade_os == True
5 | - ansible_facts['os_family'] == "RedHat"
6 |
7 | - include: add_elrepo.yml
8 | when:
9 | - system_yum.elrepo.enabled is defined and system_yum.elrepo.enabled == True
10 | - ansible_facts['os_family'] == "RedHat"
11 |
12 | - include: install_elrepo_kernel.yml
13 | when:
14 | - system_yum.elrepo.kernel.setup == "yes"
15 | - ansible_facts['os_family'] == "RedHat"
16 |
--------------------------------------------------------------------------------
/roles/system_user/tasks/login_test.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: "Test ssh-key login as user: {{ user_name }}"
3 | become: no
4 | remote_user: "{{ user_name }}"
5 | shell: whoami
6 | vars:
7 | ansible_ssh_private_key_file: "{{ pors_ssh_key }}"
8 | when: authkey.stat.exists == True
9 |
10 |
11 | - name: "Test password-based login as user: {{ user_name }}"
12 | become: no
13 | remote_user: "{{ user_name }}"
14 | vars:
15 | ansible_ssh_pass: "{{ user_pass }}"
16 | shell: whoami
17 | when: authkey.stat.exists == False
18 |
--------------------------------------------------------------------------------
/playbooks/pors/setup.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Initialize a PORS installation"
4 | hosts: "{{ target | d('localhost') }}"
5 | become: no
6 | connection: local
7 | gather_facts: false
8 |
9 | vars:
10 | var_validation_only: false
11 | initial_setup: true
12 | debug_off: "{{ no_debug | d(True) | bool }}"
13 |
14 | tasks:
15 |
16 | - include_role:
17 | name: common
18 | public: yes
19 |
20 | - include_role:
21 | name: pors/setup
22 |
23 | - include_role:
24 | name: pors/ssh
25 |
--------------------------------------------------------------------------------
/roles/shelper/tasks/remove_app.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Remove app
4 | file: path="{{ splunk_installation.splunk_home_path }}/etc/{{ item.key | replace('shcluster_apps', 'shcluster/apps') | replace('_', '-') }}/{{ app_name }}"
5 | state=absent
6 | when: "{{ item.value.install }} is defined and
7 | {{ item.value.install }} == false"
8 | with_dict: "{{ vars['app_variable'] }} | app_default"
9 | notify:
10 | - splunk reload_deploy_server
11 | - splunk apply_cluster_bundle
12 | - splunk apply_shcluster_bundle
13 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/httpeventcollector/inputs.conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk inputs.conf
4 | #####################################################################################################################
5 |
6 | splunk_inputs_conf:
7 | SSL:
8 | rootCA: $SPLUNK_HOME/etc/auth/cacert.pem
9 | serverCert: $SPLUNK_HOME/etc/auth/server.pem
10 | password: "{{ vault_SSL_password }}"
11 |
--------------------------------------------------------------------------------
/roles/pors/setup/tasks/user_setup.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Copy initial PORS files"
4 | copy:
5 | remote_src: true
6 | src: "{{ item.s }}"
7 | dest: "{{ item.d }}"
8 | owner: "{{ pors_user }}"
9 | group: "{{ pors_group }}"
10 | mode: "{{ item.m | d('0600') }}"
11 | loop:
12 | - { b: no, bu: "", s: "{{ pors_install_dir }}/EXAMPLES/.pors_vars-example" , d: "${HOME}/.pors/vars" }
13 | - { b: no, bu: "", s: "{{ pors_install_dir }}/EXAMPLES/ansible.cfg" , d: "${HOME}/.ansible.cfg" }
14 | register: init_files
15 |
16 |
--------------------------------------------------------------------------------
/roles/system_user/tasks/nopass_sudoers.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Allow >{{ user_name }}< to have passwordless sudo"
4 | lineinfile:
5 | dest: /etc/sudoers.d/ansible_admin
6 | state: present
7 | regexp: '^{{ user_name }}'
8 | line: '{{ user_name }} ALL=(ALL) NOPASSWD: ALL'
9 | validate: 'visudo -cf %s'
10 | create: yes
11 |
12 | - name: Set proper sudoers file permissions
13 | file:
14 | path: /etc/sudoers.d/ansible_admin
15 | modification_time: preserve
16 | access_time: preserve
17 | mode: 0440
18 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_peernode/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk deployment directory
4 | # Keep in mind to replace "-" and "/" with an underscore "_"
5 | #####################################################################################################################
6 |
7 | deployment:
8 | # no direct push of apps to a peernode! Use the MasterNode to push.
9 | dir: disabled-apps
10 |
--------------------------------------------------------------------------------
/roles/shelper/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - block:
4 |
5 | - include: prereq.yml
6 | become: yes
7 | become_user: root
8 | when:
9 | - shelper_installation.enabled == true
10 |
11 | - include: git_checkout.yml
12 | become: yes
13 | become_user: root
14 | when:
15 | - shelper_installation.enabled == true
16 | - shelper_installation.install_shelper_git == true
17 |
18 | when: (upgrade_shelper | d(False)) or (shelper_installation.enabled is defined and shelper_installation.enabled)
19 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_shcmember/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk deployment directory
4 | # Keep in mind to replace "-" and "/" with an underscore "_"
5 | #####################################################################################################################
6 |
7 | deployment:
8 | # no direct push of apps to a SHC member! Use the Deployer to push.
9 | dir: disabled-apps
10 |
--------------------------------------------------------------------------------
/roles/system_ntp/molecule/default/molecule.yml:
--------------------------------------------------------------------------------
1 | ---
2 | role_name_check: 1
3 | dependency:
4 | name: galaxy
5 | driver:
6 | name: docker
7 | platforms:
8 | - name: instance
9 | image: "geerlingguy/docker-${MOLECULE_DISTRO:-centos7}-ansible:latest"
10 | command: ${MOLECULE_DOCKER_COMMAND:-""}
11 | volumes:
12 | - /sys/fs/cgroup:/sys/fs/cgroup:rw
13 | cgroupns_mode: host
14 | privileged: true
15 | pre_build_image: true
16 | provisioner:
17 | name: ansible
18 | playbooks:
19 | converge: ${MOLECULE_PLAYBOOK:-converge.yml}
20 |
--------------------------------------------------------------------------------
/roles/install/tasks/fetch_distserverkeys.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | #- name: Get serverName
4 | # shell: "grep serverName {{ splunk_installation.splunk_home_path }}/etc/system/local/server.conf | cut -c14-"
5 | # register: splunk_servername
6 |
7 | - name: "Fetch distServerKeys from all Servers"
8 | ansible.builtin.fetch:
9 | src: "{{ splunk_installation.splunk_home_path }}/etc/auth/distServerKeys/trusted.pem"
10 | dest: "{{ splunk_repository.repository_root }}/distServerKeys/{{ inventory_hostname_short }}/trusted.pem"
11 | flat: yes
12 | fail_on_missing: yes
13 |
--------------------------------------------------------------------------------
/roles/install/tasks/firewalld.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Open splunkd port on firewalld
3 | firewalld: port="{{ item }}"
4 | permanent=true
5 | state=enabled
6 | when:
7 | - splunk_installation.firewalld_open_port is defined
8 | - splunk_installation.firewalld_disabled is not defined
9 | with_items: splunk_installation.firewalld_open_port | default()
10 |
11 | - name: Disable firewalld completely
12 | service:
13 | name: firewalld
14 | enabled: no
15 | state: stopped
16 | when: splunk_installation.firewalld_disable is defined
17 |
--------------------------------------------------------------------------------
/roles/splunk_download/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Check/Create repo dir
4 | file:
5 | path: "{{ repo_base }}/common/packages/{{ spltype }}"
6 | state: directory
7 |
8 | - name: Download Splunk tgz packages
9 | get_url:
10 | url: "{{ splunk_packages.linux_64_tgz[spltype].url }}"
11 | dest: "{{ repo_base }}/common/packages/{{ spltype }}/{{ tgzname }}"
12 | owner: "{{ splunk_repository.repository_owner }}"
13 | group: "{{ splunk_repository.repository_group }}"
14 | mode: 0644
15 | tmp_dest: "{{ pors_temp_dir }}"
16 |
17 |
--------------------------------------------------------------------------------
/roles/conf/web/tasks/touch.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk web.conf
4 | #####################################################################################################################
5 |
6 | # touch
7 |
8 | - name: "Touch web.conf"
9 | file:
10 | path: "{{ splunk_conf_path }}/web.conf"
11 | owner: "{{ splunk_install_user }}"
12 | group: "{{ splunk_install_group }}"
13 | mode: 0600
14 | state: touch
15 | changed_when: false
16 |
--------------------------------------------------------------------------------
/roles/install_logstream/tasks/sudoperms.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Remove template sudo config (if existent)"
4 | ansible.builtin.file:
5 | path: /etc/sudoers.d/splunk-admin
6 | state: absent
7 |
8 | - name: "Add app config for sudo"
9 | ansible.builtin.template:
10 | src: "{{ repo_base }}/common/sudo/cribl-admin"
11 | dest: /etc/sudoers.d/cribl-admin
12 | mode: 0600
13 |
14 | - name: "Implement polkit rule"
15 | ansible.builtin.template:
16 | src: "{{ repo_base }}/common/sudo/cribl-polkit.j2"
17 | dest: "/etc/polkit-1/rules.d/10-cribl.rules"
18 |
19 |
--------------------------------------------------------------------------------
/playbooks/system/delete_lnxuser_account.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Delete a remote linux user account"
4 | hosts: "{{ target }}"
5 | remote_user: "{{ pors_ssh_user }}"
6 | become: yes
7 | become_user: root
8 | gather_facts: false
9 |
10 | vars:
11 | var_validation_only: false
12 | delete_user: True
13 | remove_all_files: true
14 |
15 | tasks:
16 |
17 | - include_role:
18 | name: common
19 | public: yes
20 |
21 | - include_tasks: ../../roles/common_tasks/tasks/check_sudo.yml
22 |
23 | - import_role:
24 | name: system_user
25 |
--------------------------------------------------------------------------------
/roles/logstream_configure/templates/worker.j2:
--------------------------------------------------------------------------------
1 | distributed:
2 | mode: worker
3 | group: {{ logstream_worker.group }}
4 | envRegex: {{ logstream_worker.envRegex }}
5 | master:
6 | host: {{ groups['logstream_leader'] | first }}
7 | port: {{ logstream_worker.port }}
8 | authToken: {{ vault_logstream_leader_authtoken }}
9 | compression: {{ logstream_worker.compression }}
10 | tls:
11 | disabled: {{ logstream_worker.disabledtls }}
12 | connectionTimeout: {{ logstream_worker.connectionTimeout }}
13 | writeTimeout: {{ logstream_worker.writeTimeout }}
14 |
--------------------------------------------------------------------------------
/playbooks/system/ae_system_upgrade.yml:
--------------------------------------------------------------------------------
1 | - name: System Upgrade
2 | hosts: "*"
3 | remote_user: "{{ pors_ssh_user }}"
4 | become: yes
5 | become_user: root
6 | strategy: free
7 | gather_facts: true
8 |
9 | vars:
10 | ansible_ssh_user: "{{ pors_ssh_user }}"
11 |
12 | tasks:
13 |
14 | - include_role:
15 | name: system_journal
16 |
17 | - include_role:
18 | name: system_upgrade
19 |
20 | - include_role:
21 | name: system_base
22 | tasks_from: unattended_upgrade.yml
23 | when: unattended_system_upgrade.enabled | d(False)
24 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_peernode/inputs.conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk inputs.conf
4 | #####################################################################################################################
5 |
6 | splunk_inputs_conf:
7 | splunktcp_ssl:
8 | port: 9998
9 | SSL:
10 | sslPassword: "{{ vault_SSL_password }}"
11 | sslRootCAPath: $SPLUNK_HOME/etc/auth/cacert.pem
12 | serverCert: $SPLUNK_HOME/etc/auth/server.pem
13 |
--------------------------------------------------------------------------------
/playbooks/splunk/add_to_monitoring.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: "Deploy MonitoringConsole keys to: {{ target }}"
3 | hosts: "monitoringconsole"
4 | remote_user: "{{ pors_ssh_user }}"
5 | become: yes
6 | become_user: "{{ splunk_install_user }}"
7 |
8 | vars:
9 | parsed_groups: "{{ target }}"
10 | splunk_restart: False
11 |
12 | roles:
13 | - group/searchhead
14 |
15 | - name: "Configure splunk Monitoring Console (Roles)"
16 | hosts: monitoringconsole
17 | connection: local
18 |
19 | roles:
20 | - group/monitoringconsole
21 |
22 | vars:
23 | splunk_restart: False
24 |
--------------------------------------------------------------------------------
/roles/conf/server/tasks/touch.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk server.conf
4 | #####################################################################################################################
5 |
6 | # touch
7 |
8 | - name: "Touch server.conf"
9 | file:
10 | path: "{{ splunk_conf_path }}/server.conf"
11 | owner: "{{ splunk_install_user }}"
12 | group: "{{ splunk_install_group }}"
13 | mode: 0600
14 | state: touch
15 | changed_when: false
16 |
--------------------------------------------------------------------------------
/roles/conf/outputs/tasks/touch.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk outputs.conf
4 | #####################################################################################################################
5 |
6 | # touch
7 |
8 | - name: "Touch outputs.conf"
9 | file:
10 | path: "{{ splunk_conf_path }}/outputs.conf"
11 | owner: "{{ splunk_install_user }}"
12 | group: "{{ splunk_install_group }}"
13 | mode: 0600
14 | state: touch
15 | changed_when: false
16 |
--------------------------------------------------------------------------------
/roles/group/shcmember/tasks/init.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Init shcluster
4 | #####################################################################################################################
5 |
6 | - name: Run init shcluster-config
7 | command: "{{ splunk_installation.splunk_home_path }}/bin/splunk init shcluster-config -mgmt_uri https://{{ inventory_hostname }}:8089 -replication_port 9888 -auth 'admin:{{ splunk_auth.admin_password }}'"
8 | when: splunk_server_conf.shclustering.id is defined
9 |
--------------------------------------------------------------------------------
/roles/conf/outputs/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include: touch.yml
3 | when: splunk_inputs_conf is defined
4 |
5 | - include: tcpout/defaultGroup.yml
6 | - include: tcpout:target_group/indexerDiscovery.yml
7 | - include: tcpout:target_group/server.yml
8 | - include: tcpout:target_group/sslCertPath.yml
9 | - include: tcpout:target_group/sslPassword.yml
10 | - include: tcpout:target_group/sslRootCAPath.yml
11 | - include: tcpout:target_group/sslVerifyServerCert.yml
12 | - include: tcpout:target_group/useAck.yml
13 | - include: indexer_discovery/pass4SymmKey.yml
14 | - include: indexer_discovery/master_uri.yml
15 |
--------------------------------------------------------------------------------
/roles/conf/distsearch/tasks/touch.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk distsearch.conf
4 | #####################################################################################################################
5 |
6 | # touch
7 |
8 | - name: "Touch distsearch.conf"
9 | file:
10 | path: "{{ splunk_conf_path }}/distsearch.conf"
11 | owner: "{{ splunk_install_user }}"
12 | group: "{{ splunk_install_group }}"
13 | mode: 0600
14 | state: touch
15 | changed_when: false
16 |
17 |
--------------------------------------------------------------------------------
/roles/install/tasks/splunk_startstop.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: First Time Boot Splunk (non-systemd only)
3 | service: name=splunk
4 | state=started
5 | # when: splunkversionnum < 72
6 |
7 | - name: Stop Splunk (non-systemd only)
8 | service: name=splunk
9 | state=stopped
10 | # when: splunkversionnum < 72
11 |
12 | #- name: First Time Boot Splunk (= v7.2)
13 | # service: name=Splunkd
14 | # state=started
15 | # when: splunkversionnum == 72
16 |
17 | #- name: Stop Splunk (= 7.2)
18 | # service: name=Splunkd
19 | # state=stopped
20 | # when: splunkversionnum == 72
21 |
22 |
--------------------------------------------------------------------------------
/roles/system_disk/tasks/identify_disk.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | # requires disk.UUID=TRUE in the vmx - which should be handled by PORS
4 | # on new VMs already
5 | - name: "get device name"
6 | shell: |
7 | eval $(lsblk -SniPp -o NAME,SERIAL -e 11 | tr '[:upper:]' '[:lower:]' | grep "{{ server_disk_dev }}")
8 | if [ -z "$name" ];then
9 | echo "ERROR: cannot identify disk device name for {{ server_disk_dev }}"
10 | echo "Is 'disk.UUID=TRUE' set in the VM's vmx?"
11 | exit 3
12 | else
13 | echo "$name"
14 | fi
15 | args:
16 | executable: /bin/bash
17 | register: part_dev_name
18 |
--------------------------------------------------------------------------------
/filter_plugins/create_distsearch_serverlist.py:
--------------------------------------------------------------------------------
1 | def create_distsearch_serverlist (serverlist):
2 |
3 | servers_normalized = []
4 |
5 | for item in serverlist:
6 | if type(item) is unicode:
7 | item=str(item)
8 | if type(item) is str:
9 | item=[item]
10 | for sublist in item:
11 | servers_normalized.append(sublist)
12 |
13 | servers = []
14 | [servers.append(item) for item in servers_normalized if item not in servers]
15 |
16 | return servers
17 |
18 | class FilterModule(object):
19 | def filters(self):
20 | return {'create_distsearch_serverlist': create_distsearch_serverlist}
21 |
--------------------------------------------------------------------------------
/roles/system_optimize/tasks/firewalld.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: "Open splunkd port on firewalld"
3 | firewalld:
4 | port: "{{ item }}"
5 | permanent: true
6 | state: enabled
7 | when:
8 | - splunk_installation.firewalld_open_port is defined
9 | - splunk_installation.firewalld_disabled is not defined
10 | with_items: splunk_installation.firewalld_open_port | default()
11 |
12 | - name: "Disable firewalld completely"
13 | ansible.builtin.systemd_service:
14 | name: firewalld
15 | enabled: no
16 | state: stopped
17 | masked: yes
18 | when: splunk_installation.firewalld_disable is defined
19 |
--------------------------------------------------------------------------------
/roles/system_splunkcert/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - set_fact:
4 | openssl_cert_path: "{{ splunk_installation.splunk_home_path }}/etc/auth/server.pem"
5 |
6 | - include: check_certage.yml
7 |
8 | - include: renew_server_cert.yml
9 | when: cert_renew_required
10 |
11 | - include: check_web_enabled.yml
12 |
13 | - block:
14 | - set_fact:
15 | openssl_cert_path: "{{ splunk_installation.splunk_home_path }}/etc/auth/splunkweb/cert.pem"
16 |
17 | - include: check_certage.yml
18 |
19 | - include: renew_web_cert.yml
20 | when:
21 | - cert_renew_required
22 | when: web_ssl_on.changed
23 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/licensemaster/server.conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk server.conf
4 | #####################################################################################################################
5 |
6 | splunk_server_conf:
7 | general:
8 | pass4SymmKey: "{{ vault_general_pass4SymmKey }}"
9 | license:
10 | master_uri: self
11 | sslConfig:
12 | sslRootCAPath: $SPLUNK_HOME/etc/auth/cacert.pem
13 | sslPassword: "{{ vault_sslConfig_sslPassword }}"
14 |
--------------------------------------------------------------------------------
/roles/install/tasks/splunk_login.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | # login to allow any later commands to run w/o auth
4 | - name: "Login into splunk"
5 | shell: |
6 | {{ splunk_installation.splunk_home_path }}/bin/splunk login -auth '{{ vault_splunk_ae_user }}:{{ vault_splunk_ae_password }}'
7 | register: splauth
8 | retries: "{{ splunk_login_retries | d(5) | int }}"
9 | delay: 20
10 | #ignore_errors: "{{ splunk_login_error_ignore | d(False) | bool }}"
11 | ignore_errors: true
12 | until: splauth.rc == 0
13 |
14 | - name: "(Re)add PORS account to splunk"
15 | include_tasks: add_spl_user_passwd.yml
16 | when: splauth.rc != 0
17 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_heavyforwarder/inputs.conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | #####################################################################################################################
4 | # Configurations for Splunk inputs.conf
5 | #####################################################################################################################
6 |
7 | splunk_inputs_conf:
8 | splunktcp:
9 | port: 9997
10 | splunktcp_ssl:
11 | port: 9998
12 | SSL:
13 | rootCA: $SPLUNK_HOME/etc/auth/cacert.pem
14 | serverCert: $SPLUNK_HOME/etc/auth/server.pem
15 | password: "{{ vault_SSL_password }}"
16 |
--------------------------------------------------------------------------------
/EXAMPLES/pors_repo_common_sudo_splunk-admin-example:
--------------------------------------------------------------------------------
1 | ## Created by Ansible
2 |
3 | User_Alias USERSPLUNKADMIN=%splunkadmin
4 | Cmnd_Alias CMDSPLUNKADMIN =/bin/su - {{ splunk_install_user }} -c *, /bin/su - {{ splunk_install_user }}, /usr/sbin/tcpdump *, /usr/bin/systemctl * splunk, /usr/bin/systemctl * splunk.service, {{ splunk_installation.splunk_home_path }}/bin/splunk st*, {{ splunk_installation.splunk_home_path }}/bin/splunk restart
5 | USERSPLUNKADMIN ALL =(root) NOPASSWD: CMDSPLUNKADMIN
6 |
7 | {{ splunk_install_user }} ALL=NOPASSWD: /usr/bin/systemctl * splunk, /usr/bin/systemctl * splunk.service
8 |
--------------------------------------------------------------------------------
/playbooks/splunk/configure_monitoringconsole.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # HINT: The "hosts" value here is a dynamic group name used in PORS
3 |
4 | - name: "Configure splunk Monitoring Console (System)"
5 | hosts: "{{ target }}"
6 | remote_user: "{{ pors_ssh_user }}"
7 | become: yes
8 | become_user: "{{ splunk_install_user }}"
9 |
10 | roles:
11 | - conf/user
12 | - system_sshid
13 | - conf/web
14 | - conf/outputs
15 | - conf/server
16 | - conf/distsearch
17 |
18 | vars:
19 | splunk_restart: True
20 |
21 | # flush notify handlers to activate the splunk config
22 | post_tasks:
23 | - meta: flush_handlers
24 |
--------------------------------------------------------------------------------
/playbooks/vmware/vcenter_show-avail.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Shows local defined vCenter definitions
3 | hosts: localhost
4 |
5 | tasks:
6 | - name: Ensure temp dir is there
7 | file:
8 | path: "{{ pors_temp_dir }}"
9 | state: directory
10 |
11 | - name: Clear/Prepare vCenters temp file
12 | copy:
13 | content: ""
14 | dest: "{{ pors_temp_dir }}/vcenters.list"
15 |
16 | - name: Collecting defined vCenters (sorted by occurence)
17 | lineinfile:
18 | line: "{{ item }}"
19 | path: "{{ pors_temp_dir }}/vcenters.list"
20 | with_items: "{{ vsphere.host }}"
21 |
22 |
--------------------------------------------------------------------------------
/roles/install_logstream/tasks/backup.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Clean previous backup
3 | become: yes
4 | become_user: root
5 | shell: "rm -rf {{ splunk_installation.remote_backup_path }}/cribl/*"
6 |
7 | # do not use copy due to its limitations
8 | # copy facility does not scale to lots of files
9 | - name: Backup the current install dir
10 | become: yes
11 | become_user: root
12 | shell: "rsync -a --outbuf=None --exclude '*/kvstore/' --exclude '*/xxxxxxxxxxxxxxxxxx_training_summary/' --exclude '*/xxxxxxxxxxxxxxxxxx_summary_dev/' {{ logstream_installation.home_path }}/ {{ logstream_installation.remote_backup_path }}/cribl/"
13 |
14 |
--------------------------------------------------------------------------------
/playbooks/splunk/deploy_pors_splunk_user.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Add/Change splunk user offline"
4 | hosts: "*:!pors_server"
5 | remote_user: "{{ pors_ssh_user }}"
6 | become: yes
7 | become_user: root
8 | gather_facts: True
9 | strategy: free
10 |
11 | vars:
12 | splunk_upgrade: False
13 |
14 | handlers:
15 | - include: ../../roles/common/handlers/splunkd.yml
16 |
17 | tasks:
18 |
19 | - include_role:
20 | name: install
21 | tasks_from: splunk_login.yml
22 |
23 | # - include_role:
24 | # name: install
25 | # tasks_from: add_spl_user_passwd.yml
26 | # when: splauth.rc != 0
27 |
--------------------------------------------------------------------------------
/roles/proxmox/tasks/kvm_reboot.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: "Reboot VM"
3 | community.general.proxmox_kvm:
4 | proxmox_default_behavior: compatibility
5 | node: "{{ proxmox.node[px_var].name }}"
6 | api_user: "{{ proxmox.node[px_var].api_user }}"
7 | api_token_id: "{{ proxmox.node[px_var].api_token_id }}"
8 | api_token_secret: "{{ proxmox.node[px_var].api_token_secret }}"
9 | api_host: "{{ proxmox.node[px_var].api_host }}"
10 | name: "{{ item }}"
11 | timeout: 80
12 | state: restarted
13 | run_once: True
14 | loop: "{{ server_hostname }}"
15 | loop_control:
16 | pause: 5
17 |
--------------------------------------------------------------------------------
/roles/system_base/tasks/disable_unattended_updates.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Force disabling unattended upgrades config for {{ ansible_os_family }}"
4 | block:
5 |
6 | - name: "Disable timer for unattended upgrades"
7 | ansible.builtin.systemd_service:
8 | daemon_reload: true
9 | name: dnf-automatic.timer
10 | enabled: no
11 | state: stopped
12 |
13 | when: ansible_os_family == 'RedHat' and ansible_distribution_version.split('.')[0] | int > 8
14 |
15 | #- name: "Force disabling unattended upgrades config for {{ ansible_os_family }}"
16 | # block:
17 | #
18 | # when: ansible_os_family == 'Debian'
19 |
--------------------------------------------------------------------------------
/playbooks/proxmox/px_get_nodes.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Get configured Proxmox Nodes
3 | hosts: "pors_server"
4 | gather_facts: false
5 | connection: local
6 |
7 | tasks:
8 | - name: Ensure temp dir is there
9 | file:
10 | path: "{{ pors_temp_dir }}"
11 | state: directory
12 |
13 | - name: Clean old lists
14 | file:
15 | path: "{{ pors_temp_dir }}/pve_nodes.lst"
16 | state: absent
17 |
18 | - name: get nodes
19 | lineinfile:
20 | path: "{{ pors_temp_dir }}/pve_nodes.lst"
21 | line: "{{ item }}"
22 | create: yes
23 | loop: "{{ proxmox.node | flatten(1) }}"
24 |
--------------------------------------------------------------------------------
/playbooks/googlecp/gcp_get_ids.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Get configured GCP project ids
3 | hosts: "pors_server"
4 | gather_facts: false
5 | connection: local
6 |
7 | tasks:
8 | - name: Ensure temp dir is there
9 | file:
10 | path: "{{ pors_temp_dir }}"
11 | state: directory
12 |
13 | - name: Clean old lists
14 | file:
15 | path: "{{ pors_temp_dir }}/googlecp.ids"
16 | state: absent
17 |
18 | - name: get ids
19 | lineinfile:
20 | path: "{{ pors_temp_dir }}/googlecp.ids"
21 | line: "{{ item }}"
22 | create: yes
23 | loop: "{{ googlecp.project | flatten(1) }}"
24 |
--------------------------------------------------------------------------------
/dialogrc_mono:
--------------------------------------------------------------------------------
1 | #
2 | # Run-time configuration file for dialog
3 | #
4 | # PORS IN MONOCHROME / NO COLORS MODE
5 | #
6 | #
7 | # Types of values:
8 | #
9 | # Number -
10 | # String - "string"
11 | # Boolean -
12 | # Attribute - (foreground,background,highlight?,underline?,reverse?)
13 |
14 | # Set aspect-ration.
15 | aspect = 0
16 |
17 | # Set separator (for multiple widgets output).
18 | separate_widget = ""
19 |
20 | # Set tab-length (for textbox tab-conversion).
21 | tab_len = 0
22 |
23 | # Shadow dialog boxes? This also turns on color.
24 | use_shadow = OFF
25 |
26 | # Turn color support ON or OFF
27 | use_colors = OFF
28 |
--------------------------------------------------------------------------------
/roles/install_logstream/tasks/add_user.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Create unix group
4 | group:
5 | name: "{{ cribl_install_group }}"
6 | state: present
7 | when:
8 | - logstream_installation.create_user == True
9 | - logstream_upgrade != True
10 |
11 | - name: Create unix user
12 | user:
13 | name: "{{ cribl_install_user }}"
14 | group: "{{ cribl_install_group }}"
15 | comment: "Created by Ansible"
16 | shell: "/bin/bash"
17 | home: "{{ logstream_installation.home_path }}"
18 | password: "!!"
19 | state: present
20 | when:
21 | - logstream_installation.create_user == true
22 | - logstream_upgrade != True
23 |
--------------------------------------------------------------------------------
/roles/system_splunkcert/tasks/check_web_enabled.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Check if splunk web is enabled"
4 | shell: '{{ splunk_installation.splunk_home_path }}/bin/splunk btool web list settings |grep startwebserver | egrep -io "true|1|yes|enabled"'
5 | register: web_on
6 | ignore_errors: true
7 | changed_when: web_on.rc == 0
8 |
9 |
10 | - name: "Check if SSL is enabled on splunk web"
11 | shell: '{{ splunk_installation.splunk_home_path }}/bin/splunk btool web list settings |grep enableSplunkWebSSL | egrep -io "true|1|yes|enabled"'
12 | register: web_ssl_on
13 | ignore_errors: true
14 | changed_when: web_ssl_on.rc == 0
15 | when: web_on.changed
16 |
--------------------------------------------------------------------------------
/playbooks/cribl/ae_download_logstream.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: localhost
3 | connection: local
4 | tasks:
5 |
6 | - name: Check/Create repo dir
7 | file:
8 | path: "{{ repo_base }}/common/packages/cribl"
9 | state: directory
10 |
11 | - name: Download logstream package
12 | get_url: url="{{ item.value.url }}"
13 | dest={{ repo_base }}/common/packages/cribl
14 | owner={{ splunk_repository.repository_owner }}
15 | group={{ splunk_repository.repository_group }}
16 | mode=644
17 | with_dict: "{{ logstream_packages.linux_64_tgz }}"
18 | when: logstream_packages.linux_64_tgz is defined
19 |
--------------------------------------------------------------------------------
/roles/group/licensemaster/tasks/copy_enterprise_license_keys.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Copy Enterprise License Keys
4 | #####################################################################################################################
5 |
6 | - name: Install splunk license(s)
7 | copy:
8 | src: "{{ splunk_repository.repository_root }}/licenses/enterprise/"
9 | dest: "{{ splunk_installation.splunk_home_path }}/etc/licenses/enterprise/"
10 | mode: u+rwx,g+rwx
11 | owner: "{{ splunk_install_user }}"
12 | group: "{{ splunk_install_user }}"
13 |
--------------------------------------------------------------------------------
/roles/install_logstream/tasks/install.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Install {{ logstream_installation.package_file }}
3 | unarchive:
4 | src: "{{ logstream_installation.remote_package_temp_path }}/{{ logstream_installation.package_file }}"
5 | dest: "{{ logstream_root_path }}"
6 | copy: no
7 |
8 | - name: Remove temporary package
9 | file:
10 | path: "{{ logstream_installation.remote_package_temp_path }}/{{ logstream_installation.package_file }}"
11 | state: absent
12 | when: logstream_installation.delete_package_after_install != false
13 |
14 | - name: Copy skel files
15 | shell: "cp -uv /etc/skel/.b* {{ logstream_installation.home_path }}/"
16 |
17 |
--------------------------------------------------------------------------------
/roles/patch_datetime/tasks/post_patch.yml:
--------------------------------------------------------------------------------
1 | - name: Patching manifest
2 | become: yes
3 | become_user: "{{ splunk_install_user }}"
4 | lineinfile:
5 | backrefs: yes
6 | path: "{{ splunk_latest_manifest.stdout }}"
7 | state: present
8 | backup: true
9 | regexp: '(^f.*)(splunk/etc/datetime.xml .*)'
10 | line: '\1splunk/etc/datetime.xml e6016245a677bff48ea7ddbe8d4b36f9acbd02918e1f90ead812892692d655ea'
11 | validate: '{{ splunk_installation.splunk_home_path }}/bin/splunk validate files -manifest %s'
12 |
13 | - name: File permissions
14 | file:
15 | path: "{{ splunk_latest_manifest.stdout }}"
16 | owner: "{{ splunk_install_user }}"
17 |
--------------------------------------------------------------------------------
/roles/system_dnsmasq/templates/networkmanager.conf.j2:
--------------------------------------------------------------------------------
1 | # {{ pors_managed_marker }}
2 |
3 | [main]
4 | dns=dnsmasq
5 |
6 | [global-dns]
7 | searches={% for server in dnsmasq_configure_searchdomains | default([]) %}{{ server }},{% endfor %}
8 |
9 |
10 | [global-dns-domain-*]
11 | servers={% for server in dnsmasq_configure_servers | default([]) %}{{ server.ip }},{% endfor %}
12 |
13 | {% for server in dnsmasq_configure_servers | default([]) %}
14 | {% if server.domain is defined and server.ip is defined and server.domain != "in-addr.arpa" %}
15 |
16 | [global-dns-domain-{{ server.domain }}]
17 | servers={{ server.ip }}
18 |
19 | {% endif %}
20 | {% endfor %}
21 |
22 |
--------------------------------------------------------------------------------
/roles/system_splunkcert/tasks/check_certage.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - set_fact:
3 | futuredate: "{{ ansible_date_time.year | int + 1 }}1231"
4 | cert_renew_required: false
5 | run_once: true
6 |
7 | - name: "Get current certificate age"
8 | shell: 'date --date "$(openssl x509 -enddate -noout -in {{ openssl_cert_path }} | cut -d = -f2)" +%Y%m%d'
9 | register: cert_current
10 | changed_when: cert_current.rc != 0
11 |
12 | - name: Validate
13 | set_fact:
14 | cert_renew_required: true
15 | when: cert_current.stdout | int <= futuredate | int
16 | # when: "{{ cert_current.stdout | int }} <= {{ futuredate | int }}"
17 |
18 | - debug:
19 | var: cert_renew_required
20 |
--------------------------------------------------------------------------------
/roles/install/tasks/sys_check.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | # this can be better fetched now directly with ansible FACTS!
4 | - name: Check if we're running in docker
5 | shell: "grep /docker/ /proc/1/cgroup >> /dev/null"
6 | register: run_in_docker
7 | ignore_errors: yes
8 |
9 |
10 | #################################################################
11 | # This should be the last block in this file!
12 | - name: Set the generic VM var
13 | shell: "echo"
14 | register: run_in_vm
15 | when: (run_in_docker.rc is defined and run_in_docker.rc == 0)
16 |
17 | #################################################################
18 | # NO CODE AFTER THIS LINE - PUT NEW VM CHECKS ABOVE THE GENERIC
19 | # BLOCK!
20 |
--------------------------------------------------------------------------------
/filter_plugins/create_distsearch_servers.py:
--------------------------------------------------------------------------------
1 | def create_distsearch_servers (serverlist):
2 |
3 | servers_normalized = []
4 |
5 | for item in serverlist:
6 | if type(item) is unicode:
7 | item=str(item)
8 | if type(item) is str:
9 | item=[item]
10 | for sublist in item:
11 | servers_normalized.append(sublist)
12 |
13 | servers = []
14 | [servers.append(item) for item in servers_normalized if item not in servers]
15 |
16 |
17 | servers = [server+":8089" for server in servers ]
18 | servers = ', '.join(servers)
19 | return servers
20 |
21 | class FilterModule(object):
22 | def filters(self):
23 | return {'create_distsearch_servers': create_distsearch_servers}
24 |
--------------------------------------------------------------------------------
/playbooks/vmware/ae_download_openvmtools.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: pors_server
3 | connection: local
4 |
5 | tasks:
6 | - debug: msg="{{ file.linux_64_rpm.tools.name }}"
7 |
8 | - name: Download open-vm-tools rpm packages
9 | get_url: url="{{ item.value.repo }}/{{ item.value.name }}-{{ item.value.version }}-{{ item.value.release }}.{{ item.value.arch }}.rpm"
10 | dest={{ splunk_repository.repository_root }}/packages
11 | owner={{ splunk_repository.repository_owner }}
12 | group={{ splunk_repository.repository_group }}
13 | mode=644
14 | with_dict: "{{ vmtools_packages.linux_64_rpm }}"
15 | when: vmtools_packages.linux_64_rpm is defined
16 |
--------------------------------------------------------------------------------
/roles/system_optimize/tasks/sys_check.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | # this can be better fetched now directly with ansible FACTS!
4 | - name: Check if we're running in docker
5 | shell: "grep /docker/ /proc/1/cgroup >> /dev/null"
6 | register: run_in_docker
7 | ignore_errors: yes
8 |
9 |
10 | #################################################################
11 | # This should be the last block in this file!
12 | - name: Set the generic VM var
13 | shell: "echo"
14 | register: run_in_vm
15 | when: (run_in_docker.rc is defined and run_in_docker.rc == 0)
16 |
17 | #################################################################
18 | # NO CODE AFTER THIS LINE - PUT NEW VM CHECKS ABOVE THE GENERIC
19 | # BLOCK!
20 |
--------------------------------------------------------------------------------
/roles/system_sshid/tasks/update_authkey.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | #- name: "Ensure home exists"
4 | # file:
5 | # path: "/home/{{ system_shared_service_account }}"
6 | # owner: "{{ system_shared_service_account }}"
7 | # mode: 0700
8 | # state: directory
9 |
10 | - name: "Set/Update authorized keys for {{ system_shared_service_account }}"
11 | become: yes
12 | become_user: "{{ system_shared_service_account }}"
13 | ansible.posix.authorized_key:
14 | user: "{{ system_shared_service_account }}"
15 | state: present
16 | key: "{{ lookup('file', '{{ repo_base }}/common/ssh/authorized_keys_{{ system_shared_service_account }}') }}"
17 | exclusive: "{{ ssh_replace_authkeys | d(False) }}"
18 |
--------------------------------------------------------------------------------
/EXAMPLES/pors-bashrc:
--------------------------------------------------------------------------------
1 | # .bashrc
2 |
3 | # Source global definitions
4 | if [ -f /etc/bashrc ]; then
5 | . /etc/bashrc
6 | fi
7 |
8 | # shortcut for viewing a specific vault
9 | vault-show(){
10 | [ -z "$1" ] && echo -e "\npls specify the environment e.g.\n\n\tview-vault production\n" && return 4
11 | ansible-vault view /opt/pors_data/inventories/$1/group_vars/all/vault.yml
12 | }
13 |
14 | # shortcut for editing a specific vault
15 | vault-edit(){
16 | [ -z "$1" ] && echo -e "\npls specify the environment e.g.\n\n\tview-vault production\n" && return 4
17 | ansible-vault edit /opt/pors_data/inventories/$1/group_vars/all/vault.yml
18 | }
19 |
20 | # auto change dir to the PORS install directory
21 | cd /opt/pors
22 |
--------------------------------------------------------------------------------
/roles/proxmox/tasks/kvm_add_disk.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: "Add/Change disk on {{ server_hostname }}"
3 | community.general.proxmox_kvm:
4 | #proxmox_default_behavior: compatibility
5 | node: "{{ proxmox.node[px_var].name }}"
6 | api_user: "{{ proxmox.node[px_var].api_user }}"
7 | api_token_id: "{{ proxmox.node[px_var].api_token_id }}"
8 | api_token_secret: "{{ proxmox.node[px_var].api_token_secret }}"
9 | api_host: "{{ proxmox.node[px_var].api_host }}"
10 | name: "{{ server_hostname }}"
11 | description: "Ansible changed VM settings on: {{ datetime }}"
12 | scsi:
13 | scsi1: "{{ proxmox.node[px_var].storage }}:5,format=qcow2"
14 | update: yes
15 |
--------------------------------------------------------------------------------
/roles/system_user/tasks/delete.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "REMOVE {{ user_name }} unix user"
4 | user:
5 | name: "{{ user_name }}"
6 | group: "{{ user_name }}"
7 | force: "{{ remove_all_files | d(false) }}"
8 | state: absent
9 |
10 | - name: "Remove >{{ user_name }}< from passwordless sudo"
11 | lineinfile:
12 | dest: /etc/sudoers.d/ansible_admin
13 | state: absent
14 | regexp: '^{{ user_name }}'
15 | line: '{{ user_name }} ALL=(ALL) NOPASSWD: ALL'
16 | validate: 'visudo -cf %s'
17 | create: no
18 |
19 | - name: Set proper sudoers file permissions
20 | file:
21 | path: /etc/sudoers.d/ansible_admin
22 | modification_time: preserve
23 | access_time: preserve
24 | mode: 0440
25 |
--------------------------------------------------------------------------------
/filter_plugins/create_shcluster_mgmt_uri_servers.py:
--------------------------------------------------------------------------------
1 | def create_shcluster_mgmt_uri_servers (serverlist):
2 |
3 | servers_normalized = []
4 |
5 | for item in serverlist:
6 | if type(item) is unicode:
7 | item=str(item)
8 | if type(item) is str:
9 | item=[item]
10 | for sublist in item:
11 | servers_normalized.append(sublist)
12 |
13 | servers = []
14 | [servers.append(item) for item in servers_normalized if item not in servers]
15 |
16 |
17 | servers = ["https://"+server+":8089" for server in servers ]
18 | servers = ','.join(servers)
19 | return servers
20 |
21 | class FilterModule(object):
22 | def filters(self):
23 | return {'create_shcluster_mgmt_uri_servers': create_shcluster_mgmt_uri_servers}
24 |
--------------------------------------------------------------------------------
/roles/install_logstream/tasks/service_checks.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # check for existence of init file
3 | - name: Check init existence
4 | stat: path=/etc/init.d/cribl
5 | register: criblinitfile
6 |
7 | # check for existence of bin file
8 | - stat: path="{{ logstream_installation.home_path }}/bin/cribl"
9 | register: criblbin
10 | ignore_errors: yes
11 |
12 | # check for running daemon (independent from systemd, init etc!!)
13 | - name: Service status
14 | shell: "ps aux |egrep -v '(ssh|grep|tail|less|vim|vi)' |grep cribl"
15 | register: cribl_state
16 | failed_when: cribl_state.rc == 0
17 | ignore_errors: yes
18 |
19 | # check for existence of systemd file
20 | - stat: path=/etc/systemd/system/cribl.service
21 | register: criblsystemdfile
22 |
--------------------------------------------------------------------------------
/roles/install_logstream/tasks/firewalld.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Open splunkd port on firewalld
3 | firewalld: port="{{ item }}"
4 | permanent=true
5 | state=enabled
6 | when:
7 | - splunk_installation.firewalld_open_port is defined
8 | - splunk_installation.firewalld_disabled is not defined or (splunk_installation.firewalld_disabled is defined and splunk_installation.firewalld_disabled != "true")
9 | with_items: splunk_installation.firewalld_open_port | default()
10 |
11 | - name: Disable firewalld completely
12 | service:
13 | name: firewalld
14 | enabled: no
15 | state: stopped
16 | when:
17 | - splunk_installation.firewalld_disabled is defined and splunk_installation.firewalld_disabled == "true"
18 |
--------------------------------------------------------------------------------
/roles/common/templates/add_app_groupvars.j2:
--------------------------------------------------------------------------------
1 | ############################################
2 | #
3 | # Default app spec for the target group this
4 | # file resists in.
5 | #
6 | ############################################
7 |
8 | {{ app_variable }}:
9 | {{ deployment.dir }}:
10 | install: true
11 | clean_install: {{ app_clean_install | d('false') }}
12 | delete: false
13 | {% if filename is defined and filename != "" %}
14 | bundle_dir: {{ app_dir }}
15 | bundle: {{ filename }}
16 | {% else %}
17 | git_repo: {{ git_url }}
18 | # the following git branch was autodetected when linking
19 | # and might get overwritten by git_default_branch_override
20 | # during deployment
21 | git_version: {{ git_branch }}
22 | {% endif %}
23 |
--------------------------------------------------------------------------------
/roles/local_tmpdir/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Ensure temp dir is there
4 | file:
5 | path: "{{ pors_temp_dir }}"
6 | state: directory
7 |
8 | - name: Clean old var file
9 | file:
10 | path: "{{ pors_temp_dir }}/ansible_{{ varname }}.var"
11 | state: absent
12 |
13 | - name: Fill template
14 | template:
15 | src: getvar.yml.j2
16 | dest: "{{ pors_temp_dir }}/getvar.yml"
17 | mode: 0640
18 |
19 | - include: "{{ pors_temp_dir }}/getvar.yml"
20 |
21 | - name: Write variable
22 | lineinfile:
23 | path: "{{ pors_temp_dir }}/ansible_{{ varname }}.var"
24 | line: "{{ item.value }}"
25 | create: yes
26 | loop: "{{ lookup('dict',parsed_var) }}"
27 | when:
28 | - item.key != "changed"
29 | - item.key != "failed"
30 |
--------------------------------------------------------------------------------
/roles/pors/setup/tasks/requirements.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Install all ansible requirements"
4 | shell: "ansible-galaxy {{ item }} install -v -r {{ pors_install_dir }}/roles/requirements.yml"
5 | run_once: true
6 | register: gi_inst
7 | changed_when: "'nstalling' in gi_inst.stdout"
8 | loop:
9 | - role
10 | - collection
11 | when: not pors_allow_autoupgrade_requirements
12 |
13 | - name: "Install and/or upgrade all ansible requirements"
14 | shell: "ansible-galaxy {{ item }} install -v -r {{ pors_install_dir }}/roles/requirements.yml --force-with-deps"
15 | run_once: true
16 | register: gi_upgr
17 | changed_when: "'nstalling' in gi_upgr.stdout"
18 | loop:
19 | - role
20 | - collection
21 | when: pors_allow_autoupgrade_requirements
22 |
--------------------------------------------------------------------------------
/roles/group/shcmember/tasks/state.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Check if we can get a shc state"
4 | shell: |
5 | sleep 3m && {{ splunk_installation.splunk_home_path }}/bin/splunk show shcluster-status -auth '{{ vault_splunk_ae_user }}:{{ vault_splunk_ae_password }}' | grep -i Members
6 | args:
7 | executable: /bin/bash
8 | register: get_shc_state
9 | changed_when: get_shc_state.rc == 999
10 |
11 | - name: "Check if there is a captain already"
12 | shell: |
13 | {{ splunk_installation.splunk_home_path }}/bin/splunk show shcluster-status -auth '{{ vault_splunk_ae_user }}:{{ vault_splunk_ae_password }}' | grep elected_captain
14 | args:
15 | executable: /bin/bash
16 | register: captain_elected
17 | ignore_errors: yes
18 | when: get_shc_state.rc == 0
19 |
--------------------------------------------------------------------------------
/playbooks/splunk/deploy_monitoringkeys.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: "Deploy MonitoringConsole keys to: {{ target }}"
3 | hosts: "monitoringconsole"
4 | remote_user: "{{ pors_ssh_user }}"
5 | become: yes
6 | become_user: "{{ splunk_install_user }}"
7 |
8 | vars:
9 | splunk_restart: False
10 |
11 | roles:
12 | - splunk_sites
13 |
14 | tasks:
15 | - set_fact:
16 | parsed_groups: ["{{ target }}"]
17 | when: parsed_groups is undefined or "[]"
18 |
19 | - debug:
20 | var: parsed_groups
21 |
22 | - name: "Deploy MonitoringConsole keys to: {{ target }}"
23 | hosts: "monitoringconsole"
24 | remote_user: "{{ pors_ssh_user }}"
25 | become: yes
26 | become_user: "{{ splunk_install_user }}"
27 |
28 | roles:
29 | - group/searchhead
30 |
--------------------------------------------------------------------------------
/playbooks/system/ae_ssh.yml:
--------------------------------------------------------------------------------
1 | - name: Check ssh
2 | hosts: '*'
3 | connection: local
4 |
5 | tasks:
6 |
7 | - name: "Check known_hosts for {{ inventory_hostname }}"
8 | local_action: "shell ssh-keygen -F {{ inventory_hostname }}"
9 | register: has_entry_in_known_hosts_file
10 | changed_when: false
11 | ignore_errors: yes
12 |
13 | - name: Ignore host key on first run
14 | when: has_entry_in_known_hosts_file.rc == 1
15 | set_fact:
16 | ansible_ssh_common_args: '-o StrictHostKeyChecking=no'
17 |
18 | - name: connect and save key (when first run)
19 | connection: ssh
20 | remote_user: "{{ pors_ssh_user }}"
21 | command: echo successfully added host key
22 | when: has_entry_in_known_hosts_file.rc == 1
23 |
--------------------------------------------------------------------------------
/roles/logstream_configure/tasks/configure_leader.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Check/Create local system dir
3 | file:
4 | path: "{{ logstream_installation.home_path }}/local/_system"
5 | state: directory
6 | owner: "{{ cribl_install_group }}"
7 | group: "{{ cribl_install_user }}"
8 | mode: 0775
9 | recurse: yes
10 |
11 | - name: Create leader config
12 | template: src=leader.j2
13 | dest={{ logstream_installation.home_path }}/local/_system/instance.yml
14 | notify: cribl restart
15 |
16 | - name: Set permissions on config file
17 | file: path={{ logstream_installation.home_path }}/local/_system/instance.yml
18 | owner="{{ cribl_install_user }}"
19 | group="{{ cribl_install_group }}"
20 | mode=0660
21 | notify: cribl restart
22 |
--------------------------------------------------------------------------------
/roles/logstream_configure/tasks/configure_worker.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Check/Create local system dir
3 | file:
4 | path: "{{ logstream_installation.home_path }}/local/_system"
5 | state: directory
6 | owner: "{{ cribl_install_group }}"
7 | group: "{{ cribl_install_user }}"
8 | mode: 0775
9 | recurse: yes
10 |
11 | - name: Create worker config
12 | template: src=worker.j2
13 | dest={{ logstream_installation.home_path }}/local/_system/instance.yml
14 | notify: cribl restart
15 |
16 | - name: Set permissions on config file
17 | file: path={{ logstream_installation.home_path }}/local/_system/instance.yml
18 | owner="{{ cribl_install_user }}"
19 | group="{{ cribl_install_group }}"
20 | mode=0660
21 | notify: cribl restart
22 |
--------------------------------------------------------------------------------
/playbooks/splunk/configure_licensemaster.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # HINT: The "hosts" value here is a dynamic group name used in PORS
3 |
4 | - name: Configure splunk licensemaster
5 | hosts: "{{ target }}"
6 | remote_user: "{{ pors_ssh_user }}"
7 | become: yes
8 | become_user: "{{ splunk_install_user }}"
9 |
10 | tasks:
11 |
12 | - include_role:
13 | name: conf/user
14 |
15 | - include_role:
16 | name: system_sshid
17 |
18 | - include_role:
19 | name: conf/web
20 |
21 | - include_role:
22 | name: conf/outputs
23 |
24 | - include_role:
25 | name: conf/server
26 |
27 | - include_role:
28 | name: group/licensemaster
29 |
30 | - include_role:
31 | name: common_tasks
32 | tasks_from: monitoring.yml
33 |
--------------------------------------------------------------------------------
/roles/system_prep/tasks/regen_ssh_host_keys.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - file:
4 | state: absent
5 | path: "{{item}}"
6 | loop:
7 | - /etc/ssh/ssh_host_rsa_key
8 | - /etc/ssh/ssh_host_dsa_key
9 | - /etc/ssh/ssh_host_ecdsa_key
10 | - /etc/ssh/ssh_host_ed25519_key
11 |
12 | - name: Regenerate host SSH key (RSA)
13 | shell: ssh-keygen -q -b 8192 -t rsa -f /etc/ssh/ssh_host_rsa_key -C "" -N ""
14 |
15 | - name: Regenerate host SSH key (DSA)
16 | shell: ssh-keygen -q -t dsa -f /etc/ssh/ssh_host_dsa_key -C "" -N ""
17 |
18 | - name: Regenerate host SSH key (ECDSA)
19 | shell: ssh-keygen -q -t ecdsa -f /etc/ssh/ssh_host_ecdsa_key -C "" -N ""
20 |
21 | - name: Regenerate host SSH key (ED25519)
22 | shell: ssh-keygen -q -t ed25519 -a 100 -f /etc/ssh/ssh_host_ed25519_key -C "" -N ""
23 |
--------------------------------------------------------------------------------
/roles/system_sshid/tasks/add_privkey.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Add private key (SSH)
4 | ansible.builtin.copy:
5 | content: "{{ vault_ssh_privkey_splunk }}"
6 | force: True
7 | dest: "{{ splunk_installation.splunk_home_path }}/.ssh/id_rsa"
8 | mode: 0600
9 | owner: "{{ splunk_install_user }}"
10 | group: "{{ splunk_install_group }}"
11 | when: vault_ssh_privkey_splunk is defined
12 | # no_log: true
13 |
14 | - name: "Add pub key (SSH)"
15 | ansible.builtin.copy:
16 | content: "{{ vault_ssh_pubkey_splunk }}"
17 | force: True
18 | dest: "{{ splunk_installation.splunk_home_path }}/.ssh/id_rsa.pub"
19 | mode: 0600
20 | owner: "{{ splunk_install_user }}"
21 | group: "{{ splunk_install_group }}"
22 | when: vault_ssh_pubkey_splunk is defined
23 |
--------------------------------------------------------------------------------
/roles/install_logstream/tasks/service_start.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Import service checks
3 | import_tasks: service_checks.yml
4 |
5 | # check for existence of daemon bin file
6 | - stat: path="{{ logstream_installation.home_path }}/bin/cribl"
7 | register: criblbin
8 | failed_when: criblbin.stat.exists != True
9 |
10 | - name: systemctl reload
11 | shell: systemctl daemon-reload
12 | ignore_errors: yes
13 |
14 | - name: Start daemon
15 | service:
16 | name: cribl
17 | state: restarted
18 |
19 | # ensure daemon is REALLY running ! Do not rely on init, systemd etc!!
20 | - name: Re-Check daemon status
21 | shell: "sleep 10s && ps aux |grep -Ev '(ssh|grep|tail|less|vim|vi|patrol)' |grep cribl"
22 | register: daemon_laststate
23 | failed_when: daemon_laststate.rc != 0
24 |
25 |
--------------------------------------------------------------------------------
/roles/install/tasks/splunk_start.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Import splunk checks
3 | import_tasks: splunk_checks.yml
4 |
5 | # check for existence of splunk bin file
6 | - stat: path="{{ splunk_installation.splunk_home_path }}/bin/splunk"
7 | register: splunkbin
8 | failed_when: splunkbin.stat.exists != True
9 |
10 | - name: systemctl reload
11 | shell: systemctl daemon-reload
12 | ignore_errors: yes
13 |
14 | - name: Start Splunk by service
15 | import_tasks: splunk_service_start.yml
16 |
17 | # ensure splunk is REALLY running ! Do not rely on init, systemd etc!!
18 | - name: Re-Check Splunk Status
19 | shell: "sleep 10s && ps aux |grep -vE '(ssh|grep|tail|less|vim|vi|patrol|ansible)' |grep 'splunkd '"
20 | register: splunk_laststate
21 | failed_when: splunk_laststate.rc != 0
22 |
23 |
--------------------------------------------------------------------------------
/roles/ssh/check/tasks/check_ssh.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Wait for SSH coming up (timeout: {{ wait_timeout | default(240)}}s)"
4 | become: no
5 | wait_for:
6 | port: 22
7 | host: '{{ (server_hostname|default(ansible_ssh_host)|default(ansible_host))|default(inventory_hostname) }}'
8 | search_regex: OpenSSH
9 | delay: 2
10 | timeout: "{{ wait_timeout | default(240)}}"
11 | connection: local
12 | ignore_errors: True
13 | register: ssh_avail
14 |
15 | #- name: "Wait for SSH coming up (timeout: {{ wait_timeout | default(240)}}s)"
16 | # ansible.builtin.wait_for_connection:
17 | # timeout: "{{ wait_timeout | default(240)}}"
18 | # become: no
19 | # delegate_to: localhost
20 |
21 | - meta: clear_facts
22 |
23 | - name: Gather facts for {{ server_hostname }}
24 | setup:
25 |
--------------------------------------------------------------------------------
/roles/system_disk/tasks/create_swap_nolvm.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: "Partitioning {{ part_dev_name.stdout }}"
3 | parted:
4 | device: "{{ part_dev_name.stdout }}"
5 | number: 1
6 | label: gpt
7 | state: present
8 | when: server_disk_dev is defined
9 |
10 | - name: "Make {{ part_dev_name.stdout }} a swap device"
11 | shell: "mkswap {{ server_disk_dev }}1"
12 | when: server_disk_dev is defined
13 |
14 | - name: "add {{ part_dev_name.stdout }} to fstab"
15 | mount:
16 | src: "{{ part_dev_name.stdout }}1"
17 | path: none
18 | opts: "{{ server_disk_mountopts | default('sw')}}"
19 | fstype: swap
20 | passno: "0"
21 | backup: yes
22 | state: present
23 | when: server_disk_dev is defined
24 |
25 | - name: "Enable the new swap space"
26 | shell: swapon -a
27 |
--------------------------------------------------------------------------------
/roles/install/tasks/install_splunk.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Install Splunk package from rpm
3 | yum: name="{{ splunk_installation.remote_package_temp_path }}/{{ splunk_installation.package_file }}"
4 | state=present
5 | when: splunk_installation.package_format == "rpm"
6 |
7 | - name: Install Splunk package from tgz (shell mod)
8 | shell: "tar xzf {{ splunk_installation.remote_package_temp_path }}/{{ splunk_installation.package_file }}"
9 | args:
10 | chdir: /opt
11 | when: splunk_installation.package_format == "tgz"
12 |
13 | - name: Remove temporary Splunk package
14 | command: rm "{{ splunk_installation.remote_package_temp_path }}/{{ splunk_installation.package_file }}"
15 | when: splunk_installation.delete_package_after_install != false
16 |
17 | - include_tasks: set_perms.yml
18 |
--------------------------------------------------------------------------------
/roles/system_base/tasks/update_package_db.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "update pkg cache (debian)"
4 | ansible.builtin.apt:
5 | update_cache: true
6 | when: ansible_facts['os_family'] == "Debian"
7 |
8 | - block:
9 |
10 | - name: "update pkg cache (redhat >= 9.x)"
11 | ansible.builtin.dnf:
12 | update_cache: true
13 | when: ansible_distribution_version.split('.')[0] | int > 8
14 |
15 | - name: "update pkg cache (redhat =< 9.x)"
16 | ansible.builtin.yum:
17 | update_cache: true
18 | when: ansible_distribution_version.split('.')[0] | int < 9
19 |
20 | when: ansible_facts['os_family'] == "RedHat"
21 |
22 | - name: "update pkg cache (suse)"
23 | community.general.zypper:
24 | update_cache: true
25 | when: ansible_facts['os_family'] == "Suse"
26 |
27 |
--------------------------------------------------------------------------------
/roles/system_dnsmasq/tasks/activate.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Enabling the dnsmasq service
4 | ansible.builtin.service:
5 | name: dnsmasq
6 | state: restarted
7 | enabled: true
8 | when:
9 | - dnsmasq_configure_activate
10 | - not dnsmasq_configure_networkmanager
11 |
12 | - block:
13 |
14 | - name: "Disabling the regular dnsmasq service (handled by NM)"
15 | ansible.builtin.service:
16 | name: dnsmasq
17 | state: stopped
18 | enabled: false
19 |
20 | - name: "Restarting NetworkManager to start dnsmasq"
21 | ansible.builtin.service:
22 | name: NetworkManager
23 | state: restarted
24 |
25 | when:
26 | - dnsmasq_configure_networkmanager
27 | - (dnsmasq_nm_setup.changed or dnsmasq_nm_conf.changed or dnsmasq_conf.changed)
28 |
--------------------------------------------------------------------------------
/docs/generate.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ###############################################################################################################
3 | # execute this from the ROOT of the repo!
4 | ###########################################
5 |
6 | export PROJECT_NUMBER="$(git rev-parse HEAD ; git diff-index --quiet HEAD || echo '(with uncommitted changes)')"
7 | export PROJECT_NUMBER="Generated: $(date +%Y-%m-%d)"
8 | cd ./docs
9 | (exec doxygen Doxyfile_extra)
10 | cd ../github.io/pors/
11 | git checkout develop
12 | git status
13 | echo -e "\n\tcd ../github.io/pors/\n\tgit add .\n\tgit commit -a -m 'update documentation'"
14 | echo -e "\tgit checkout master"
15 | echo -e "\tgit merge develop"
16 | echo -e "\tgit checkout develop"
17 | echo -e "\tgit push develop master (do that in the UI to avoid user/pw issues)"
18 |
--------------------------------------------------------------------------------
/roles/proxmox/tasks/kvm_poweron.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: "Power On VM"
3 | community.general.proxmox_kvm:
4 | proxmox_default_behavior: compatibility
5 | node: "{{ proxmox.node[px_var].name }}"
6 | api_user: "{{ proxmox.node[px_var].api_user }}"
7 | api_token_id: "{{ proxmox.node[px_var].api_token_id }}"
8 | api_token_secret: "{{ proxmox.node[px_var].api_token_secret }}"
9 | api_host: "{{ proxmox.node[px_var].api_host }}"
10 | name: "{{ item }}"
11 | timeout: 80
12 | state: started
13 | run_once: True
14 | loop: "{{ server_hostname }}"
15 | loop_control:
16 | pause: 5
17 |
18 | # we can't check by SSH bc the cloud init might be not completed yet!
19 | - name: Giving PVE some time to actually start the VM...
20 | pause:
21 | seconds: 60
22 |
--------------------------------------------------------------------------------
/roles/common_tasks/tasks/check_sudo.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Check pipelining support"
4 | lineinfile:
5 | path: /etc/sudoers
6 | regexp: ^Defaults.*requiretty
7 | state: absent
8 | check_mode: yes
9 | register: plsup
10 |
11 | - debug:
12 | msg: |
13 | WARNING WARNING WARNING WARNING WARNING WARNING
14 | ***********************************************
15 |
16 | Your /etc/sudoers configuration contains "requiretty" in the "Defaults" setting.
17 | This will prevent the use of pipelining support in Ansible and so decreases
18 | the execution speed a lot.
19 |
20 | ***********************************************
21 | WARNING WARNING WARNING WARNING WARNING WARNING
22 | changed_when: plsup is changed
23 | when: plsup is changed
24 |
--------------------------------------------------------------------------------
/playbooks/vmware/vcenter_poweron-vm.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: power on VM and wait for IP
3 | connection: local
4 | community.vmware.vmware_guest:
5 | proxy_host: "{{ vsphere.host[vsphere_var].proxy_host | d('') }}"
6 | proxy_port: "{{ vsphere.host[vsphere_var].proxy_port | d() | int }}"
7 | hostname: '{{ vsphere.host[vsphere_var].dest }}'
8 | username: '{{ vsphere.host[vsphere_var].user }}'
9 | password: '{{ vsphere.host[vsphere_var].password }}'
10 | validate_certs: "{{ vsphere.host[vsphere_var].validate_certs }}"
11 | esxi_hostname: "{{ vsphere_esx_host }}"
12 | datacenter: "{{ vsphere_datacenter }}"
13 | name: "{{ server_hostname | regex_replace('\\..*','') }}"
14 | folder: "/{{ vsphere_datacenter }}/vm"
15 | state: poweredon
16 | wait_for_ip_address: true
17 |
18 |
--------------------------------------------------------------------------------
/playbooks/system/ae_system_base.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Settle up the base
3 | hosts: "*"
4 | remote_user: "{{ pors_ssh_user }}"
5 | become: yes
6 | become_user: root
7 | strategy: free
8 |
9 | vars:
10 | ansible_ssh_user: "{{ pors_ssh_user }}"
11 |
12 | tasks:
13 |
14 | - include_role:
15 | name: system_upgrade
16 | when: system.upgrade_os is defined and system.upgrade_os == True
17 |
18 | - include_role:
19 | name: system_prep
20 |
21 | - include_role:
22 | name: system_base
23 |
24 | - include_role:
25 | name: system_optimize
26 |
27 | - include_role:
28 | name: system_ntp
29 | when: ntp_manage_config | d(False) | bool
30 |
31 | - include_role:
32 | name: system_journal
33 |
34 | - include_role:
35 | name: system_dnsmasq
36 |
--------------------------------------------------------------------------------
/roles/group/shcmember/tasks/restart_shcmember.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Restart shcmembers
4 | #####################################################################################################################
5 |
6 | - name: "(Re-)Start splunkd"
7 | become: yes
8 | become_user: root
9 | shell: "systemctl restart splunk"
10 | async: 600
11 | poll: 5
12 | when:
13 | - splunk_server_conf.shclustering is defined
14 | - splunk_server_conf.shclustering.disabled == False
15 |
16 | #- name: Run splunk restart
17 | # command: "{{ splunk_installation.splunk_home_path }}/bin/splunk restart"
18 | # when:
19 | # - splunk_server_conf.shclustering is defined
20 | # - splunk_server_conf.shclustering.disabled == False
21 |
--------------------------------------------------------------------------------
/roles/pors/ssh/tasks/create_ssh_key.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Create private SSH key for user: {{ user_name }}"
4 | connection: local
5 | community.crypto.openssh_keypair:
6 | path: "~/.ssh/{{ user_name }}_ed25519"
7 | type: ed25519
8 |
9 | # we do not use community.general.ssh_config as it expects a valid hostname
10 | # aaaaand when using the wildcard "*" it will not work properly and re-add
11 | # on each run
12 | - name: "Ensure new key is set in ssh config"
13 | connection: local
14 | become: no
15 | lineinfile:
16 | path: "~/.ssh/config"
17 | owner: "{{ pors_user }}"
18 | mode: 0600
19 | regex: "^[iI]dentity[fF]ile\\s+{{ pors_ssh_key }}"
20 | line: "IdentityFile ~/.ssh/{{ user_name }}_ed25519"
21 | create: yes
22 | insertbefore: BOF
23 | state: present
24 | when: add_local_ssh_config
25 |
--------------------------------------------------------------------------------
/roles/system_ntp/defaults/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | ntp_enabled: true
3 | ntp_timezone: Etc/UTC
4 |
5 | # ntp_daemon: [various]
6 | # ntp_package: ntp
7 | # ntp_config_file: /etc/ntp.conf
8 | # ntp_driftfile: [various]
9 |
10 | ntp_manage_config: false
11 |
12 | # NTP server area configuration (leave empty for 'Worldwide').
13 | # See: http://support.ntp.org/bin/view/Servers/NTPPoolServers
14 | ntp_area: ''
15 | ntp_servers:
16 | - "0{{ '.' + ntp_area if ntp_area else '' }}.pool.ntp.org iburst"
17 | - "1{{ '.' + ntp_area if ntp_area else '' }}.pool.ntp.org iburst"
18 | - "2{{ '.' + ntp_area if ntp_area else '' }}.pool.ntp.org iburst"
19 | - "3{{ '.' + ntp_area if ntp_area else '' }}.pool.ntp.org iburst"
20 |
21 | ntp_restrict:
22 | - "127.0.0.1"
23 | - "::1"
24 |
25 | ntp_cron_handler_enabled: false
26 |
27 | ntp_tinker_panic: false
28 |
--------------------------------------------------------------------------------
/EXAMPLES/pors-server.sudoers.d-example:
--------------------------------------------------------------------------------
1 | # PORS starts as a separate user in multi-user environments
2 | # all users defined in the User_Alias are allowed to run PORS
3 | #
4 | # Change USERAE and add all user accounts and/or groups which should be able
5 | # to start PORS. Groups need the % sign as prefix, users not.
6 |
7 | User_Alias USERPORS = %wheel, foo, bar
8 | Cmnd_Alias PORSCMD = /usr/bin/env CALLUSR=* /opt/pors/pors *,/usr/bin/env CALLUSR=* ./pors *,/usr/bin/env CALLUSR=* ./pors
9 |
10 | # change the user in ALL=(pors) depending on the variable AEUSER in PORS
11 | USERPORS ALL=(pors) NOPASSWD: PORSCMD
12 |
13 | # allow changing to user pors for all PORS users
14 | USERPORS ALL=(root) NOPASSWD: /bin/su - pors
15 |
16 | # allow specifying/overwriting debug option for PORS
17 | Defaults env_keep += "DEBUG"
18 |
--------------------------------------------------------------------------------
/playbooks/pors/configure.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "(Re-)configure a PORS installation"
4 | hosts: "{{ target | d('localhost') }}"
5 | become: no
6 | connection: local
7 | gather_facts: false
8 |
9 | vars:
10 | var_validation_only: false
11 | initial_setup: "{{ reconfigure | d(True) }}"
12 | debug_off: "{{ no_debug | d(True) | bool }}"
13 |
14 | tasks:
15 |
16 | - include_role:
17 | name: common
18 | public: yes
19 |
20 | - include_role:
21 | name: pors/configure
22 | vars:
23 | user_name: "{{ splunk_install_user }}"
24 |
25 | # create the ssh key for the splunk account
26 | # used for fetching apps from git
27 | - include_role:
28 | name: pors/ssh
29 | vars:
30 | user_name: "{{ splunk_install_user }}"
31 | add_local_ssh_config: false
32 |
--------------------------------------------------------------------------------
/roles/googlecp/tasks/gcp_configure_address.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: create a address
4 | google.cloud.gcp_compute_address:
5 | auth_kind: "{{ googlecp.project[project_name].cred_kind }}"
6 | service_account_file: "{{ googlecp.project[project_name].cred_file }}"
7 | project: "{{ googlecp.project[project_name].id }}"
8 | region: "{{ googlecp.project[project_name].region }}"
9 | scopes: "{{ googlecp.project[project_name].scopes }}"
10 | name: "{{ server_shortname }}-ip"
11 | address_type: INTERNAL
12 | address: "{{ server_ip | default() }}"
13 | subnetwork:
14 | selfLink: "https://www.googleapis.com/compute/v1/projects/{{ googlecp.project[project_name].id }}/regions/{{ googlecp.project[project_name].region }}/subnetworks/{{ googlecp.project[project_name].subnet }}"
15 | state: present
16 | register: address
17 |
--------------------------------------------------------------------------------
/roles/logstream_configure/templates/leader.j2:
--------------------------------------------------------------------------------
1 | distributed:
2 | mode: master
3 | master:
4 | #host: {{ groups['logstream_leader'] | first }}
5 | host: 0.0.0.0
6 | port: {{ logstream_leader.port }}
7 | ipWhitelistRegex: /.*/
8 | authToken: {{ vault_logstream_leader_authtoken }}
9 | tls:
10 | disabled: {{ logstream_leader.disabledtls }}
11 | privKeyPath: {{ logstream_leader.tls_privKeyPath }}
12 | certPath: {{ logstream_leader.tls_certPath }}
13 | caPath: {{ logstream_leader.tls_caPath }}
14 | # Authenticate client (mutual auth) - Whether to require clients to present their certificates. Used to perform client authentication using SSL certs.
15 | requestCert: true
16 | # false if ignoring untrusted certs
17 | rejectUnauthorized: {{ logstream_leader.tls_untrusted }}
18 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/all/custom_tasks.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # custom tasks definitions which can extend PORS actions
3 |
4 | #custom:
5 | # custom tasks which will be run after a system has been deployed
6 | # and/or during a system upgrade.
7 | # custom roles must be added to:
8 | # /custom/roles/custom_system/tasks/
9 | # -> main.yml will be included so ensure you set any dependencies here!
10 | # Possible values: True | False
11 | # system_tasks: False
12 |
13 | # custom tasks which will be run after pure splunk has been installed
14 | # custom roles must be added to:
15 | # /custom/roles/custom_post_install_splunk/tasks/
16 | # -> main.yml will be included so ensure you set any dependencies here!
17 | # Possible values: True | False
18 | # post_install_splunk: False
19 |
--------------------------------------------------------------------------------
/playbooks/cribl/configure_logstream.yml:
--------------------------------------------------------------------------------
1 | - name: Configure cribl logstream
2 | hosts: "{{ target }}"
3 | remote_user: "{{ cribl_ssh_user }}"
4 | become: yes
5 | become_user: root
6 | gather_facts: true
7 | strategy: free
8 |
9 | vars:
10 | logstream_upgrade: False
11 | skip_ssh_splunk_privkey: True
12 | skip_ssh_splunk_authkey: True
13 |
14 | tasks:
15 |
16 | - block:
17 |
18 | - include_role:
19 | name: logstream_configure
20 |
21 | - include_role:
22 | name: system_sshid
23 |
24 | rescue:
25 |
26 | - fail:
27 | msg: |
28 | ERROR OCCURED:
29 |
30 | {{ ansible_failed_result.msg | replace('"','') |replace("'","") | regex_replace('[}{]','') }}
31 |
32 | See above for details.
33 |
--------------------------------------------------------------------------------
/playbooks/splunk/ae_install_splunk.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - ansible.builtin.import_playbook: ../shelper/ae_install_shelper.yml
4 |
5 | - name: Install Splunk on all hosts
6 | hosts: "*:!pors_server"
7 | remote_user: "{{ pors_ssh_user }}"
8 | become: yes
9 | become_user: root
10 | gather_facts: True
11 | strategy: free
12 |
13 | vars:
14 | splunk_upgrade: False
15 |
16 | pre_tasks:
17 |
18 | - include_vars: "{{ env_inventory_dir }}/group_vars/universal_forwarder/splunk_installation.yml"
19 | when: spltype is defined and spltype == "splunkforwarder"
20 |
21 | tasks:
22 |
23 | - include_role:
24 | name: system_base
25 | tasks_from: software.yml
26 |
27 | - include_role:
28 | name: install
29 |
30 | - include_role:
31 | name: conf/user
32 | when: post_install_add_users | d(False)
33 |
--------------------------------------------------------------------------------
/playbooks/system/ae_download_openvmtools.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: pors_server
3 | gather_facts: false
4 |
5 | vars:
6 | ansible_ssh_user: "{{ pors_ssh_user }}"
7 |
8 | tasks:
9 | - debug: msg="{{ file.linux_64_rpm.tools.name }}"
10 |
11 | - name: Download open-vm-tools rpm packages
12 | get_url: url="{{ item.value.repo }}/{{ item.value.name }}-{{ item.value.version }}-{{ item.value.release }}.{{ item.value.arch }}.rpm"
13 | dest={{ splunk_repository.repository_root }}/packages
14 | owner={{ splunk_repository.repository_owner }}
15 | group={{ splunk_repository.repository_group }}
16 | mode=644
17 | with_dict: "{{ vmtools_packages.linux_64_rpm }}"
18 | when: vmtools_installation.package_format == "rpm" and
19 | vmtools_packages.linux_64_rpm is defined
20 |
21 |
22 |
--------------------------------------------------------------------------------
/roles/install/tasks/splunk_checks.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # check for existence of splunk init file
3 | - name: Check Splunk init existence
4 | stat: path=/etc/init.d/splunk
5 | register: splunkinitfile
6 |
7 | # check for existence of splunk bin file
8 | - stat: path="{{ splunk_installation.splunk_home_path }}/bin/splunk"
9 | register: splunkbin
10 | ignore_errors: yes
11 |
12 | # check for running splunk daemon (independent from systemd, init etc!!)
13 | - name: "Check for running Splunk processes"
14 | shell: >-
15 | ps faux | grep '^{{ splunk_install_user }} ' | grep -Ev '(ssh|grep|tail|less|vim|nano|patrol|ansible)'
16 | register: splunk_state
17 | failed_when: splunk_state.rc == 0
18 | ignore_errors: yes
19 |
20 | # check for existence of splunk systemd file
21 | - stat: path=/etc/systemd/system/Splunkd.service
22 | register: splunksystemdfile
23 |
--------------------------------------------------------------------------------
/roles/pors/configure/tasks/ask_github.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - pause:
4 | prompt: |-
5 |
6 | Please specify your self-hosted git server API URL
7 |
8 | Github (organisation):
9 | Format: "https:///api/v3/orgs/"
10 | Example: "https://api.github.com/orgs/secure-diversITy"
11 |
12 | Github (user):
13 | Format: "https:///api/v3/users/"
14 |
15 | Checkout documentation for further details:
16 | https://github.com/secure-diversITy/ansible_pors/wiki/git_access
17 |
18 | Enter the API URI
19 | echo: true
20 | no_log: "{{ debug_off }}"
21 | register: answ_git_server
22 | when:
23 | - answ_git_server_type.user_input == "2"
24 | - answ_git_server is undefined
25 |
26 |
--------------------------------------------------------------------------------
/roles/system/template/system_local_configs/tasks/configure_local.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # THIS GETS OVERWRITTEN EACH RUN FROM: roles/system/template/ DIRECTORY!
3 | - name: clear var
4 | set_fact: shouldinstall="False"
5 |
6 | #- name: set the var according to the users setting
7 | # set_fact: shouldinstall="{{ item.value.install }}"
8 | # when: item.key == "system"
9 | # with_dict: "{{ vars['app_variable'] }} |app_default"
10 |
11 | - name: Configure SYSTEM (local/*.conf)
12 | become: yes
13 | become_user: "{{ splunk_install_user }}"
14 | copy: src="{{ item }}"
15 | dest="{{ splunk_installation.splunk_home_path }}/etc/system/local/"
16 | when: shouldinstall == true
17 | with_fileglob: system/local/*.conf
18 |
19 | #- name: Debug SYSTEM
20 | # debug: msg="items {{ item.value.install }}." verbosity=4
21 | # with_dict: "{{ vars['app_variable'] }}"
22 |
23 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/all/web.conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 | ####################################################################################################################
3 | #
4 | # ALL OUTCOMMENTED KEY:VALUES (IF ANY) YOU SEE HERE ARE THE DEFINED DEFAULT VALUES
5 | # i.e. there is no need to enable them if you like the shown default value
6 | #
7 | ####################################################################################################################
8 |
9 | #####################################################################################################################
10 | # Configurations for Splunk web.conf
11 | #####################################################################################################################
12 |
13 | splunk_web_conf:
14 | settings:
15 | enableSplunkWebSSL: 1
16 | updateCheckerBaseURL: 0
17 |
18 |
--------------------------------------------------------------------------------
/playbooks/splunk/configure_deployer.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # HINT: The "hosts" value here is a dynamic group name used in PORS
3 |
4 | - name: Configure splunk deployer
5 | hosts: "{{ target }}"
6 | remote_user: "{{ pors_ssh_user }}"
7 | become: yes
8 | become_user: "{{ splunk_install_user }}"
9 |
10 | vars:
11 | splunk_restart: True
12 |
13 | tasks:
14 |
15 | - include_role:
16 | name: conf/user
17 |
18 | - include_role:
19 | name: system_sshid
20 |
21 | - include_role:
22 | name: conf/web
23 |
24 | - include_role:
25 | name: conf/outputs
26 |
27 | - include_role:
28 | name: conf/server
29 |
30 | - include_role:
31 | name: common_tasks
32 | tasks_from: monitoring.yml
33 |
34 | # flush notify handlers to activate the splunk config
35 | post_tasks:
36 | - meta: flush_handlers
37 |
38 |
--------------------------------------------------------------------------------
/playbooks/splunk/ae_configure_ix_peernode.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # HINT: The "hosts" value here is a dynamic group name used in PORS
3 |
4 | - name: Configure splunk indexers
5 | hosts: "{{ target }}"
6 | remote_user: "{{ pors_ssh_user }}"
7 | become: yes
8 | become_user: "{{ splunk_install_user }}"
9 |
10 | vars:
11 | splunk_restart: True
12 |
13 | tasks:
14 |
15 | - include_role:
16 | name: conf/user
17 |
18 | - include_role:
19 | name: system_sshid
20 |
21 | - include_role:
22 | name: conf/web
23 |
24 | - include_role:
25 | name: conf/inputs
26 |
27 | - include_role:
28 | name: conf/server
29 |
30 | - include_role:
31 | name: common_tasks
32 | tasks_from: monitoring.yml
33 |
34 | # flush notify handlers to activate the splunk config
35 | post_tasks:
36 | - meta: flush_handlers
37 |
38 |
--------------------------------------------------------------------------------
/roles/system_disk/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - set_fact: datetime="{{lookup('pipe','date \"+%Y/%m/%d %H:%M\"')}}"
3 | run_once: True
4 |
5 | - include_role:
6 | name: ssh/check
7 |
8 | - include_tasks: identify_disk.yml
9 |
10 | - include_tasks: format_disk_lvm.yml
11 | when:
12 | - format_disk_lvm is defined
13 | - format_disk != "false"
14 | - server_disk_fs != "swap"
15 |
16 | - include_tasks: format_disk_nolvm.yml
17 | when:
18 | - format_disk_lvm is undefined
19 | - format_disk != "false"
20 | - server_disk_fs != "swap"
21 |
22 | - include_tasks: create_swap_nolvm.yml
23 | when:
24 | - server_lvm_lv is undefined
25 | - format_disk != "false"
26 | - server_disk_fs == "swap"
27 |
28 | - include_tasks: fs_move.yml
29 | when:
30 | - server_fs_move is defined
31 | - server_fs_move == "true"
32 | - server_disk_fs != "swap"
33 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/all/idx_clustering.yml:
--------------------------------------------------------------------------------
1 | ---
2 | ####################################################################################################################
3 | #
4 | # ALL OUTCOMMENTED KEY:VALUES (IF ANY) YOU SEE HERE ARE THE DEFINED DEFAULT VALUES
5 | # i.e. there is no need to enable them if you like the shown default value
6 | #
7 | ####################################################################################################################
8 |
9 | ##############################################################
10 | # Indexer Cluster settings
11 | ##############################################################
12 |
13 | #idx_cluster_available_sites: site1
14 |
15 | #idx_cluster_replicationfactor: 1
16 | #idx_cluster_searchfactor: 1
17 |
18 | #idx_cluster_site_replicationfactor: "origin:1,total:1"
19 | #idx_cluster_site_searchfactor: "origin:1,total:1"
20 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/monitoringconsole/distsearch.conf:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk distsearch.conf
4 | ####################################################################################################################
5 |
6 | splunk_distsearch_conf:
7 | distributedSearch:
8 | servers:
9 | - "{{ groups['peernode'] }}"
10 | - "{{ groups['licensemaster'] }}"
11 | - "{{ groups['masternode'] }}"
12 | - "{{ groups['deploymentserver'] }}"
13 | - "{{ groups['deployer'] }}"
14 | - "{{ groups['shcmember'] }}"
15 | - "{{ groups['heavyforwarder'] }}"
16 | - "{{ groups['searchhead'] }}"
17 | - "{{ groups['httpeventcollector'] }}"
18 | - "{{ groups['activeinput'] }}"
19 |
20 |
--------------------------------------------------------------------------------
/playbooks/proxmox/px_destroy_vm.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Destroy a Proxmox VM
3 | hosts: "pors_server"
4 | gather_facts: false
5 | connection: local
6 |
7 | vars:
8 | ansible_python_interpreter: /usr/bin/python3
9 | kvm_poweroff: yes
10 | kvm_create: False
11 | kvm_configure: False
12 | kvm_poweron: False
13 |
14 | roles:
15 | - proxmox
16 |
17 | post_tasks:
18 | - name: "Trash {{ server_hostname }}"
19 | community.general.proxmox_kvm:
20 | node: "{{ px_var }}"
21 | api_user: "{{ proxmox.node[px_var].api_user }}"
22 | api_token_id: "{{ proxmox.node[px_var].api_token_id }}"
23 | api_token_secret: "{{ proxmox.node[px_var].api_token_secret }}"
24 | api_host: "{{ proxmox.node[px_var].api_host }}"
25 | name: "{{ item }}"
26 | state: absent
27 | timeout: 120
28 | loop: "{{ server_hostname }}"
29 |
--------------------------------------------------------------------------------
/playbooks/splunk/configure_deploymentserver.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # HINT: The "hosts" value here is a dynamic group name used in PORS
3 |
4 | - name: Configure splunk deploymentserver
5 | hosts: "{{ target }}"
6 | remote_user: "{{ pors_ssh_user }}"
7 | become: yes
8 | become_user: "{{ splunk_install_user }}"
9 |
10 | vars:
11 | splunk_restart: True
12 |
13 | tasks:
14 |
15 | - include_role:
16 | name: conf/user
17 |
18 | - include_role:
19 | name: system_sshid
20 |
21 | - include_role:
22 | name: conf/web
23 |
24 | - include_role:
25 | name: conf/outputs
26 |
27 | - include_role:
28 | name: conf/server
29 |
30 | - include_role:
31 | name: common_tasks
32 | tasks_from: monitoring.yml
33 |
34 | # flush notify handlers to activate the splunk config
35 | post_tasks:
36 | - meta: flush_handlers
37 |
38 |
39 |
--------------------------------------------------------------------------------
/roles/proxmox/tasks/ct_create.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Create new container
3 | community.general.proxmox:
4 | node: "{{ px_var }}"
5 | api_user: "{{ proxmox.node[px_var].api_user }}"
6 | api_token_id: "{{ proxmox.node[px_var].api_token_id }}"
7 | api_token_secret: "{{ proxmox.node[px_var].api_token_secret }}"
8 | api_host: "{{ proxmox.node[px_var].api_host }}"
9 | hostname: "{{ server_hostname }}"
10 | ostemplate: "{{ proxmox.node[px_var].ansible_template_name }}"
11 | #clone: "{{ proxmox.node[px_var].ansible_template_name }}"
12 | netif: '{"net0":"name=eth0,gw=10.9.11.1,ip=10.9.11.20/24,bridge=vmbr911"}'
13 | password: "{{ vault_proxmox_ct_default_password }}"
14 | description: "{{ server_notes }} on {{ datetime }}"
15 | cores: 1
16 | storage: "{{ proxmox.node[px_var].storage }}"
17 | timeout: 120
18 |
--------------------------------------------------------------------------------
/roles/system_ntp/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | dependencies: []
3 |
4 | galaxy_info:
5 | role_name: ntp
6 | author: geerlingguy
7 | description: NTP installation and configuration for Linux.
8 | company: "Midwestern Mac, LLC"
9 | license: "license (BSD, MIT)"
10 | min_ansible_version: 2.4
11 | platforms:
12 | - name: EL
13 | versions:
14 | - all
15 | - name: Fedora
16 | versions:
17 | - all
18 | - name: Debian
19 | versions:
20 | - all
21 | - name: Ubuntu
22 | versions:
23 | - all
24 | - name: FreeBSD
25 | versions:
26 | - all
27 | - name: Suse
28 | versions:
29 | - all
30 | - name: Archlinux
31 | versions:
32 | - all
33 | galaxy_tags:
34 | - system
35 | - ntp
36 | - date
37 | - time
38 | - timezone
39 | - chrony
40 | - chronyd
41 | - synchronization
42 |
--------------------------------------------------------------------------------
/roles/group/licensemaster/tasks/distribute_distsearch_trustedkey.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Distribute Keys for Distributed Search
4 | #####################################################################################################################
5 |
6 | - name: Copy Keys for distributed search
7 | copy:
8 | src: "{{ splunk_repository.repository_root }}/distServerKeys/{{ inventory_hostname_short }}/"
9 | dest: "{{ splunk_installation.splunk_home_path }}/etc/auth/distServerKeys"
10 | with_items: "{{ splunk_distsearch_conf.distributedSearch.servers | create_distsearch_serverlist }}"
11 |
12 | # local_action: "shell scp -r {{ splunk_repository.repository_root }}/distServerKeys/{{ inventory_hostname_short }}* splunk@{{ item }}:{{ splunk_installation.splunk_home_path }}/etc/auth/distServerKeys"
13 |
--------------------------------------------------------------------------------
/EXAMPLES/gen_hosts.yml:
--------------------------------------------------------------------------------
1 | - name: Generate hosts file from inventory
2 | remote_user: "{{ pors_ssh_user }}"
3 | become: yes
4 | become_user: root
5 | hosts: "all:!pors_server"
6 | gather_facts: True
7 |
8 | tasks:
9 | - setup:
10 | filter: ansible_default_ipv4.address
11 |
12 | - name: Build /etc/hosts from inventory
13 | lineinfile:
14 | #dest: /etc/hosts
15 | dest: /etc/cloud/templates/hosts.suse.tmpl
16 | regexp: '.*{{ item }}$'
17 | line: "{{ hostvars[item]['ansible_default_ipv4']['address'] }} {{ hostvars[item]['ansible_nodename'] }} {{item}}"
18 | backup: yes
19 | state: present
20 | when: hostvars[item]['ansible_facts']['default_ipv4'] is defined and item != "localhost"
21 | with_items: "{{ groups['all'] }}"
22 |
23 | - name: Reload cloud-init to apply the new hosts
24 | shell: "cloud-init init"
25 | become: yes
26 | become_user: root
27 |
28 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/all/distsearch.conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 | ####################################################################################################################
3 | #
4 | # ALL OUTCOMMENTED KEY:VALUES (IF ANY) YOU SEE HERE ARE THE DEFINED DEFAULT VALUES
5 | # i.e. there is no need to enable them if you like the shown default value
6 | #
7 | ####################################################################################################################
8 |
9 | #####################################################################################################################
10 | # Configurations for Splunk distsearch.conf
11 | ####################################################################################################################
12 |
13 | # usually should not touched
14 | #splunk_distsearch_conf:
15 | # distributedSearch:
16 | # servers:
17 | # - "{{ groups.peernode }}"
18 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/hosts_dynamic.proxmox.yml.example:
--------------------------------------------------------------------------------
1 | ###############################################################
2 | # dynamic inventory for Proxmox VE
3 | ###############################################################
4 | plugin: community.general.proxmox
5 |
6 | # target environment
7 | all:
8 | # change this to your environment
9 | vars:
10 | target_env: ""
11 | # do not touch the following
12 | children:
13 | logstream_all:
14 | logstream_leader:
15 | logstream_worker:
16 |
17 |
18 | # do not touch this
19 | pors_server:
20 | hosts:
21 | localhost:
22 |
23 | url: "https://{{ proxmox.dc.host }}:8006"
24 | user: "{{ vault_proxmox_pve_user }}"
25 | password: "{{ vault_proxmox_pve_password }}"
26 | validate_certs: "{{ proxmox.dc.validate_certs }}"
27 |
28 | want_facts: yes
29 | facts_prefix: ""
30 | group_prefix: ""
31 |
--------------------------------------------------------------------------------
/roles/system_dnsmasq/tasks/configure.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Templatizing {{ dnsmasq_configure_conf_path }}"
4 | ansible.builtin.template:
5 | src: dnsmasq.conf.j2
6 | dest: '{{ dnsmasq_configure_conf_path }}'
7 | mode: '644'
8 | register: dnsmasq_conf
9 |
10 | - block:
11 |
12 | - name: "Configure dnsmasq in NetworkManager"
13 | ansible.builtin.template:
14 | src: networkmanager.conf.j2
15 | dest: '{{ dnsmasq_networkmanager_confd }}/00-use-dnsmasq.conf'
16 | mode: '644'
17 | register: dnsmasq_nm_setup
18 |
19 | - name: "Include dnsmasq config in NetworkManager"
20 | community.general.ini_file:
21 | path: /etc/NetworkManager/dnsmasq.d/dnsmasq.conf
22 | option: "conf-file"
23 | value: "{{ dnsmasq_configure_conf_path }}"
24 | no_extra_spaces: true
25 | create: true
26 | register: dnsmasq_nm_conf
27 |
28 | when: dnsmasq_configure_networkmanager
29 |
--------------------------------------------------------------------------------
/playbooks/splunk/splunk_actions.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: splunk actions
3 | hosts: "*"
4 | remote_user: "{{ pors_ssh_user }}"
5 | become: yes
6 | become_user: "{{ splunk_install_user }}"
7 | no_log: false
8 | gather_facts: False
9 |
10 | handlers:
11 | - include: ../../roles/common/handlers/main.yml
12 |
13 | tasks:
14 | - name: "splunk {{ splunk_action }}"
15 | shell: "echo 'splunk {{ splunk_action }} initiated... it can take some time before you see results here ...'"
16 | notify: "splunk {{ splunk_action }}"
17 | register: actionresult
18 | when: shelper is undefined
19 |
20 | - name: "shelper {{ splunk_action }}"
21 | shell: "echo 'shelper {{ splunk_action }} initiated... it can take some time before you see results here ...'"
22 | notify: "shelper {{ splunk_action }}"
23 | register: actionresult
24 | when: shelper is defined
25 |
26 | - meta: flush_handlers
27 |
--------------------------------------------------------------------------------
/playbooks/splunk/ae_configure_heavyforwarder.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # HINT: The "hosts" value here is a dynamic group name used in PORS
3 |
4 | - name: Configure splunk HF
5 | hosts: "{{ target }}"
6 | remote_user: "{{ pors_ssh_user }}"
7 | become: yes
8 | become_user: "{{ splunk_install_user }}"
9 |
10 | vars:
11 | splunk_restart: True
12 |
13 | tasks:
14 |
15 | - include_role:
16 | name: conf/user
17 |
18 | - include_role:
19 | name: system_sshid
20 |
21 | - include_role:
22 | name: conf/inputs
23 |
24 | - include_role:
25 | name: conf/outputs
26 |
27 | - include_role:
28 | name: conf/server
29 |
30 | - include_role:
31 | name: conf/web
32 |
33 | - include_role:
34 | name: common_tasks
35 | tasks_from: monitoring.yml
36 |
37 | # flush notify handlers to activate the splunk config
38 | post_tasks:
39 | - meta: flush_handlers
40 |
41 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/all/splunk_auth.yml:
--------------------------------------------------------------------------------
1 | ---
2 | ####################################################################################################################
3 | #
4 | # ALL OUTCOMMENTED KEY:VALUES (IF ANY) YOU SEE HERE ARE THE DEFINED DEFAULT VALUES
5 | # i.e. there is no need to enable them if you like the shown default value
6 | #
7 | ####################################################################################################################
8 |
9 | #####################################################################################################################
10 | # Installation Variables
11 | # should be set in your VAULT only. Check documentation.
12 | #####################################################################################################################
13 |
14 | #splunk_auth:
15 | # admin_password: "{{ vault_splunk_admin_password }}"
16 | # splunk_secret: "{{ vault_splunk_secret }}"
17 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/all/system_journald.yml:
--------------------------------------------------------------------------------
1 | ---
2 | ####################################################################################################################
3 | #
4 | # ALL OUTCOMMENTED KEY:VALUES (IF ANY) YOU SEE HERE ARE THE DEFINED DEFAULT VALUES
5 | # i.e. there is no need to enable them if you like the shown default value
6 | #
7 | ####################################################################################################################
8 |
9 | #########################################################################
10 | # journald.conf handling
11 | #
12 | # source: https://galaxy.ansible.com/cscfi/systemd-journal
13 | #########################################################################
14 |
15 | #systemd_journal_storage: persistent
16 | #systemd_journal_compress: "yes"
17 | #systemd_journal_system_max_use: 1G
18 | #systemd_journal_system_max_file_size: 200M
19 | #systemd_journal_restart_state: restarted
20 |
--------------------------------------------------------------------------------
/playbooks/splunk/ae_configure_masternode.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # HINT: The "hosts" value here is a dynamic group name used in PORS
3 |
4 | - name: Configure splunk masternode
5 | hosts: "{{ target }}"
6 | remote_user: "{{ pors_ssh_user }}"
7 | become: yes
8 | become_user: "{{ splunk_install_user }}"
9 |
10 | vars:
11 | splunk_restart: True
12 |
13 | tasks:
14 |
15 | - include_role:
16 | name: conf/user
17 |
18 | - include_role:
19 | name: system_sshid
20 |
21 | - include_role:
22 | name: conf/web
23 |
24 | - include_role:
25 | name: conf/outputs
26 |
27 | - include_role:
28 | name: conf/server
29 |
30 | # - include_role:
31 | # name: group/searchhead
32 |
33 | - include_role:
34 | name: common_tasks
35 | tasks_from: monitoring.yml
36 |
37 | # flush notify handlers to activate the splunk config
38 | post_tasks:
39 | - meta: flush_handlers
40 |
41 |
--------------------------------------------------------------------------------
/playbooks/splunk/ae_configure_searchhead.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # HINT: The "hosts" value here is a dynamic group name used in PORS
3 |
4 | - name: "Configure splunk searchheads"
5 | hosts: "{{ target }}"
6 | remote_user: "{{ pors_ssh_user }}"
7 | become: yes
8 | become_user: "{{ splunk_install_user }}"
9 |
10 | vars:
11 | splunk_restart: True
12 |
13 | tasks:
14 |
15 | - include_role:
16 | name: conf/user
17 |
18 | - include_role:
19 | name: conf/ldap
20 |
21 | - include_role:
22 | name: system_sshid
23 |
24 | - include_role:
25 | name: group/searchhead
26 |
27 | - include_role:
28 | name: conf/web
29 |
30 | - include_role:
31 | name: conf/outputs
32 |
33 | - include_role:
34 | name: conf/server
35 |
36 | - include_role:
37 | name: common_tasks
38 | tasks_from: monitoring.yml
39 |
40 | post_tasks:
41 | - meta: flush_handlers
42 |
43 |
--------------------------------------------------------------------------------
/roles/upgrade/tasks/backup_splunk.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # do not use copy due to its limitations
3 | # copy facility does not scale to lots of files
4 | #- name: Backup the current splunk dir
5 | # become: yes
6 | # become_user: root
7 | # synchronize:
8 | # archive: yes
9 | # compress: yes
10 | # src: "{{ splunk_installation.remote_package_temp_path }}/{{ splunk_installation.package_file }}"
11 | # dest: "{{ splunk_installation.remote_backup_path }}"
12 |
13 | - name: "Ensure backup path exists"
14 | file: "{{ splunk_installation.remote_backup_path }}"
15 | state: directory
16 | mode: 0700
17 |
18 | # do not use copy due to its limitations
19 | # copy facility does not scale to lots of files
20 | - name: Backup the current splunk dir
21 | become: yes
22 | become_user: root
23 | community.general.archive:
24 | path: "{{ splunk_installation.backup_path }}"
25 | dest: "{{ splunk_installation.remote_backup_path }}"
26 | format: gz
27 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/hosts_dynamic.vsphere.yml.example:
--------------------------------------------------------------------------------
1 | ###############################################################
2 | # dynamic inventory for VMware/vSphere
3 | ###############################################################
4 | plugin: vmware_vm_inventory
5 | strict: False
6 | with_tags: True
7 |
8 | # target environment
9 | all:
10 | # change this to your environment
11 | vars:
12 | target_env: ""
13 | # do not touch the following
14 | children:
15 | logstream_all:
16 | logstream_leader:
17 | logstream_worker:
18 |
19 |
20 | # do not touch this
21 | pors_server:
22 | hosts:
23 | localhost:
24 |
25 | hostname: "{{ vsphere.host..dest }}"
26 | validate_certs: "{{ vsphere.host..validate_certs }}"
27 | username: "{{ vault_vsphere_user }}"
28 | password: "{{ vault_vsphere_password }}"
29 |
--------------------------------------------------------------------------------
/roles/googlecp/tasks/gcp_create_instance.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: create instance {{ server_shortname }} tasks
3 | template:
4 | src: create_instance.j2
5 | dest: "{{ base_install_dir }}/roles/googlecp/tasks/generated_create_instance.yml"
6 | connection: local
7 | loop: "{{ disks }}"
8 |
9 | ######################################################################
10 | # The GCP ansible modules do not offer to attach disks to an already
11 | # running instance. There is an old module 'gc_pd' but it is
12 | # superseeded by gcp_compute_instance. The latter does not offer an
13 | # 'attach-disk' key (as in gcloud SDK) so atm (2021, Jan) there is no
14 | # way to do this task via the gcp ansible modules.
15 | #
16 | # the following tasks file gets auto-generated by the above template:
17 | ######################################################################
18 |
19 | - include_tasks: "{{ base_install_dir }}/roles/googlecp/tasks/generated_create_instance.yml"
20 |
--------------------------------------------------------------------------------
/roles/apps/tasks/git_checkout.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Clean existing app directory
3 | file: path="{{ splunk_installation.splunk_home_path }}/etc/{{ item.key | replace('shcluster_apps', 'shcluster/apps') | replace('_', '-') }}/{{ app_name }}"
4 | state=absent
5 | when: "item.value.clean_install is defined and
6 | item.value.clean_install == true and
7 | item.value.git_repo is defined"
8 | with_dict: "{{ vars[app_variable] }}"
9 |
10 | - name: Clone repository
11 | git:
12 | repo={{ item.value.git_repo }}
13 | dest="{{ splunk_installation.splunk_home_path }}/etc/{{ item.key | replace('shcluster_apps', 'shcluster/apps') | replace('_', '-') }}/{{ app_name }}"
14 | version={{ item.value.git_version | default('HEAD') }}
15 | clone=yes
16 | update=yes
17 | force=yes
18 | accept_hostkey=yes
19 | when: "item.value.install == true and
20 | item.value.git_repo is defined"
21 | with_dict: "{{ vars[app_variable] }}"
22 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/all/splunk_repository.yml:
--------------------------------------------------------------------------------
1 | ---
2 | ####################################################################################################################
3 | #
4 | # ALL OUTCOMMENTED KEY:VALUES (IF ANY) YOU SEE HERE ARE THE DEFINED DEFAULT VALUES
5 | # i.e. there is no need to enable them if you like the shown default value
6 | #
7 | ####################################################################################################################
8 |
9 | #####################################################################################################################
10 | # Repository Variables
11 | #####################################################################################################################
12 |
13 | #repo_base: /opt/pors_repo
14 |
15 | #splunk_repository:
16 | # repository_root: "{{ repo_base }}/{{ target_env }}"
17 | # repository_owner: "{{ pors_user }}"
18 | # repository_group: "{{ pors_group }}"
19 |
--------------------------------------------------------------------------------
/roles/pors/setup/tasks/check_sudo.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Check if we can become root"
4 | become: yes
5 | become_user: root
6 | shell: whoami
7 | ignore_errors: true
8 | changed_when: become.rc != 0
9 | register: become
10 |
11 | - fail:
12 | msg: |
13 |
14 | It seems we cannot become root here.
15 | In order to run the setup you must have added a sudo config
16 | to this machine for the user {{ pors_user }}!
17 |
18 | The following command executed as root will do that for you:
19 |
20 | echo -e "# Ansible management for the splunk> platform\n{{ pors_user }}\tALL=(ALL)\tNOPASSWD: ALL\n" > /etc/sudoers.d/ansible-admin-root
21 |
22 | This will create a new sudoers file named: /etc/sudoers.d/ansible-admin-root
23 | and adds this as content:
24 |
25 | # Ansible management for the splunk> platform
26 | {{ pors_user }} ALL=(ALL) NOPASSWD: ALL
27 |
28 | when: become.changed
29 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/all/outputs.conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Configurations for Splunk outputs.conf
4 | #####################################################################################################################
5 |
6 | # outputs.conf
7 | splunk_outputs_conf:
8 | tcpout:
9 | defaultGroup: "{{ target_env }}"
10 | target_group: "{
11 | '{{ target_env }}': {
12 | 'useACK': true,
13 | 'indexerDiscovery': '{{ target_env }}',
14 | 'sslPassword': '{{ vault_tcpout_sslPassword }}',
15 | 'sslCertPath': '$SPLUNK_HOME/etc/auth/server.pem',
16 | 'sslRootCAPath': '$SPLUNK_HOME/etc/auth/ca.pem'
17 | }
18 | }"
19 | indexer_discovery: "{
20 | '{{ target_env }}': {
21 | 'pass4SymmKey': '{{ vault_indexerdiscovery_pass4SymmKey }}',
22 | 'master_uri': 'dynamic'
23 | }
24 | }"
25 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/site1_deployer/server.conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | #####################################################################################################################
4 | # Configurations for Splunk server.conf
5 | ####################################################################################################################
6 |
7 | splunk_server_conf:
8 | general:
9 | pass4SymmKey: "{{ vault_general_pass4SymmKey }}"
10 | site: site1
11 | license:
12 | master_uri: dynamic
13 | clustering:
14 | mode: searchhead
15 | master_uri: dynamic
16 | multisite: True
17 | pass4SymmKey: "{{ vault_idxclustering_pass4SymmKey }}"
18 | shclustering:
19 | shcluster_label: "shc_{{ target_env }}"
20 | pass4SymmKey: "{{ vault_shclustering_pass4SymmKey }}"
21 | disabled: False
22 | mode: searchhead
23 | sslConfig:
24 | sslRootCAPath: $SPLUNK_HOME/etc/auth/cacert.pem
25 | sslPassword: "{{ vault_sslConfig_sslPassword }}"
26 |
--------------------------------------------------------------------------------
/roles/proxmox/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - set_fact: datetime="{{lookup('pipe','date \"+%Y/%m/%d %H:%M\"')}}"
3 | run_once: True
4 |
5 | - include_tasks: cloud_init.yml
6 | when: proxmox.node[px_var].advanced_ci.enabled is defined and proxmox.node[px_var].advanced_ci.enabled == true
7 |
8 | - include_tasks: ct_create.yml
9 | when: ct_create is defined and ct_create != False
10 |
11 | - include_tasks: kvm_create.yml
12 | when: kvm_create is defined and kvm_create != False
13 |
14 | - include_tasks: kvm_configure.yml
15 | when: kvm_configure is defined and kvm_configure != False
16 |
17 | - include_tasks: kvm_poweron.yml
18 | when: kvm_create is defined and kvm_create != False
19 |
20 | - include_tasks: kvm_add_disk.yml
21 | when: kvm_add_disk is defined and kvm_add_disk != False
22 |
23 | - include_tasks: kvm_poweroff.yml
24 | when: kvm_poweroff is defined and kvm_poweroff != False
25 |
26 | - include_tasks: kvm_reboot.yml
27 | when: kvm_autoreboot is defined and kvm_autoreboot != False
28 |
--------------------------------------------------------------------------------
/roles/system/template/system_local_configs/tasks/inject_myfiles.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # THIS GETS OVERWRITTEN EACH RUN FROM: roles/system/template/ DIRECTORY!
3 | #- name: set the var according to the users setting
4 | # set_fact: shouldinstall="{{ item.value.install }}"
5 | # when: item.key == "system"
6 | # with_dict: "{{ vars['app_variable'] }} |app_default"
7 |
8 | #- name: Debug SYSTEM
9 | # debug: msg="items {{ item.value.install }}." verbosity=4
10 | # with_dict: "{{ vars['app_variable'] }}"
11 |
12 | - name: Prepare local files (local/*.conf)
13 | connection: local
14 | become: no
15 | lineinfile:
16 | dest: "{{ item }}"
17 | regexp: "^#+ ANSIBLE FILE-MANAGED - DO NOT TOUCH WITHOUT ANSIBLE.*"
18 | state: present
19 | line: "########## ANSIBLE FILE-MANAGED - DO NOT TOUCH WITHOUT ANSIBLE (last run initiated from: {{ ansible_conf.host }}) ##########\n"
20 | insertbefore: BOF
21 | with_fileglob: system/local/*.conf
22 | # when: shouldinstall == true
23 |
24 |
25 |
--------------------------------------------------------------------------------
/playbooks/splunk/ae_upgrade_splunk.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Upgrade Splunk
4 | hosts: "*:!pors_server"
5 | remote_user: "{{ pors_ssh_user }}"
6 | become: yes
7 | become_user: root
8 | strategy: free
9 | gather_facts: False
10 |
11 | vars:
12 | splunk_upgrade: True
13 | splunk_type: "splunk"
14 | ansible_ssh_user: "{{ pors_ssh_user }}"
15 |
16 | pre_tasks:
17 | - include_vars: "{{ env_inventory_dir }}/group_vars/universal_forwarder/splunk_installation.yml"
18 | when: spltype is defined and spltype == "splunkforwarder"
19 |
20 | - set_fact:
21 | splunk_type: "{{ spltype }}"
22 | when: spltype is defined
23 |
24 | - name: "Collecting system information"
25 | setup:
26 |
27 | tasks:
28 |
29 | - include_role:
30 | name: splunk_info
31 |
32 | - include_role:
33 | name: install
34 |
35 | - ansible.builtin.import_playbook: ../shelper/ae_install_shelper.yml
36 | - ansible.builtin.import_playbook: ../system/ae_system_upgrade.yml
37 |
--------------------------------------------------------------------------------
/playbooks/system/ae_manage_splunkuser.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Manage a splunk user account (native auth)
3 | hosts: all
4 | remote_user: "{{ pors_ssh_user }}"
5 | become: yes
6 | become_user: "{{ splunk_install_user }}"
7 | gather_facts: false
8 |
9 | vars:
10 | ansible_ssh_user: "{{ pors_ssh_user }}"
11 |
12 | tasks:
13 | - name: "Create {{ user_name }} with role {{ user_role }}"
14 | shell: "{{ splunk_installation.splunk_home_path }}/bin/splunk add user {{ user_name }} -password '{{ user_password }}' -role {{ user_role }} -auth {{ vault_splunk_ae_user }}:{{ vault_splunk_ae_password }}"
15 | no_log: true
16 | when: create_user is defined
17 |
18 | - name: "Change {{ user_name }} , role {{ user_role }}"
19 | shell: "{{ splunk_installation.splunk_home_path }}/bin/splunk edit user {{ user_name }} -password '{{ user_password }}' -role {{ user_role }} -auth {{ vault_splunk_ae_user }}:{{ vault_splunk_ae_password }}"
20 | no_log: true
21 | when: edit_user is defined
22 |
--------------------------------------------------------------------------------
/roles/system_base/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - include_tasks: set_dns_domain.yml
3 | when: ansible_facts['os_family'] == "Suse" and server_domain is defined
4 |
5 | - include_tasks: set_dns_servers.yml
6 | when:
7 | - ansible_facts['os_family'] == "Suse"
8 | - server_dns1 is defined
9 | - server_dns2 is defined
10 |
11 | - include_tasks: set_hostname.yml
12 | when:
13 | - system.set_hostname is defined
14 | - system.set_hostname == True
15 | - server_hostname is defined
16 | - server_shortname is defined
17 |
18 | - name: Custom system tasks
19 | include_role:
20 | name: custom_system
21 | when:
22 | - (system.add_custom_tasks is defined and system.add_custom_tasks == True)
23 | or (hook_system_tasks is defined and hook_system_tasks == True)
24 |
25 | - include: update_package_db.yml
26 | - include_tasks: software.yml
27 |
28 | - include_tasks: ../../system_optimize/tasks/optimize_selinux.yml
29 | when: system.selinux is defined
30 |
31 | - include: unattended_upgrade.yml
32 |
--------------------------------------------------------------------------------
/roles/apps/app.template/tasks/git_checkout.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: "Save current state"
4 | ansible.builtin.shell: |
5 | cd {{ splunk_installation.splunk_home_path }}/etc/{{ item.key | replace('shcluster_apps', 'shcluster/apps') | replace('_', '-') }}/{{ app_name }} >> /dev/null 2>&1 || exit 0
6 | git add -A . && git stash save befreset-$(date +%F_%T)
7 | args:
8 | executable: /bin/bash
9 | with_dict: "{{ vars[app_variable] }}"
10 |
11 | - name: "Clone/update app repository"
12 | ansible.builtin.git:
13 | repo: "{{ item.value.git_repo | regex_replace('^https://', 'https://' + vault_git_ro_user + ':' + vault_git_ro_token + '@') }}"
14 | dest: "{{ splunk_installation.splunk_home_path }}/etc/{{ item.key | replace('shcluster_apps', 'shcluster/apps') | replace('_', '-') }}/{{ app_name }}"
15 | version: "{{ git_default_branch_override | default(item.value.git_version) }}"
16 | clone: yes
17 | update: yes
18 | force: yes
19 | accept_hostkey: yes
20 | with_dict: "{{ vars[app_variable] }}"
21 |
--------------------------------------------------------------------------------
/roles/common_tasks/tasks/monitoring.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | #- include_role:
4 | # name: install
5 | # tasks_from: fetch_distserverkeys.yml
6 | # delegate_to: "{{ groups['dmc'] | first }}"
7 | # delegate_facts: True
8 | # run_once: True
9 |
10 | - name: "Ensure distServerKeys exists"
11 | ansible.builtin.file:
12 | path: "{{ splunk_installation.splunk_home_path }}/etc/auth/distServerKeys/"
13 | mode: 0700
14 | owner: "{{ splunk_install_user }}"
15 | group: "{{ splunk_install_user }}"
16 | state: directory
17 |
18 | - name: "Construct MC short name"
19 | ansible.builtin.set_fact:
20 | mc_host_split: "{{ groups['dmc'] | first | split('.') }}"
21 |
22 | - name: "Deploy key for distributed monitoring console"
23 | ansible.builtin.copy:
24 | owner: "{{ splunk_install_user }}"
25 | src: "{{ splunk_repository.repository_root }}/distServerKeys/{{ mc_host_split[0] }}"
26 | dest: "{{ splunk_installation.splunk_home_path }}/etc/auth/distServerKeys/"
27 | mode: 0640
28 | notify: splunk restart
29 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/all/cribl_packages.yml:
--------------------------------------------------------------------------------
1 | ---
2 | ####################################################################################################################
3 | #
4 | # ALL OUTCOMMENTED KEY:VALUES (IF ANY) YOU SEE HERE ARE THE DEFINED DEFAULT VALUES
5 | # i.e. there is no need to enable them if you like the shown default value
6 | #
7 | ####################################################################################################################
8 |
9 | #####################################################################################################################
10 | # Package Variables
11 | #####################################################################################################################
12 |
13 | #logstream_packages:
14 | # linux_64_tgz:
15 | # package:
16 | # version: "{{ logstream_globversion }}"
17 | # url: 'https://cdn.cribl.io/dl/{{ logstream_globversion }}/{{ logstream_tgzname }}'
18 | # # https://cdn.cribl.io/dl/2.0.2/cribl-2.0.2-0b96f829-linux-x64.tgz
19 |
--------------------------------------------------------------------------------
/EXAMPLES/inventories/staging/group_vars/all/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 | ####################################################################################################################
3 | #
4 | # ALL OUTCOMMENTED KEY:VALUES (IF ANY) YOU SEE HERE ARE THE DEFINED DEFAULT VALUES
5 | # i.e. there is no need to enable them if you like the shown default value
6 | #
7 | ####################################################################################################################
8 |
9 | #####################################################################################################################
10 | # Configurations for Splunk deployment directory
11 | # Keep in mind to replace "-" and "/" with an underscore "_"
12 | #####################################################################################################################
13 |
14 | # default directory to put splunk apps in (i.e {splunk install dir}/etc/)
15 | # must be overwritten in a group_vars/deploymentserver, ..., etc
16 | #deployment:
17 | # dir: apps
18 |
--------------------------------------------------------------------------------
/roles/pors/configure/tasks/ask_pors_repo.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - pause:
4 | prompt: |-
5 |
6 | Type in the full path to your git repository where {{ pors_data_dir }} should be pushed to.
7 |
8 | Examples:
9 | https:///splunkinfra/pors_data.git
10 | git@:splunkinfra/pors_data.git
11 |
12 | Repository
13 | echo: true
14 | no_log: "{{ debug_off }}"
15 | register: answ_git_pors_data_repo
16 | when:
17 | - answ_git_pors_data_repo is undefined
18 | - pre_answ_git_pors_data_repo is undefined
19 |
20 | - pause:
21 | prompt: |-
22 |
23 | Eanble automatic config push?
24 |
25 | Should PORS push any changes to the repository automatically (highly recommended)?
26 |
27 | Type "y" to activate it or press to skip
28 | echo: true
29 | no_log: "{{ debug_off }}"
30 | register: answ_git_pors_data_push
31 | when:
32 | - answ_git_pors_data_push is undefined
33 | - pre_answ_git_pors_data_push is undefined
34 |
35 |
--------------------------------------------------------------------------------
/roles/proxmox/templates/ci_network.j2:
--------------------------------------------------------------------------------
1 | #cloud-config
2 | # ref: https://cloudinit.readthedocs.io/en/latest/reference/examples.html
3 | # ref2: https://cloudinit.readthedocs.io/en/latest/reference/network-config-format-v2.html
4 | network:
5 | version: 2
6 | ethernets:
7 | id0:
8 | match:
9 | name: e*
10 | set-name: eth0
11 | {% if proxmox_target_server_address == "dhcp" %}
12 | dhcp4: true
13 | {% else %}
14 | dhcp4: false
15 | addresses:
16 | - {{ proxmox_target_server_address }}
17 | routes:
18 | - to: 0.0.0.0/0
19 | via: {{ proxmox_target_server_gateway4 }}
20 | {% endif %}
21 | nameservers:
22 | addresses:
23 | - {{ server_dns1 }}
24 | - {{ server_dns2 }}
25 | {% if proxmox.node[px_var].advanced_ci.cinetwork is defined %}
26 | # injected custom ciuser config
27 | {{ proxmox.node[px_var].advanced_ci.cinetwork }}
28 | {% endif %}
29 |
--------------------------------------------------------------------------------
/roles/group/shcmember/tasks/bootstrap.yml:
--------------------------------------------------------------------------------
1 | ---
2 | #####################################################################################################################
3 | # Bootstrap shcluster
4 | #####################################################################################################################
5 |
6 | - set_fact:
7 | splunk_shc_list: "{{ splunk_shc_list | d([]) + [ 'https://' + item + ':8089' ] }}"
8 | loop: "{{ query('inventory_hostnames', '{{ shc_site }}_shcmember') }}"
9 | run_once: true
10 | when: inventory_hostname != item
11 |
12 | - name: debug splunk_shc_list
13 | debug:
14 | msg: "{{ splunk_shc_list | join(',') }}"
15 |
16 | - name: Run bootstrap shcluster-captain
17 | shell: "{{ splunk_installation.splunk_home_path }}/bin/splunk bootstrap shcluster-captain -servers_list {{ splunk_shc_list | join(',') }} -auth '{{ vault_splunk_ae_user }}:{{ vault_splunk_ae_password }}'"
18 | when:
19 | - splunk_server_conf.shclustering is defined
20 | - splunk_server_conf.shclustering.disabled == False
21 | run_once: true
22 |
--------------------------------------------------------------------------------