├── .gitignore
├── Pipfile
├── Pipfile.lock
├── README.md
├── ansible.cfg
├── bootstrap_consul.yml
├── group_vars
├── all
│ ├── vars
│ └── vault
├── cfssl
├── dbserver
├── dmz
├── flowbat
├── git
├── haproxy
├── joyent
├── kim
├── lan
├── ldap
├── lx_zones
├── netflow
├── owncloud
├── pimon
├── plex
├── prometheus
├── radius
├── samba
│ ├── vars
│ └── vault
├── shell
├── smartos_hypervisor
├── subsonic
├── syncthing
└── transmission
├── install_osquery.yml
├── production
├── provision_vm.yml
├── roles
├── cfssl
│ └── tasks
│ │ └── main.yml
├── common
│ ├── tasks
│ │ ├── auto_updates.yml
│ │ └── main.yml
│ └── templates
│ │ ├── apt-20auto-upgrades.j2
│ │ └── yum-cron.conf.j2
├── consul_server
│ ├── defaults
│ │ └── main.yml
│ ├── handlers
│ │ └── main.yml
│ ├── tasks
│ │ ├── download_consul.yml
│ │ └── main.yml
│ └── templates
│ │ ├── consul.service.j2
│ │ └── etc_initd
│ │ ├── consul.Debian.j2
│ │ └── consul.RedHat.j2
├── cups_server
│ ├── handlers
│ │ └── main.yml
│ ├── tasks
│ │ └── main.yml
│ └── templates
│ │ └── cupsd.conf.j2
├── docker
│ └── tasks
│ │ └── main.yml
├── flowbat
│ ├── defaults
│ │ └── main.yml
│ ├── tasks
│ │ └── main.yml
│ └── templates
│ │ └── flowbat.service.j2
├── git_server
│ └── tasks
│ │ └── main.yml
├── grafana
│ ├── files
│ │ └── grafana.yum
│ ├── tasks
│ │ └── main.yml
│ └── templates
│ │ └── grafana.repo.j2
├── haproxy
│ ├── defaults
│ │ └── main.yml
│ ├── handlers
│ │ └── main.yml
│ ├── tasks
│ │ ├── install_certbot.yml
│ │ └── main.yml
│ └── templates
│ │ └── haproxy.conf.j2
├── haproxy_kim
│ ├── defaults
│ │ └── main.yml
│ ├── handlers
│ │ └── main.yml
│ ├── tasks
│ │ ├── haproxy-exporter.yml
│ │ └── main.yml
│ └── templates
│ │ ├── haproxy-defaults.init
│ │ ├── haproxy-exporter.init
│ │ └── haproxy.cfg.j2
├── influxdb
│ ├── tasks
│ │ └── main.yml
│ └── templates
│ │ ├── influxdb.conf.j2
│ │ └── influxdb.repo.j2
├── joyent_zone_bootstrap
│ ├── tasks
│ │ └── main.yml
│ └── vars
│ │ └── main.yml
├── openldap
│ └── tasks
│ │ └── main.yml
├── plex
│ ├── defaults
│ │ └── main.yml
│ ├── files
│ │ └── get_latest_plex_version.py
│ ├── handlers
│ │ └── main.yml
│ └── tasks
│ │ ├── automate_yum_updates.yml
│ │ ├── install_apt.yml
│ │ ├── install_yum.yml
│ │ └── main.yml
├── plex_update
│ ├── defaults
│ │ └── main.yml
│ ├── files
│ │ └── get_latest_plex_version.py
│ ├── handlers
│ │ └── main.yml
│ └── tasks
│ │ ├── automate_yum_updates.yml
│ │ ├── install_apt.yml
│ │ ├── install_yum.yml
│ │ └── main.yml
├── postgresql
│ └── tasks
│ │ └── main.yml
├── precurse.kim_docker_images
│ └── tasks
│ │ └── main.yml
├── precurse.pi_docker_images
│ ├── defaults
│ │ └── main.yml
│ └── tasks
│ │ └── main.yml
├── precurse.stsrv_docker_images
│ └── tasks
│ │ └── main.yml
├── pxeserver
│ ├── tasks
│ │ ├── install_syslinux.yml
│ │ └── main.yml
│ ├── templates
│ │ └── pxelinux.cfg.j2
│ └── vars
│ │ └── main.yml
├── radius_server
│ ├── handlers
│ │ └── main.yml
│ └── tasks
│ │ └── main.yml
├── rsyslog_server
│ ├── handlers
│ │ └── main.yml
│ ├── tasks
│ │ └── main.yml
│ └── templates
│ │ └── rsyslog.smartos.conf.j2
├── samba
│ ├── tasks
│ │ └── main.yml
│ └── templates
│ │ └── smb.conf.j2
├── samba_swift_backup
│ ├── defaults
│ │ └── main.yml
│ ├── tasks
│ │ └── main.yml
│ └── templates
│ │ ├── openrc.j2
│ │ └── rclone.conf.j2
├── silk
│ ├── defaults
│ │ └── main.yml
│ ├── files
│ │ └── silk.conf
│ ├── handlers
│ │ └── main.yml
│ ├── tasks
│ │ └── main.yml
│ └── templates
│ │ ├── rwflowpack.conf.j2
│ │ ├── rwflowpack.service.j2
│ │ ├── sensors.conf.j2
│ │ └── yaf.service.j2
├── ssmtp
│ ├── tasks
│ │ └── main.yml
│ └── templates
│ │ └── ssmtp.conf.j2
├── subsonic
│ ├── handlers
│ │ └── main.yml
│ ├── tasks
│ │ └── main.yml
│ └── vars
│ │ └── main.yml
├── syncthing
│ ├── defaults
│ │ └── main.yml
│ ├── tasks
│ │ └── main.yml
│ └── templates
│ │ ├── config.xml.j2
│ │ └── syncthing.init.j2
├── syncthing_relay
│ ├── defaults
│ │ └── main.yml
│ ├── tasks
│ │ └── main.yml
│ └── templates
│ │ └── syncthing-relaysrv.service.j2
├── transmission
│ └── tasks
│ │ └── main.yml
└── ups
│ ├── handlers
│ └── main.yml
│ ├── tasks
│ ├── build_nut.yml
│ └── main.yml
│ ├── templates
│ ├── nut.conf.j2
│ ├── ups.conf.j2
│ ├── upsd.conf.j2
│ ├── upsd.users.j2
│ ├── upsmon.conf.j2
│ └── upssched.conf.j2
│ └── vars
│ └── main.yml
├── setup_cfssl.yml
├── setup_consul.yml
├── setup_dbserver.yml
├── setup_gitserver.yml
├── setup_haproxy.yml
├── setup_kim.yml
├── setup_netflow.yml
├── setup_owncloud.yml
├── setup_pimon.yml
├── setup_plex.yml
├── setup_radius.yml
├── setup_samba.yml
├── setup_shell.yml
├── setup_stsrv.yml
├── setup_subsonic.yml
├── setup_syncthing.yml
├── setup_transmission.yml
├── site.yml
├── tasks
├── smartos_zone_bootstrap.yml
└── smartos_zone_usersetup.yml
├── update_packages.yml
└── update_plex.yml
/.gitignore:
--------------------------------------------------------------------------------
1 | *.retry
2 | roles/smartos_provision/*
3 | *.key
4 |
--------------------------------------------------------------------------------
/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | verify_ssl = true
3 | name = "pypi"
4 | url = "https://pypi.org/simple"
5 |
6 | [packages]
7 | ansible = "*"
8 | docker-py = "*"
9 | dnspython = "*"
10 |
11 | [dev-packages]
12 |
13 | [requires]
14 | python_version = "2.7"
15 |
--------------------------------------------------------------------------------
/Pipfile.lock:
--------------------------------------------------------------------------------
1 | {
2 | "_meta": {
3 | "hash": {
4 | "sha256": "963a74a28fcd3eb46c4d747738fade8e9e8e94b63ec26f7b9660c7a5da2354a3"
5 | },
6 | "pipfile-spec": 6,
7 | "requires": {
8 | "python_version": "2.7"
9 | },
10 | "sources": [
11 | {
12 | "name": "pypi",
13 | "url": "https://pypi.org/simple",
14 | "verify_ssl": true
15 | }
16 | ]
17 | },
18 | "default": {
19 | "ansible": {
20 | "hashes": [
21 | "sha256:aaf9e1974bd12840ca055ac156f37601c08d73d726a3a6b98a2fe759a57051bb"
22 | ],
23 | "index": "pypi",
24 | "version": "==2.7.5"
25 | },
26 | "asn1crypto": {
27 | "hashes": [
28 | "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87",
29 | "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
30 | ],
31 | "version": "==0.24.0"
32 | },
33 | "backports.ssl-match-hostname": {
34 | "hashes": [
35 | "sha256:502ad98707319f4a51fa2ca1c677bd659008d27ded9f6380c79e8932e38dcdf2"
36 | ],
37 | "markers": "python_version < '3.5'",
38 | "version": "==3.5.0.1"
39 | },
40 | "bcrypt": {
41 | "hashes": [
42 | "sha256:05d8b762cb8a9bd0ad92ee95ed34b6119200a8760b625dadacfe88537ae691a3",
43 | "sha256:136243dc44e5bab9b61206bd46fff3018bd80980b1a1dfbab64a22ff5745957f",
44 | "sha256:1de0df7a9ca76d68ec8122573ae584aab78dcfb728fc2c78ecafb15750b79465",
45 | "sha256:214c720cfcd394ab9fd1cac59303847b0d45cc8feeb8126ec55619c77d85ec19",
46 | "sha256:290e07820d408e8c81f79f848279b95cef693d6e6ce148fa6b1e573e89a4305b",
47 | "sha256:2d60412b11994ab91d25572f780f8461748cecdb6014c23e33b2ea0aabc99782",
48 | "sha256:62ff976497590c7ef714f426aff8b908f2a11686364bb01cfc7d338e86a2ee27",
49 | "sha256:77c99c50bd7ac4e9e9f948015c4638176ebe0a495b22b6ae4857f3ba077b12d8",
50 | "sha256:9af0a7e135e7f5feca9c16ba33064af545b33b7297c1bb65daedb11c0fa653c4",
51 | "sha256:9b08088fd103eedfd750d832819555d1f96bc8bec749c6d35a3f3de3b9e8c98d",
52 | "sha256:a185efb05ef8bac9a531474abfefb8323f3ec8d524d308d6720657eaeda068b5",
53 | "sha256:c7a733c4c309c9ab572644cf7f8779845addcd5ecf474bb5c376f05731842c41",
54 | "sha256:cc3f53fa3287c0fc2bc1636e9514b896d4777444b03d9e0e4f16762a856bfe8a",
55 | "sha256:d216ee4e8e64d43d819acaf8aa0db6cb518859072152cf35ada4987bf5c92bff",
56 | "sha256:db3c7d712c4049eff365f00c9236279602af17c0ba44ca759008641c7fd892b7",
57 | "sha256:e1bb330c56ddec65ad9ce989e9e8664901ce96badfe47853a5ed03bfeb76f91a",
58 | "sha256:efcaace6e2915434d84e865c44f0cfe34e802269378afbb39a4aa6381aaec78b",
59 | "sha256:f4431e01f1a5fdea95c78758e24c9565651499d92024ff34663b1ab12c8a10e5",
60 | "sha256:fd21155abee7cd4c0ba8fad5138636f2531174ea79ad1751b25dc30d833e1723"
61 | ],
62 | "version": "==3.1.5"
63 | },
64 | "certifi": {
65 | "hashes": [
66 | "sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7",
67 | "sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033"
68 | ],
69 | "version": "==2018.11.29"
70 | },
71 | "cffi": {
72 | "hashes": [
73 | "sha256:151b7eefd035c56b2b2e1eb9963c90c6302dc15fbd8c1c0a83a163ff2c7d7743",
74 | "sha256:1553d1e99f035ace1c0544050622b7bc963374a00c467edafac50ad7bd276aef",
75 | "sha256:1b0493c091a1898f1136e3f4f991a784437fac3673780ff9de3bcf46c80b6b50",
76 | "sha256:2ba8a45822b7aee805ab49abfe7eec16b90587f7f26df20c71dd89e45a97076f",
77 | "sha256:3bb6bd7266598f318063e584378b8e27c67de998a43362e8fce664c54ee52d30",
78 | "sha256:3c85641778460581c42924384f5e68076d724ceac0f267d66c757f7535069c93",
79 | "sha256:3eb6434197633b7748cea30bf0ba9f66727cdce45117a712b29a443943733257",
80 | "sha256:495c5c2d43bf6cebe0178eb3e88f9c4aa48d8934aa6e3cddb865c058da76756b",
81 | "sha256:4c91af6e967c2015729d3e69c2e51d92f9898c330d6a851bf8f121236f3defd3",
82 | "sha256:57b2533356cb2d8fac1555815929f7f5f14d68ac77b085d2326b571310f34f6e",
83 | "sha256:770f3782b31f50b68627e22f91cb182c48c47c02eb405fd689472aa7b7aa16dc",
84 | "sha256:79f9b6f7c46ae1f8ded75f68cf8ad50e5729ed4d590c74840471fc2823457d04",
85 | "sha256:7a33145e04d44ce95bcd71e522b478d282ad0eafaf34fe1ec5bbd73e662f22b6",
86 | "sha256:857959354ae3a6fa3da6651b966d13b0a8bed6bbc87a0de7b38a549db1d2a359",
87 | "sha256:87f37fe5130574ff76c17cab61e7d2538a16f843bb7bca8ebbc4b12de3078596",
88 | "sha256:95d5251e4b5ca00061f9d9f3d6fe537247e145a8524ae9fd30a2f8fbce993b5b",
89 | "sha256:9d1d3e63a4afdc29bd76ce6aa9d58c771cd1599fbba8cf5057e7860b203710dd",
90 | "sha256:a36c5c154f9d42ec176e6e620cb0dd275744aa1d804786a71ac37dc3661a5e95",
91 | "sha256:a6a5cb8809091ec9ac03edde9304b3ad82ad4466333432b16d78ef40e0cce0d5",
92 | "sha256:ae5e35a2c189d397b91034642cb0eab0e346f776ec2eb44a49a459e6615d6e2e",
93 | "sha256:b0f7d4a3df8f06cf49f9f121bead236e328074de6449866515cea4907bbc63d6",
94 | "sha256:b75110fb114fa366b29a027d0c9be3709579602ae111ff61674d28c93606acca",
95 | "sha256:ba5e697569f84b13640c9e193170e89c13c6244c24400fc57e88724ef610cd31",
96 | "sha256:be2a9b390f77fd7676d80bc3cdc4f8edb940d8c198ed2d8c0be1319018c778e1",
97 | "sha256:ca1bd81f40adc59011f58159e4aa6445fc585a32bb8ac9badf7a2c1aa23822f2",
98 | "sha256:d5d8555d9bfc3f02385c1c37e9f998e2011f0db4f90e250e5bc0c0a85a813085",
99 | "sha256:e55e22ac0a30023426564b1059b035973ec82186ddddbac867078435801c7801",
100 | "sha256:e90f17980e6ab0f3c2f3730e56d1fe9bcba1891eeea58966e89d352492cc74f4",
101 | "sha256:ecbb7b01409e9b782df5ded849c178a0aa7c906cf8c5a67368047daab282b184",
102 | "sha256:ed01918d545a38998bfa5902c7c00e0fee90e957ce036a4000a88e3fe2264917",
103 | "sha256:edabd457cd23a02965166026fd9bfd196f4324fe6032e866d0f3bd0301cd486f",
104 | "sha256:fdf1c1dc5bafc32bc5d08b054f94d659422b05aba244d6be4ddc1c72d9aa70fb"
105 | ],
106 | "version": "==1.11.5"
107 | },
108 | "chardet": {
109 | "hashes": [
110 | "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
111 | "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
112 | ],
113 | "version": "==3.0.4"
114 | },
115 | "cryptography": {
116 | "hashes": [
117 | "sha256:05a6052c6a9f17ff78ba78f8e6eb1d777d25db3b763343a1ae89a7a8670386dd",
118 | "sha256:0eb83a24c650a36f68e31a6d0a70f7ad9c358fa2506dc7b683398b92e354a038",
119 | "sha256:0ff4a3d6ea86aa0c9e06e92a9f986de7ee8231f36c4da1b31c61a7e692ef3378",
120 | "sha256:1699f3e916981df32afdd014fb3164db28cdb61c757029f502cb0a8c29b2fdb3",
121 | "sha256:1b1f136d74f411f587b07c076149c4436a169dc19532e587460d9ced24adcc13",
122 | "sha256:21e63dd20f5e5455e8b34179ac43d95b3fb1ffa54d071fd2ed5d67da82cfe6dc",
123 | "sha256:2454ada8209bbde97065453a6ca488884bbb263e623d35ba183821317a58b46f",
124 | "sha256:3cdc5f7ca057b2214ce4569e01b0f368b3de9d8ee01887557755ccd1c15d9427",
125 | "sha256:418e7a5ec02a7056d3a4f0c0e7ea81df374205f25f4720bb0e84189aa5fd2515",
126 | "sha256:471a097076a7c4ab85561d7fa9a1239bd2ae1f9fd0047520f13d8b340bf3210b",
127 | "sha256:5ecaf9e7db3ca582c6de6229525d35db8a4e59dc3e8a40a331674ed90e658cbf",
128 | "sha256:63b064a074f8dc61be81449796e2c3f4e308b6eba04a241a5c9f2d05e882c681",
129 | "sha256:6afe324dfe6074822ccd56d80420df750e19ac30a4e56c925746c735cf22ae8b",
130 | "sha256:70596e90398574b77929cd87e1ac6e43edd0e29ba01e1365fed9c26bde295aa5",
131 | "sha256:70c2b04e905d3f72e2ba12c58a590817128dfca08949173faa19a42c824efa0b",
132 | "sha256:8908f1db90be48b060888e9c96a0dee9d842765ce9594ff6a23da61086116bb6",
133 | "sha256:af12dfc9874ac27ebe57fc28c8df0e8afa11f2a1025566476b0d50cdb8884f70",
134 | "sha256:b4fc04326b2d259ddd59ed8ea20405d2e695486ab4c5e1e49b025c484845206e",
135 | "sha256:da5b5dda4aa0d5e2b758cc8dfc67f8d4212e88ea9caad5f61ba132f948bab859"
136 | ],
137 | "version": "==2.4.2"
138 | },
139 | "dnspython": {
140 | "hashes": [
141 | "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01",
142 | "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"
143 | ],
144 | "index": "pypi",
145 | "version": "==1.16.0"
146 | },
147 | "docker-py": {
148 | "hashes": [
149 | "sha256:35b506e95861914fa5ad57a6707e3217b4082843b883be246190f57013948aba",
150 | "sha256:4c2a75875764d38d67f87bc7d03f7443a3895704efc57962bdf6500b8d4bc415"
151 | ],
152 | "index": "pypi",
153 | "version": "==1.10.6"
154 | },
155 | "docker-pycreds": {
156 | "hashes": [
157 | "sha256:6ce3270bcaf404cc4c3e27e4b6c70d3521deae82fb508767870fdbf772d584d4",
158 | "sha256:7266112468627868005106ec19cd0d722702d2b7d5912a28e19b826c3d37af49"
159 | ],
160 | "version": "==0.4.0"
161 | },
162 | "enum34": {
163 | "hashes": [
164 | "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
165 | "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
166 | "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
167 | "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
168 | ],
169 | "markers": "python_version < '3'",
170 | "version": "==1.1.6"
171 | },
172 | "idna": {
173 | "hashes": [
174 | "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
175 | "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
176 | ],
177 | "version": "==2.8"
178 | },
179 | "ipaddress": {
180 | "hashes": [
181 | "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794",
182 | "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c"
183 | ],
184 | "markers": "python_version < '3'",
185 | "version": "==1.0.22"
186 | },
187 | "jinja2": {
188 | "hashes": [
189 | "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd",
190 | "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4"
191 | ],
192 | "version": "==2.10"
193 | },
194 | "markupsafe": {
195 | "hashes": [
196 | "sha256:048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432",
197 | "sha256:130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b",
198 | "sha256:19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9",
199 | "sha256:1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af",
200 | "sha256:1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834",
201 | "sha256:1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd",
202 | "sha256:1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d",
203 | "sha256:31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7",
204 | "sha256:3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b",
205 | "sha256:4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3",
206 | "sha256:525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c",
207 | "sha256:52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2",
208 | "sha256:52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7",
209 | "sha256:5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36",
210 | "sha256:5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1",
211 | "sha256:5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e",
212 | "sha256:7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1",
213 | "sha256:83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c",
214 | "sha256:857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856",
215 | "sha256:98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550",
216 | "sha256:bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492",
217 | "sha256:d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672",
218 | "sha256:e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401",
219 | "sha256:edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6",
220 | "sha256:efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6",
221 | "sha256:f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c",
222 | "sha256:f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd",
223 | "sha256:fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1"
224 | ],
225 | "version": "==1.1.0"
226 | },
227 | "paramiko": {
228 | "hashes": [
229 | "sha256:3c16b2bfb4c0d810b24c40155dbfd113c0521e7e6ee593d704e84b4c658a1f3b",
230 | "sha256:a8975a7df3560c9f1e2b43dc54ebd40fd00a7017392ca5445ce7df409f900fcb"
231 | ],
232 | "version": "==2.4.2"
233 | },
234 | "pyasn1": {
235 | "hashes": [
236 | "sha256:061442c60842f6d11051d4fdae9bc197b64bd41573a12234a753a0cb80b4f30b",
237 | "sha256:0ee2449bf4c4e535823acc25624c45a8b454f328d59d3f3eeb82d3567100b9bd",
238 | "sha256:5f9fb05c33e53b9a6ee3b1ed1d292043f83df465852bec876e93b47fd2df7eed",
239 | "sha256:65201d28e081f690a32401e6253cca4449ccacc8f3988e811fae66bd822910ee",
240 | "sha256:79b336b073a52fa3c3d8728e78fa56b7d03138ef59f44084de5f39650265b5ff",
241 | "sha256:8ec20f61483764de281e0b4aba7d12716189700debcfa9e7935780850bf527f3",
242 | "sha256:9458d0273f95d035de4c0d5e0643f25daba330582cc71bb554fe6969c015042a",
243 | "sha256:98d97a1833a29ca61cd04a60414def8f02f406d732f9f0bcb49f769faff1b699",
244 | "sha256:b00d7bfb6603517e189d1ad76967c7e805139f63e43096e5f871d1277f50aea5",
245 | "sha256:b06c0cfd708b806ea025426aace45551f91ea7f557e0c2d4fbd9a4b346873ce0",
246 | "sha256:d14d05984581770333731690f5453efd4b82e1e5d824a1d7976b868a2e5c38e8",
247 | "sha256:da2420fe13a9452d8ae97a0e478adde1dee153b11ba832a95b223a2ba01c10f7",
248 | "sha256:da6b43a8c9ae93bc80e2739efb38cc776ba74a886e3e9318d65fe81a8b8a2c6e"
249 | ],
250 | "version": "==0.4.5"
251 | },
252 | "pycparser": {
253 | "hashes": [
254 | "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
255 | ],
256 | "version": "==2.19"
257 | },
258 | "pynacl": {
259 | "hashes": [
260 | "sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255",
261 | "sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c",
262 | "sha256:0d0a8171a68edf51add1e73d2159c4bc19fc0718e79dec51166e940856c2f28e",
263 | "sha256:1c780712b206317a746ace34c209b8c29dbfd841dfbc02aa27f2084dd3db77ae",
264 | "sha256:2424c8b9f41aa65bbdbd7a64e73a7450ebb4aa9ddedc6a081e7afcc4c97f7621",
265 | "sha256:2d23c04e8d709444220557ae48ed01f3f1086439f12dbf11976e849a4926db56",
266 | "sha256:30f36a9c70450c7878053fa1344aca0145fd47d845270b43a7ee9192a051bf39",
267 | "sha256:37aa336a317209f1bb099ad177fef0da45be36a2aa664507c5d72015f956c310",
268 | "sha256:4943decfc5b905748f0756fdd99d4f9498d7064815c4cf3643820c9028b711d1",
269 | "sha256:57ef38a65056e7800859e5ba9e6091053cd06e1038983016effaffe0efcd594a",
270 | "sha256:5bd61e9b44c543016ce1f6aef48606280e45f892a928ca7068fba30021e9b786",
271 | "sha256:6482d3017a0c0327a49dddc8bd1074cc730d45db2ccb09c3bac1f8f32d1eb61b",
272 | "sha256:7d3ce02c0784b7cbcc771a2da6ea51f87e8716004512493a2b69016326301c3b",
273 | "sha256:a14e499c0f5955dcc3991f785f3f8e2130ed504fa3a7f44009ff458ad6bdd17f",
274 | "sha256:a39f54ccbcd2757d1d63b0ec00a00980c0b382c62865b61a505163943624ab20",
275 | "sha256:aabb0c5232910a20eec8563503c153a8e78bbf5459490c49ab31f6adf3f3a415",
276 | "sha256:bd4ecb473a96ad0f90c20acba4f0bf0df91a4e03a1f4dd6a4bdc9ca75aa3a715",
277 | "sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1",
278 | "sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0"
279 | ],
280 | "version": "==1.3.0"
281 | },
282 | "pyyaml": {
283 | "hashes": [
284 | "sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b",
285 | "sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf",
286 | "sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a",
287 | "sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3",
288 | "sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1",
289 | "sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1",
290 | "sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613",
291 | "sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04",
292 | "sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f",
293 | "sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537",
294 | "sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531"
295 | ],
296 | "version": "==3.13"
297 | },
298 | "requests": {
299 | "hashes": [
300 | "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
301 | "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
302 | ],
303 | "version": "==2.21.0"
304 | },
305 | "six": {
306 | "hashes": [
307 | "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
308 | "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
309 | ],
310 | "version": "==1.12.0"
311 | },
312 | "urllib3": {
313 | "hashes": [
314 | "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
315 | "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
316 | ],
317 | "version": "==1.24.1"
318 | },
319 | "websocket-client": {
320 | "hashes": [
321 | "sha256:8c8bf2d4f800c3ed952df206b18c28f7070d9e3dcbd6ca6291127574f57ee786",
322 | "sha256:e51562c91ddb8148e791f0155fdb01325d99bb52c4cdbb291aee7a3563fd0849"
323 | ],
324 | "version": "==0.54.0"
325 | }
326 | },
327 | "develop": {}
328 | }
329 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Infrastructure Setup Using Ansible and SmartOS
2 |
3 | ### Create new Virtual Machine called "shell"
4 | ```
5 | ansible-playbook provision_vm.yml --limit=shell
6 | ```
7 | ### Run playbook to setup shell server
8 | ```
9 | ansible-playbook site.yml --limit=shell
10 | ```
11 |
12 | ### Updating Packages on shell
13 | ```
14 | ansible-playbook update_packages.yml --limit=shell
15 | ```
16 |
17 | ### Update packages on all servers
18 | ```
19 | ansible-playbook site.yml --tags=update_packages
20 | ```
21 |
--------------------------------------------------------------------------------
/ansible.cfg:
--------------------------------------------------------------------------------
1 | [defaults]
2 | ;inventory=smartos.py
3 | inventory=production
4 | retry_files_enabled = False
5 | vault_password_file = ./.vault.key
6 |
7 | [ssh_connection]
8 | pipelining=True
9 |
--------------------------------------------------------------------------------
/bootstrap_consul.yml:
--------------------------------------------------------------------------------
1 | - hosts: consul
2 | vars:
3 | consul_bootstrap: true
4 | roles:
5 | - consul
6 |
7 |
--------------------------------------------------------------------------------
/group_vars/all/vars:
--------------------------------------------------------------------------------
1 | admin_groups: adm
2 |
3 | autoboot: "true"
4 | hypervisor_host: smartos.signet
5 | user_script: "/usr/sbin/mdata-get root_authorized_keys > ~root/.ssh/authorized_keys ; /usr/sbin/mdata-get root_authorized_keys > ~admin/.ssh/authorized_keys"
6 | root_authorized_keys: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCX5NmP23FhXZ+YiV3unu/Bz6h5oaeJyx3J5EaJOi4de0im3MV1aXZlpYnF0MfpmRxYl2S2pUEJXjW/toA48A+zYjHI7xReKZ9MpCsDBlW4Vfl6EjaoZqN3Hc4P5wK/BiMkSIgURFRJukus1ajRvV+YZiAaRyTwgkhmF20ZdOOIAPiugaoEYg+6iQ5CJZURw1VLJ+UViCC7cBcC4AOjKcbEaLf9RzjISzAs78fN7G60+P5fyAsIinDhKC2VJE/AkxjFtQAdBlt3HNhWnLfd2jmClRNA24Ob/gL3i3OWecWdEsERSypDiOFZI/sRHDKih1mkESbiZiHHMiZRCO34Fqpx piranha@laptop"
7 |
8 | domain: signet
9 | resolvers:
10 | - 10.0.3.1
11 |
12 | ssmtp_dest_email: "{{ vault_ssmtp_dest_email }}"
13 | ssmtp_usestarttls: true
14 | ssmtp_hostname: "{{ ansible_hostname }}"
15 | ssmtp_mailhub_host: smtp.gmail.com
16 | ssmtp_mailhub_port: 587
17 | ssmtp_auth_user: "{{ vault_ssmtp_auth_user }}"
18 | ssmtp_auth_pass: "{{ vault_ssmtp_auth_pass }}"
19 |
20 | kim_acl_ips: "{{ vault_kim_acl_ips }}"
21 | kim_minio_access_key: "{{ vault_kim_minio_access_key }}"
22 | kim_minio_secret_key: "{{ vault_kim_minio_secret_key }}"
23 |
24 | kim_haproxy_crt: "{{ vault_kim_haproxy_crt }}"
25 | kim_haproxy_key: "{{ vault_kim_haproxy_key }}"
26 | ca_pem: "{{ vault_ca_pem }}"
27 |
--------------------------------------------------------------------------------
/group_vars/all/vault:
--------------------------------------------------------------------------------
1 | $ANSIBLE_VAULT;1.1;AES256
2 | 63303266306434343562623536666365343761353432333535386133393766613132666665336639
3 | 6132396562323231666263646436363432366632613037640a343666643732343662663339323636
4 | 36343532646333646632333961653233396435383934636430303238396132323631313866336634
5 | 3134363433633664660a333833396130323731316261643861383634656136313337366262666230
6 | 31306366303536326463383165363861373336343131393533656565323664643037373135303136
7 | 30663534396365386665386631616263353061393166306235663631323332663332626131313865
8 | 64643733353361363063333864356235363866326137343530383630643062373265363461613934
9 | 65346239386630616238623032303166653237363435653732326336653830323765626438663065
10 | 64393265616461373464346366616331353333326637343465333036356264663362363330303163
11 | 37626233353930336364333061323463306432366338613362646164663662323335373261363133
12 | 32353832643837656237663236613861623838303364376563393065346164653062633337663132
13 | 38396363396261376465613065623934646135653737383738333932633466333637373734386437
14 | 38623764386465383536613862333562333437626361336632613464336664306666383933613165
15 | 30306439333262653835396665326130633338623464313463336233626664373863383635343131
16 | 66646262396666623538306237626238323761636566643336633632643861353532316430633233
17 | 38346533336633316335646137303434366234393766663431656463303133366363646336316430
18 | 35376161333139333061666434373235333162636465633562653436623265616365326534363665
19 | 61663564636461646565623936383039633139323139633938306430363832346665326339353737
20 | 66316265653630313764633665636138313230353838626230346238613933356162383166363166
21 | 30313231613432376630636463346230626131383538303664356539613033646266613434653731
22 | 34313234333434383033656263313737653137326566353863653465663961613838623263633130
23 | 36393062366338336364363332343239643630623265346437366661366430323233393861613038
24 | 37663463616164333135666363653235333831343536323437666137646363303162363564353834
25 | 35616536306166663162666331633137393039663165656163303736326631653166303734643832
26 | 66636239356137313738643732373938663639386465666631613634343164643035323262333335
27 | 61316365366561306239626637666138323730636230356133356661663237396566663361663331
28 | 61646236363163636165626562346131383834323837343930346237626430646437333861373739
29 | 36373733666133396262373534616530366432623534366261386633643461363238383261326532
30 | 38666462653337343333333735663762656533353137333938633465333061663361316537303865
31 | 36363335336138383930306661656663633337316461623039373162376539326334396631383261
32 | 62373164333537353436356666653863613162333466303962386239656337326161323539396136
33 | 32316637333465616665643461623933323838666431663732323663383937636332306561333232
34 | 36366334363030366230393934663466313730356332656562323835383263633966613130343263
35 | 37393537386636623364373263343439623938326566636537383133376635333262663739306535
36 | 61623239383432343936303135303732303235323039356261623563333566363163343839366238
37 | 64666332393236386562363436386234636563326564363539356366356236346466346535623234
38 | 63333034663166366331613033336666393536616261343434383534363233613735333535363031
39 | 65386531643631663934333965313936653061663763303833353761373965376139366532356138
40 | 34643861366332333763663937633334643864363134386637363835663831353133383232656433
41 | 38323461656339353236623631623931626337316137633533346131653533373435653763613930
42 | 66373833333636383132666562346137656563303664353865666164663165303761386238333564
43 | 37363531323736323264336331316439306235633136626263393138303335316162633664663736
44 | 30303734353932353530363465633532333765366232636266613861616336326531373661623439
45 | 37623161336637636465616130316632336537633366323864323430366338363563656231666335
46 | 34316430663830353261306532623036323839613737616362323935356439383035653635346161
47 | 31643462386665383837313738623965623335396435313237343133653237343437393334326632
48 | 30376463383431346433643038376431633163623864303530353161653262656363366234353436
49 | 38373437303937633238386664656439343666666638303537326138353264373030393264353636
50 | 66313434333861383061383161303035373238396534626132383633613138376232366634323738
51 | 66653561616464666333623332623366613238326261383966393366383662626639336330393562
52 | 35363264303631393464356464393439636635653065333436623333633961666638333632393564
53 | 64353438666164333063323331343837623538316265313333616232386131643961656333643739
54 | 63333766333835383533643031393063663038666337626438383436353238306331316336613731
55 | 39623932323130346666376462653034643134646664623630616137663165303764383462396663
56 | 36666232643564623731313338363637613764373531373838623730623061386637643336616663
57 | 35326536373262663430613639316234316566383437656264623630353261333662333933363064
58 | 31316462316135333763376263383530396532363035363938313366656530333137313930313766
59 | 31346535316638646131613936346637346138623637633764666566326138306265313364333165
60 | 37323731653863653261343635633036383636303265656664656635346539633163386534626161
61 | 65663161613332643439333434363532383461393664376166316433343936323635313036633137
62 | 34646465663934643665373135666162396662383864326162343262343231303465326132346238
63 | 32343931636466643364613938366365363962313139376465303431613330633866636238303938
64 | 62313330346331343464616362656565396330383735366432383331613665626464653362393635
65 | 61336430656531393961333662633337393562643963616534613739623765623034333462366165
66 | 63323364333162633631346237653737366239613664643831633437343661613832363530343530
67 | 62366562393164623065376661646539353862346235383435373039346464656634633536666466
68 | 63316138613639666234613566313736313136653034393565303662356133333561623337653163
69 | 36356163376232646339346533313231393263343966326137373234336566313465323434616462
70 | 38633339383134343036663762393639393439313232623165633762356263646661646263333164
71 | 33663731653636326533666232343637313865396466363230366633346531306636383666336663
72 | 61326330643436313531613536346665643132663337656664376133343032386563303133623863
73 | 63663262613866613035373433306663656166663632326232333132303034636631333262616533
74 | 64383836323638383964646263373761633533653934376164373463383661626238373230653661
75 | 38636365666439666435346535306639666566653939393134376539613238386564383462623364
76 | 38633666396131393664356536306365393038646537636633333730343361653865393135643863
77 | 38653635333762636139663638353631303038633963393938336233396232373832616433383930
78 | 66643364393637316665313363646233336231613633633035313739643165363062316138353466
79 | 32636364323435383762356437613464633337663930373532623263633136386335643335626632
80 | 31313933633466386237636336363634343834396162366430633035343265386631356362303832
81 | 63393366653238613766353832346663653430613835643134306637616562656364323234633232
82 | 39636334613863616135623563396133373733366438326562366366323362366562306635326562
83 | 36343362393938366239653661306463643730643235396261333262616563633566363032653135
84 | 65386538653232356238333930343962383965373162383362336238313764643464373730653632
85 | 33326562616163656235316366393635343637386434333039623932323736346364353032623738
86 | 36653337616162393166666564616566653034656262376438326132363730346139346338633737
87 | 34363530393835396430353365373366376330653332363063313130306635663663313030336130
88 | 66633936313935386536396539323366303435356466326130386330356465303863366163316365
89 | 64336165626364656433613438623338623463343163353636646264343530653266363264663264
90 | 62343161356336616166323963643439383639326363616239313264393135373034386635316362
91 | 30643635643965653930346239353866313864653031373032636239373262316437663264623538
92 | 64373436363366353663633634613432623530376138626331663630313332626166353362313765
93 | 31303233383133633864643262353035663837633731366539393637653631333734666166393962
94 | 62313534376430653538623939333264326435383030386333633337653532633339353838306132
95 | 63353262643065623932613130323436396264616563313335313838373538626235386531613765
96 | 35316665623535373033373566613234613936393237626337623463373466303163653233356333
97 | 37643765623064623230646438663235323537326235333230346336633362376665396532663665
98 | 31636366333062643734303061633565373935346133386566383739303362363864616539626666
99 | 66626664366664353938303366303664383131343830663037636535396565633264353462636630
100 | 65393363306265373537393462393364323666343436643666376634386435643438303639393430
101 | 64656363363861376366313530666663303466326365353964373331326130383366313139373039
102 | 65623361366166626166333564663138316531656233343932393066326162376437386564313135
103 | 39616362303132383761613839666463333737313866376535363935656233656633313761613861
104 | 64306635353564366133663862393165666335663739363664333538346464636638636335313964
105 | 32336434393063323630336363613635663466386636303265343938333636303930323062636566
106 | 32643366346535346333643264663939633834643431613130626563313231366231366362393065
107 | 63333437663038353434633535343732346230333264343663656364623665653138663663643365
108 | 39613763636162646232343631333662386436363731633238653930363438323434626264396465
109 | 32373964663838356131613335376536333635666537393661313630306561316362633161353635
110 | 33653631653638353661336338623434323566343432633739393462306536313936653764633431
111 | 34376530613862303766616363616134656532323361333130383265636630313739623831626639
112 | 31396235616631366335346539376630383266353262336133373661623136633933663065623234
113 | 66313034636464656630613734633464346463643635636266326334336238313237623466393565
114 | 35663531333136303136663438353164303838666539376462376431366238633666373938326663
115 | 63613530393534376334363839353137626561353163366337623136623435313332346164663164
116 | 64366435373132373032363131636239303461663334316434386531313938633965643866366433
117 | 30323065306635653261363538373061393232613064333135313537613463393563633962353765
118 | 37636534356663623764346536313462666561323733626137373233333930643133356533386662
119 | 63393132383435366438623965343738306366383965386235393832666531333930323631373036
120 | 31313137333035383931373337326564353031663762356237346630376630623433316135656565
121 | 62313531623866646564376364383839313464313363636332363034363461653731653235333065
122 | 38623938626136623535373039656262326466373565373036326165616435343133626635346464
123 | 35336464326139383230366331326563333438373061626363323436343464613831313764396131
124 | 38313362396230633932656230623431653762313237316639316566383061623266363237343931
125 | 62666530333031343037383734323531653634666134306637393033356630313630393835623338
126 | 65306365653136613533633762303031313534383032373666316235656539333639626665306538
127 | 62623431356664386361356631383761363031376637613730323035623931663330333438313962
128 | 32356564376337353733333631613139626136376238636337613032346534313838393962356636
129 | 64326637353338363563666461343432396531363765393561323761323962343961386463376364
130 | 30353834336637393865643464383738653361663634666533366335386138326330343530306131
131 | 62613835366634626361663136656266633938353162353566623837613062616438366163663364
132 | 36303532353165646364316661666332303962373335326534653730613664663035666334336463
133 | 32633734656661323361643836356466626263323766343232663861343663363039303835316432
134 | 64343966356166363630636661386632626336366437333831386263653235626637396236613464
135 | 39656131613434373263666334633165373130373338646237656531363533636565343361653839
136 | 64636462313039386336663332393365616333323565643266653961363035616164653862666637
137 | 31396130343561313237393431383661626266303963643364613136396433373762623030326162
138 | 62326435306663316331373364343236666334336461373037376565373930333038313265353064
139 | 61623637363864306661353164663564643861333965373362383564363234303639303134353361
140 | 64323136656466663235386331346534646333656239363664663765363365656166383931346238
141 | 35383236356333633665393461343335376230393138306334623164656532333536633662633162
142 | 35353662326335343930316633316335356638383634633963343461636331636536636464393063
143 | 30656662653765303733313834633133313535363962363934626538386235376237386533393231
144 | 65653030343837333939623361383563356166313333313334666130653139373039373864396232
145 | 36396130623835396361363639363335643863616431363266333763363732666434336330643537
146 | 34616637646135346331373733336164346266623237356161323139366538393834326136393232
147 | 31306338643930313665353061636162316238383237636563376332356433366265323666613239
148 | 34663834316466326166643533646438633533336461633334323431353632303436343330623636
149 | 30656462376561663965306663383339636330346239376265373962356431656563633335376539
150 | 33633332326432333262666439643765613731383661343662383564306466303730316234393465
151 | 37613763316265363463363565356264626537333234356430616366383666616261393564343261
152 | 33663764313762313133613733633432653337393334656365323863363032306534363235316165
153 | 32336565383564386532643635316130366163633137366164363764323164326364323865306166
154 | 33646563333738383662636262333435616465666566613533343736643038643062643663386362
155 | 37366464636131373638366631393636646264393132303435663638666639323336363566356233
156 | 31373837353131346433626638633166326132653632306335333662663638396330666137643236
157 | 35343636396636366361643730333166643066306161373034306431623839346563643363396439
158 | 66386632613133646336646136636231653862636137616264326135333965356430393234326631
159 | 38393639356135666238323436393336613035383334366439643630646165336431643036376165
160 | 35323161386339633237333130383233656235663437623635383932333965636136346466316164
161 | 32383365393562373066336266313234666131633863616164303166366664323337323930393332
162 | 66663639303930373466636135303534303939313333636331376466656235353834393236373931
163 | 34633033616530663864626434323538343265346366306530636262613531376631616165373066
164 | 64383732623934626466386133373036343933396466646631636436663362656438666231666236
165 | 36373166313463383866306263663835386531343730336137613339333335633734643736623963
166 | 33636263636261353930656264343735326362343237376637393037333339386131343362323838
167 | 3664343838393865646539653234393734303639356565386366
168 |
--------------------------------------------------------------------------------
/group_vars/cfssl:
--------------------------------------------------------------------------------
1 | ansible_user: root
2 |
3 | image_name: centos-7
4 | cpu_cap: 100
5 | max_phy_mem: 512
6 | quota: 10
7 | brand: lx
8 | alias: cfssl
9 |
--------------------------------------------------------------------------------
/group_vars/dbserver:
--------------------------------------------------------------------------------
1 | ansible_python_interpreter: /opt/local/bin/python2
2 | ansible_user: root
3 |
4 | image_name: base-64-lts
5 | cpu_cap: 100
6 | max_phy_mem: 4096
7 | quota: 100
8 | brand: joyent
9 | alias: dbserver
10 | nics:
11 | - {interface: "net0", nic_tag: "external", ip: "10.0.3.13", netmask: "255.255.255.0", gateway: "10.0.3.1"}
12 | - {interface: "net1", nic_tag: "stub0", ip: "10.0.1.2", netmask: "255.255.255.0"}
13 | filesystems:
14 | - {source: "/zones/db/pgsql", target: "/var/pgsql/data", read_only: false}
15 |
--------------------------------------------------------------------------------
/group_vars/dmz:
--------------------------------------------------------------------------------
1 | ansible_user: root
2 | domain: dmz
3 | resolvers:
4 | - 10.0.4.254
5 |
6 | nics:
7 | - {interface: "net0", nic_tag: "external", vlan_id: "4", ip: "{{ lookup('dig', alias + '.' + domain + '.') }}", netmask: "255.255.255.0", gateway: "10.0.4.254"}
8 |
--------------------------------------------------------------------------------
/group_vars/flowbat:
--------------------------------------------------------------------------------
1 | ansible_user: root
2 | silk_autostart: false
3 | silk_binary_only: true
4 |
5 |
6 | image_name: centos-7
7 | quota: 300
8 | max_phy_mem: 1536
9 | brand: lx
10 | alias: flowbat
11 | domain: signet
12 |
13 | filesystems:
14 | - {source: "/zones/db/netflow", target: "/data", read_only: true}
15 |
--------------------------------------------------------------------------------
/group_vars/git:
--------------------------------------------------------------------------------
1 | ansible_python_interpreter: /opt/local/bin/python2
2 | ansible_user: root
3 |
4 | image_name: minimal-64-lts
5 | cpu_cap: 100
6 | max_phy_mem: 512
7 | quota: 5
8 | brand: joyent
9 | alias: git
10 |
11 | filesystems:
12 | - {source: "/zones/data/git", target: "/export/git", read_only: false}
13 |
--------------------------------------------------------------------------------
/group_vars/haproxy:
--------------------------------------------------------------------------------
1 | ansible_user: root
2 |
3 | image_name: centos-7
4 | cpu_cap: 200
5 | max_phy_mem: 512
6 | quota: 5
7 | brand: lx
8 | alias: haproxy
9 |
10 | haproxy_cb_email: andrewklaus@gmail.com
11 | haproxy_cb_domains:
12 | - home.aklaus.ca
13 | - www.aklaus.ca
14 | - subsonic.aklaus.ca
15 | - owncloud.aklaus.ca
16 |
17 |
--------------------------------------------------------------------------------
/group_vars/joyent:
--------------------------------------------------------------------------------
1 | ansible_python_interpreter: /opt/local/bin/python
2 | admin_group: adm
3 | user_script: "/usr/sbin/mdata-get root_authorized_keys > ~root/.ssh/authorized_keys ; /usr/sbin/mdata-get root_authorized_keys > ~admin/.ssh/authorized_keys"
4 | root_authorized_keys: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCX5NmP23FhXZ+YiV3unu/Bz6h5oaeJyx3J5EaJOi4de0im3MV1aXZlpYnF0MfpmRxYl2S2pUEJXjW/toA48A+zYjHI7xReKZ9MpCsDBlW4Vfl6EjaoZqN3Hc4P5wK/BiMkSIgURFRJukus1ajRvV+YZiAaRyTwgkhmF20ZdOOIAPiugaoEYg+6iQ5CJZURw1VLJ+UViCC7cBcC4AOjKcbEaLf9RzjISzAs78fN7G60+P5fyAsIinDhKC2VJE/AkxjFtQAdBlt3HNhWnLfd2jmClRNA24Ob/gL3i3OWecWdEsERSypDiOFZI/sRHDKih1mkESbiZiHHMiZRCO34Fqpx piranha@laptop"
5 |
--------------------------------------------------------------------------------
/group_vars/kim:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/precurse/playbooks/ccc3ec6d3b0b30a33d59afc7a48ab7b682cba19e/group_vars/kim
--------------------------------------------------------------------------------
/group_vars/lan:
--------------------------------------------------------------------------------
1 | domain: signet
2 | resolvers:
3 | - 10.0.3.254
4 |
5 | nics:
6 | - {interface: "net0", nic_tag: "external", ip: "{{ lookup('dig', alias + '.' + domain + '.') }}", netmask: "255.255.255.0", gateway: "10.0.3.254"}
7 |
--------------------------------------------------------------------------------
/group_vars/ldap:
--------------------------------------------------------------------------------
1 | image_name: centos-6
2 | cpu_cap: 100
3 | max_phy_mem: 4096
4 | quota: 100
5 | brand: lx
6 | alias: ldap
7 |
8 | filesystems:
9 | - {source: "/zones/db/ldap", target: "/var/lib/ldap", read_only: false}
10 |
--------------------------------------------------------------------------------
/group_vars/lx_zones:
--------------------------------------------------------------------------------
1 | admin_group: adm
2 | user_script: "/usr/sbin/mdata-get root_authorized_keys > ~root/.ssh/authorized_keys ; /usr/sbin/mdata-get root_authorized_keys > ~admin/.ssh/authorized_keys"
3 | root_authorized_keys: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCX5NmP23FhXZ+YiV3unu/Bz6h5oaeJyx3J5EaJOi4de0im3MV1aXZlpYnF0MfpmRxYl2S2pUEJXjW/toA48A+zYjHI7xReKZ9MpCsDBlW4Vfl6EjaoZqN3Hc4P5wK/BiMkSIgURFRJukus1ajRvV+YZiAaRyTwgkhmF20ZdOOIAPiugaoEYg+6iQ5CJZURw1VLJ+UViCC7cBcC4AOjKcbEaLf9RzjISzAs78fN7G60+P5fyAsIinDhKC2VJE/AkxjFtQAdBlt3HNhWnLfd2jmClRNA24Ob/gL3i3OWecWdEsERSypDiOFZI/sRHDKih1mkESbiZiHHMiZRCO34Fqpx piranha@laptop"
4 |
--------------------------------------------------------------------------------
/group_vars/netflow:
--------------------------------------------------------------------------------
1 | ansible_user: root
2 |
3 | image_name: centos-7
4 | cpu_cap: 100
5 | max_phy_mem: 512
6 | quota: 100
7 | brand: lx
8 | alias: netflow
9 |
10 | filesystems:
11 | - {source: "/zones/db/netflow", target: "/data", read_only: false}
12 |
13 |
--------------------------------------------------------------------------------
/group_vars/owncloud:
--------------------------------------------------------------------------------
1 | ansible_user: root
2 |
3 | image_name: centos-6
4 | quota: 40
5 | brand: lx
6 | alias: owncloud
7 | nics:
8 | - {interface: "eth0", nic_tag: "external", vlan_id: "4", ip: "10.0.4.14", netmask: "255.255.255.0", gateway: "10.0.4.1"}
9 | - {interface: "eth1", nic_tag: "stub0", ip: "10.0.1.3", netmask: "255.255.255.0"}
10 | filesystems:
11 | - {source: "/zones/data/owncloud_data", target: "/media/owncloud_data", read_only: false}
12 |
--------------------------------------------------------------------------------
/group_vars/pimon:
--------------------------------------------------------------------------------
1 | ansible_python_interpreter: /usr/bin/python2
2 | ansible_user: root
3 |
4 | st_user: piranha
5 | st:group: piranha
6 |
--------------------------------------------------------------------------------
/group_vars/plex:
--------------------------------------------------------------------------------
1 | image_name: centos-6
2 | cpu_cap: 400
3 | max_phy_mem: 1024
4 | quota: 20
5 | brand: lx
6 | alias: plex
7 | filesystems:
8 | - {source: "/zones/db/plex", target: "/var/lib/plexmediaserver", read_only: false}
9 | - {source: "/zones/data/media", target: "/media", read_only: true}
10 |
--------------------------------------------------------------------------------
/group_vars/prometheus:
--------------------------------------------------------------------------------
1 | image_name: centos-6
2 | cpu_cap: 100
3 | max_phy_mem: 1024
4 | quota: 20
5 | brand: lx
6 | alias: prometheus
7 | domain: signet
8 |
--------------------------------------------------------------------------------
/group_vars/radius:
--------------------------------------------------------------------------------
1 | ansible_user: root
2 |
3 | image_name: centos-7
4 | cpu_cap: 100
5 | max_phy_mem: 512
6 | quota: 100
7 | brand: lx
8 | alias: radius
9 |
--------------------------------------------------------------------------------
/group_vars/samba/vars:
--------------------------------------------------------------------------------
1 | ansible_user: root
2 | ansible_python_interpreter: /opt/local/bin/python2
3 |
4 | image_name: base-64-lts
5 | cpu_cap: 100
6 | max_phy_mem: 1024
7 | quota: 100
8 | brand: joyent
9 | alias: samba
10 |
11 | filesystems:
12 | - {source: "/zones/data/personal", target: "/media/personal", read_only: false}
13 | - {source: "/zones/data/media/movies", target: "/media/movies", read_only: false}
14 | - {source: "/zones/data/media/tv", target: "/media/tv", read_only: false}
15 | - {source: "/zones/data/media/incoming", target: "/media/incoming", read_only: false}
16 | - {source: "/zones/data/media/training", target: "/media/training", read_only: false}
17 | - {source: "/zones/data/media/music", target: "/media/music", read_only: false}
18 | - {source: "/zones/db/syncthing", target: "/media/syncthing", read_only: false}
19 |
20 | samba_username: "{{ vault_samba_username }}"
21 | samba_password: "{{ vault_samba_password }}"
22 | samba_cron: "{{ vault_samba_cron }}"
23 | os_username: "{{ vault_os_username }}"
24 | os_password: "{{ vault_os_password }}"
25 | os_auth_url: "{{ vault_os_auth_url }}"
26 | os_domain: "Default"
27 | os_tenant_name: "{{ vault_os_tenant_name }}"
28 | os_region_name: "{{ vault_os_region_name }}"
29 | os_endpoint_type: "{{ vault_os_endpoint_type }}"
30 | os_auth_ver: "{{ vault_os_auth_ver }}"
31 | os_enc_key: "{{ vault_os_enc_key }}"
32 | os_enc_salt: "{{ vault_os_enc_salt }}"
33 | pcloud_token: "{{ vault_pcloud_token }}"
34 | pcloud_enc_key: "{{ vault_pcloud_enc_key }}"
35 | pcloud_enc_salt: "{{ vault_pcloud_enc_salt }}"
36 |
--------------------------------------------------------------------------------
/group_vars/samba/vault:
--------------------------------------------------------------------------------
1 | $ANSIBLE_VAULT;1.1;AES256
2 | 30323932643032363538363336353062326564323839656662323063613265373736306333623962
3 | 3037373036636635303961376135646431343564646265340a383835613438383236613634633563
4 | 61623462303138663135346634373231373033633435346230633565306663643631333930343239
5 | 3034626533636634350a613237333038343064616633646131393034353937613537363966666434
6 | 30666133373664613662633138333038623264643735396366656264333663326130303238346434
7 | 39626539323430343732316663376461313533323030373137363133633136343134663136626565
8 | 61343037653235343635633962386433303163393235366134333531396465356534613534353132
9 | 63613664653064633264383039353639363366636238386136313962316537396666663630646533
10 | 31633838326566306336666131373266333832346463393634646461346631363163336332663462
11 | 32636364666333313238353630333738623434313436643635616630396464323361613565383235
12 | 66393033396337366665623938633162366337306566396137313438363839383964363331613332
13 | 65313261643064623066363435663435633464653737366134393230383031616633386661653831
14 | 63313439636239323236636434363037636433343864323866393338336562316632313130303431
15 | 64356530323737343935393630343335313334306139623264653932333437646164613634346131
16 | 30376566656465353866643366313864626136376161636563383663346339376337333032633232
17 | 64643739346266616136366663643332653132366631336565343537333537356134373164343538
18 | 38353862376331306237663837646437386432633833336461303338666138356566336633383638
19 | 65306638356235393664393764646338633664663235313066353634323661303765356533653137
20 | 32336536626239666539633761313636326261663139363763336666623731343232326437613639
21 | 31356661616439623266343161643565353337613936643432313531313731313465666632396564
22 | 66656431343561343634303562323830333937363932623565653830383934383834636563396633
23 | 34316533346463393835303261303539316562386235366438366163373933653239303564323166
24 | 66323334636339363130666666333937633531333032633061373166663636316262646561303337
25 | 35323766633764313734653333393366613133643833363931623437383931656338646366613130
26 | 34616662636537643562313038623735356164383133633164326239393235346534653064356561
27 | 37656233303965303435383264383938653337663061623330313633646262336432613034383065
28 | 30663763353733613935343433643436653764373838303737353061323061323766333237383665
29 | 35613530373363616665333835666663663732383363396638666131366437326431613165623966
30 | 65633061326562316634303435313064623435613463303539346363366633306433343963386330
31 | 62366666393833623035643333393134663964323637653465396637313733303833326164656434
32 | 62346331623631366362346335646338326332653333393735353733636139356435323832316562
33 | 39613739396639306263303537373836613933316335323433363039366463366161323633323662
34 | 34393064323332303433616534656236343166383139353464366665376461663364656633666130
35 | 38333866303866623662336366373164333661313539336165656461386261386636303861316232
36 | 33386335656139363461343838393739303665346462376663356633666535323263316538363339
37 | 36613763356666626530363038346364303862386333653435356130383836323865393166343533
38 | 38316631663363386436656435656561333365303432343739363162333565343438636163613064
39 | 62663464663239666631323738356636313733303834663836626562323335623234343530613935
40 | 65346434646230666232363438303333333462636665373435373831636166663063333639623037
41 | 30303936316330326438386339363938366466396631386430623332643730646634663263333938
42 | 31363864343861396464626335326563363631616363303438363033363366663533336365613433
43 | 65336664343335393933653132396262343534376363633530396663623233393665346664366361
44 | 31636633336465613531623364636562383061306264333461393534336536366135346634616536
45 | 30333163363439396465326637343962336664346665616232363966306663363530326165633862
46 | 33353837653937653764376635353531646239336466333234396363623230643863366333303630
47 | 62663731646266313436393334643631643661393033393063333834646166643035383564343232
48 | 33663065346132316363623937663639343232373939353834623234323966366335626662666134
49 | 31616364626533333736633763613462336134306432386234656366346161366261616666623962
50 | 30646234633762626462363866353966626432343565336638323662613062363030393535313762
51 | 65623635653335636435393639366634386235313232323531623361663732396636326139363266
52 | 34356264383565626666373139333063303262303962613734623036396539306663623735663565
53 | 32313934663062333835626237333035393664323266656238633839336339633734393332653831
54 | 62626632346361626630326135343662663330343535663462663635393530376438653066626332
55 | 31313261353463343335393431343362626162666536356462656631663438376335383435623034
56 | 39386264663130646331313465616138346139643430396461663164613634316339333032656336
57 | 34386137613266653937326631643164303331366239353531616234366534653164313564393936
58 | 34636137373862616664306561386666636465666534376438353462373034353966336536663536
59 | 61626331373532626166306461623537393432626232613535633365373039363939386430313263
60 | 35353564633339613861346162383731323131306538356234383461663231343566323533306264
61 | 63666166303532643661613634386566663839613266663434316334623332613836336236323261
62 | 30313634626566363433373862376530313333623265363636356338623836313734353033313261
63 | 63343561613939313035366238313938326639626638653065353261386336623637316435303062
64 | 39313732336166323261306433633535323632663163633238616437393830366166366634643763
65 | 61356534646537633433313663393332646335306135663465366134333532636165383637663534
66 | 33396439376163333765306665613265333337613131356638623162636463316131303531386136
67 | 3831303833316637623161393039336462613666383063323865
68 |
--------------------------------------------------------------------------------
/group_vars/shell:
--------------------------------------------------------------------------------
1 | ansible_python_interpreter: /opt/local/bin/python2
2 | ansible_user: root
3 |
4 | image_name: minimal-64
5 | cpu_cap: 100
6 | max_phy_mem: 256
7 | quota: 5
8 | brand: joyent
9 | alias: shell
10 |
--------------------------------------------------------------------------------
/group_vars/smartos_hypervisor:
--------------------------------------------------------------------------------
1 | smartos_hypervisor: true
2 | ansible_user: root
3 | ansible_python_interpreter: /opt/tools/bin/python2
4 | hypervisor_install_python: true
5 |
--------------------------------------------------------------------------------
/group_vars/subsonic:
--------------------------------------------------------------------------------
1 | ansible_user: root
2 |
3 | image_name: debian-8
4 | cpu_cap: 100
5 | max_phy_mem: 1024
6 | quota: 40
7 | brand: lx
8 | alias: subsonic
9 | filesystems:
10 | - {source: "/zones/data/media/music", target: "/var/music", read_only: true}
11 | - {source: "/zones/db/subsonic", target: "/var/subsonic", read_only: false }
12 |
--------------------------------------------------------------------------------
/group_vars/syncthing:
--------------------------------------------------------------------------------
1 | ansible_user: root
2 |
3 | image_name: debian-7
4 | cpu_cap: 100
5 | max_phy_mem: 512
6 | quota: 20
7 | brand: lx
8 | alias: syncthing
9 | filesystems:
10 | - {source: "zones/db/syncthing", target: "/export/syncthing_data", read_only: false}
11 |
--------------------------------------------------------------------------------
/group_vars/transmission:
--------------------------------------------------------------------------------
1 | ansible_user: root
2 |
3 | autoboot: "true"
4 | image_name: centos-6
5 | cpu_cap: 100
6 | max_phy_mem: 1024
7 | quota: 20
8 | brand: lx
9 | alias: transmission
10 | filesystems:
11 | - {source: "/zones/db/transmission", target: "/var/lib/transmission-daemon", read_only: false }
12 | - {source: "/zones/data/media", target: "/media", read_only: false}
13 |
--------------------------------------------------------------------------------
/install_osquery.yml:
--------------------------------------------------------------------------------
1 | - hosts: all
2 | gather_facts: true
3 | tasks:
4 | - group_by: key=os_{{ ansible_distribution }}
5 |
6 | - hosts: os_Ubuntu
7 | gather_facts: False
8 | tasks:
9 | - apt_key:
10 | keyserver=keyserver.ubuntu.com
11 | id=1484120AC4E9F8A1A577AEEE97A80C63C9D8B80B
12 | become: yes
13 | - apt_repository:
14 | repo="deb [arch=amd64] https://osquery-packages.s3.amazonaws.com/trusty trusty main" state=present
15 | become: yes
16 | - apt:
17 | name=osquery
18 | update_cache=yes cache_valid_time=3600
19 | become: yes
20 |
21 | - hosts: os_Debian
22 | gather_facts: False
23 | tasks:
24 | - apt_key:
25 | keyserver=keyserver.ubuntu.com
26 | id=1484120AC4E9F8A1A577AEEE97A80C63C9D8B80B
27 | become: yes
28 | - apt_repository:
29 | repo="deb [arch=amd64] https://osquery-packages.s3.amazonaws.com/trusty trusty main" state=present
30 | become: yes
31 | - apt:
32 | name=osquery
33 | update_cache=yes cache_valid_time=3600
34 | become: yes
35 |
36 | - hosts: os_CentOS
37 | gather_facts: False
38 | tasks:
39 | - name: Install repo
40 | yum:
41 | name=https://osquery-packages.s3.amazonaws.com/centos7/noarch/osquery-s3-centos7-repo-1-0.0.noarch.rpm
42 | state=present
43 | become: yes
44 |
45 | - name: Install package
46 | yum:
47 | name=osquery
48 | state=present
49 | become: yes
50 |
51 |
--------------------------------------------------------------------------------
/production:
--------------------------------------------------------------------------------
1 | # file: production
2 | [smartos_hypervisor]
3 | smartos.signet
4 |
5 | ## DMZ
6 | [shell]
7 | shell.dmz
8 |
9 | [git]
10 | git.dmz
11 |
12 | [plex]
13 | plex.dmz
14 |
15 | [subsonic]
16 | subsonic.dmz
17 |
18 | [haproxy]
19 | haproxy.dmz
20 |
21 | [owncloud]
22 | owncloud.dmz
23 |
24 | [transmission]
25 | transmission.dmz
26 |
27 | [syncthing]
28 | syncthing.dmz
29 |
30 | [dmz:children]
31 | shell
32 | git
33 | plex
34 | subsonic
35 | haproxy
36 | owncloud
37 | transmission
38 | syncthing
39 |
40 | ## LAN
41 | [samba]
42 | smb.signet
43 |
44 | [radius]
45 | 10.0.3.10
46 |
47 | [switch]
48 | switch.signet
49 |
50 | [dbserver]
51 | dbserver.signet
52 |
53 | [objectstore]
54 | objectstore.signet
55 |
56 | [cfssl]
57 | cfssl.signet
58 |
59 | [logging]
60 | logging.signet
61 |
62 | [pimon]
63 | alarmpi.signet
64 |
65 | [kim]
66 | kim.signet
67 |
68 | [syncthing_relay]
69 | stsrv.signet ansible_user=ubuntu
70 |
71 | [prometheus]
72 | prometheus.signet
73 |
74 | [netflow]
75 | netflow.signet
76 |
77 | [flowbat]
78 | flowbat.signet
79 |
80 | [consul]
81 | alarmpi.signet
82 |
83 |
84 | [lan:children]
85 | samba
86 | radius
87 | netflow
88 | switch
89 | dbserver
90 | objectstore
91 | cfssl
92 | logging
93 | pimon
94 | kim
95 | prometheus
96 | flowbat
97 |
98 | [consul:children]
99 | pimon
100 |
--------------------------------------------------------------------------------
/provision_vm.yml:
--------------------------------------------------------------------------------
1 | # file: site.yml
2 |
3 | - hosts: all
4 | gather_facts: false
5 | roles:
6 | - role: smartos_provision
7 | vars:
8 | - provision_mode: true
9 | - smartos_python_path: /opt/local/bin/python2
10 | post_tasks:
11 | # Install python on joyent zones
12 | - block:
13 | - name: Upgrading pkgin packages
14 | raw: /opt/local/bin/pkgin -y update; /opt/local/bin/pkgin -y upgrade
15 | - name: Installing python
16 | raw: /opt/local/bin/pkgin -y install python27
17 | - set_fact: ansible_python_interpreter={{ smartos_python_path }}
18 | - name: Test python install
19 | setup:
20 | rescue:
21 | - debug: msg="Unable to install python on joyent zone"
22 |
23 | when: brand == 'joyent'
24 | become: yes
25 |
26 |
--------------------------------------------------------------------------------
/roles/cfssl/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Install golang
2 | yum:
3 | name: golang
4 | update_cache: yes
5 | become: yes
6 |
--------------------------------------------------------------------------------
/roles/common/tasks/auto_updates.yml:
--------------------------------------------------------------------------------
1 | - name: Enable debian auto-updates
2 | apt:
3 | name: unattended-upgrades
4 | update_cache: yes
5 | become: yes
6 | when: ansible_os_family == "Debian"
7 |
8 | - name: Copy auto-upgrade template (apt)
9 | template:
10 | src: apt-20auto-upgrades.j2
11 | dest: /etc/apt/apt.conf.d/20auto-upgrades
12 | become: yes
13 | when: ansible_os_family == "Debian"
14 |
15 | - name: Ensure yum fully updates
16 | yum:
17 | name: yum
18 | state: latest
19 | update_cache: yes
20 | become: yes
21 | when: ansible_distribution == "CentOS"
22 |
23 | - name: Install yum-cron
24 | yum:
25 | name: yum-cron
26 | become: yes
27 | when: ansible_distribution == "CentOS"
28 |
29 | - name: Copy yum-cron template
30 | template:
31 | src: yum-cron.conf.j2
32 | dest: /etc/yum/yum-cron.conf
33 | become: yes
34 | when: ansible_distribution == "CentOS"
35 |
36 | - name: Enable yum-cron service
37 | service:
38 | name: yum-cron
39 | state: started
40 | enabled: yes
41 | become: yes
42 | when: ansible_distribution == "CentOS"
43 |
--------------------------------------------------------------------------------
/roles/common/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: gather facts
2 | setup:
3 |
4 | - block:
5 | - name: Setup Users
6 | user:
7 | name: piranha
8 | shell: /bin/bash
9 | state: present
10 | append: yes
11 | groups: "{{ admin_groups }}"
12 |
13 | - name: Adding github ssh keys
14 | authorized_key: user={{ item }} key=https://github.com/precurse.keys
15 | with_items:
16 | - piranha
17 | - root
18 |
19 | - name: Enable account
20 | command: passwd -N piranha
21 | when: ansible_distribution == "SmartOS"
22 |
23 | - name: Enable EPEL Repo for CentOS
24 | yum:
25 | name: epel-release
26 | state: present
27 | when: ansible_distribution == "CentOS"
28 |
29 | - name: Enable SCL Repo for CentOS
30 | yum:
31 | name: centos-release-scl
32 | state: present
33 | when: ansible_distribution == "CentOS"
34 |
35 | - name: Disable Debian auto-start daemons on install
36 | shell: echo "exit 101" > /usr/sbin/policy-rc.d creates=/usr/sbin/policy-rc.d
37 | when: ansible_os_family == "Debian"
38 |
39 | - import_tasks: auto_updates.yml
40 |
41 | - name: Install common packages
42 | package:
43 | name: "{{ item }}"
44 | state: present
45 | with_items:
46 | - sudo
47 | - tmux
48 | - git
49 | ignore_errors: yes
50 |
51 | - name: Clone dotfiles
52 | git:
53 | repo: "https://github.com/precurse/dotfiles"
54 | dest: "/home/piranha/dotfiles"
55 | clone: yes
56 | become_user: piranha
57 |
58 | - name: Link dotfiles
59 | file:
60 | state: link
61 | force: yes
62 | src: "/home/piranha/dotfiles/{{ item }}"
63 | dest: "/home/piranha/.{{ item }}"
64 | with_items:
65 | - profile
66 | - bashrc
67 | - bash_profile
68 | become_user: piranha
69 |
70 | - name: Setting hostname on SmartOS
71 | command: sm-set-hostname {{ alias }}
72 | when: alias is defined and ansible_distribution == "SmartOS"
73 | changed_when: no
74 |
75 | - name: Setting hostname on Linux
76 | hostname: name={{ alias }}
77 | when: alias is defined and ansible_system == "Linux"
78 | changed_when: no
79 | become: yes
80 |
81 |
--------------------------------------------------------------------------------
/roles/common/templates/apt-20auto-upgrades.j2:
--------------------------------------------------------------------------------
1 | APT::Periodic::Update-Package-Lists "1";
2 | APT::Periodic::Unattended-Upgrade "1";
3 |
--------------------------------------------------------------------------------
/roles/common/templates/yum-cron.conf.j2:
--------------------------------------------------------------------------------
1 | update_cmd = security
2 | apply_updates = yes
3 | random_sleep = 360
4 | [emitters]
5 | system_name = None
6 | emit_via=stdio
7 | output_width=80
8 | [base]
9 | debuglevel = 2
10 | mdpolicy = group:main
11 |
--------------------------------------------------------------------------------
/roles/consul_server/defaults/main.yml:
--------------------------------------------------------------------------------
1 | consul_ver: 0.8.0
2 | consul_shasum_amd64: f4051c2cab9220be3c0ca22054ee4233f1396c7138ffd97a38ffbcea44377f47
3 | consul_shasum_arm: a6efaabb11990672df0aaf6c70a272484d2f6d654f393c58ec1a41fd30629f0c
4 | consul_bootstrap: false
5 |
6 | consul_install_path: /usr/local/sbin
7 |
--------------------------------------------------------------------------------
/roles/consul_server/handlers/main.yml:
--------------------------------------------------------------------------------
1 | - name: restart consul
2 | service:
3 | name: consul
4 | state: restarted
5 |
--------------------------------------------------------------------------------
/roles/consul_server/tasks/download_consul.yml:
--------------------------------------------------------------------------------
1 | - name: Download consul arm binary
2 | get_url:
3 | url: https://releases.hashicorp.com/consul/{{ consul_ver }}/consul_{{ consul_ver }}_linux_arm.zip
4 | dest: /tmp/consul_{{ consul_ver }}.zip
5 | mode: 0644
6 | checksum: sha256:{{ consul_shasum_arm }}
7 | when: ansible_machine == "armv7l"
8 | become_user: consul
9 | become: true
10 |
11 | - name: Download consul amd64 binary
12 | get_url:
13 | url: https://releases.hashicorp.com/consul/{{ consul_ver }}/consul_{{ consul_ver }}_linux_amd64.zip
14 | dest: /tmp/consul_{{ consul_ver }}.zip
15 | mode: 0644
16 | checksum: sha256:{{ consul_shasum_amd64 }}
17 | when: ansible_machine == "x86_64"
18 | become_user: consul
19 | become: true
20 |
--------------------------------------------------------------------------------
/roles/consul_server/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Ensure consul user exists
2 | user:
3 | name: consul
4 | comment: "Consul user"
5 | shell: /bin/false
6 | state: present
7 |
8 | - name: Ensure unzip binary exists
9 | package:
10 | name: unzip
11 | state: present
12 | become_user: consul
13 | become: true
14 |
15 | - name: Check if consul already installed
16 | stat:
17 | path: /usr/local/sbin/consul
18 | register: st_c
19 |
20 | - block:
21 | - include: download_consul.yml
22 |
23 | - name: Extract consul
24 | unarchive:
25 | src: /tmp/consul_{{ consul_ver }}.zip
26 | dest: /usr/local/sbin
27 | remote_src: True
28 | become: true
29 | notify: restart consul
30 | when: st_c.stat.executable is not defined or not st_c.stat.executable
31 |
32 | - name: Ensure consul.d directory exists
33 | file:
34 | path: /etc/consul.d
35 | state: directory
36 | mode: 0755
37 | notify: restart consul
38 | become: true
39 |
40 | - name: Ensure consul data directory exists and proper permissions
41 | file:
42 | path: /var/lib/consul
43 | state: directory
44 | mode: 0755
45 | recurse: yes
46 | owner: consul
47 | group: consul
48 | become: true
49 |
50 | ## Systemd
51 | - block:
52 | - name: setup systemd service file
53 | template:
54 | src: templates/consul.service.j2
55 | dest: /etc/systemd/system/consul.service
56 | become: true
57 |
58 | - name: Setup consul systemd service
59 | systemd:
60 | name: consul
61 | daemon_reload: yes
62 | state: started
63 | enabled: yes
64 | become: true
65 | when: ansible_service_mgr == "systemd"
66 | ## /Systemd
67 |
68 | ## SysVInit
69 | #- block:
70 | # - name: setup sysvinit consul file
71 | #
72 | # when: ansible_service_mgr == "sysvinit"
73 |
--------------------------------------------------------------------------------
/roles/consul_server/templates/consul.service.j2:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=consul agent
3 | Requires=network-online.target
4 | After=network-online.target
5 |
6 | [Service]
7 | User=consul
8 | Group=consul
9 | EnvironmentFile=-/etc/conf.d/consul
10 | Environment=GOMAXPROCS=2
11 | Restart=on-failure
12 | ExecStart=/usr/local/sbin/consul agent $OPTIONS -config-dir=/etc/consul.d -data-dir=/var/lib/consul
13 | ExecReload=/bin/kill -HUP $MAINPID
14 | KillSignal=SIGINT
15 |
16 | [Install]
17 | WantedBy=multi-user.target
18 |
--------------------------------------------------------------------------------
/roles/consul_server/templates/etc_initd/consul.Debian.j2:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | ### BEGIN INIT INFO
3 | # Provides: consul
4 | # Required-Start: $local_fs $remote_fs
5 | # Required-Stop: $local_fs $remote_fs
6 | # Default-Start: 2 3 4 5
7 | # Default-Stop: S 0 1 6
8 | # Short-Description: Consul service discovery framework
9 | # Description: Healthchecks local services and registers
10 | # them in a central consul database.
11 | ### END INIT INFO
12 |
13 | # Do NOT "set -e"
14 |
15 | # PATH should only include /usr/* if it runs after the mountnfs.sh script
16 | PATH=/usr/sbin:/usr/bin:/sbin:/bin
17 | DESC="Consul service discovery framework"
18 | NAME=consul
19 | DAEMON={{consul_bin}}
20 | DAEMON_ARGS="agent -config-dir={{consul_config_dir}}"
21 | USER={{consul_user}}
22 | PIDFILE=/var/run/$NAME.pid
23 | SCRIPTNAME=/etc/init.d/$NAME
24 |
25 | # Exit if the package is not installed
26 | [ -x "$DAEMON" ] || exit 0
27 |
28 | # Read configuration variable file if it is present
29 | [ -r /etc/default/$NAME ] && . /etc/default/$NAME
30 |
31 | # Load the VERBOSE setting and other rcS variables
32 | [ -f /etc/default/rcS ] && . /etc/default/rcS
33 |
34 | # Define LSB log_* functions.
35 | # Depend on lsb-base (>= 3.0-6) to ensure that this file is present.
36 | . /lib/lsb/init-functions
37 |
38 | #
39 | # Function that starts the daemon/service
40 | #
41 | do_start()
42 | {
43 | # Return
44 | # 0 if daemon has been started
45 | # 1 if daemon was already running
46 | # 2 if daemon could not be started
47 | echo "Starting consul and backgrounding"
48 | start-stop-daemon --start \
49 | --quiet \
50 | --pidfile $PIDFILE \
51 | --exec $DAEMON \
52 | --chuid $USER \
53 | --background \
54 | --make-pidfile --test > /dev/null \
55 | || return 1
56 | start-stop-daemon --start \
57 | --quiet \
58 | --pidfile $PIDFILE \
59 | --exec $DAEMON \
60 | --chuid $USER \
61 | --background \
62 | --make-pidfile -- \
63 | $DAEMON_ARGS ${CONSUL_OPTS} \
64 | || return 2
65 |
66 | echo -n "Waiting for consul daemon to be listening..."
67 | for i in `seq 1 30`; do
68 | if ! start-stop-daemon --quiet --stop --test --pidfile $PIDFILE --exec $DAEMON --user $USER; then
69 | echo " FAIL: consul process died"
70 | return 2
71 | fi
72 | if "$DAEMON" info >/dev/null; then
73 | echo " OK"
74 | return 0
75 | fi
76 | echo -n .
77 | sleep 1
78 | done
79 | echo " FAIL: consul process is alive, but is not listening."
80 | return 2
81 | }
82 |
83 | #
84 | # Function that stops the daemon/service
85 | #
86 | do_stop()
87 | {
88 | # first try doing this gracefully
89 | "$DAEMON" leave
90 | # Return
91 | # 0 if daemon has been stopped
92 | # 1 if daemon was already stopped
93 | # 2 if daemon could not be stopped
94 | # other if a failure occurred
95 | start-stop-daemon --stop --quiet --retry=TERM/30/KILL/5 --pidfile $PIDFILE --name $NAME
96 | RETVAL="$?"
97 | [ "$RETVAL" = 2 ] && return 2
98 | # Wait for children to finish too if this is a daemon that forks
99 | # and if the daemon is only ever run from this initscript.
100 | # If the above conditions are not satisfied then add some other code
101 | # that waits for the process to drop all resources that could be
102 | # needed by services started subsequently. A last resort is to
103 | # sleep for some time.
104 | start-stop-daemon --stop --quiet --oknodo --retry=0/30/KILL/5 --exec $DAEMON
105 | [ "$?" = 2 ] && return 2
106 | # Many daemons don't delete their pidfiles when they exit.
107 | rm -f $PIDFILE
108 | return "$RETVAL"
109 | }
110 |
111 | #
112 | # Function that sends a SIGHUP to the daemon/service
113 | #
114 | do_reload() {
115 | #
116 | # If the daemon can reload its configuration without
117 | # restarting (for example, when it is sent a SIGHUP),
118 | # then implement that here.
119 | #
120 | start-stop-daemon --stop --signal 1 --quiet --pidfile $PIDFILE --name $NAME
121 | return 0
122 | }
123 |
124 | case "$1" in
125 | start)
126 | [ "$VERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME"
127 | do_start
128 | case "$?" in
129 | 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;;
130 | 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;;
131 | esac
132 | ;;
133 | stop)
134 | [ "$VERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME"
135 | do_stop
136 | case "$?" in
137 | 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;;
138 | 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;;
139 | esac
140 | ;;
141 | #reload|force-reload)
142 | #
143 | # If do_reload() is not implemented then leave this commented out
144 | # and leave 'force-reload' as an alias for 'restart'.
145 | #
146 | #log_daemon_msg "Reloading $DESC" "$NAME"
147 | #do_reload
148 | #log_end_msg $?
149 | #;;
150 | restart|force-reload)
151 | #
152 | # If the "reload" option is implemented then remove the
153 | # 'force-reload' alias
154 | #
155 | log_daemon_msg "Restarting $DESC" "$NAME"
156 | do_stop
157 | case "$?" in
158 | 0|1)
159 | do_start
160 | case "$?" in
161 | 0) log_end_msg 0 ;;
162 | 1) log_end_msg 1 ;; # Old process is still running
163 | *) log_end_msg 1 ;; # Failed to start
164 | esac
165 | ;;
166 | *)
167 | # Failed to stop
168 | log_end_msg 1
169 | ;;
170 | esac
171 | ;;
172 | *)
173 | #echo "Usage: $SCRIPTNAME {start|stop|restart|reload|force-reload}" >&2
174 | echo "Usage: $SCRIPTNAME {start|stop|restart|force-reload}" >&2
175 | exit 3
176 | ;;
177 | esac
178 |
179 | :
180 |
--------------------------------------------------------------------------------
/roles/consul_server/templates/etc_initd/consul.RedHat.j2:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # consul Manage the consul agent
4 | #
5 | # chkconfig: 2345 95 95
6 | # description: Consul is a tool for service discovery and configuration
7 | # processname: consul
8 | # config: /etc/consul.d
9 | # pidfile: /var/run/consul.pid
10 |
11 | ### BEGIN INIT INFO
12 | # Provides: consul
13 | # Required-Start: $local_fs $network
14 | # Required-Stop:
15 | # Should-Start:
16 | # Should-Stop:
17 | # Default-Start: 2 3 4 5
18 | # Default-Stop: 0 1 6
19 | # Short-Description: Manage the consul agent
20 | # Description: Consul is a tool for service discovery and configuration
21 | ### END INIT INFO
22 |
23 | # source function library
24 | . /etc/rc.d/init.d/functions
25 |
26 | prog="consul"
27 | user="{{consul_user}}"
28 | exec="{{consul_bin}}"
29 | pidfile="/var/run/$prog.pid"
30 | lockfile="/var/lock/subsys/$prog"
31 | logfile="/var/log/$prog"
32 | confdir="{{consul_config_dir}}"
33 | DAEMON_ARGS="agent -config-dir=$confdir "
34 |
35 | # pull in sysconfig settings
36 | [ -e /etc/sysconfig/$prog ] && . /etc/sysconfig/$prog
37 |
38 | export GOMAXPROCS=${GOMAXPROCS:-2}
39 |
40 | start() {
41 | [ -x $exec ] || exit 5
42 |
43 | [ -d $confdir ] || exit 6
44 |
45 | umask 077
46 |
47 | touch $logfile $pidfile
48 | chown $user:$user $logfile $pidfile
49 |
50 | echo -n $"Starting $prog: "
51 |
52 | ## holy shell shenanigans, batman!
53 | ## daemon can't be backgrounded. we need the pid of the spawned process,
54 | ## which is actually done via runuser thanks to --user. you can't do "cmd
55 | ## &; action" but you can do "{cmd &}; action".
56 | daemon \
57 | --pidfile=$pidfile \
58 | --user=consul \
59 | " { $exec ${DAEMON_ARGS} ${CONSUL_OPTS} &>> $logfile & } ; echo \$! >| $pidfile "
60 |
61 | RETVAL=$?
62 | echo
63 |
64 | [ $RETVAL -eq 0 ] && touch $lockfile
65 |
66 | return $RETVAL
67 | }
68 |
69 | stop() {
70 | echo -n $"Shutting down $prog: "
71 | ## graceful shutdown with SIGINT
72 | #killproc -p $pidfile $exec -INT
73 | $exec leave
74 | RETVAL=$?
75 | echo
76 | [ $RETVAL -eq 0 ] && rm -f $lockfile
77 | return $RETVAL
78 | }
79 |
80 | restart() {
81 | stop
82 | start
83 | }
84 |
85 | reload() {
86 | echo -n $"Reloading $prog: "
87 | #killproc -p $pidfile $exec -HUP
88 | $exec reload
89 | echo
90 | }
91 |
92 | force_reload() {
93 | restart
94 | }
95 |
96 | rh_status() {
97 | status -p "$pidfile" -l $prog $exec
98 | }
99 |
100 | rh_status_q() {
101 | rh_status >/dev/null 2>&1
102 | }
103 |
104 | case "$1" in
105 | start)
106 | rh_status_q && exit 0
107 | $1
108 | ;;
109 | stop)
110 | rh_status_q || exit 0
111 | $1
112 | ;;
113 | restart)
114 | $1
115 | ;;
116 | reload)
117 | rh_status_q || exit 7
118 | $1
119 | ;;
120 | force-reload)
121 | force_reload
122 | ;;
123 | status)
124 | rh_status
125 | ;;
126 | condrestart|try-restart)
127 | rh_status_q || exit 0
128 | restart
129 | ;;
130 | *)
131 | echo $"Usage: $0 {start|stop|status|restart|condrestart|try-restart|reload|force-reload}"
132 | exit 2
133 | esac
134 |
135 | exit $?
136 |
--------------------------------------------------------------------------------
/roles/cups_server/handlers/main.yml:
--------------------------------------------------------------------------------
1 | - name: restart cups
2 | service:
3 | name: cups.service
4 | state: restarted
5 | become: yes
6 |
--------------------------------------------------------------------------------
/roles/cups_server/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Install cups
2 | apt:
3 | name: ['cups', 'google-cloud-print-connector']
4 | cache_valid_time: 3600
5 | state: present
6 | become: yes
7 |
8 | - name: Configure cupsd.conf
9 | template:
10 | src: templates/cupsd.conf.j2
11 | dest: /etc/cups/cupsd.conf
12 | become: yes
13 | notify: restart cups
14 |
15 | - name: Ensure username part of sys group
16 | user:
17 | name: "{{ ansible_user }}"
18 | groups: lpadmin
19 | become: yes
20 |
21 | - name: Start services
22 | service:
23 | name: "{{ item }}"
24 | state: started
25 | enabled: yes
26 | with_items:
27 | - cups.service
28 | - cups-browsed.service
29 | become: yes
30 |
--------------------------------------------------------------------------------
/roles/cups_server/templates/cupsd.conf.j2:
--------------------------------------------------------------------------------
1 | # Disable cups internal logging - use logrotate instead
2 | MaxLogSize 0
3 | LogLevel warn
4 | #PageLogFormat
5 | Listen 631
6 | ServerAlias *
7 | BrowseAllow All
8 | Browsing On
9 | BrowseLocalProtocols dnssd
10 | # Default authentication type, when authentication is required...
11 | DefaultAuthType Basic
12 | WebInterface Yes
13 |
14 |
15 | # Restrict access to the server...
16 | # By default only localhost connections are possible
17 |
18 | Order allow,deny
19 | Allow from @LOCAL
20 |
21 |
22 | # Restrict access to the admin pages...
23 |
24 | Order allow,deny
25 | Allow from @LOCAL
26 |
27 |
28 | # Restrict access to configuration files...
29 |
30 | AuthType Basic
31 | Require user @SYSTEM
32 | Order allow,deny
33 | Allow from @LOCAL
34 |
35 |
36 |
37 | AuthType Default
38 | Require user @SYSTEM
39 | Order allow,deny
40 |
41 |
42 | # Job/subscription privacy...
43 | JobPrivateAccess default
44 | JobPrivateValues default
45 | SubscriptionPrivateAccess default
46 | SubscriptionPrivateValues default
47 |
48 | Order deny,allow
49 |
50 |
51 |
52 | Require user @OWNER @SYSTEM
53 | Order deny,allow
54 |
55 |
56 | # All administration operations require an administrator to authenticate...
57 |
58 | AuthType Default
59 | Require user @SYSTEM
60 | Order deny,allow
61 |
62 |
63 | # All printer operations require a printer operator to authenticate...
64 |
65 | AuthType Default
66 | Require user @SYSTEM
67 | Order deny,allow
68 |
69 |
70 | # Only the owner or an administrator can cancel or authenticate a job...
71 |
72 | Require user @OWNER @SYSTEM
73 | Order deny,allow
74 |
75 |
76 |
77 | Order deny,allow
78 |
79 |
80 |
81 | # Set the authenticated printer/job policies...
82 |
83 | # Job/subscription privacy...
84 | JobPrivateAccess default
85 | JobPrivateValues default
86 | SubscriptionPrivateAccess default
87 | SubscriptionPrivateValues default
88 |
89 | # Job-related operations must be done by the owner or an administrator...
90 |
91 | AuthType Default
92 | Order deny,allow
93 |
94 |
95 |
96 | AuthType Default
97 | Require user @OWNER @SYSTEM
98 | Order deny,allow
99 |
100 |
101 | # All administration operations require an administrator to authenticate...
102 |
103 | AuthType Default
104 | Require user @SYSTEM
105 | Order deny,allow
106 |
107 |
108 | # All printer operations require a printer operator to authenticate...
109 |
110 | AuthType Default
111 | Require user @SYSTEM
112 | Order deny,allow
113 |
114 |
115 | # Only the owner or an administrator can cancel or authenticate a job...
116 |
117 | AuthType Default
118 | Require user @OWNER @SYSTEM
119 | Order deny,allow
120 |
121 |
122 |
123 | Order deny,allow
124 |
125 |
126 |
--------------------------------------------------------------------------------
/roles/docker/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Ensure pre-requesite packages installed
2 | apt:
3 | name: "{{ item }}"
4 | state: present
5 | with_items:
6 | - apt-transport-https
7 | - ca-certificates
8 | - curl
9 | - software-properties-common
10 | become: yes
11 |
12 | - name: Ensure Docker apt-key setup
13 | apt_key:
14 | id: 0EBFCD88
15 | url: https://download.docker.com/linux/ubuntu/gpg
16 | state: present
17 | become: yes
18 |
19 | - name: Enable docker apt repo
20 | apt_repository:
21 | repo: deb [arch=amd64] https://download.docker.com/linux/ubuntu {{ ansible_lsb.codename }} stable
22 | state: present
23 | become: yes
24 |
25 | - name: Ensure docker installed
26 | apt:
27 | name: docker-ce
28 | cache_valid_time: 3600
29 | state: present
30 | become: yes
31 |
32 | - name: Ensure docker enabled at boot
33 | service:
34 | name: docker
35 | state: started
36 | enabled: yes
37 | become: yes
38 |
--------------------------------------------------------------------------------
/roles/flowbat/defaults/main.yml:
--------------------------------------------------------------------------------
1 | flowbat_user: flowbat
2 | flowbat_group: flowbat
3 |
--------------------------------------------------------------------------------
/roles/flowbat/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Create flowbat group
2 | group:
3 | name: flowbat
4 | state: present
5 | become: yes
6 |
7 | - name: Create flowbat user
8 | user:
9 | name: flowbat
10 | group: flowbat
11 | become: yes
12 |
13 | - name: Install packages
14 | package:
15 | name: "{{ item }}"
16 | state: present
17 | with_items:
18 | - git-core
19 | - curl
20 | - nodejs
21 | - npm
22 | - gcc-c++
23 | become: yes
24 |
25 | - name: Install npm packages
26 | npm:
27 | name: "{{ item }}"
28 | global: yes
29 | state: present
30 | become: yes
31 | with_items:
32 | - meteorite
33 |
34 | # Run block as flowbat user
35 | - block:
36 | - name: git clone flowbat repo
37 | git:
38 | repo: https://github.com/chrissanders/FlowBAT.git
39 | dest: /home/flowbat/FlowBAT
40 | clone: yes
41 | update: yes
42 | force: yes
43 |
44 | - name: download meteor install
45 | get_url:
46 | url: https://install.meteor.com/
47 | dest: /home/flowbat/meteor.sh
48 | mode: 0755
49 |
50 | - name: run meteor install
51 | command: /home/flowbat/meteor.sh
52 |
53 | - name: Install local packages
54 | npm:
55 | name: "{{ item }}"
56 | state: present
57 | path: /home/flowbat/FlowBAT
58 | with_items:
59 | - babel-runtime
60 | - bcrypt
61 |
62 | # - name: copy flowbat settings template
63 | # copy:
64 | # remote_src: true
65 | # src: /home/flowbat/flowbat/settings/prod.sample.json
66 | # dest: /home/flowbat/flowbat/settings/ansible.json
67 |
68 | # - name: update flowbat url settings
69 | # lineinfile:
70 | # dest: /home/flowbat/flowbat/settings/ansible.json
71 | # regexp: '^\s*"baseUrl": '
72 | # line: ' "baseUrl": "http://127.0.0.1:1800",'
73 | # state: present
74 |
75 | # - name: update flowbat email settings
76 | # lineinfile:
77 | # dest: /home/flowbat/flowbat/settings/ansible.json
78 | # regexp: '^\s*"mailUrl": '
79 | # line: ' "mailUrl": "",'
80 | # state: present
81 |
82 | - name: mrt install
83 | command: mrt install
84 | args:
85 | chdir: /home/flowbat/FlowBAT/
86 | become_user: flowbat
87 | become: yes
88 |
89 | # Finish up as root
90 | - block:
91 | - name: Copy systemd flowbat file
92 | template:
93 | src: templates/flowbat.service.j2
94 | dest: /etc/systemd/system/flowbat.service
95 |
96 | - name: Enable flowbat service
97 | service:
98 | name: flowbat
99 | state: restarted
100 | enabled: yes
101 | become: yes
102 |
--------------------------------------------------------------------------------
/roles/flowbat/templates/flowbat.service.j2:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=flowbat
3 | Requires=network-online.target
4 | After=network-online.target
5 |
6 | [Service]
7 | User={{ flowbat_user }}
8 | Group={{ flowbat_group }}
9 | Restart=on-failure
10 | ExecStart=/home/flowbat/.meteor/meteor --port 1800 run --settings settings/dev.json "$@"
11 | ExecReload=/bin/kill -HUP $MAINPID
12 | KillSignal=SIGINT
13 | WorkingDirectory=/home/flowbat/FlowBAT
14 |
15 | [Install]
16 | WantedBy=multi-user.target
17 |
--------------------------------------------------------------------------------
/roles/git_server/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Install scmgit
2 | package:
3 | name=scmgit
4 | state=present
5 | become: yes
6 |
7 | - name: Create git group
8 | group:
9 | name=git
10 | gid=104
11 | state=present
12 | become: yes
13 |
14 | - name: Create git user
15 | user:
16 | name=git
17 | uid=104
18 | groups=git
19 | state=present
20 | become: yes
21 |
22 | - name: Adding git authorized keys
23 | authorized_key: user=git key=https://github.com/precurse.keys
24 |
25 | - name: Change git repo permissions
26 | file:
27 | path=/export/git
28 | state=directory
29 | recurse=yes
30 | owner=git
31 | group=git
32 |
--------------------------------------------------------------------------------
/roles/grafana/files/grafana.yum:
--------------------------------------------------------------------------------
1 | [grafana]
2 | name=grafana
3 | baseurl=https://packagecloud.io/grafana/stable/el/6/$basearch
4 | repo_gpgcheck=1
5 | enabled=1
6 | gpgcheck=1
7 | gpgkey=https://packagecloud.io/gpg.key https://grafanarel.s3.amazonaws.com/RPM-GPG-KEY-grafana
8 | sslverify=1
9 | sslcacert=/etc/pki/tls/certs/ca-bundle.crt
10 |
--------------------------------------------------------------------------------
/roles/grafana/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Copy Grafana yum repo
2 | copy:
3 | src: grafana.yum
4 | dest: /etc/yum.repos.d/grafana.repo
5 | become: yes
6 |
7 | - name: Copy PGP keys
8 | rpm_key:
9 | state: present
10 | key: "{{ item }}"
11 | become: yes
12 | with_items:
13 | - "https://packagecloud.io/gpg.key"
14 | - "https://grafanarel.s3.amazonaws.com/RPM-GPG-KEY-grafana"
15 |
16 | - name: Ensure Grafana installed
17 | yum:
18 | name: grafana
19 | state: present
20 | update_cache: yes
21 | become: yes
22 |
23 | - name: Enable grafana service
24 | service:
25 | name: grafana-server
26 | state: started
27 | enabled: yes
28 |
29 |
--------------------------------------------------------------------------------
/roles/grafana/templates/grafana.repo.j2:
--------------------------------------------------------------------------------
1 | [grafana]
2 | name=grafana
3 | baseurl=https://packagecloud.io/grafana/stable/el/6/$basearch
4 | repo_gpgcheck=1
5 | enabled=1
6 | gpgcheck=1
7 | gpgkey=https://packagecloud.io/gpg.key https://grafanarel.s3.amazonaws.com/RPM-GPG-KEY-grafana
8 | sslverify=1
9 | sslcacert=/etc/pki/tls/certs/ca-bundle.crt
10 |
--------------------------------------------------------------------------------
/roles/haproxy/defaults/main.yml:
--------------------------------------------------------------------------------
1 | haproxy_bind_addr: 0.0.0.0
2 | haproxy_bind_port: 443
3 |
4 | haproxy_maxconn: 2048
5 | haproxy_dhparam: 2048
6 |
7 | haproxy_conf: /etc/haproxy/haproxy.cfg
8 | haproxy_chroot: true
9 | haproxy_chroot_dir: /var/lib/haproxy
10 | haproxy_user: haproxy
11 | haproxy_group: haproxy
12 |
13 | haproxy_stats_user: stats
14 | haproxy_stats_pass: stats
15 |
16 | # Certbot vars
17 | haproxy_cb_test: False
18 | haproxy_cb_keysize: 2048
19 | haproxy_cb_pref_challenge: tls-sni-01
20 | haproxy_cb_tls_bind_addr: 127.0.0.1
21 | haproxy_cb_tls_bind_port: 18443
22 |
23 | haproxy_cb_http_bind_addr: 127.0.0.1
24 | haproxy_cb_http_bind_port: 18080
25 |
26 | haproxy_cb_email: user@example.com
27 | haproxy_cb_domains:
28 | - www.example.com
29 | - example.com
30 |
31 | haproxy_cb_dest_bundle: /etc/haproxy/haproxy.pem
32 | haproxy_cb_prehook: ""
33 | haproxy_cb_posthook: "cat /etc/letsencrypt/live/{{ haproxy_cb_domains[0] }}/{fullchain,privkey}.pem > {{ haproxy_cb_dest_bundle }}; systemctl reload haproxy"
34 |
35 |
36 | haproxy_cb_all_flags: '{% if haproxy_cb_test %}--test-cert {% endif %} --standalone --keep --agree-tos --preferred-challenges {{ haproxy_cb_pref_challenge }} --tls-sni-01-port {{ haproxy_cb_tls_bind_port }} --http-01-port {{ haproxy_cb_http_bind_port }} --rsa-key-size {{ haproxy_cb_keysize }} --agree-tos --email {{ haproxy_cb_email }} -q -n --expand --pre-hook "{{ haproxy_cb_prehook }}" --post-hook "{{ haproxy_cb_posthook }}"'
37 | haproxy_cb_certonly_flags: "certonly {{ haproxy_cb_all_flags }} {% for i in haproxy_cb_domains %} -d {{ i }} {% endfor %}"
38 | haproxy_cb_renew_flags: "renew {{ haproxy_cb_all_flags }}"
39 |
40 | haproxy_exporter_version: 0.8.0
41 |
--------------------------------------------------------------------------------
/roles/haproxy/handlers/main.yml:
--------------------------------------------------------------------------------
1 | - name: reload haproxy
2 | service:
3 | name: haproxy
4 | state: reloaded
5 | become: yes
6 |
--------------------------------------------------------------------------------
/roles/haproxy/tasks/install_certbot.yml:
--------------------------------------------------------------------------------
1 | - name: Download certbot-auto
2 | get_url:
3 | url: https://raw.githubusercontent.com/certbot/certbot/master/certbot-auto
4 | dest: /root/certbot-auto
5 | mode: 0755
6 | become: yes
7 |
8 | - name: Create invalid tmp cert/key to start haproxy (to bootstrap new haproxy install)
9 | shell: "{{ item }}"
10 | args:
11 | chdir: /tmp
12 | creates: "{{ haproxy_cb_dest_bundle }}"
13 | with_items:
14 | - "openssl genrsa -out server.key 2048"
15 | - "openssl req -new -key server.key -out server.csr -subj '/C=GB/ST=London/L=London/O=Global Security/OU=IT Department/CN=example.com'"
16 | - "openssl x509 -req -days 365 -in server.csr -signkey server.key -out server.crt"
17 | - "cat server.crt server.key > {{ haproxy_cb_dest_bundle }}"
18 | become: yes
19 |
20 | - name: Ensure haproxy started
21 | service:
22 | name: haproxy
23 | state: started
24 | become: yes
25 |
26 | - name: Create certbot certs if needed
27 | command: "/root/certbot-auto {{ haproxy_cb_certonly_flags }}"
28 | become: yes
29 |
30 | - name: Setup certbot crontab if needed
31 | cron: name="Autorenew certbot"
32 | special_time="daily"
33 | job="/root/certbot-auto {{ haproxy_cb_renew_flags }}"
34 | cron_file=certbot
35 | user="root"
36 | become: yes
37 |
--------------------------------------------------------------------------------
/roles/haproxy/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Ensure group present
2 | group:
3 | name: "{{ haproxy_group }}"
4 | state: present
5 | become: yes
6 |
7 | - name: Ensure user present
8 | user:
9 | name: "{{ haproxy_user }}"
10 | group: "{{ haproxy_group }}"
11 | state: present
12 | become: yes
13 |
14 | - name: Install haproxy
15 | yum:
16 | name: haproxy
17 | state: present
18 | update_cache: yes
19 | become: yes
20 |
21 | - name: Copy haproxy template
22 | template:
23 | src: haproxy.conf.j2
24 | dest: "{{ haproxy_conf }}"
25 | become: yes
26 | notify: reload haproxy
27 | tags: config
28 |
29 | - name: Ensure chroot dir created
30 | file:
31 | path: "{{ haproxy_chroot_dir }}"
32 | owner: "{{ haproxy_user }}"
33 | group: "{{ haproxy_group }}"
34 | state: directory
35 | mode: 0644
36 | become: yes
37 |
38 | - name: Setup certbot
39 | include: install_certbot.yml
40 |
41 | - name: Enable haproxy
42 | service:
43 | name: haproxy
44 | state: started
45 | enabled: yes
46 | become: yes
47 |
48 | - name: Download haproxy exporter
49 | get_url:
50 | url: https://github.com/prometheus/haproxy_exporter/releases/download/v{{ haproxy_exporter_version }}/haproxy_exporter-{{ haproxy_exporter_version }}.linux-amd64.tar.gz
51 | dest: /root/haproxy_exporter-{{ haproxy_exporter_version }}.linux-amd64.tar.gz
52 | become: yes
53 |
54 | - name: Extract haproxy exporter
55 | unarchive:
56 | remote_src: true
57 | src: haproxy_exporter-{{ haproxy_exporter_version }}.linux-amd64.tar.gz
58 | dest: /root/
59 | become: yes
60 |
61 | - name: Setup haproxy exporter on startup
62 | lineinfile:
63 | path: /etc/rc.local
64 | regexp: '^/root/haproxy_exporter'
65 | insertbefore: 'exit 0'
66 | line: '/root/haproxy_exporter-{{ haproxy_exporter_version }}.linux-amd64/haproxy_exporter --haproxy.scrape-uri="http://{{ haproxy_stats_user }}:{{ haproxy_stats_pass }}@localhost:8082/haproxy?stats;csv" &'
67 | become: yes
68 |
69 |
70 |
--------------------------------------------------------------------------------
/roles/haproxy/templates/haproxy.conf.j2:
--------------------------------------------------------------------------------
1 | global
2 | log 127.0.0.1 local0
3 | log-send-hostname
4 | {% if haproxy_chroot %}
5 | chroot {{ haproxy_chroot_dir }}
6 | {% endif %}
7 | pidfile /var/run/haproxy.pid
8 | maxconn {{ haproxy_maxconn }}
9 | user {{ haproxy_user }}
10 | group {{ haproxy_group }}
11 | nbproc 1
12 | daemon
13 | tune.ssl.default-dh-param {{ haproxy_dhparam }}
14 | ssl-default-bind-options no-tls-tickets ssl-min-ver TLSv1.2
15 | ssl-default-bind-ciphers EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
16 |
17 |
18 | defaults
19 | log global
20 | option http-ignore-probes
21 | option forwardfor except 127.0.0.0/8
22 | stats enable
23 | timeout connect 5000
24 | timeout client 50000
25 | timeout server 50000
26 |
27 | listen stats
28 | bind :::8082
29 | mode http
30 | stats enable
31 | stats uri /
32 | stats realm private
33 | stats auth {{ haproxy_stats_user }}:{{ haproxy_stats_pass }}
34 |
35 | listen letsencrypt-tls01-detect
36 | mode tcp
37 | option tcplog
38 |
39 | bind {{ haproxy_bind_addr }}:{{ haproxy_bind_port }}
40 |
41 | tcp-request inspect-delay 5s
42 | tcp-request content accept if { req.ssl_hello_type 1 }
43 |
44 | use_backend letsencrypt-tls01 if { req.ssl_sni -m end .acme.invalid }
45 | use_backend ssh if { payload(0,7) -m bin 5353482d322e30 }
46 | use_backend openvpn if !{ req.ssl_hello_type 1 } !{ req.len 0 }
47 |
48 | use-server https-server if { req.ssl_hello_type 1 }
49 | server https-server 127.0.0.1:19443 weight 0 send-proxy
50 |
51 | frontend https-in
52 | mode http
53 | option httplog
54 | option forwardfor
55 | option http-server-close
56 | option httpclose
57 |
58 | bind 127.0.0.1:19443 ssl crt {{ haproxy_cb_dest_bundle }} accept-proxy
59 |
60 | rspadd Strict-Transport-Security:\ max-age=31536000;\ includeSubDomains;\ preload
61 | capture request header User-agent len 100
62 |
63 | redirect scheme https code 301 if !{ ssl_fc }
64 | reqadd X-Forwarded-Proto:\ https if { ssl_fc }
65 |
66 | acl host_subsonic hdr(host) -i subsonic.aklaus.ca
67 | use_backend host_subsonic_be if host_subsonic
68 |
69 | backend letsencrypt-tls01
70 | mode tcp
71 | option tcplog
72 | server letsencrypt {{ haproxy_cb_tls_bind_addr }}:{{ haproxy_cb_tls_bind_port }}
73 |
74 | listen transmission
75 | mode tcp
76 | bind {{ haproxy_bind_addr }}:51413
77 |
78 | server transmission transmission.dmz:51413
79 |
80 | listen plex
81 | mode tcp
82 | option tcplog
83 | bind {{ haproxy_bind_addr }}:32400
84 |
85 | server plex plex.dmz:32400
86 |
87 | backend host_subsonic_be
88 | mode http
89 | option httplog
90 | option forwardfor # This sets X-Forwarded-For
91 | rspdel ^Strict-Transport-Security:.* #Remove hsts header from backend applications
92 |
93 | server subsonic subsonic.dmz:4040 check
94 |
95 | backend ssh
96 | mode tcp
97 | option tcplog
98 | server ssh shell.dmz:22
99 |
100 | backend openvpn
101 | mode tcp
102 | option tcplog
103 | server openvpn openvpn.dmz:1194
104 |
--------------------------------------------------------------------------------
/roles/haproxy_kim/defaults/main.yml:
--------------------------------------------------------------------------------
1 | ca_pem_file: /usr/local/share/ca-certificates/ca.pem
2 |
3 | haproxy_key_file: /etc/haproxy/haproxy-key.pem
4 |
--------------------------------------------------------------------------------
/roles/haproxy_kim/handlers/main.yml:
--------------------------------------------------------------------------------
1 | - name: reload haproxy
2 | service:
3 | name: haproxy
4 | state: reloaded
5 | become: yes
6 |
--------------------------------------------------------------------------------
/roles/haproxy_kim/tasks/haproxy-exporter.yml:
--------------------------------------------------------------------------------
1 | - name: Create haproxy_exporter user
2 | user:
3 | name: haexport
4 | comment: HAProxy Exporter
5 | become: yes
6 |
7 | - name: Create /opt/haproxy_exporter-0.9.0
8 | file:
9 | path: /opt/haproxy_exporter-0.9.0
10 | state: directory
11 | become: true
12 |
13 | - name: Download haproxy_exporter
14 | get_url:
15 | url: https://github.com/prometheus/haproxy_exporter/releases/download/v0.9.0/haproxy_exporter-0.9.0.linux-amd64.tar.gz
16 | dest: /tmp/haproxy_exporter-0.9.0.linux-amd64.tar.gz
17 | become: yes
18 |
19 | - name: Extract haproxy_exporter
20 | unarchive:
21 | src: /tmp/haproxy_exporter-0.9.0.linux-amd64.tar.gz
22 | dest: /opt/haproxy_exporter-0.9.0
23 | remote_src: true
24 | become: yes
25 |
26 | - name: Copy init script
27 | template:
28 | src: haproxy-exporter.init
29 | dest: /etc/init.d/prometheus-haproxy-exporter
30 | mode: 0755
31 | become: yes
32 |
33 | - name: Copy init defaults
34 | template:
35 | src: haproxy-defaults.init
36 | dest: /etc/default/prometheus-haproxy-exporter
37 | become: yes
38 |
39 | - name: Enable haproxy-exporter on startup
40 | service:
41 | name: prometheus-haproxy-exporter
42 | state: started
43 | enabled: yes
44 | become: yes
45 |
--------------------------------------------------------------------------------
/roles/haproxy_kim/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Install haproxy
2 | apt:
3 | name: haproxy
4 | state: present
5 | update_cache: yes
6 | become: yes
7 |
8 | - name: Copy ca certificate
9 | copy:
10 | content: "{{ ca_pem }}"
11 | dest: "{{ ca_pem_file }}"
12 | become: yes
13 |
14 | - name: Copy haproxy key-crt
15 | copy:
16 | content: "{{ kim_haproxy_crt + kim_haproxy_key }}"
17 | dest: "{{ haproxy_key_file }}"
18 | owner: root
19 | group: haproxy
20 | mode: 0640
21 | become: yes
22 |
23 | - name: Setup haproxy.cfg
24 | template:
25 | src: haproxy.cfg.j2
26 | dest: /etc/haproxy/haproxy.cfg
27 | become: yes
28 | notify: reload haproxy
29 |
30 | - name: Ensure haproxy started and on boot
31 | service:
32 | name: haproxy
33 | state: started
34 | enabled: yes
35 | become: yes
36 |
37 | - include: haproxy-exporter.yml
38 |
--------------------------------------------------------------------------------
/roles/haproxy_kim/templates/haproxy-defaults.init:
--------------------------------------------------------------------------------
1 | ARGS='--haproxy.scrape-uri=http://localhost:8082/;csv'
2 |
--------------------------------------------------------------------------------
/roles/haproxy_kim/templates/haproxy-exporter.init:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # kFreeBSD do not accept scripts as interpreters, using #!/bin/sh and sourcing.
3 | if [ true != "$INIT_D_SCRIPT_SOURCED" ] ; then
4 | set "$0" "$@"; INIT_D_SCRIPT_SOURCED=true . /lib/init/init-d-script
5 | fi
6 | ### BEGIN INIT INFO
7 | # Provides: prometheus-haproxy-exporter
8 | # Required-Start: $remote_fs $syslog
9 | # Required-Stop: $remote_fs $syslog
10 | # Default-Start: 2 3 4 5
11 | # Default-Stop: 0 1 6
12 | # Short-Description: Prometheus exporter for haproxy metrics
13 | # Description: Prometheus exporter for haproxy metrics, written in Go
14 | # with pluggable metric collectors.
15 | ### END INIT INFO
16 |
17 | # Author: Martín Ferrari
18 | # Author (haproxy): Andrew Klaus
19 |
20 | DESC="Prometheus exporter for machine metrics"
21 | DAEMON=/opt/haproxy_exporter-0.9.0/haproxy_exporter-0.9.0.linux-amd64/haproxy_exporter
22 | NAME=prometheus-haproxy-exporter
23 | USER=haexport
24 | PIDFILE=/var/run/prometheus/prometheus-haproxy-exporter.pid
25 | LOGFILE=/var/log/prometheus/prometheus-haproxy-exporter.log
26 |
27 | HELPER=/usr/bin/daemon
28 | HELPER_ARGS="--name=$NAME --output=$LOGFILE --pidfile=$PIDFILE --user=$USER"
29 |
30 | ARGS=""
31 | [ -r /etc/default/$NAME ] && . /etc/default/$NAME
32 |
33 | do_start_prepare()
34 | {
35 | mkdir -p `dirname $PIDFILE` || true
36 | chown -R $USER: `dirname $LOGFILE`
37 | chown -R $USER: `dirname $PIDFILE`
38 | }
39 |
40 | do_start_cmd()
41 | {
42 | # Return
43 | # 0 if daemon has been started
44 | # 1 if daemon was already running
45 | # 2 if daemon could not be started
46 | $HELPER $HELPER_ARGS --running && return 1
47 | $HELPER $HELPER_ARGS -- $DAEMON $ARGS || return 2
48 | return 0
49 | }
50 |
51 | do_stop_cmd()
52 | {
53 | # Return
54 | # 0 if daemon has been stopped
55 | # 1 if daemon was already stopped
56 | # 2 if daemon could not be stopped
57 | # other if a failure occurred
58 | $HELPER $HELPER_ARGS --running || return 1
59 | $HELPER $HELPER_ARGS --stop || return 2
60 | # wait for the process to really terminate
61 | for n in 1 2 3 4 5; do
62 | sleep 1
63 | $HELPER $HELPER_ARGS --running || break
64 | done
65 | $HELPER $HELPER_ARGS --running || return 0
66 | return 2
67 | }
68 |
69 |
--------------------------------------------------------------------------------
/roles/haproxy_kim/templates/haproxy.cfg.j2:
--------------------------------------------------------------------------------
1 | global
2 | log /dev/log local0
3 | log /dev/log local1 notice
4 | chroot /var/lib/haproxy
5 | stats socket /run/haproxy/admin.sock mode 660 level admin
6 | stats timeout 30s
7 | user haproxy
8 | group haproxy
9 | daemon
10 |
11 | ca-base /etc/ssl/certs
12 | crt-base /etc/ssl/private
13 |
14 | tune.ssl.default-dh-param 2048
15 | ssl-default-bind-options no-sslv3
16 | ssl-default-bind-ciphers EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
17 |
18 |
19 | defaults
20 | log global
21 | mode http
22 | option httplog
23 | option dontlognull
24 | stats enable
25 | timeout connect 5000
26 | timeout client 50000
27 | timeout server 50000
28 | errorfile 400 /etc/haproxy/errors/400.http
29 | errorfile 403 /etc/haproxy/errors/403.http
30 | errorfile 408 /etc/haproxy/errors/408.http
31 | errorfile 500 /etc/haproxy/errors/500.http
32 | errorfile 502 /etc/haproxy/errors/502.http
33 | errorfile 503 /etc/haproxy/errors/503.http
34 | errorfile 504 /etc/haproxy/errors/504.http
35 |
36 |
37 | listen stats
38 | bind 127.0.0.1:8082
39 | mode http
40 | stats enable
41 | stats uri /
42 | stats realm private
43 |
44 | frontend frontend_ip
45 | bind 0.0.0.0:443 ssl crt {{ haproxy_key_file }} ca-file {{ ca_pem_file }} verify optional
46 | bind :::443 ssl crt {{ haproxy_key_file }} ca-file {{ ca_pem_file }} verify optional
47 |
48 | bind 0.0.0.0:80 transparent
49 | bind :::80 transparent
50 |
51 | {% for ip in kim_acl_ips %}
52 | acl white_list src {{ ip }}
53 | {% endfor %}
54 |
55 | # Allow white-list and client-cert verified
56 | http-request deny unless white_list || { ssl_c_used }
57 |
58 | # Always redirect to https
59 | http-request redirect scheme https code 301 if !{ ssl_fc }
60 |
61 | acl trans_path url_beg /transmission
62 | acl sonarr_path url_beg /sonarr
63 | acl radarr_path url_beg /radarr
64 |
65 | use_backend transmission if trans_path
66 | use_backend sonarr if sonarr_path
67 | use_backend radarr if radarr_path
68 |
69 | default_backend syncthing
70 |
71 | listen hastats_ip
72 | bind 0.0.0.0:19101
73 | {% for ip in kim_acl_ips %}
74 | acl white_list src {{ ip }}
75 | {% endfor %}
76 | server haexport localhost:9101
77 | tcp-request content accept if white_list
78 | tcp-request content reject
79 |
80 | listen sys_stats_ip
81 | bind 0.0.0.0:19100
82 | {% for ip in kim_acl_ips %}
83 | acl white_list src {{ ip }}
84 | {% endfor %}
85 | server haexport localhost:9100
86 | tcp-request content accept if white_list
87 | tcp-request content reject
88 |
89 | listen minio_ip
90 | bind 0.0.0.0:9001 ssl crt {{ haproxy_key_file }}
91 | {% for ip in kim_acl_ips %}
92 | acl white_list src {{ ip }}
93 | {% endfor %}
94 | server minio localhost:9000
95 | tcp-request content accept if white_list
96 | tcp-request content reject
97 |
98 | backend syncthing
99 | mode http
100 | option httplog
101 | option forwardfor # This sets X-Forwarded-For
102 | server sonarr localhost:8384
103 |
104 | backend sonarr
105 | mode http
106 | option httplog
107 | option forwardfor # This sets X-Forwarded-For
108 | server sonarr localhost:8989
109 |
110 | backend radarr
111 | mode http
112 | option httplog
113 | option forwardfor # This sets X-Forwarded-For
114 | server radarr localhost:7878
115 |
116 | backend transmission
117 | mode http
118 | option httplog
119 | option forwardfor # This sets X-Forwarded-For
120 | server transmission localhost:9091
121 |
122 | backend ssh
123 | mode tcp
124 | option tcplog
125 | server ssh localhost:22
126 |
127 | backend openvpn
128 | mode tcp
129 | option tcplog
130 | server openvpn localhost:1443
131 |
--------------------------------------------------------------------------------
/roles/influxdb/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Update packages
2 | yum: name=* state=latest
3 | tags: update_packages
4 | - name: Add influxdb key
5 | rpm_key: state=present key=https://repos.influxdata.com/influxdb.key
6 |
7 | - name: Add yum repo
8 | template: src=influxdb.repo.j2 dest=/etc/yum.repos.d/influxdb.repo owner=root group=root
9 |
10 | - name: Install influxdb
11 | package: name=influxdb state=present
12 | - name: Influxdb config
13 | template: src=influxdb.conf.j2 dest=/etc/influxdb/influxdb.conf owner=root group=root mode=0644
14 | - name: Enable influxdb service
15 | service: name=influxdb state=running enabled=yes
16 |
--------------------------------------------------------------------------------
/roles/influxdb/templates/influxdb.conf.j2:
--------------------------------------------------------------------------------
1 | reporting-disabled = false
2 |
3 | [meta]
4 | dir = "/export/influxdb/meta"
5 | hostname = "localhost"
6 | bind-address = ":8088"
7 | retention-autocreate = true
8 | election-timeout = "1s"
9 | heartbeat-timeout = "1s"
10 | leader-lease-timeout = "500ms"
11 | commit-timeout = "50ms"
12 | cluster-tracing = false
13 | raft-promotion-enabled = true
14 |
15 | [data]
16 | dir = "/export/influxdb/data"
17 |
18 | # engine ="bz1"
19 | max-wal-size = 104857600 # Maximum size the WAL can reach before a flush. Defaults to 100MB.
20 | wal-flush-interval = "10m" # Maximum time data can sit in WAL before a flush.
21 | wal-partition-flush-delay = "2s" # The delay time between each WAL partition being flushed.
22 | wal-dir = "/export/influxdb/wal"
23 | wal-enable-logging = true
24 |
25 | [cluster]
26 | shard-writer-timeout = "10s" # The time within which a shard must respond to write.
27 | write-timeout = "5s" # The time within which a write operation must complete on the cluster.
28 |
29 | [retention]
30 | enabled = true
31 | check-interval = "30m"
32 |
33 | [monitor]
34 | store-enabled = true # Whether to record statistics internally.
35 | store-database = "_internal" # The destination database for recorded statistics
36 | store-interval = "10s" # The interval at which to record statistics
37 |
38 | [admin]
39 | enabled = true
40 | bind-address = ":8083"
41 | https-enabled = false
42 | https-certificate = "/etc/ssl/influxdb.pem"
43 |
44 | [http]
45 | enabled = true
46 | bind-address = ":8086"
47 | auth-enabled = false
48 | log-enabled = true
49 | write-tracing = false
50 | pprof-enabled = false
51 | https-enabled = false
52 | https-certificate = "/etc/ssl/influxdb.pem"
53 |
54 | [[graphite]]
55 | enabled = false
56 |
57 | [opentsdb]
58 | enabled = false
59 |
60 | [[udp]]
61 | enabled = false
62 |
63 | [continuous_queries]
64 | log-enabled = true
65 | enabled = true
66 | recompute-previous-n = 2
67 | recompute-no-older-than = "10m"
68 | compute-runs-per-interval = 10
69 | compute-no-more-than = "2m"
70 |
71 | [hinted-handoff]
72 | enabled = true
73 | dir = "/export/influxdb/hh"
74 | max-size = 1073741824
75 | max-age = "168h"
76 | retry-rate-limit = 0
77 | retry-interval = "1s"
78 |
79 | [shard-precreation]
80 | enabled = true
81 | check-interval = "10m"
82 | advance-period = "30m"
83 |
84 | [collectd]
85 | enabled = false
86 | bind-address = ":8096" # the bind address
87 | database = "collectd" # Name of the database that will be written to
88 | retention-policy = ""
89 | batch-size = 5000 # will flush if this many points get buffered
90 | batch-pending = 10 # number of batches that may be pending in memory
91 | batch-timeout = "10s"
92 | read-buffer = 0 # UDP read buffer size, 0 means to use OS default
93 | typesdb = "/usr/share/collectd/types.db"
94 |
--------------------------------------------------------------------------------
/roles/influxdb/templates/influxdb.repo.j2:
--------------------------------------------------------------------------------
1 | [influxdb]
2 | name = InfluxDB Repository - RHEL \$releasever
3 | baseurl = https://repos.influxdata.com/rhel/\$releasever/\$basearch/stable
4 | enabled = 1
5 | gpgcheck = 1
6 | gpgkey = https://repos.influxdata.com/influxdb.key
7 |
--------------------------------------------------------------------------------
/roles/joyent_zone_bootstrap/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Upgrading pkgin packages
2 | raw: /opt/local/bin/pkgin -y update; /opt/local/bin/pkgin -y upgrade
3 | tags: update_packages
4 | - name: Installing python
5 | raw: /opt/local/bin/pkgin -y install python27
6 | - set_fact: ansible_python_interpreter={{ smartos_python_path }}
7 | - name: gather facts
8 | setup:
9 | - name: Set Timezone
10 | command: sm-set-timezone MST
11 |
--------------------------------------------------------------------------------
/roles/joyent_zone_bootstrap/vars/main.yml:
--------------------------------------------------------------------------------
1 | smartos_python_path: /opt/local/bin/python2
2 |
--------------------------------------------------------------------------------
/roles/openldap/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Ensure openldap is installed
2 | yum: name={{ item }} state=present
3 | with_items:
4 | - openldap
5 | - openldap-servers
6 | - openldap-clients
7 |
8 |
9 | - name: Ensure openldap-server enabled
10 | service: name=slapd state=started enabled=yes
11 |
--------------------------------------------------------------------------------
/roles/plex/defaults/main.yml:
--------------------------------------------------------------------------------
1 | plex_yum_url: "https://downloads.plex.tv/plex-media-server/{{ plex_ver }}/plexmediaserver-{{ plex_ver }}.x86_64.rpm"
2 |
--------------------------------------------------------------------------------
/roles/plex/files/get_latest_plex_version.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import sys
3 | import re
4 |
5 | if (sys.version_info > (3, 0)):
6 | from urllib import parse
7 | py_ver = 3
8 | else:
9 | from urlparse import urlparse
10 | py_ver = 2
11 |
12 | r = requests.get("https://plex.tv/api/downloads/1.json")
13 |
14 | j = r.json()
15 |
16 | for i in j['computer']['Linux']['releases']:
17 | if 'CentOS 64-bit' in i['label']:
18 | centos_json = i
19 | break
20 |
21 | # We have full URL:
22 | # 'https://downloads.plex.tv/plex-media-server/1.3.3.3148-b38628e/plexmediaserver-1.3.3.3148-b38628e.x86_64.rpm
23 | plex_ver = re.split('\/', centos_json['url'])[4]
24 |
25 | print(plex_ver)
26 |
--------------------------------------------------------------------------------
/roles/plex/handlers/main.yml:
--------------------------------------------------------------------------------
1 | - name: restart plexmediaserver
2 | service:
3 | name: plexmediaserver
4 | state: restarted
5 |
--------------------------------------------------------------------------------
/roles/plex/tasks/automate_yum_updates.yml:
--------------------------------------------------------------------------------
1 | - name: Install packages
2 | yum:
3 | name: "{{ item }}"
4 | state: present
5 | update_cache: yes
6 | with_items:
7 | - python-pip
8 | - python-devel
9 | - openssl-devel
10 | - gcc
11 | - git
12 | - libffi-devel
13 | become: yes
14 |
15 | - name: Install ansible
16 | pip:
17 | name: ansible
18 | state: present
19 | become: yes
20 |
--------------------------------------------------------------------------------
/roles/plex/tasks/install_apt.yml:
--------------------------------------------------------------------------------
1 | - name: Update apt
2 | apt: update_cache=yes
3 | - apt_key: url=http://shell.ninthgate.se/packages/shell-ninthgate-se-keyring.key state=present
4 | - apt_key: id=5C808C2B65558117 keyserver=pgp.mit.edu state=present
5 | - apt_repository: repo='deb http://www.deb-multimedia.org jessie main non-free' state=present
6 | - apt_repository: repo='deb http://shell.ninthgate.se/packages/debian wheezy main' state=present
7 |
8 | - name: Update apt
9 | apt: update_cache=yes
10 |
11 | - name: install packages
12 | apt: name={{ item }} state=present update_cache=yes
13 | with_items:
14 | - plexmediaserver
15 | - deb-multimedia-keyring
16 |
17 |
--------------------------------------------------------------------------------
/roles/plex/tasks/install_yum.yml:
--------------------------------------------------------------------------------
1 | - name: Install epel
2 | yum:
3 | name: epel-release
4 | state: present
5 |
6 | - name: Install python requests
7 | yum:
8 | name: python-requests
9 | state: present
10 |
11 | - name: Copy plex version script
12 | copy:
13 | src: files/get_latest_plex_version.py
14 | dest: /tmp/get_latest_plex_version.py
15 |
16 | - name: Get latest plex version from plex.tv
17 | command: python2 /tmp/get_latest_plex_version.py
18 | register: plextv_ver
19 | failed_when: plextv_ver.rc > 0
20 | changed_when: no
21 |
22 | - name: Check if plex already installed
23 | command: rpm -q plexmediaserver-*.x86_64
24 | register: rpm_check
25 | failed_when: rpm_check.rc > 1
26 | changed_when: no
27 |
28 | - name: Download var version if needed
29 | get_url:
30 | url: "{{ plex_yum_url }}"
31 | dest: /tmp/plex-{{ plex_ver }}.rpm
32 | when: rpm_check.rc == 1
33 |
34 | - name: Install package if needed
35 | yum:
36 | name: /tmp/plex-{{ plex_ver }}.rpm
37 | state: present
38 | when: rpm_check.rc == 1
39 | notify: restart plexmediaserver
40 |
--------------------------------------------------------------------------------
/roles/plex/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Setup plex for apt
2 | include: tasks/install_apt.yml
3 | when: ansible_pkg_mgr == "apt"
4 |
5 | - name: Setup plex for yum
6 | include: tasks/install_yum.yml
7 | when: ansible_pkg_mgr == "yum"
8 |
9 | - name: Ensure plex user created
10 | user: name=plex state=present
11 |
12 | - name: Set owner for plex directory
13 | file: path=/var/lib/plexmediaserver owner=plex group=plex recurse=yes
14 |
15 | - name: ensure plexmediaserver is running (and enable it at boot)
16 | service: name=plexmediaserver state=started enabled=yes
17 |
--------------------------------------------------------------------------------
/roles/plex_update/defaults/main.yml:
--------------------------------------------------------------------------------
1 | plex_yum_url: "https://downloads.plex.tv/plex-media-server/{{ plex_ver }}/plexmediaserver-{{ plex_ver }}.x86_64.rpm"
2 |
--------------------------------------------------------------------------------
/roles/plex_update/files/get_latest_plex_version.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import sys
3 | import re
4 |
5 | if (sys.version_info > (3, 0)):
6 | from urllib import parse
7 | py_ver = 3
8 | else:
9 | from urlparse import urlparse
10 | py_ver = 2
11 |
12 | r = requests.get("https://plex.tv/api/downloads/1.json")
13 |
14 | j = r.json()
15 |
16 | for i in j['computer']['Linux']['releases']:
17 | if 'CentOS 64-bit' in i['label']:
18 | centos_json = i
19 | break
20 |
21 | # We have full URL:
22 | # 'https://downloads.plex.tv/plex-media-server/1.3.3.3148-b38628e/plexmediaserver-1.3.3.3148-b38628e.x86_64.rpm
23 | plex_ver = re.split('\/', centos_json['url'])[4]
24 |
25 | print(plex_ver)
26 |
--------------------------------------------------------------------------------
/roles/plex_update/handlers/main.yml:
--------------------------------------------------------------------------------
1 | - name: restart plexmediaserver
2 | service:
3 | name: plexmediaserver
4 | state: restarted
5 |
--------------------------------------------------------------------------------
/roles/plex_update/tasks/automate_yum_updates.yml:
--------------------------------------------------------------------------------
1 | - name: Install packages
2 | yum:
3 | name: "{{ item }}"
4 | state: present
5 | update_cache: yes
6 | with_items:
7 | - python-pip
8 | - python-devel
9 | - openssl-devel
10 | - gcc
11 | - git
12 | - libffi-devel
13 | become: yes
14 |
15 | - name: Install ansible
16 | pip:
17 | name: ansible
18 | state: present
19 | become: yes
20 |
--------------------------------------------------------------------------------
/roles/plex_update/tasks/install_apt.yml:
--------------------------------------------------------------------------------
1 | - name: Update apt
2 | apt: update_cache=yes
3 | - apt_key: url=http://shell.ninthgate.se/packages/shell-ninthgate-se-keyring.key state=present
4 | - apt_key: id=5C808C2B65558117 keyserver=pgp.mit.edu state=present
5 | - apt_repository: repo='deb http://www.deb-multimedia.org jessie main non-free' state=present
6 | - apt_repository: repo='deb http://shell.ninthgate.se/packages/debian wheezy main' state=present
7 |
8 | - name: Update apt
9 | apt: update_cache=yes
10 |
11 | - name: install packages
12 | apt: name={{ item }} state=present update_cache=yes
13 | with_items:
14 | - plexmediaserver
15 | - deb-multimedia-keyring
16 |
17 |
--------------------------------------------------------------------------------
/roles/plex_update/tasks/install_yum.yml:
--------------------------------------------------------------------------------
1 | - name: Install epel
2 | yum:
3 | name: epel-release
4 | state: present
5 |
6 | - name: Install python requests
7 | yum:
8 | name: python-requests
9 | state: present
10 |
11 | - name: Copy plex version script
12 | copy:
13 | src: files/get_latest_plex_version.py
14 | dest: /tmp/get_latest_plex_version.py
15 | tags: update_packages
16 |
17 | - name: Get latest plex version from plex.tv
18 | command: python2 /tmp/get_latest_plex_version.py
19 | register: plextv_ver
20 | failed_when: plextv_ver.rc > 0
21 | changed_when: no
22 | tags: update_packages
23 |
24 | - name: Set latest plex version var
25 | set_fact:
26 | plex_ver: "{{ plextv_ver.stdout }}"
27 | when: plextv_ver.rc == 0
28 | tags: update_packages
29 |
30 | - name: Get current installed version
31 | command: rpm -q plexmediaserver-{{ plex_ver }}.x86_64
32 | register: rpm_check
33 | failed_when: rpm_check.rc > 1
34 | changed_when: no
35 | tags: update_packages
36 |
37 | - name: Download var version if installed version outdated
38 | get_url:
39 | url: "{{ plex_yum_url }}"
40 | dest: /tmp/plex-{{ plex_ver }}.rpm
41 | when: rpm_check.rc == 1
42 | tags: update_packages
43 |
44 | - name: Install package if installed version outdated
45 | yum:
46 | name: /tmp/plex-{{ plex_ver }}.rpm
47 | state: present
48 | when: rpm_check.rc == 1
49 | notify: restart plexmediaserver
50 | tags: update_packages
51 |
--------------------------------------------------------------------------------
/roles/plex_update/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Setup plex for apt
2 | include: tasks/install_apt.yml
3 | when: ansible_pkg_mgr == "apt"
4 |
5 | - name: Setup plex for yum
6 | include: tasks/install_yum.yml
7 | when: ansible_pkg_mgr == "yum"
8 |
9 | - name: ensure plexmediaserver is running (and enable it at boot)
10 | service:
11 | name: plexmediaserver
12 | state: started
13 | enabled: yes
14 |
--------------------------------------------------------------------------------
/roles/postgresql/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Install postgresql
2 | package: name={{ item }} state=present
3 | with_items:
4 | - postgresql94
5 | - name: Enable postgresql
6 | service: name=postgresql state=started enabled=yes
7 |
--------------------------------------------------------------------------------
/roles/precurse.kim_docker_images/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Ensure Ubuntu docker-py not installed
2 | apt:
3 | name: python-docker
4 | state: absent
5 | become: yes
6 |
7 | - name: Install python-pip
8 | apt:
9 | name: python-pip
10 | state: present
11 | become: yes
12 |
13 | - name: Install docker-py from pip
14 | pip:
15 | name: docker-py
16 | state: present
17 | become: yes
18 |
19 | - name: Fetch docker images
20 | docker_image:
21 | name: "{{ item }}"
22 | state: present
23 | with_items:
24 | - linuxserver/radarr
25 | - linuxserver/sonarr
26 | - linuxserver/transmission
27 | - plexinc/pms-docker
28 | - precurse/syncthing
29 | - precurse/syncthing-relay
30 | - kylemanna/openvpn
31 | - minio/minio
32 | become: yes
33 |
34 | - name: Create docker volumes
35 | docker_volume:
36 | name: "{{ item }}"
37 | state: present
38 | with_items:
39 | - syncthing_config
40 | - syncthing_relay_config
41 | - sonarr_config
42 | - radarr_config
43 | - transmission_config
44 | - plex_config
45 | become: yes
46 |
47 | - name: Ensure plex container created
48 | docker_container:
49 | name: plex
50 | image: plexinc/pms-docker
51 | state: started
52 | restart_policy: unless-stopped
53 | published_ports:
54 | - "32400:32400"
55 | volumes:
56 | - /home/media/movies:/media/movies
57 | - /home/media/tv:/media/tv
58 | - plex_config:/config
59 | env:
60 | PLEX_UID: 911
61 | PLEX_GID: 911
62 | become: yes
63 |
64 | - name: Ensure transmission container created
65 | docker_container:
66 | name: transmission
67 | image: linuxserver/transmission
68 | state: started
69 | restart_policy: unless-stopped
70 | published_ports:
71 | - "127.0.0.1:9091:9091"
72 | - "51413:51413/tcp"
73 | - "51413:51413/udp"
74 | volumes:
75 | - /home/media/downloads:/media/downloads
76 | - /home/media/watch:/watch
77 | - transmission_config:/config
78 | env:
79 | TZ: America/Edmonton
80 | PUID: 911
81 | PGID: 911
82 | become: yes
83 |
84 | - name: Ensure syncthing container created
85 | docker_container:
86 | name: syncthing
87 | image: precurse/syncthing
88 | state: started
89 | restart_policy: unless-stopped
90 | published_ports:
91 | - "127.0.0.1:8384:8384"
92 | - "22000:22000"
93 | - "21027:21027/udp"
94 | volumes:
95 | - syncthing_config:/var/syncthing
96 | - /home/piranha/syncthing:/data
97 | become: yes
98 |
99 | - name: Ensure radarr container created
100 | docker_container:
101 | name: radarr
102 | image: linuxserver/radarr
103 | state: started
104 | restart_policy: unless-stopped
105 | published_ports:
106 | - "127.0.0.1:7878:7878"
107 | volumes:
108 | - /etc/localtime:/etc/localtime:ro
109 | - /home/media:/media
110 | - radarr_config:/config
111 | links:
112 | - transmission:transmission
113 | - plex:plexmediaserver
114 | env:
115 | TZ: America/Edmonton
116 | PUID: 911
117 | PGID: 911
118 | become: yes
119 |
120 | - name: Ensure sonarr container created
121 | docker_container:
122 | name: sonarr
123 | image: linuxserver/sonarr
124 | state: started
125 | restart_policy: unless-stopped
126 | published_ports:
127 | - "127.0.0.1:8989:8989"
128 | volumes:
129 | - /etc/localtime:/etc/localtime:ro
130 | - /home/media:/media
131 | - sonarr_config:/config
132 | links:
133 | - transmission:transmission
134 | - plex:plexmediaserver
135 | env:
136 | TZ: America/Edmonton
137 | PUID: 911
138 | PGID: 911
139 | become: yes
140 |
141 | - name: Ensure syncthing-relay container created
142 | docker_container:
143 | name: syncthing-relay
144 | image: precurse/syncthing-relay
145 | state: started
146 | restart_policy: unless-stopped
147 | published_ports:
148 | - "22067:22067"
149 | - "22070:22070"
150 | volumes:
151 | - syncthing_relay_config:/var/syncthing-relay
152 | - /home/piranha/syncthing:/data
153 | become: yes
154 |
155 | - name: Ensure minio container created
156 | docker_container:
157 | name: minio
158 | image: minio/minio
159 | command: server /data
160 | state: started
161 | restart_policy: unless-stopped
162 | published_ports:
163 | - "127.0.0.1:9000:9000"
164 | volumes:
165 | - /home/media:/data
166 | env:
167 | MINIO_ACCESS_KEY: "{{ kim_minio_access_key }}"
168 | MINIO_SECRET_KEY: "{{ kim_minio_secret_key }}"
169 | become: yes
170 |
--------------------------------------------------------------------------------
/roles/precurse.pi_docker_images/defaults/main.yml:
--------------------------------------------------------------------------------
1 | docker_repo_path: /home/{{ ansible_user }}/docker-stuff
2 |
--------------------------------------------------------------------------------
/roles/precurse.pi_docker_images/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Clone docker build repo
2 | git:
3 | repo: https://github.com/precurse/docker-stuff
4 | dest: "{{ docker_repo_path }}"
5 | update: yes
6 | force: yes
7 |
8 | - name: Remove system docker-py
9 | apt:
10 | name: python-docker
11 | state: absent
12 | become: yes
13 |
14 | - name: Install python-pip
15 | apt:
16 | name: python-pip
17 | state: present
18 | cache_valid_time: 3600
19 | become: yes
20 |
21 | - name: Install docker-py
22 | pip:
23 | name: docker-py
24 | state: present
25 | become: yes
26 |
27 | - name: Create volume(s)
28 | docker_volume:
29 | name: syncthing_config
30 | become: yes
31 |
32 | # - name: Download alpine rootfs
33 | # command: ./fetch.sh
34 | # args:
35 | # chdir: "{{ docker_repo_path }}/alpine-armhf"
36 | # creates: "{{ docker_repo_path }}/alpine-armhf/rootfs.tar.gz"
37 |
38 | - name: Build docker images
39 | docker_image:
40 | path: "{{ docker_repo_path }}/{{ item }}"
41 | name: "precurse/{{ item }}"
42 | buildargs:
43 | ARCH: arm
44 |
45 | become: yes
46 | with_items:
47 | # - alpine-armhf
48 | - networkupstools
49 | - smartospxe
50 | - syncthing
51 | - tftpd
52 |
53 | - name: Create tftpd container
54 | docker_container:
55 | name: tftpd
56 | image: precurse/tftpd
57 | state: started
58 | restart_policy: unless-stopped
59 | published_ports:
60 | - "69:69/udp"
61 | become: yes
62 |
63 | - name: Create smartospxe container
64 | docker_container:
65 | name: smartospxe
66 | image: precurse/smartospxe
67 | state: started
68 | restart_policy: unless-stopped
69 | published_ports:
70 | - "80:80/tcp"
71 | become: yes
72 |
73 | - name: Create networkupstools container
74 | docker_container:
75 | name: networkupstools
76 | image: precurse/networkupstools
77 | state: started
78 | restart_policy: unless-stopped
79 | published_ports:
80 | - "3493:3493/tcp"
81 | devices:
82 | - /dev/bus/usb/001/005
83 | become: yes
84 |
85 | - name: Create syncthing container
86 | docker_container:
87 | name: syncthing
88 | image: precurse/syncthing
89 | state: started
90 | restart_policy: unless-stopped
91 | network_mode: host
92 | volumes:
93 | - syncthing_config:/var/syncthing
94 | - /home/piranha/syncthing:/data
95 | become: yes
96 |
--------------------------------------------------------------------------------
/roles/precurse.stsrv_docker_images/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Ensure Ubuntu docker-py not installed
2 | apt:
3 | name: python-docker
4 | state: absent
5 | become: yes
6 |
7 | - name: Install python-pip
8 | apt:
9 | name: python-pip
10 | state: present
11 | become: yes
12 |
13 | - name: Install docker-py from pip
14 | pip:
15 | name: docker-py
16 | state: present
17 | become: yes
18 |
19 | - name: Fetch docker images
20 | docker_image:
21 | name: "{{ item }}"
22 | state: present
23 | with_items:
24 | - precurse/syncthing-relay
25 | become: yes
26 |
27 | - name: Create docker volumes
28 | docker_volume:
29 | name: "{{ item }}"
30 | state: present
31 | with_items:
32 | - syncthing_relay_config
33 | become: yes
34 |
35 | - name: Ensure syncthing-relay container created
36 | docker_container:
37 | name: syncthing-relay
38 | image: precurse/syncthing-relay
39 | state: started
40 | restart_policy: unless-stopped
41 | published_ports:
42 | - "22067:22067"
43 | - "22070:22070"
44 | volumes:
45 | - syncthing_relay_config:/var/syncthing-relay
46 | become: yes
47 |
--------------------------------------------------------------------------------
/roles/pxeserver/tasks/install_syslinux.yml:
--------------------------------------------------------------------------------
1 | - name: Download syslinux
2 | get_url: url="{{ syslinux_url }}" dest="{{ tftpdir }}/syslinux-{{ syslinux_ver }}.tar.xz"
3 |
4 | - name: Extract syslinux
5 | unarchive: src="{{ tftpdir }}/syslinux-{{ syslinux_ver }}.tar.xz" dest="/srv/tftp/" copy=no
6 |
7 | - name: Copy c32 files
8 | copy: remote_src=True src="{{ tftpdir }}/syslinux-{{ syslinux_ver }}/{{ item }}" dest="/srv/tftp/"
9 | with_items:
10 | - bios/com32/lib/libcom32.c32
11 | - bios/com32/elflink/ldlinux/ldlinux.c32
12 |
13 | - name: Copy mboot.c32 file
14 | copy: remote_src=True
15 | src="{{ tftpdir }}/syslinux-{{ syslinux_ver }}/bios/com32/mboot/mboot.c32"
16 | dest="{{ webdir }}/"
17 |
18 | - name: Copy pxelinux.0 file
19 | shell: "cp {{ tftpdir }}/syslinux-{{ syslinux_ver }}/bios/core/lpxelinux.0 {{ tftpdir }}/pxelinux.0"
20 |
21 | - name: Create pxelinux conf directory
22 | file: path="{{ tftpdir }}/pxelinux.cfg" state=directory mode=0755
23 |
24 |
--------------------------------------------------------------------------------
/roles/pxeserver/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Install packages
2 | pacman: name={{ item }} state=present
3 | with_items:
4 | - tftp-hpa
5 | - nginx
6 | become: yes
7 |
8 | - name: Create webdir if needed
9 | file: dest={{ webdir }} state=directory mode=0755
10 | become: yes
11 |
12 | - name: Download smartos platform
13 | get_url: url={{ smartos_url }}
14 | dest="{{ webdir }}/platform.tgz"
15 | force=yes
16 | tags:
17 | - update_packages
18 |
19 | - name: Check for existing platform directory
20 | stat: path="{{ webdir }}/platform"
21 | register: platform_stat
22 | tags:
23 | - update_packages
24 |
25 |
26 | - name: Extract new platform directory
27 | unarchive: src="{{ webdir }}/platform.tgz" dest="{{ webdir }}/" copy=no
28 | tags:
29 | - update_packages
30 |
31 | - name: Move old platform directory
32 | command: mv {{ webdir }}/platform {{ webdir }}/platform_{{ ansible_date_time.date }}
33 | creates="{{ webdir }}/platform_{{ ansible_date_time.date }}"
34 | when: platform_stat.stat.isdir is defined and platform_stat.stat.isdir == True
35 | tags:
36 | - update_packages
37 |
38 | - name: Create platform directory
39 | file: path="{{ webdir }}/platform" state=directory mode=0755
40 | tags:
41 | - update_packages
42 |
43 | - name: Rename to platform directory
44 | shell: "mv {{ webdir }}/platform-*/* {{ webdir }}/platform"
45 | tags:
46 | - update_packages
47 |
48 | ## Only download syslinux if not already
49 |
50 | - name: Check if syslinux exists
51 | stat: path="{{ tftpdir }}/pxelinux.0"
52 | register: pxelinux_stat
53 | tags:
54 | - syslinux
55 |
56 | - name: Install syslinux
57 | include: tasks/install_syslinux.yml
58 | when: pxelinux_stat.stat.exists is not defined or pxelinux_stat.stat.exists == False
59 | tags:
60 | - syslinux
61 |
62 | - name: Setup pxelinux template
63 | template: src=pxelinux.cfg.j2 dest={{ tftpdir }}/pxelinux.cfg/default
64 | tags:
65 | - syslinux
66 |
67 | - name: Enable nginx
68 | service: name=nginx state=started enabled=yes
69 | tags:
70 | - services
71 |
72 | - name: Enable tftpd
73 | service: name=tftpd state=started enabled=yes
74 | tags:
75 | - services
76 |
--------------------------------------------------------------------------------
/roles/pxeserver/templates/pxelinux.cfg.j2:
--------------------------------------------------------------------------------
1 | default smartos
2 | prompt 1
3 | timeout 50
4 | label smartos
5 | kernel http://{{ pxe_host }}/smartos/mboot.c32
6 | append http://{{ pxe_host }}/smartos/platform/i86pc/kernel/amd64/unix -B smartos=true --- http://{{ pxe_host }}/smartos/platform/i86pc/amd64/boot_archive
7 |
8 |
--------------------------------------------------------------------------------
/roles/pxeserver/vars/main.yml:
--------------------------------------------------------------------------------
1 | webdir: "/usr/share/nginx/html/smartos"
2 | tftpdir: "/srv/tftp"
3 |
4 | smartos_url: https://us-east.manta.joyent.com/Joyent_Dev/public/SmartOS/platform-latest.tgz
5 |
6 | syslinux_ver: "6.03"
7 | syslinux_url: https://www.kernel.org/pub/linux/utils/boot/syslinux/syslinux-{{ syslinux_ver }}.tar.xz
8 |
9 | pxe_host: 10.0.3.4
10 | admin_group: wheel
11 |
--------------------------------------------------------------------------------
/roles/radius_server/handlers/main.yml:
--------------------------------------------------------------------------------
1 | - name: restart radius
2 | service:
3 | name: radiusd
4 | state: started
5 | enabled: yes
6 | become: yes
7 |
--------------------------------------------------------------------------------
/roles/radius_server/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Install packages
2 | package:
3 | name: "{{ item }}"
4 | state: present
5 | with_items:
6 | - freeradius
7 | - freeradius-utils
8 | become: yes
9 |
10 | - name: Setup radius files module
11 | file:
12 | src: /etc/raddb/mods-available/files
13 | dest: /etc/raddb/mods-enabled/files
14 | state: link
15 | notify: restart radius
16 | become: yes
17 |
18 | - name: Enable radius service
19 | service:
20 | name: radiusd
21 | state: started
22 | enabled: yes
23 | become: yes
24 |
25 |
--------------------------------------------------------------------------------
/roles/rsyslog_server/handlers/main.yml:
--------------------------------------------------------------------------------
1 | - name: reload rsyslog
2 | service:
3 | name: rsyslog
4 | state: reloaded
5 | become: yes
6 |
--------------------------------------------------------------------------------
/roles/rsyslog_server/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Install rsyslog
2 | package: name=rsyslog state=present
3 | become: yes
4 |
5 | - name: Apply rsyslog template
6 | template:
7 | src: templates/rsyslog.smartos.conf.j2
8 | dest: /opt/local/etc/rsyslog.conf
9 | become: yes
10 | notify: reload rsyslog
11 | when: ansible_distribution == "SmartOS"
12 |
--------------------------------------------------------------------------------
/roles/rsyslog_server/templates/rsyslog.smartos.conf.j2:
--------------------------------------------------------------------------------
1 | $ModLoad immark
2 | $ModLoad imsolaris
3 | # $ModLoad imtcp
4 | $ModLoad imudp
5 | $UDPServerRun 514
6 | $UDPServerAddress 127.0.0.1
7 |
8 | local0.* /var/log/haproxy.log
9 |
10 | *.err;kern.notice;auth.notice /dev/sysmsg
11 | *.err;kern.debug;daemon.notice;mail.crit /var/adm/messages
12 | *.alert;kern.err;daemon.err :omusrmsg:operator
13 | *.alert :omusrmsg:root
14 | *.emerg :omusrmsg:*
15 |
16 | mail.debug /var/log/syslog
17 | mail.info /var/log/maillog
18 | auth.info /var/log/authlog
19 |
20 |
--------------------------------------------------------------------------------
/roles/samba/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - block:
2 | - name: ensure samba installed
3 | pkgin: name=samba state=present
4 | - name: ensure samba is running (and enable it at boot)
5 | service: name={{ item }} state=started enabled=yes
6 | with_items:
7 | - samba:smbd
8 | - samba:nmbd
9 | - template:
10 | src: smb.conf.j2
11 | dest: /opt/local/etc/samba/smb.conf
12 | owner: root
13 | group: root
14 | mode: 0644
15 |
16 | - name: Setup primary samba user/pass
17 | shell: (echo {{ samba_password }}; echo {{ samba_password }}) \
18 | | smbpasswd -a -s {{ samba_username }}
19 | tags:
20 | - setup
21 |
22 | - service: name={{ item }} state=restarted
23 | with_items:
24 | - samba:smbd
25 | - samba:nmbd
26 | when: ansible_distribution == 'SmartOS'
27 | become: yes
28 |
--------------------------------------------------------------------------------
/roles/samba/templates/smb.conf.j2:
--------------------------------------------------------------------------------
1 | [global]
2 | workgroup = SIGNET
3 | server string = Samba %v (%h)
4 | security = user
5 | load printers = yes
6 | log level = 3
7 | syslog = 1
8 |
9 | #============================ Share Definitions ==============================
10 | [homes]
11 | comment = Home Directories
12 | browseable = no
13 | writable = yes
14 |
15 | [tmp]
16 | comment = Temporary file space
17 | path = /tmp
18 | read only = no
19 | public = yes
20 |
21 | [personal]
22 | path = /media/personal
23 | valid users = piranha
24 | public = no
25 | writeable = yes
26 | browsable = yes
27 |
28 | [syncthing]
29 | path = /media/syncthing
30 | valid users = piranha
31 | public = no
32 | writeable = yes
33 | browsable = yes
34 |
35 | [music]
36 | path = /media/music
37 | valid users = piranha
38 | public = no
39 | writeable = yes
40 | browseable = yes
41 |
42 | [tv]
43 | path = /media/tv
44 | valid users = piranha
45 | guest ok = yes
46 | public = yes
47 | browsable = yes
48 |
49 | [movies]
50 | path = /media/movies
51 | valid users = piranha
52 | guest ok =yes
53 | public = yes
54 | browsable = yes
55 |
56 | [incoming]
57 | path = /media/incoming
58 | valid users = piranha
59 | guest ok = no
60 | public = yes
61 | browsable = yes
62 |
--------------------------------------------------------------------------------
/roles/samba_swift_backup/defaults/main.yml:
--------------------------------------------------------------------------------
1 | rclone_url: https://downloads.rclone.org/rclone-current-solaris-amd64.zip
2 |
--------------------------------------------------------------------------------
/roles/samba_swift_backup/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Install dependencies
2 | pkgin:
3 | name: "{{ item }}"
4 | state: present
5 | with_items:
6 | - build-essential
7 | - unzip
8 | - py27-pip
9 | become: yes
10 |
11 | - name: Create syncthing group
12 | group:
13 | name: syncthing
14 | gid: 1001
15 | state: present
16 | become: yes
17 |
18 | - name: Install Openstack Swift
19 | pip:
20 | name: "{{ item }}"
21 | with_items:
22 | - python-keystoneclient
23 | - python-swiftclient
24 | become: yes
25 |
26 | - name: Check if rclone already exists
27 | stat:
28 | path: /home/piranha/rclone/rclone
29 | register: rclone_stat
30 |
31 | - name: Download rclone
32 | get_url:
33 | url: "{{ rclone_url }}"
34 | dest: /home/piranha/rclone.zip
35 | become_user: piranha
36 | become: yes
37 | when: not rclone_stat.stat.exists
38 |
39 | - name: Extract rclone
40 | unarchive:
41 | src: /home/piranha/rclone.zip
42 | dest: /home/piranha/
43 | remote_src: yes
44 | become_user: piranha
45 | become: yes
46 | when: not rclone_stat.stat.exists
47 |
48 | - name: Move rclone
49 | shell: mv /home/piranha/rclone-*-amd64 /home/piranha/rclone
50 | become_user: piranha
51 | become: yes
52 | when: not rclone_stat.stat.exists
53 |
54 | - name: Ensure rclone config directory exists
55 | file:
56 | path: /home/piranha/.config/rclone
57 | state: directory
58 | become_user: piranha
59 | become: yes
60 |
61 | - name: Copy openrc
62 | template:
63 | src: openrc.j2
64 | dest: /home/piranha/openrc
65 | become_user: piranha
66 | become: yes
67 |
68 | - name: Copy rclone.conf
69 | template:
70 | src: rclone.conf.j2
71 | dest: /home/piranha/.config/rclone/rclone.conf
72 | become_user: piranha
73 | become: yes
74 |
75 | - name: Setup crontabs
76 | cron:
77 | name: "{{ item.name }}"
78 | user: piranha
79 | hour: "{{ (item.job | hash | list | map('int',0,16) | sum ) % 24 }}"
80 | minute: "{{ (item.job | hash | list | map('int',0,16) | sum ) % 60 }}"
81 | job: "{{ item.job }}"
82 | with_items: "{{ samba_cron }}"
83 | tags: cron
84 |
--------------------------------------------------------------------------------
/roles/samba_swift_backup/templates/openrc.j2:
--------------------------------------------------------------------------------
1 | export OS_AUTH_URL={{ os_auth_url }}
2 | export OS_TENANT_NAME="{{ os_tenant_name }}"
3 |
4 | # unsetting v3 items in case set
5 | unset OS_PROJECT_ID
6 | unset OS_PROJECT_NAME
7 | unset OS_USER_DOMAIN_NAME
8 | unset OS_INTERFACE
9 |
10 | export OS_USERNAME="{{ os_username }}"
11 | export OS_PASSWORD="{{ os_password }}"
12 | export OS_REGION_NAME="{{ os_region_name }}""
13 | export OS_ENDPOINT_TYPE={{ os_endpoint_type }}
14 | export OS_IDENTITY_API_VERSION={{ os_auth_ver }}
15 |
16 |
--------------------------------------------------------------------------------
/roles/samba_swift_backup/templates/rclone.conf.j2:
--------------------------------------------------------------------------------
1 | [swifty]
2 | type = swift
3 | user = {{ os_username }}
4 | key = {{ os_password }}
5 | auth = {{ os_auth_url }}
6 | domain = {{ os_domain }}
7 | tenant_domain = {{ os_username }}
8 | region = {{ os_region_name }}
9 | auth_version = {{ os_auth_ver }}
10 | endpoint_type = {{ os_endpoint_type }}
11 |
12 | [photos-encrypted]
13 | type = crypt
14 | remote = swifty:photos
15 | filename_encryption = standard
16 | password = {{ os_enc_key }}
17 | password2 = {{ os_enc_salt }}
18 |
19 | [syncthing-encrypted]
20 | type = crypt
21 | remote = swifty:syncthing
22 | filename_encryption = standard
23 | password = {{ os_enc_key }}
24 | password2 = {{ os_enc_salt }}
25 |
26 | [backup-encrypted]
27 | type = crypt
28 | remote = swifty:backup
29 | filename_encryption = standard
30 | password = {{ os_enc_key }}
31 | password2 = {{ os_enc_salt }}
32 |
33 | [pcloud]
34 | type = pcloud
35 | client_id =
36 | client_secret =
37 | token = {{ pcloud_token }}
38 |
39 | [pcloud-encrypted]
40 | type = crypt
41 | remote = pcloud:crypt_auto
42 | filename_encryption = standard
43 | directory_name_encryption = true
44 | password = {{ pcloud_enc_key }}
45 | password2 = {{ pcloud_enc_salt }}
46 |
47 |
--------------------------------------------------------------------------------
/roles/silk/defaults/main.yml:
--------------------------------------------------------------------------------
1 | silk_user: silk
2 | silk_group: silk
3 |
4 | silk_autostart: true
5 | silk_binary_only: false
6 | silk_no_yaf: true
7 | silk_yaf_interface: eth0
8 |
9 | silk_tmp_dir: /home/{{ silk_user }}/tmp
10 | silk_data_dir: /data
11 | # Latest version/hashes from:
12 | # http://tools.netsa.cert.org/fixbuf/download.html
13 | # http://tools.netsa.cert.org/yaf/download.html
14 | # http://tools.netsa.cert.org/silk/download.html
15 | silk_lfb_ver: 1.8.0
16 | silk_lfb_sha256: c0a3b4f99916c7124b1964b273fc3a1969e34228633f68a1a9615f2b420236ce
17 | silk_lfb_fn: libfixbuf-{{ silk_lfb_ver }}.tar.gz
18 | silk_lfb_url: http://tools.netsa.cert.org/releases/{{ silk_lfb_fn }}
19 |
20 | silk_yaf_ver: 2.9.2
21 | silk_yaf_sha256: c6246dc64d9311a098b239a313c75f793ece02bac61daf2c83c26ac868bc0def
22 | silk_yaf_fn: yaf-{{ silk_yaf_ver }}.tar.gz
23 | silk_yaf_url: http://tools.netsa.cert.org/releases/{{ silk_yaf_fn }}
24 |
25 | silk_silk_ver: 3.16.0
26 | silk_silk_sha256: 152054cc717eea23543fb6c8b18270fb040c7b0df87a802038f6f1d4b37ece5d
27 | silk_silk_fn: silk-{{ silk_silk_ver }}.tar.gz
28 | silk_silk_url: http://tools.netsa.cert.org/releases/{{ silk_silk_fn }}
29 |
30 | silk_geoip_url: http://geolite.maxmind.com/download/geoip/database/GeoIPCountryCSV.zip
31 | silk_geoip_cron: True
32 |
33 | silk_probes:
34 | - name: S0
35 | protocol: udp
36 | address: "{{ ansible_default_ipv4.address }}"
37 | port: 9988
38 | type: netflow-v9
39 | - name: S1
40 | protocol: udp
41 | address: "{{ ansible_default_ipv4.address }}"
42 | port: 5600
43 | type: sflow
44 |
45 | silk_networks:
46 | - name: my_net
47 | ipblocks:
48 | - 192.168.0.0/16
49 | - 10.0.0.0/8
50 |
51 | silk_sensors:
52 | - name: S0
53 | type: netflow-v9
54 | probe: S0
55 | internal_network: my_net
56 | - name: S1
57 | type: sflow
58 | probe: S1
59 | internal_network: my_net
60 |
61 |
--------------------------------------------------------------------------------
/roles/silk/files/silk.conf:
--------------------------------------------------------------------------------
1 | /usr/local/lib
2 | /usr/local/lib/silk
3 |
--------------------------------------------------------------------------------
/roles/silk/handlers/main.yml:
--------------------------------------------------------------------------------
1 | - name: restart rwflowpack
2 | service:
3 | name: rwflowpack
4 | state: restarted
5 | become: yes
6 |
--------------------------------------------------------------------------------
/roles/silk/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Setup silk group
2 | group:
3 | name: "{{ silk_group }}"
4 | state: present
5 | become: yes
6 |
7 | - name: Setup silk user
8 | user:
9 | name: "{{ silk_user }}"
10 | group: "{{ silk_group }}"
11 | state: present
12 | become: yes
13 |
14 | - name: Install packages
15 | yum:
16 | name: "{{ item }}"
17 | state: present
18 | update_cache: true
19 | with_items:
20 | - gcc
21 | - make
22 | - c-ares
23 | - c-ares-devel
24 | - doxygen
25 | - git-core
26 | - glib2
27 | - glib2-devel
28 | - gnutls
29 | - gnutls-devel
30 | - openssl-devel
31 | - lzo
32 | - lzo-devel
33 | - libpcap
34 | - libpcap-devel
35 | - net-tools
36 | - pcre-devel
37 | - python
38 | - python-devel
39 | - wget
40 | - zlib
41 | - zlib-devel
42 | become: yes
43 |
44 | ## TODO: Check if installed already
45 |
46 | - name: Ensure tmp dir exist
47 | file:
48 | state: directory
49 | path: "{{ silk_tmp_dir }}"
50 | owner: "{{ silk_user }}"
51 | group: "{{ silk_group }}"
52 | recurse: yes
53 | become: yes
54 |
55 | - name: Ensure data dir exist
56 | file:
57 | state: directory
58 | path: "{{ silk_data_dir }}"
59 | owner: "{{ silk_user }}"
60 | group: "{{ silk_group }}"
61 | recurse: yes
62 | become: yes
63 | when: not silk_binary_only
64 |
65 | ## YAF Setup
66 | - block:
67 | - name: download yaf
68 | get_url:
69 | url: "{{ silk_yaf_url }}"
70 | dest: "{{ silk_tmp_dir }}"
71 | checksum: sha256:{{ silk_yaf_sha256 }}
72 |
73 | - name: Extract yaf
74 | unarchive:
75 | src: "{{ silk_tmp_dir }}/{{ silk_yaf_fn }}"
76 | dest: "{{ silk_tmp_dir }}"
77 | remote_src: true
78 |
79 | - name: compile yaf
80 | environment:
81 | PKG_CONFIG_PATH: /usr/local/lib/pkgconfig
82 | command: "{{ item }} chdir={{ silk_tmp_dir }}/{{ silk_yaf_fn.split('.tar')[0] }}"
83 | with_items:
84 | - ./configure --enable-applabel
85 | - make
86 | become_user: "{{ silk_user }}"
87 | become: yes
88 | when: not silk_no_yaf
89 |
90 | - block:
91 | - name: install yaf
92 | command: "make install chdir={{ silk_tmp_dir }}/{{ silk_yaf_fn.split('.tar')[0] }}"
93 |
94 | - name: copy yaf systemd service file
95 | template:
96 | src: templates/yaf.service.j2
97 | dest: "/etc/systemd/system/yaf.service"
98 |
99 | - name: Enable yaf service at boot
100 | service:
101 | name: yaf
102 | state: started
103 | enabled: yes
104 | become: yes
105 | when: not silk_no_yaf
106 |
107 | ## SILK SETUP
108 | - block:
109 | - name: download libfixbuf
110 | get_url:
111 | url: "{{ silk_lfb_url }}"
112 | dest: "{{ silk_tmp_dir }}"
113 | checksum: sha256:{{ silk_lfb_sha256 }}
114 |
115 | - name: download silk
116 | get_url:
117 | url: "{{ silk_silk_url }}"
118 | dest: "{{ silk_tmp_dir }}"
119 | checksum: sha256:{{ silk_silk_sha256 }}
120 |
121 | - name: Extract libfixbuf
122 | unarchive:
123 | src: "{{ silk_tmp_dir }}/{{ silk_lfb_fn }}"
124 | dest: "{{ silk_tmp_dir }}"
125 | remote_src: true
126 |
127 | - name: Extract silk
128 | unarchive:
129 | src: "{{ silk_tmp_dir }}/{{ silk_silk_fn }}"
130 | dest: "{{ silk_tmp_dir }}"
131 | remote_src: true
132 |
133 | - name: compile libfixbuf
134 | environment:
135 | CFLAGS: "-DFB_SUPPRESS_LOGS=1"
136 | command: "{{ item }} chdir={{ silk_tmp_dir }}/{{ silk_lfb_fn.split('.tar')[0] }}"
137 | with_items:
138 | - ./configure
139 | - make -e
140 | become_user: "{{ silk_user }}"
141 | become: yes
142 |
143 | - name: install libfixbuf
144 | command: "make install chdir={{ silk_tmp_dir }}/{{ silk_lfb_fn.split('.tar')[0] }}"
145 | become: yes
146 |
147 |
148 | - name: compile silk
149 | command: "{{ item }} chdir={{ silk_tmp_dir }}/{{ silk_silk_fn.split('.tar')[0] }}"
150 | with_items:
151 | - ./configure --with-libfixbuf=/usr/local/lib/pkgconfig/ --with-python --enable-ipv6
152 | - make
153 | become_user: "{{ silk_user }}"
154 | become: yes
155 |
156 | - name: install silk
157 | command: "make install chdir={{ silk_tmp_dir }}/{{ silk_silk_fn.split('.tar')[0] }}"
158 | become: yes
159 |
160 | - name: Copy silk.conf to ld.so
161 | copy:
162 | src: files/silk.conf
163 | dest: /etc/ld.so.conf.d/silk.conf
164 | become: yes
165 |
166 | - name: ldconfig
167 | command: ldconfig
168 | become: yes
169 |
170 | - name: copy silk.conf
171 | copy:
172 | remote_src: true
173 | src: "{{ silk_tmp_dir }}/{{ silk_silk_fn.split('.tar')[0] }}/site/twoway/silk.conf"
174 | dest: "{{ silk_data_dir }}/silk.conf"
175 | tags: config
176 | notify: restart rwflowpack
177 | become_user: "{{ silk_user }}"
178 | become: yes
179 | when: not silk_binary_only
180 |
181 | - name: copy sensors.conf template
182 | template:
183 | src: templates/sensors.conf.j2
184 | dest: "{{ silk_data_dir }}/sensors.conf"
185 | tags: config
186 | notify: restart rwflowpack
187 | become_user: "{{ silk_user }}"
188 | become: yes
189 | when: not silk_binary_only
190 |
191 | - name: copy rwflowpack.conf template
192 | template:
193 | src: templates/rwflowpack.conf.j2
194 | dest: /usr/local/etc/rwflowpack.conf
195 | become: yes
196 |
197 |
198 | - block:
199 | - name: get geoip zip
200 | get_url:
201 | url: "{{ silk_geoip_url }}"
202 | dest: "{{ silk_tmp_dir }}/geoip.zip"
203 |
204 | - name: unzip geoip data
205 | unarchive:
206 | src: "{{ silk_tmp_dir }}/geoip.zip"
207 | dest: "{{ silk_tmp_dir }}/"
208 | remote_src: True
209 |
210 | - name: create geoip data file
211 | shell: cat {{ silk_tmp_dir }}/GeoIPCountryWhois.csv | /usr/local/bin/rwgeoip2ccmap --csv-input > {{ silk_tmp_dir }}/country_codes.pmap
212 |
213 | - name: import geoip data file
214 | shell: mv {{ silk_tmp_dir }}/country_codes.pmap {{ silk_data_dir }}/
215 | tags: geoip
216 | become_user: "{{ silk_user }}"
217 | become: yes
218 | when: not silk_binary_only
219 |
220 | - name: symlink geoip file
221 | file:
222 | src: "{{ silk_data_dir }}/country_codes.pmap"
223 | dest: /usr/local/share/silk/country_codes.pmap
224 | state: link
225 | force: yes
226 | tags: geoip
227 | become: yes
228 |
229 | - name: Create cron for geoip
230 | cron:
231 | name: "Update geoip data"
232 | cron_file: update_geoip
233 | special_time: weekly
234 | user: "{{ silk_user }}"
235 | job: "wget {{ silk_geoip_url }} -O {{ silk_tmp_dir }}/geoip.zip && unzip -f {{ silk_tmp_dir }}/geoip.zip && cat {{ silk_tmp_dir }}/GeoIPCountryWhois.csv | /usr/local/bin/rwgeoip2ccmap --csv-input > {{ silk_tmp_dir }}/country_codes.pmap && mv {{ silk_tmp_dir }}/country_codes.pmap {{ silk_data_dir }}/country_codes.pmap"
236 | state: present
237 | tags:
238 | - geoip
239 | - config
240 | become: yes
241 | when: silk_geoip_cron and not silk_binary_only
242 |
243 | - name: copy rwflowpack systemd service file
244 | template:
245 | src: templates/rwflowpack.service.j2
246 | dest: "/etc/systemd/system/rwflowpack.service"
247 | become: yes
248 | when: silk_autostart
249 |
250 | - name: Enable rwflowpack service at boot
251 | service:
252 | name: rwflowpack
253 | state: started
254 | enabled: yes
255 | become: yes
256 | when: silk_autostart
257 |
258 |
--------------------------------------------------------------------------------
/roles/silk/templates/rwflowpack.conf.j2:
--------------------------------------------------------------------------------
1 | # Set to non-empty value to enable rwflowpack
2 | ENABLED=
3 | statedirectory=/usr/local/var/lib/rwflowpack
4 | CREATE_DIRECTORIES=yes
5 | BIN_DIR=/usr/local/sbin
6 | SENSOR_CONFIG={{ silk_data_dir }}/sensors.conf
7 | DATA_ROOTDIR={{ silk_data_dir }}
8 | SITE_CONFIG={{ silk_data_dir }}/silk.conf
9 | PACKING_LOGIC=
10 | INPUT_MODE=stream
11 | INCOMING_DIR=${statedirectory}/incoming
12 | ARCHIVE_DIR=${statedirectory}/archive
13 | FLAT_ARCHIVE=0
14 | ERROR_DIR= #${statedirectory}/error
15 | OUTPUT_MODE=local-storage
16 | SENDER_DIR=${statedirectory}/sender-incoming
17 | INCREMENTAL_DIR=${statedirectory}/sender-incoming
18 | COMPRESSION_TYPE=
19 | POLLING_INTERVAL=
20 | FLUSH_TIMEOUT=
21 | FILE_CACHE_SIZE=
22 | FILE_LOCKING=1
23 | PACK_INTERFACES=0
24 | SILK_IPFIX_PRINT_TEMPLATES=
25 | LOG_TYPE=syslog
26 | LOG_LEVEL=info
27 | LOG_DIR=${statedirectory}/log
28 | PID_DIR=${LOG_DIR}
29 | USER=`whoami`
30 | EXTRA_OPTIONS=
31 | EXTRA_ENVVAR=
32 |
--------------------------------------------------------------------------------
/roles/silk/templates/rwflowpack.service.j2:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=rwflowpack
3 | Requires=network-online.target
4 | After=network-online.target
5 |
6 | [Service]
7 | User={{ silk_user }}
8 | Group={{ silk_group }}
9 | Restart=on-failure
10 | ExecStart=/usr/local/sbin/rwflowpack --sensor-configuration={{ silk_data_dir }}/sensors.conf --site-config-file={{ silk_data_dir }}/silk.conf --output-mode=local-storage --log-destination=syslog --root-directory={{ silk_data_dir }}/ --pidfile=/var/log/rwflowpack.pid --log-level=info --no-daemon
11 | ExecReload=/bin/kill -HUP $MAINPID
12 | KillSignal=SIGINT
13 |
14 | [Install]
15 | WantedBy=multi-user.target
16 |
17 |
--------------------------------------------------------------------------------
/roles/silk/templates/sensors.conf.j2:
--------------------------------------------------------------------------------
1 | {% for p in silk_probes %}
2 | probe {{ p.name }} {{ p.type }}
3 | listen-on-port {{ p.port }}
4 | protocol {{ p.protocol }}
5 | listen-as-host {{ p.address }}
6 | end probe
7 | {% endfor %}
8 | {% for n in silk_networks %}
9 | group {{ n.name }}
10 | {% for b in n.ipblocks %}
11 | ipblocks {{ b }}
12 | {% endfor %}
13 | end group
14 | {% endfor %}
15 | {% for s in silk_sensors %}
16 | sensor {{ s.name }}
17 | {{ s.type }}-probes {{ s.probe }}
18 | internal-ipblocks @{{ s.internal_network }}
19 | external-ipblocks remainder
20 | end sensor
21 | {% endfor %}
22 |
--------------------------------------------------------------------------------
/roles/silk/templates/yaf.service.j2:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=yaf
3 | Requires=network-online.target
4 | After=network-online.target
5 |
6 | [Service]
7 | Restart=on-failure
8 | ExecStart=/usr/local/bin/yaf --silk --ipfix=tcp --live=pcap --out=127.0.0.1 --ipfix-port=18001 --in={{ silk_yaf_interface }} --applabel --max-payload=384 --become-user={{ silk_user }} --become-group={{ silk_group }}
9 | ExecReload=/bin/kill -HUP $MAINPID
10 | KillSignal=SIGINT
11 |
12 | [Install]
13 | WantedBy=multi-user.target
14 |
15 |
--------------------------------------------------------------------------------
/roles/ssmtp/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Ensure ssmtp installed
2 | apt:
3 | name: ssmtp
4 | update_cache: yes
5 | cache_valid_time: 3600
6 | become: yes
7 |
8 | - name: Setup ssmtp template
9 | template:
10 | src: ssmtp.conf.j2
11 | dest: /etc/ssmtp/ssmtp.conf
12 | become: yes
13 |
--------------------------------------------------------------------------------
/roles/ssmtp/templates/ssmtp.conf.j2:
--------------------------------------------------------------------------------
1 | root={{ ssmtp_dest_email }}
2 | rewriteDomain=
3 | UseSTARTTLS={% if ssmtp_usestarttls %}YES{% else %}NO{% endif %}
4 |
5 | hostname={{ ssmtp_hostname }}
6 | mailhub={{ ssmtp_mailhub_host }}:{{ ssmtp_mailhub_port }}
7 | AuthUser={{ ssmtp_auth_user }}
8 | AuthPass={{ ssmtp_auth_pass }}
9 | FromLineOverride=YES
10 |
--------------------------------------------------------------------------------
/roles/subsonic/handlers/main.yml:
--------------------------------------------------------------------------------
1 | - name: stop subsonic
2 | service: name=subsonic state=stopped
3 |
4 | - name: restart subsonic
5 | service: name=subsonic state=retarted
6 |
7 | - name: start subsonic
8 | service: name=subsonic state=started
9 |
--------------------------------------------------------------------------------
/roles/subsonic/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Update packages
2 | apt: upgrade=dist update_cache=yes
3 | tags: update_packages
4 |
5 | - name: Installing openjdk
6 | apt: name=openjdk-7-jre state=present update_cache=yes cache_valid_time=3600
7 |
8 | - name: Check if Subsonic installed
9 | shell: dpkg-query -s subsonic |grep Version | grep "{{ subsonic_ver }}"
10 | register: deb_installed
11 | ignore_errors: yes
12 | changed_when: no
13 |
14 | - name: Install subsonic if not present
15 | apt: deb={{ subsonic_url }}
16 | when: deb_installed is defined and deb_installed.rc > 0
17 |
18 | - name: Installing lame
19 | package: name=lame state=present
20 |
21 | - name: Creating Subsonic group
22 | group: name=subsonic state=present gid=80
23 |
24 | - name: Creating Subsonic user
25 | user: name=subsonic uid=80 group=subsonic
26 |
27 | - name: Update subsonic config context
28 | lineinfile: dest=/etc/default/subsonic regexp=^SUBSONIC_ARGS= line=SUBSONIC_ARGS="--max-memory=150 --context-path=/"
29 | notify:
30 | - stop subsonic
31 |
32 | - name: Update subsonic config user
33 | lineinfile: dest=/etc/default/subsonic regexp=^SUBSONIC_USER= line=SUBSONIC_USER=subsonic
34 | notify:
35 | - stop subsonic
36 |
37 | - meta: flush_handlers
38 |
39 | - name: Enable subsonic
40 | service:
41 | name: subsonic
42 | state: started
43 | enabled: yes
44 |
--------------------------------------------------------------------------------
/roles/subsonic/vars/main.yml:
--------------------------------------------------------------------------------
1 | subsonic_ver: '6.1.3'
2 | subsonic_url: 'http://subsonic.org/download/subsonic-{{ subsonic_ver }}.deb'
3 | subsonic_sha256: '143e5608219271eaafc0fc444eb0110df7e6390c7cc2767d25f2d65d8b0dc0ab'
4 |
5 |
--------------------------------------------------------------------------------
/roles/syncthing/defaults/main.yml:
--------------------------------------------------------------------------------
1 | st_user: syncthing
2 | st_def_folder: /home/syncthing/Default
3 | st_def_folder_name: Default Folder
4 | st_gui_listen_address: 0.0.0.0
5 | st_gui_listen_port: 8384
6 | st_theme: dark
7 | st_binary: /usr/bin/syncthing
8 |
--------------------------------------------------------------------------------
/roles/syncthing/tasks/main.yml:
--------------------------------------------------------------------------------
1 | # Debian/Ubuntu specifics
2 | - block:
3 | - name: Ensure apt-transport-https package installed
4 | apt:
5 | name: apt-transport-https
6 | state: present
7 | cache_valid_time: 3600
8 | become: yes
9 |
10 | - name: Add syncthing apt key
11 | apt_key:
12 | url: https://syncthing.net/release-key.txt
13 | state: present
14 | become: yes
15 |
16 | - name: Add syncthing apt repo
17 | apt_repository:
18 | repo: deb https://apt.syncthing.net/ syncthing release
19 | state: present
20 | become: yes
21 |
22 | - name: Install syncthing
23 | apt:
24 | name: syncthing
25 | state: present
26 | update_cache: yes
27 | become: yes
28 |
29 | when: ansible_distribution == 'Debian' or ansible_distribution == 'Ubuntu'
30 |
31 | # Arch specifics
32 | - name: Install syncthing (Arch)
33 | pacman:
34 | name: syncthing
35 | state: present
36 | update_cache: yes
37 | when: ansible_distribution == 'Archlinux'
38 |
39 |
40 | - name: Ensure syncthing group exists
41 | group:
42 | name: "{{ st_user }}"
43 | state: present
44 | become: yes
45 |
46 | - name: Ensure syncthing user exists
47 | user:
48 | name: "{{ st_user }}"
49 | group: "{{ st_user }}"
50 | state: present
51 | become: yes
52 |
53 | - name: Copy syncthing init script
54 | template:
55 | src: templates/syncthing.init.j2
56 | dest: /etc/init.d/syncthing
57 | mode: 0755
58 | become: yes
59 | when: ansible_service_mgr == 'sysvinit'
60 |
61 | - name: Ensure default store folder writable
62 | file:
63 | state: directory
64 | recurse: true
65 | path: "{{ st_def_folder }}"
66 | owner: "{{ st_user }}"
67 | mode: 0750
68 | become: yes
69 |
70 | - name: Ensure external storage folder writable
71 | file:
72 | state: directory
73 | recurse: true
74 | path: "{{ filesystems[0].target }}"
75 | owner: "{{ st_user }}"
76 | become: yes
77 | when: filesystems is defined and filesystems
78 |
79 | # Do stuff as syncthing user
80 | - block:
81 | - name: Ensure syncthing config dir exists
82 | file:
83 | state: directory
84 | path: /home/{{ st_user }}/.config/syncthing/
85 |
86 | - name: Setup syncthing config template
87 | template:
88 | src: config.xml.j2
89 | dest: /home/{{ st_user }}/.config/syncthing/config.xml
90 | force: no
91 | become_user: "{{ st_user }}"
92 | become: yes
93 |
94 | - name: Enable syncthing service (init)
95 | service:
96 | name: syncthing
97 | state: started
98 | enabled: yes
99 | become: yes
100 | when: ansible_service_mgr == 'sysvinit'
101 |
102 | - name: Enable syncthing service (systemd)
103 | service:
104 | name: syncthing@{{ st_user }}
105 | state: started
106 | enabled: yes
107 | become: yes
108 | when: ansible_service_mgr == 'systemd'
109 |
--------------------------------------------------------------------------------
/roles/syncthing/templates/config.xml.j2:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | {{ st_gui_listen_address }}:{{ st_gui_listen_port }}
6 | {{ st_theme }}
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/roles/syncthing/templates/syncthing.init.j2:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | ### BEGIN INIT INFO
3 | # Provides: syncthing
4 | # Required-Start: $local_fs $remote_fs
5 | # Required-Stop: $local_fs $remote_fs
6 | # Should-Start: $network
7 | # Should-Stop: $network
8 | # Default-Start: 2 3 4 5
9 | # Default-Stop: 0 1 6
10 | # Short-Description: Multi-user daemonized version of syncthing.
11 | # Description: Starts the syncthing daemon for all registered users.
12 | ### END INIT INFO
13 |
14 | # Replace with users you want to run syncthing clients for
15 | syncthing_USERS="{{ st_user }}"
16 | DAEMON={{ st_binary }}
17 |
18 | startd() {
19 | for stuser in $syncthing_USERS; do
20 | HOMEDIR=$(getent passwd $stuser | awk -F: '{print $6}')
21 | if [ -f $config ]; then
22 | echo "Starting syncthiing for $stuser"
23 | start-stop-daemon -b -o -c $stuser -S -u $stuser -x $DAEMON
24 | else
25 | echo "Couldn't start syncthing for $stuser (no $config found)"
26 | fi
27 | done
28 | }
29 |
30 | stopd() {
31 | for stuser in $syncthing_USERS; do
32 | dbpid=$(pgrep -fu $stuser $DAEMON)
33 | if [ ! -z "$dbpid" ]; then
34 | echo "Stopping syncthing for $stuser"
35 | start-stop-daemon -o -c $stuser -K -u $stuser -x $DAEMON
36 | fi
37 | done
38 | }
39 |
40 | status() {
41 | for stuser in $syncthing_USERS; do
42 | dbpid=$(pgrep -fu $stuser $DAEMON)
43 | if [ -z "$dbpid" ]; then
44 | echo "syncthing for USER $stuser: not running."
45 | else
46 | echo "syncthing for USER $stuser: running (pid $dbpid)"
47 | fi
48 | done
49 | }
50 |
51 | case "$1" in
52 | start) startd
53 | ;;
54 | stop) stopd
55 | ;;
56 | restart|reload|force-reload) stopd && startd
57 | ;;
58 | status) status
59 | ;;
60 | *) echo "Usage: /etc/init.d/syncthing {start|stop|reload|force-reload|restart|status}"
61 | exit 1
62 | ;;
63 | esac
64 |
65 | exit 0
66 |
--------------------------------------------------------------------------------
/roles/syncthing_relay/defaults/main.yml:
--------------------------------------------------------------------------------
1 | strelay_ver: strelaysrv-linux-amd64-v0.14.20+2-g22a4d49
2 |
3 | strelay_url: https://build.syncthing.net/job/strelaysrv/lastStableBuild/artifact/{{ strelay_ver }}.tar.gz
4 |
--------------------------------------------------------------------------------
/roles/syncthing_relay/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ## Arch
2 | - block:
3 | - name: Install syncthing-relay
4 | pacman:
5 | name: "syncthing-relaysrv"
6 | state: present
7 | update_cache: yes
8 | become: yes
9 | when: ansible_distribution == "Arch"
10 |
11 | ## Debian/Ubuntu
12 | - block:
13 | - name: Ensure syncthing-relay group present
14 | group:
15 | name: syncthing-relaysrv
16 | gid: 992
17 | state: present
18 |
19 | - name: Ensure syncthing-relay user present
20 | user:
21 | name: syncthing-relaysrv
22 | uid: 992
23 | group: syncthing-relaysrv
24 | state: present
25 |
26 | - name: Ensure working directory exists & correct permission
27 | file:
28 | state: directory
29 | path: /var/lib/syncthing-relaysrv
30 | owner: syncthing-relaysrv
31 | group: syncthing-relaysrv
32 |
33 | - name: Copy syncthing-relay systemd service
34 | template:
35 | src: syncthing-relaysrv.service.j2
36 | dest: /etc/systemd/system/syncthing-relaysrv.service
37 |
38 | - name: Download syncthing-relay
39 | get_url:
40 | url: "{{ strelay_url }}"
41 | dest: /tmp/syncthing-relay.tar.gz
42 | become_user: syncthing-relaysrv
43 |
44 | - name: Extract syncthing-relay
45 | unarchive:
46 | src: /tmp/syncthing-relay.tar.gz
47 | dest: /tmp/
48 | remote_src: True
49 | become_user: syncthing-relaysrv
50 |
51 | - name: Copy syncthing-relay binary to usr/bin
52 | copy:
53 | src: /tmp/{{ strelay_ver }}/strelaysrv
54 | dest: /usr/local/bin/syncthing-relaysrv
55 | mode: 0755
56 | remote_src: True
57 |
58 | when: ansible_distribution == "Ubuntu"
59 | become: yes
60 |
61 | - name: Enable/Start syncthing-relay
62 | systemd:
63 | name: syncthing-relaysrv
64 | state: started
65 | enabled: true
66 | daemon_reload: yes
67 | become: yes
68 |
--------------------------------------------------------------------------------
/roles/syncthing_relay/templates/syncthing-relaysrv.service.j2:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=Syncthing relay server
3 | After=network.target
4 |
5 | [Service]
6 | User=syncthing-relaysrv
7 | Group=syncthing-relaysrv
8 | ExecStart=/usr/local/bin/syncthing-relaysrv -listen=":443"
9 | WorkingDirectory=/var/lib/syncthing-relaysrv
10 | ExecStartPre=/sbin/setcap 'cap_net_bind_service=+ep' '/usr/local/bin/syncthing-relaysrv'
11 |
12 | #PrivateTmp=true
13 | #ProtectSystem=full
14 | #ProtectHome=true
15 | #NoNewPrivileges=true
16 | PermissionsStartOnly=true
17 |
18 | [Install]
19 | WantedBy=multi-user.target
20 |
--------------------------------------------------------------------------------
/roles/transmission/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - package:
2 | name={{ item }}
3 | state=present
4 | with_items:
5 | - transmission-daemon
6 | - transmission
7 | - curl
8 |
--------------------------------------------------------------------------------
/roles/ups/handlers/main.yml:
--------------------------------------------------------------------------------
1 | - name: restart nut-server
2 | service: name=nut-server state=restarted
3 |
4 | - name: restart nut-monitor
5 | service: name=nut-monitor state=restarted
6 |
7 |
--------------------------------------------------------------------------------
/roles/ups/tasks/build_nut.yml:
--------------------------------------------------------------------------------
1 | - name: checkout network ups tools
2 | git: repo=https://aur.archlinux.org/network-ups-tools.git dest=/home/alarm/nut-build force=yes
3 | become: yes
4 | become_user: alarm
5 | - name: Adding arm compile support
6 | lineinfile: dest=/home/alarm/nut-build/PKGBUILD regexp="^arch=" line="arch=('armv6h' 'armv7h')"
7 | - name: compile network ups tools
8 | shell: cd /home/alarm/nut-build && makepkg -s
9 | become: yes
10 | become_user: alarm
11 |
--------------------------------------------------------------------------------
/roles/ups/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Install needed packages
2 | pacman: name={{ item }} update_cache=yes state=present
3 | with_items:
4 | - base-devel
5 | - git
6 | - neon
7 | - net-snmp
8 | - libusb-compat
9 | - docbook-xml
10 | - docbook-xsl
11 | - libxml2
12 | - libxslt
13 | - asciidoc
14 |
15 | - name: Check if NUT installed
16 | command: pacman -Q network-ups-tools
17 | register: is_installed
18 | failed_when: is_installed.rc > 1
19 | changed_when: no
20 |
21 | - name: Build NUT if not installed
22 | include: tasks/build_nut.yml
23 | when: is_installed.rc == 1
24 |
25 | - name: install package
26 | shell: pacman -U /home/alarm/nut-build/network-ups-tools-*.pkg.tar.xz --noconfirm
27 | when: is_installed.rc == 1
28 |
29 | - name: Configure templates
30 | template: src={{ item }}.j2 dest=/etc/ups/{{ item }}
31 | with_items:
32 | - nut.conf
33 | - ups.conf
34 | - upsd.conf
35 | - upsd.users
36 | - upsmon.conf
37 | - upssched.conf
38 | notify:
39 | - restart nut-server
40 | - restart nut-monitor
41 |
42 | - service: name=nut-server state=started enabled=yes
43 | - service: name=nut-monitor state=started enabled=yes
44 |
--------------------------------------------------------------------------------
/roles/ups/templates/nut.conf.j2:
--------------------------------------------------------------------------------
1 | MODE=netserver
2 |
--------------------------------------------------------------------------------
/roles/ups/templates/ups.conf.j2:
--------------------------------------------------------------------------------
1 | [Cyberpower]
2 | driver = "usbhid-ups"
3 | port = "auto"
4 | vendorid = "0764"
5 | productid = "0501"
6 | product = "CP1500PFCLCD"
7 | serial = "000000000000"
8 | vendor = "CPS"
9 | bus = "001"
10 |
11 |
--------------------------------------------------------------------------------
/roles/ups/templates/upsd.conf.j2:
--------------------------------------------------------------------------------
1 | LISTEN 0.0.0.0
2 |
--------------------------------------------------------------------------------
/roles/ups/templates/upsd.users.j2:
--------------------------------------------------------------------------------
1 | [admin]
2 | password = admin
3 | actions = SET
4 | instcmds = ALL
5 |
6 | [upsmon]
7 | password = pass8412
8 | upsmon master
9 |
10 |
--------------------------------------------------------------------------------
/roles/ups/templates/upsmon.conf.j2:
--------------------------------------------------------------------------------
1 | MONITOR Cyberpower@localhost 1 upsmon pass8412 master
2 |
3 | NOTIFYFLAG ONBATT EXEC+WALL+SYSLOG
4 | NOTIFYFLAG ONLINE EXEC+WALL+SYSLOG
5 | MINSUPPLIES 1
6 | SHUTDOWNCMD "/sbin/shutdown -h +0"
7 | # NOTIFYCMD /usr/bin/notifyme
8 | POLLFREQ 5
9 | POLLFREQALERT 5
10 | HOSTSYNC 15
11 | DEADTIME 15
12 | POWERDOWNFLAG /etc/killpower
13 |
14 | RBWARNTIME 43200
15 | NOCOMMWARNTIME 300
16 | FINALDELAY 5
17 |
18 |
--------------------------------------------------------------------------------
/roles/ups/templates/upssched.conf.j2:
--------------------------------------------------------------------------------
1 | CMDSCRIPT /usr/bin/upssched-cmd
2 |
--------------------------------------------------------------------------------
/roles/ups/vars/main.yml:
--------------------------------------------------------------------------------
1 | ansible_python_interpreter: /usr/bin/python2
2 | admin_group: wheel
3 |
4 |
--------------------------------------------------------------------------------
/setup_cfssl.yml:
--------------------------------------------------------------------------------
1 | - hosts: cfssl
2 | roles:
3 | - { role: common, tags: common }
4 | - cfssl
5 |
--------------------------------------------------------------------------------
/setup_consul.yml:
--------------------------------------------------------------------------------
1 | - hosts: consul
2 | roles:
3 | - { role: common, tags: common }
4 | - { role: consul_server, tags: consul }
5 |
--------------------------------------------------------------------------------
/setup_dbserver.yml:
--------------------------------------------------------------------------------
1 | - hosts: dbserver
2 | roles:
3 | - { role: common, tags: common }
4 | - postgresql
5 |
--------------------------------------------------------------------------------
/setup_gitserver.yml:
--------------------------------------------------------------------------------
1 | - hosts: git
2 | roles:
3 | - { role: common, tags: common }
4 | - git_server
5 |
--------------------------------------------------------------------------------
/setup_haproxy.yml:
--------------------------------------------------------------------------------
1 | - hosts: haproxy
2 | roles:
3 | - { role: common, tags: common }
4 | - rsyslog_server
5 | - haproxy
6 |
--------------------------------------------------------------------------------
/setup_kim.yml:
--------------------------------------------------------------------------------
1 | - hosts: kim
2 | become: true
3 | roles:
4 | - { role: common, tags: common }
5 | - { role: haproxy_kim, tags: haproxy }
6 | - { role: ssmtp, tags: ssmtp }
7 | - { role: docker, tags: docker }
8 | - { role: precurse.kim_docker_images, tags: docker }
9 |
--------------------------------------------------------------------------------
/setup_netflow.yml:
--------------------------------------------------------------------------------
1 | - hosts: netflow
2 | roles:
3 | - { role: common, tags: common }
4 | - silk
5 |
6 | - hosts: flowbat
7 | roles:
8 | - { role: common, tags: common }
9 | - flowbat
10 | - silk
11 |
--------------------------------------------------------------------------------
/setup_owncloud.yml:
--------------------------------------------------------------------------------
1 | - hosts: owncloud
2 | roles:
3 | - { role: common, tags: common }
4 | tasks:
5 | - name: Adding https apt support
6 | package: name=apt-transport-https state=present
7 | - name: Add owncloud repos
8 | apt_repository: repo='deb https://download.owncloud.org/download/repositories/stable/Debian_8.0/ /' state=present
9 |
--------------------------------------------------------------------------------
/setup_pimon.yml:
--------------------------------------------------------------------------------
1 | - hosts: pimon
2 | roles:
3 | - { role: common, tags: common }
4 | - { role: cups_server, tags: cups }
5 | - { role: precurse.pi_docker_images, tags: docker }
6 |
--------------------------------------------------------------------------------
/setup_plex.yml:
--------------------------------------------------------------------------------
1 | - hosts: plex
2 | remote_user: root
3 | roles:
4 | - { role: common, tags: common }
5 | - plex
6 |
--------------------------------------------------------------------------------
/setup_radius.yml:
--------------------------------------------------------------------------------
1 | - hosts: radius
2 | roles:
3 | - { role: common, tags: common }
4 | - radius_server
5 |
--------------------------------------------------------------------------------
/setup_samba.yml:
--------------------------------------------------------------------------------
1 | - hosts: samba
2 | roles:
3 | - { role: common, tags: common }
4 | - samba
5 | - samba_swift_backup
6 |
--------------------------------------------------------------------------------
/setup_shell.yml:
--------------------------------------------------------------------------------
1 | - hosts: shell
2 | roles:
3 | - { role: common, tags: common }
4 |
--------------------------------------------------------------------------------
/setup_stsrv.yml:
--------------------------------------------------------------------------------
1 | - hosts: syncthing_relay
2 | roles:
3 | - { role: common, tags: common }
4 | - { role: ssmtp, tags: ssmtp }
5 | - { role: docker, tags: docker }
6 | - { role: precurse.stsrv_docker_images, tags: docker }
7 |
--------------------------------------------------------------------------------
/setup_subsonic.yml:
--------------------------------------------------------------------------------
1 | - hosts: subsonic
2 | roles:
3 | - { role: common, tags: common }
4 | - subsonic
5 |
--------------------------------------------------------------------------------
/setup_syncthing.yml:
--------------------------------------------------------------------------------
1 | - hosts: syncthing
2 | roles:
3 | - { role: common, tags: common }
4 | - syncthing
5 |
--------------------------------------------------------------------------------
/setup_transmission.yml:
--------------------------------------------------------------------------------
1 | - hosts: transmission
2 | roles:
3 | - { role: common, tags: common }
4 | - transmission
5 |
--------------------------------------------------------------------------------
/site.yml:
--------------------------------------------------------------------------------
1 | # file: site.yml
2 | - include: update_packages.yml
3 | - include: setup_consul.yml
4 | - include: setup_kim.yml
5 | - include: setup_pimon.yml
6 | - include: setup_haproxy.yml
7 | - include: setup_radius.yml
8 | - include: setup_gitserver.yml
9 | - include: setup_samba.yml
10 | - include: setup_dbserver.yml
11 | - include: setup_owncloud.yml
12 | - include: setup_plex.yml
13 | - include: setup_subsonic.yml
14 | - include: setup_transmission.yml
15 | - include: setup_shell.yml
16 | - include: setup_syncthing.yml
17 | - include: setup_netflow.yml
18 | - include: setup_stsrv.yml
19 |
20 | - hosts: ceph
21 | tasks:
22 | - name: Create ceph group
23 | group: name=ceph
24 | - name: Create ceph user
25 | user:
26 | name=ceph
27 | state=present
28 | groups=ceph
29 | - name: Add cep repository
30 | yum_repository:
31 | name: ceph-noarch
32 | description: Ceph noarch packages
33 | baseurl: https://download.ceph.com/rpm-{{ ceph_release }}/{{ distro }}/noarch
34 | gpgcheck: yes
35 | gpgkey: https://download.ceph.com/keys/release.asc
36 | - name: Update yum repos
37 | yum:
38 | name=ceph-deploy
39 | state=present
40 |
41 | - hosts: ldap
42 | remote_user: root
43 | gather_facts: no
44 | roles:
45 | - openldap
46 |
47 | - hosts: influxdb
48 | remote_user: root
49 | gather_facts: no
50 | roles:
51 | - common
52 | - influxdb
53 | - grafana
54 |
--------------------------------------------------------------------------------
/tasks/smartos_zone_bootstrap.yml:
--------------------------------------------------------------------------------
1 | - name: Upgrading pkgin packages
2 | raw: /opt/local/bin/pkgin -y update; /opt/local/bin/pkgin -y upgrade
3 | tags: update_packages
4 | - name: Installing python
5 | raw: /opt/local/bin/pkgin -y install python27
6 | - name: gather facts
7 | setup:
8 | - name: Set Timezone
9 | command: sm-set-timezone MST
10 |
--------------------------------------------------------------------------------
/tasks/smartos_zone_usersetup.yml:
--------------------------------------------------------------------------------
1 | - name: add users
2 | user: name={{ item }} state=present uid=1000 groups=adm
3 | with_items:
4 | - piranha
5 | - authorized_key: user=piranha key=https://github.com/precurse.keys
6 | - authorized_key: user=root key=https://github.com/precurse.keys
7 | - name: Unlock Account
8 | command: passwd -N piranha
9 |
10 |
--------------------------------------------------------------------------------
/update_packages.yml:
--------------------------------------------------------------------------------
1 | - hosts: all
2 | gather_facts: true
3 | tasks:
4 | - group_by: key=os_{{ ansible_distribution }}
5 | tags: update_packages
6 |
7 | - hosts: os_Archlinux
8 | gather_facts: False
9 | tasks:
10 | - name: Update packages
11 | pacman: update_cache=yes upgrade=yes
12 | tags: update_packages
13 | become: yes
14 |
15 | - hosts: os_CentOS
16 | gather_facts: False
17 | tasks:
18 | - name: Update packages
19 | yum: name=* state=latest
20 | become: yes
21 | tags: update_packages
22 | vars:
23 | ansible_user: root
24 |
25 | - hosts: os_Debian
26 | gather_facts: False
27 | tasks:
28 | - name: Update packages
29 | apt: upgrade=dist update_cache=yes
30 | become: yes
31 | tags: update_packages
32 | vars:
33 | ansible_user: root
34 |
35 | - hosts: os_Ubuntu
36 | gather_facts: False
37 | tasks:
38 | - name: Update packages
39 | apt: upgrade=dist update_cache=yes
40 | become: yes
41 | tags: update_packages
42 |
43 | - hosts: os_SmartOS
44 | gather_facts: False
45 | tasks:
46 | - name: Update packages
47 | pkgin: full_upgrade=yes
48 | when: smartos_hypervisor is not defined
49 | tags: update_packages
50 | vars:
51 | ansible_user: root
52 |
53 | - hosts: os_FreeBSD
54 | gather_facts: False
55 | tasks:
56 | - name: Update packages
57 | pkgng: full_upgrade=yes
58 | become: yes
59 | tags: update_packages
60 |
61 |
--------------------------------------------------------------------------------
/update_plex.yml:
--------------------------------------------------------------------------------
1 | - hosts: plex
2 | remote_user: root
3 | roles:
4 | - plex_update
5 |
--------------------------------------------------------------------------------