├── confs └── logstash │ └── logstash.conf ├── Vagrantfile ├── example-logs └── testlog ├── ubuntu.sh ├── manifests └── default.pp ├── setup.sh ├── kibana4_init ├── README.md └── .gitignore /confs/logstash/logstash.conf: -------------------------------------------------------------------------------- 1 | input { 2 | file { 3 | path => ["/vagrant/example-logs/testlog"] 4 | start_position => "beginning" 5 | codec => "json" 6 | type => "json" 7 | } 8 | } 9 | 10 | output { 11 | elasticsearch { 12 | document_type => "example" 13 | host => "127.0.0.1" 14 | cluster => "vagrant_elasticsearch" 15 | protocol => "http" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing! 5 | VAGRANTFILE_API_VERSION = "2" 6 | 7 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| 8 | 9 | # set to false, if you do NOT want to check the correct VirtualBox Guest Additions version when booting this box 10 | if defined?(VagrantVbguest::Middleware) 11 | config.vbguest.auto_update = true 12 | end 13 | 14 | config.vm.box = "puppetlabs/ubuntu-14.04-64-puppet" 15 | config.vm.box_version = "1.0.1" 16 | config.vm.network :forwarded_port, guest: 5601, host: 5601 17 | config.vm.network :forwarded_port, guest: 9200, host: 9200 18 | config.vm.network :forwarded_port, guest: 9300, host: 9300 19 | 20 | config.vm.provider :virtualbox do |vb| 21 | vb.customize ["modifyvm", :id, "--cpus", "2", "--memory", "2048"] 22 | end 23 | 24 | config.vm.provider "vmware_fusion" do |v, override| 25 | ## the puppetlabs ubuntu 14-04 image might work on vmware, not tested? 26 | v.provision "shell", path: 'ubuntu.sh' 27 | v.box = "phusion/ubuntu-14.04-amd64" 28 | v.vmx["numvcpus"] = "2" 29 | v.vmx["memsize"] = "2048" 30 | end 31 | config.vm.provision "shell", path: 'setup.sh' 32 | config.vm.provision "puppet", manifest_file: "default.pp" 33 | end -------------------------------------------------------------------------------- /example-logs/testlog: -------------------------------------------------------------------------------- 1 | {"@timestamp":"2015-02-18T02:05:10.2224556+01:00","@fields":{"level":"INFO","logger":"queryLog","properties":{"log4net:HostName":"hostMachine"},"exception":null},"@message":{"documentTitle":"Proving that Android’s, Java’s and Python’s sorting algorithm is broken (and showing how to fix it)"}} 2 | {"@timestamp":"2015-02-18T03:13:24.76802+01:00","@fields":{"level":"INFO","logger":"queryLog","properties":{"log4net:HostName":"hostMachine"},"exception":null},"@message":{"flow":"Maler","documentTitle":"Wuthering heights","queryId":"06728e4a-8e53-40f4-a26d-7d81403732f8","totalHits":452}} 3 | {"@timestamp":"2015-02-18T03:13:24.76802+01:00","@fields":{"level":"INFO","logger":"queryLog","properties":{"log4net:HostName":"hostMachine"},"exception":null},"@message":{"flow":"Flow1","documentTitle":"Optimizing Python in the Real World: NumPy, Numba, and the NUFFT","hitNumber":1,"queryId":"06728e4a-8e53-40f4-a26d-7d81403732f8"}} 4 | {"@timestamp":"2015-02-18T03:13:24.7836248+01:00","@fields":{"level":"INFO","logger":"queryLog","properties":{"log4net:HostName":"hostMachine"},"exception":null},"@message":{"flow":"Flow1","documentTitle":"DON’T PUT SELF-DOCUMENTING CODE ON A PEDESTAL","hitNumber":2,"queryId":"06728e4a-8e53-40f4-a26d-7d81403732f8"}} 5 | {"@timestamp":"2015-02-18T03:13:24.7836248+01:00","@fields":{"level":"INFO","logger":"queryLog","properties":{"log4net:HostName":"hostMachine"},"exception":null},"@message":{"flow":"Flow1","documentTitle":"How to be a Programmer: A Short, Comprehensive, and Personal Summary","hitNumber":3,"queryId":"06728e4a-8e53-40f4-a26d-7d81403732f8"}} 6 | -------------------------------------------------------------------------------- /ubuntu.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # This bootstraps Puppet on Ubuntu 12.04 LTS. 4 | # 5 | set -e 6 | 7 | # Load up the release information 8 | . /etc/lsb-release 9 | 10 | REPO_DEB_URL="http://apt.puppetlabs.com/puppetlabs-release-${DISTRIB_CODENAME}.deb" 11 | 12 | #-------------------------------------------------------------------- 13 | # NO TUNABLES BELOW THIS POINT 14 | #-------------------------------------------------------------------- 15 | if [ "$(id -u)" != "0" ]; then 16 | echo "This script must be run as root." >&2 17 | exit 1 18 | fi 19 | 20 | if which puppet > /dev/null 2>&1 -a apt-cache policy | grep --quiet apt.puppetlabs.com; then 21 | echo "Puppet is already installed." 22 | exit 0 23 | fi 24 | 25 | # Do the initial apt-get update 26 | echo "Initial apt-get update..." 27 | apt-get update >/dev/null 28 | 29 | # Install wget if we have to (some older Ubuntu versions) 30 | echo "Installing wget..." 31 | apt-get install -y wget >/dev/null 32 | 33 | # Install the PuppetLabs repo 34 | echo "Configuring PuppetLabs repo..." 35 | repo_deb_path=$(mktemp) 36 | wget --output-document="${repo_deb_path}" "${REPO_DEB_URL}" 2>/dev/null 37 | dpkg -i "${repo_deb_path}" >/dev/null 38 | apt-get update >/dev/null 39 | 40 | # Install Puppet 41 | echo "Installing Puppet..." 42 | DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install puppet >/dev/null 43 | 44 | echo "Puppet installed!" 45 | 46 | # Install RubyGems for the provider 47 | echo "Installing RubyGems..." 48 | if [ $DISTRIB_CODENAME != "trusty" ]; then 49 | apt-get install -y rubygems >/dev/null 50 | fi 51 | gem install --no-ri --no-rdoc rubygems-update 52 | update_rubygems >/dev/null 53 | -------------------------------------------------------------------------------- /manifests/default.pp: -------------------------------------------------------------------------------- 1 | # Update APT Cache 2 | class { 'apt': 3 | always_apt_update => true, 4 | } 5 | 6 | 7 | # Java is required 8 | class { 'java': } 9 | 10 | # Elasticsearch 11 | class { 'elasticsearch': 12 | manage_repo => true, 13 | repo_version => '1.7', 14 | } 15 | 16 | elasticsearch::instance { 'es-01': 17 | config => { 18 | 'cluster.name' => 'vagrant_elasticsearch', 19 | 'index.number_of_replicas' => '0', 20 | 'index.number_of_shards' => '1', 21 | 'network.host' => '0.0.0.0', 22 | 'marvel.agent.enabled' => false #DISABLE marvel data collection. 23 | }, # Configuration hash 24 | init_defaults => { }, # Init defaults hash 25 | before => Exec['start kibana'] 26 | } 27 | 28 | elasticsearch::plugin{'royrusso/elasticsearch-HQ': 29 | instances => 'es-01' 30 | } 31 | 32 | elasticsearch::plugin{'elasticsearch/marvel/latest': 33 | instances => 'es-01' 34 | } 35 | 36 | # Logstash 37 | class { 'logstash': 38 | # autoupgrade => true, 39 | ensure => 'present', 40 | manage_repo => true, 41 | repo_version => '1.5', 42 | require => [ Class['java'], Class['elasticsearch'] ], 43 | } 44 | 45 | # remove initial logstash config 46 | #file { '/etc/logstash/conf.d/logstash': 47 | #ensure => '/vagrant/confs/logstash/logstash.conf', 48 | # require => [ Class['logstash'] ], 49 | #} 50 | 51 | 52 | # Kibana 53 | package { 'curl': 54 | ensure => 'present', 55 | require => [ Class['apt'] ], 56 | } 57 | 58 | file { '/opt/kibana': 59 | ensure => 'directory', 60 | group => 'vagrant', 61 | owner => 'vagrant', 62 | } 63 | 64 | exec { 'download_kibana': 65 | command => '/usr/bin/curl -L https://download.elastic.co/kibana/kibana/kibana-4.1.1-linux-x64.tar.gz | /bin/tar xvz -C /opt/kibana --strip-components 1', 66 | require => [ Package['curl'], File['/opt/kibana'], Class['elasticsearch'] ], 67 | timeout => 1800 68 | } 69 | 70 | exec {'start kibana': 71 | command => '/etc/init.d/kibana start', 72 | } 73 | -------------------------------------------------------------------------------- /setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # this script installs the puppet modules we need, 3 | #and tries to do tricks with setting local repository for ubuntu updates 4 | 5 | # Install wget 6 | 7 | sudo apt-get install -qy wget; 8 | 9 | sed -e '/templatedir/ s/^#*/#/' -i.back /etc/puppet/puppet.conf 10 | 11 | ## set local/fastest mirror and local timezone 12 | mv /etc/apt/sources.list /etc/apt/sources.list.orig 13 | cat > /etc/apt/sources.list <\(.*\)<\/TimeZone>.*/\1/p'` && sudo timedatectl set-timezone $tz 22 | 23 | mkdir -p /etc/puppet/modules; 24 | if [ ! -d /etc/puppet/modules/file_concat ]; then 25 | puppet module install ispavailability/file_concat 26 | fi 27 | if [ ! -d /etc/puppet/modules/apt ]; then 28 | puppet module install puppetlabs-apt --version 1.8.0 29 | fi 30 | if [ ! -d /etc/puppet/modules/java ]; then 31 | puppet module install puppetlabs-java 32 | fi 33 | if [ ! -d /etc/puppet/modules/elasticsearch ]; then 34 | puppet module install elasticsearch-elasticsearch 35 | fi 36 | if [ ! -d /etc/puppet/modules/logstash ]; then 37 | puppet module install elasticsearch-logstash 38 | fi 39 | if [ ! -f /etc/init.d/kibana ]; then 40 | sudo cp /vagrant/kibana4_init /etc/init.d/kibana 41 | sudo sed -i 's/\r//' /etc/init.d/kibana 42 | sudo chmod +x /etc/init.d/kibana 43 | sudo update-rc.d kibana defaults 44 | wget -q http://geolite.maxmind.com/download/geoip/database/GeoLiteCountry/GeoIP.dat.gz http://geolite.maxmind.com/download/geoip/database/GeoIPv6.dat.gz http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz http://download.maxmind.com/download/geoip/database/asnum/GeoIPASNum.dat.gz 45 | fi -------------------------------------------------------------------------------- /kibana4_init: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # /etc/init.d/kibana4_init -- startup script for kibana4 4 | # bsmith@the408.com 2015-02-20; used elasticsearch init script as template 5 | # https://github.com/akabdog/scripts/edit/master/kibana4_init 6 | # 7 | ### BEGIN INIT INFO 8 | # Provides: kibana4_init 9 | # Required-Start: $network $remote_fs $named 10 | # Required-Stop: $network $remote_fs $named 11 | # Default-Start: 2 3 4 5 12 | # Default-Stop: 0 1 6 13 | # Short-Description: Starts kibana4_init 14 | # Description: Starts kibana4_init using start-stop-daemon 15 | ### END INIT INFO 16 | 17 | #configure this with wherever you unpacked kibana: 18 | KIBANA_BIN=/opt/kibana/bin 19 | 20 | NAME=kibana4 21 | PID_FILE=/var/run/$NAME.pid 22 | PATH=/bin:/usr/bin:/sbin:/usr/sbin:$KIBANA_BIN 23 | DAEMON=$KIBANA_BIN/kibana 24 | DESC="Kibana4" 25 | 26 | if [ `id -u` -ne 0 ]; then 27 | echo "You need root privileges to run this script" 28 | exit 1 29 | fi 30 | 31 | . /lib/lsb/init-functions 32 | 33 | if [ -r /etc/default/rcS ]; then 34 | . /etc/default/rcS 35 | fi 36 | 37 | case "$1" in 38 | start) 39 | log_daemon_msg "Starting $DESC" 40 | 41 | pid=`pidofproc -p $PID_FILE kibana` 42 | if [ -n "$pid" ] ; then 43 | log_begin_msg "Already running." 44 | log_end_msg 0 45 | exit 0 46 | fi 47 | 48 | # Start Daemon 49 | start-stop-daemon --start --pidfile "$PID_FILE" --make-pidfile --background --exec $DAEMON 50 | log_end_msg $? 51 | ;; 52 | stop) 53 | log_daemon_msg "Stopping $DESC" 54 | 55 | if [ -f "$PID_FILE" ]; then 56 | start-stop-daemon --stop --pidfile "$PID_FILE" \ 57 | --retry=TERM/20/KILL/5 >/dev/null 58 | if [ $? -eq 1 ]; then 59 | log_progress_msg "$DESC is not running but pid file exists, cleaning up" 60 | elif [ $? -eq 3 ]; then 61 | PID="`cat $PID_FILE`" 62 | log_failure_msg "Failed to stop $DESC (pid $PID)" 63 | exit 1 64 | fi 65 | rm -f "$PID_FILE" 66 | else 67 | log_progress_msg "(not running)" 68 | fi 69 | log_end_msg 0 70 | ;; 71 | status) 72 | status_of_proc -p $PID_FILE kibana kibana && exit 0 || exit $? 73 | ;; 74 | restart|force-reload) 75 | if [ -f "$PID_FILE" ]; then 76 | $0 stop 77 | sleep 1 78 | fi 79 | $0 start 80 | ;; 81 | *) 82 | log_success_msg "Usage: $0 {start|stop|restart|force-reload|status}" 83 | exit 1 84 | ;; 85 | esac 86 | 87 | exit 0 88 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This vagrant box installs elasticsearch 1.7, logstash 1.5.4 and kibana 4.1.1 2 | 3 | ## Prerequisites 4 | 5 | [VirtualBox](https://www.virtualbox.org/) and [Vagrant](http://www.vagrantup.com/) (minimum version 1.6) 6 | Other providers, like VMWare may work, not tested! 7 | 8 | 9 | ## Up and SSH 10 | 11 | To start the vagrant box run: 12 | 13 | vagrant up 14 | 15 | To log in to the machine run: 16 | 17 | vagrant ssh 18 | 19 | Elasticsearch will be available on the host machine at [http://localhost:9200/](http://localhost:9200/) 20 | 21 | Kibana at [http://localhost:5601/](http://localhost:5601/) 22 | 23 | Marvel elasticsearch plugin at [http://localhost:9200/_plugin/marvel/](http://localhost:9200/_plugin/marvel/) 24 | 25 | HQ elasticsearch plugin at [http://localhost:9200/_plugin/HQ/](http://localhost:9200/_plugin/HQ/) 26 | 27 | 28 | ## Vagrant commands 29 | 30 | 31 | ``` 32 | vagrant up # starts the machine 33 | vagrant ssh # ssh to the machine 34 | vagrant halt # shut down the machine 35 | vagrant provision # applies the bash and puppet provisioning 36 | 37 | ``` 38 | 39 | ##Elasticsearch 40 | Installed via debian package, started on boot. 41 | Controlled by 42 | 43 | ```bash 44 | 45 | sudo service elasticsearch-es-01 46 | 47 | ``` 48 | 49 | 50 | ##Logstash 51 | Installed via debian package, started on boot. 52 | Controlled by 53 | 54 | ```bash 55 | 56 | sudo service logstash 57 | 58 | ``` 59 | 60 | If you want some sample Logstash data to play with, run 61 | 62 | ```bash 63 | 64 | /opt/logstash/bin/logstash agent -f /vagrant/confs/logstash/logstash.conf 65 | 66 | ``` 67 | It will index into elasticsearch 68 | reading from example log file at [/vagrant/example-logs/testlog](/example-logs/testlog) 69 | 70 | ##Kibana 71 | Manual install, start up script provided in this repo. 72 | Controlled by 73 | 74 | ```bash 75 | 76 | sudo service kibana 77 | 78 | ``` 79 | 80 | ## Configuration details 81 | Elasticsearch and Logstash are installed using puppet modules. deb file for Kibana is downloaded and extracted, thanks to @UnrealQuester we even have init script for Kibana. 82 | Installation can be configured in the file [/manifests/default.pp](/manifests/default.pp) .For details on the elasticsearch puppet configuration, see [https://forge.puppetlabs.com/elasticsearch/elasticsearch](https://forge.puppetlabs.com/elasticsearch/elasticsearch) Logstash puppet at [https://forge.puppetlabs.com/elasticsearch/logstash](https://forge.puppetlabs.com/elasticsearch/logstash) 83 | 84 | Elasticsearch is installed using cluster name 'vagrant_elasticsearch', instance name es-01, using 1 shard, 0 replicas. 85 | 86 | 87 | Read (a bit) more: http://blog.comperiosearch.com/blog/2014/08/14/elk-one-vagrant-box/ 88 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ################ 2 | ## Eclipse 3 | ################# 4 | 5 | *.pydevproject 6 | .project 7 | .metadata 8 | bin/ 9 | tmp/ 10 | *.tmp 11 | *.bak 12 | *.swp 13 | *~.nib 14 | local.properties 15 | .classpath 16 | .settings/ 17 | .loadpath 18 | 19 | # External tool builders 20 | .externalToolBuilders/ 21 | 22 | # Locally stored "Eclipse launch configurations" 23 | *.launch 24 | 25 | # CDT-specific 26 | .cproject 27 | 28 | # PDT-specific 29 | .buildpath 30 | 31 | 32 | ################# 33 | ## Visual Studio 34 | ################# 35 | 36 | ## Ignore Visual Studio temporary files, build results, and 37 | ## files generated by popular Visual Studio add-ons. 38 | 39 | # User-specific files 40 | *.suo 41 | *.user 42 | *.sln.docstates 43 | 44 | # Build results 45 | [Dd]ebug/ 46 | [Rr]elease/ 47 | *_i.c 48 | *_p.c 49 | *.ilk 50 | *.meta 51 | *.obj 52 | *.pch 53 | *.pdb 54 | *.pgc 55 | *.pgd 56 | *.rsp 57 | *.sbr 58 | *.tlb 59 | *.tli 60 | *.tlh 61 | *.tmp 62 | *.vspscc 63 | .builds 64 | *.dotCover 65 | 66 | ## TODO: If you have NuGet Package Restore enabled, uncomment this 67 | #packages/ 68 | 69 | # Visual C++ cache files 70 | ipch/ 71 | *.aps 72 | *.ncb 73 | *.opensdf 74 | *.sdf 75 | 76 | # Visual Studio profiler 77 | *.psess 78 | *.vsp 79 | 80 | # ReSharper is a .NET coding add-in 81 | _ReSharper* 82 | 83 | # Installshield output folder 84 | [Ee]xpress 85 | 86 | # DocProject is a documentation generator add-in 87 | DocProject/buildhelp/ 88 | DocProject/Help/*.HxT 89 | DocProject/Help/*.HxC 90 | DocProject/Help/*.hhc 91 | DocProject/Help/*.hhk 92 | DocProject/Help/*.hhp 93 | DocProject/Help/Html2 94 | DocProject/Help/html 95 | 96 | # Click-Once directory 97 | publish 98 | 99 | # Others 100 | [Bb]in 101 | [Oo]bj 102 | sql 103 | TestResults 104 | *.Cache 105 | ClientBin 106 | stylecop.* 107 | ~$* 108 | *.dbmdl 109 | Generated_Code #added for RIA/Silverlight projects 110 | 111 | # Backup & report files from converting an old project file to a newer 112 | # Visual Studio version. Backup files are not needed, because we have git ;-) 113 | _UpgradeReport_Files/ 114 | Backup*/ 115 | UpgradeLog*.XML 116 | 117 | 118 | 119 | ############ 120 | ## Windows 121 | ############ 122 | 123 | # Windows image file caches 124 | Thumbs.db 125 | 126 | # Folder config file 127 | Desktop.ini 128 | 129 | 130 | ############# 131 | ## Python 132 | ############# 133 | 134 | *.py[co] 135 | 136 | # Packages 137 | *.egg 138 | *.egg-info 139 | dist 140 | build 141 | eggs 142 | parts 143 | bin 144 | var 145 | sdist 146 | develop-eggs 147 | .installed.cfg 148 | 149 | # Installer logs 150 | pip-log.txt 151 | 152 | # Unit test / coverage reports 153 | .coverage 154 | .tox 155 | 156 | #Translations 157 | *.mo 158 | 159 | #Mr Developer 160 | .mr.developer.cfg 161 | 162 | # Mac crap 163 | .DS_Store 164 | .orig 165 | 166 | **/App_Data/Log/* 167 | **/App_Data/Assembly/* 168 | **/App_Data/Config/* 169 | **/App_Data/Blob/* 170 | 171 | *.log 172 | 173 | *.dll 174 | *.orig 175 | 176 | .vagrant 177 | 178 | elasticsearch 179 | !modules/elasticsearch 180 | 181 | *~ 182 | 183 | kibana/ 184 | elasticsearch 185 | logs/ 186 | tmp/ 187 | project/ 188 | esbackups/ --------------------------------------------------------------------------------