├── .gitignore ├── .rubocop.yml ├── .ruby-version ├── 1.8 ├── post-tweets.json ├── tweeter-analytics.json └── tweeter.json ├── 1.9 ├── post-tweets.json ├── tweeter-analytics.json └── tweeter.json ├── Dockerfile ├── Gemfile ├── Gemfile.lock ├── README.md ├── Rakefile ├── app ├── assets │ └── stylesheets │ │ ├── _bootstrap.css │ │ ├── _tweeter.css │ │ └── application.css ├── controllers │ ├── analytics_controller.rb │ ├── application_controller.rb │ └── tweets_controller.rb ├── models │ ├── analytics.rb │ └── tweet.rb └── views │ ├── analytics │ └── index.html.erb │ ├── layouts │ └── application.html.erb │ └── tweets │ ├── index.html.erb │ └── show.html.erb ├── bin ├── bundle ├── rails ├── rake ├── spring └── tweet ├── cassandra-clear.json ├── cli_script.sh ├── config.ru ├── config ├── application.rb ├── boot.rb ├── environment.rb ├── environments │ ├── development.rb │ ├── production.rb │ └── test.rb ├── initializers │ ├── cassandra.rb │ ├── cookies_serializer.rb │ ├── filter_parameter_logging.rb │ ├── kafka.rb │ ├── session_store.rb │ └── wrap_parameters.rb ├── locales │ └── en.yml ├── routes.rb └── secrets.yml ├── cypress.json ├── lib └── tasks │ └── db.rake ├── log └── .keep ├── post-tweets.json ├── public ├── 404.html ├── 422.html ├── 500.html └── favicon.ico ├── router ├── Dockerfile ├── app.lua ├── marathon.json └── nginx.conf ├── shakespeare-data.json ├── shakespeare-tweets.json ├── tests ├── integration │ └── tweeter_spec.js └── support │ └── commands.js ├── tweeter-analytics.json └── tweeter.json /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files for more about ignoring files. 2 | # 3 | # If you find yourself ignoring temporary files generated by your text editor 4 | # or operating system, you probably want to add a global ignore instead: 5 | # git config --global core.excludesfile '~/.gitignore_global' 6 | 7 | # Ignore bundler config. 8 | /.bundle 9 | 10 | # Ignore the default SQLite database. 11 | /db/*.sqlite3 12 | /db/*.sqlite3-journal 13 | 14 | # Ignore all logfiles and tempfiles. 15 | /log/*.log 16 | /tmp 17 | -------------------------------------------------------------------------------- /.rubocop.yml: -------------------------------------------------------------------------------- 1 | Metrics/AbcSize: 2 | Enabled: false 3 | 4 | Style/ClassVars: 5 | Enabled: false 6 | 7 | Performance/ParallelAssignment: 8 | Enabled: false 9 | 10 | Metrics/MethodLength: 11 | Max: 20 12 | -------------------------------------------------------------------------------- /.ruby-version: -------------------------------------------------------------------------------- 1 | 2.2.0 2 | -------------------------------------------------------------------------------- /1.8/post-tweets.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "/post-tweets", 3 | "cmd": "bin/tweet shakespeare-tweets.json http://1.1.1.1:30000", 4 | "cpus": 0.25, 5 | "mem": 256, 6 | "disk": 0, 7 | "instances": 1, 8 | "container": { 9 | "type": "DOCKER", 10 | "docker": { 11 | "image": "mesosphere/tweeter", 12 | "network": "BRIDGE", 13 | "portMappings": [ 14 | { 15 | "containerPort": 3000, 16 | "hostPort": 0, 17 | "servicePort": 10101, 18 | "protocol": "tcp" 19 | } 20 | ] 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /1.8/tweeter-analytics.json: -------------------------------------------------------------------------------- 1 | {"paragraphs":[{"title":"Load Dependencies","text":"%dep\nz.load(\"org.apache.commons:commons-csv:1.2\")\nz.load(\"org.apache.spark:spark-streaming-kafka_2.10:1.5.2\")\nz.load(\"com.typesafe.play:play-json_2.10:2.4.6\")\n","dateUpdated":"Apr 18, 2016 6:43:09 PM","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/scala","title":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1461004989264_383768947","id":"20160418-184309_508939537","result":{"code":"SUCCESS","type":"TEXT","msg":"res0: org.apache.zeppelin.spark.dep.Dependency = org.apache.zeppelin.spark.dep.Dependency@5ba40793\n"},"dateCreated":"Apr 18, 2016 6:43:09 PM","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:20"},{"title":"Spark Streaming","text":"import java.text.{SimpleDateFormat, ParseException}\nimport java.util.Date\n\nimport org.apache.spark.{SparkConf}\nimport org.apache.spark.streaming.kafka.{KafkaUtils}\nimport org.apache.spark.streaming.{StreamingContext, Seconds}\n\nimport play.api.libs.json._\n\nval ssc = new StreamingContext(sc, Seconds(2))\nval msgStream = KafkaUtils.createStream(ssc, \"master.mesos:2181/dcos-service-kafka\", \"zeppelin-consumer-group\", Map(\"tweets\" -> 1)).map(_._2)\n\nval tweetStream = msgStream.map(tweetString => {\n val tweet = Json.parse(tweetString)\n ((tweet \\ \"handle\").as[String], (tweet \\ \"content\").as[String], (tweet \\ \"created_at\").as[String])\n })\ntweetStream.print()\n \ntweetStream.window(Seconds(60))\n .foreachRDD(rdd => rdd.toDF(\"handle\", \"content\", \"created_at\").registerTempTable(\"tweets\"))\n \n \n \nssc.start()","dateUpdated":"Apr 18, 2016 6:44:03 PM","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"tableHide":false,"editorMode":"ace/mode/scala","title":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1461004989265_383384198","id":"20160418-184309_1078829498","result":{"code":"SUCCESS","type":"TEXT","msg":"import java.text.{SimpleDateFormat, ParseException}\nimport java.util.Date\nimport org.apache.spark.SparkConf\nimport org.apache.spark.streaming.kafka.KafkaUtils\nimport org.apache.spark.streaming.{StreamingContext, Seconds}\nimport play.api.libs.json._\nssc: org.apache.spark.streaming.StreamingContext = org.apache.spark.streaming.StreamingContext@33126f3f\nmsgStream: org.apache.spark.streaming.dstream.DStream[String] = org.apache.spark.streaming.dstream.MappedDStream@1f5376fc\ntweetStream: org.apache.spark.streaming.dstream.DStream[(String, String, String)] = org.apache.spark.streaming.dstream.MappedDStream@20f4af29\n"},"dateCreated":"Apr 18, 2016 6:43:09 PM","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:21"},{"title":"Top tweeters","text":"%sql select handle, count(*) as count from tweets\n group by handle\n order by count desc","dateUpdated":"Apr 18, 2016 6:43:09 PM","config":{"colWidth":12,"graph":{"mode":"multiBarChart","height":560,"optionOpen":false,"keys":[{"name":"handle","index":0,"aggr":"sum"}],"values":[{"name":"count","index":1,"aggr":"sum"}],"groups":[],"scatter":{"xAxis":{"name":"handle","index":0,"aggr":"sum"}}},"enabled":true,"editorMode":"ace/mode/sql","editorHide":false,"tableHide":false,"title":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1461004989265_384538445","id":"20160418-184309_1093666837","result":{"code":"SUCCESS","type":"TABLE","msg":"handle\tcount\nrosalind\t236\ntouchstone\t134\njaques\t117\ncelia\t97\norlando\t94\nduke senior\t48\ncorin\t42\nphebe\t30\nduke frederick\t19\namiens\t17\nsilvius\t15\naudrey\t9\nsir oliver martext\t7\nadam\t2\noliver\t2\n","comment":"","msgTable":[[{"key":"count","value":"rosalind"},{"key":"count","value":"236"}],[{"value":"touchstone"},{"value":"134"}],[{"value":"jaques"},{"value":"117"}],[{"value":"celia"},{"value":"97"}],[{"value":"orlando"},{"value":"94"}],[{"value":"duke senior"},{"value":"48"}],[{"value":"corin"},{"value":"42"}],[{"value":"phebe"},{"value":"30"}],[{"value":"duke frederick"},{"value":"19"}],[{"value":"amiens"},{"value":"17"}],[{"value":"silvius"},{"value":"15"}],[{"value":"audrey"},{"value":"9"}],[{"value":"sir oliver martext"},{"value":"7"}],[{"value":"adam"},{"value":"2"}],[{"value":"oliver"},{"value":"2"}]],"columnNames":[{"name":"handle","index":0,"aggr":"sum"},{"name":"count","index":1,"aggr":"sum"}],"rows":[["rosalind","236"],["touchstone","134"],["jaques","117"],["celia","97"],["orlando","94"],["duke senior","48"],["corin","42"],["phebe","30"],["duke frederick","19"],["amiens","17"],["silvius","15"],["audrey","9"],["sir oliver martext","7"],["adam","2"],["oliver","2"]]},"dateCreated":"Apr 18, 2016 6:43:09 PM","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:22"},{"dateUpdated":"Apr 18, 2016 6:43:09 PM","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/scala"},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1461004989266_384538445","id":"20160418-184309_1977340255","result":{"code":"SUCCESS","type":"TEXT"},"dateCreated":"Apr 18, 2016 6:43:09 PM","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:23"}],"name":"Tweeter Analytics","id":"2BGEB4WMQ","angularObjects":{"2BFVURX7P":[],"2BJ26CJUP":[],"2BGC7K1CY":[],"2BGG4QMZP":[],"2BHTSMVYX":[],"2BJAXRYGW":[],"2BH1XZMES":[],"2BJ3XKTDP":[],"2BHEWU631":[],"2BJ9J1UR6":[],"2BG18JECV":[],"2BGYEW4KP":[],"2BGTRC1KJ":[],"2BHPKYBCN":[]},"config":{"looknfeel":"default"},"info":{}} 2 | -------------------------------------------------------------------------------- /1.8/tweeter.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "/tweeter", 3 | "cmd": "export KAFKA_BROKERS='broker.kafka.l4lb.thisdcos.directory:9092' && until rake cassandra:setup; do sleep 5; done && rails server", 4 | "cpus": 0.25, 5 | "mem": 256, 6 | "disk": 0, 7 | "instances": 3, 8 | "container": { 9 | "type": "DOCKER", 10 | "docker": { 11 | "image": "mesosphere/tweeter", 12 | "network": "BRIDGE", 13 | "portMappings": [ 14 | { 15 | "containerPort": 3000, 16 | "hostPort": 0, 17 | "servicePort": 10000, 18 | "labels": { 19 | "VIP_0": "1.1.1.1:30000" 20 | } 21 | } 22 | ] 23 | } 24 | }, 25 | "env": { 26 | "CASSANDRA_HOSTS": "node-0.cassandra.mesos", 27 | "RAILS_ENV": "production", 28 | "SECRET_KEY_BASE": "be6ea21bd0e8ddad06accbdfbfcbc6f120815744a8177fb1196442c1670401c86a1d020f1fb62f9b7d6bacc8cf818de277d23d3f3e7dcf704ca88965e5b9ed86" 29 | }, 30 | "healthChecks": [ 31 | { 32 | "path": "/", 33 | "protocol": "HTTP", 34 | "portIndex": 0 35 | } 36 | ], 37 | "labels": { 38 | "HAPROXY_GROUP": "external" 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /1.9/post-tweets.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "/post-tweets", 3 | "cmd": "bin/tweet shakespeare-tweets.json http://1.1.1.1:30000", 4 | "cpus": 0.25, 5 | "mem": 256, 6 | "disk": 0, 7 | "instances": 1, 8 | "container": { 9 | "type": "DOCKER", 10 | "docker": { 11 | "image": "mesosphere/tweeter", 12 | "network": "BRIDGE", 13 | "portMappings": [ 14 | { 15 | "containerPort": 3000, 16 | "hostPort": 0, 17 | "servicePort": 10101, 18 | "protocol": "tcp" 19 | } 20 | ] 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /1.9/tweeter-analytics.json: -------------------------------------------------------------------------------- 1 | {"paragraphs":[{"title":"Load Dependencies","text":"%dep\nz.load(\"org.apache.commons:commons-csv:1.2\")\nz.load(\"org.apache.spark:spark-streaming-kafka_2.10:1.5.2\")\nz.load(\"com.typesafe.play:play-json_2.10:2.4.6\")\n","dateUpdated":"Apr 18, 2016 6:43:09 PM","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/scala","title":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1461004989264_383768947","id":"20160418-184309_508939537","result":{"code":"SUCCESS","type":"TEXT","msg":"res0: org.apache.zeppelin.spark.dep.Dependency = org.apache.zeppelin.spark.dep.Dependency@5ba40793\n"},"dateCreated":"Apr 18, 2016 6:43:09 PM","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:20"},{"title":"Spark Streaming","text":"import java.text.{SimpleDateFormat, ParseException}\nimport java.util.Date\n\nimport org.apache.spark.{SparkConf}\nimport org.apache.spark.streaming.kafka.{KafkaUtils}\nimport org.apache.spark.streaming.{StreamingContext, Seconds}\n\nimport play.api.libs.json._\n\nval ssc = new StreamingContext(sc, Seconds(2))\nval msgStream = KafkaUtils.createStream(ssc, \"master.mesos:2181/dcos-service-kafka\", \"zeppelin-consumer-group\", Map(\"tweets\" -> 1)).map(_._2)\n\nval tweetStream = msgStream.map(tweetString => {\n val tweet = Json.parse(tweetString)\n ((tweet \\ \"handle\").as[String], (tweet \\ \"content\").as[String], (tweet \\ \"created_at\").as[String])\n })\ntweetStream.print()\n \ntweetStream.window(Seconds(60))\n .foreachRDD(rdd => rdd.toDF(\"handle\", \"content\", \"created_at\").registerTempTable(\"tweets\"))\n \n \n \nssc.start()","dateUpdated":"Apr 18, 2016 6:44:03 PM","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"tableHide":false,"editorMode":"ace/mode/scala","title":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1461004989265_383384198","id":"20160418-184309_1078829498","result":{"code":"SUCCESS","type":"TEXT","msg":"import java.text.{SimpleDateFormat, ParseException}\nimport java.util.Date\nimport org.apache.spark.SparkConf\nimport org.apache.spark.streaming.kafka.KafkaUtils\nimport org.apache.spark.streaming.{StreamingContext, Seconds}\nimport play.api.libs.json._\nssc: org.apache.spark.streaming.StreamingContext = org.apache.spark.streaming.StreamingContext@33126f3f\nmsgStream: org.apache.spark.streaming.dstream.DStream[String] = org.apache.spark.streaming.dstream.MappedDStream@1f5376fc\ntweetStream: org.apache.spark.streaming.dstream.DStream[(String, String, String)] = org.apache.spark.streaming.dstream.MappedDStream@20f4af29\n"},"dateCreated":"Apr 18, 2016 6:43:09 PM","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:21"},{"title":"Top tweeters","text":"%sql select handle, count(*) as count from tweets\n group by handle\n order by count desc","dateUpdated":"Apr 18, 2016 6:43:09 PM","config":{"colWidth":12,"graph":{"mode":"multiBarChart","height":560,"optionOpen":false,"keys":[{"name":"handle","index":0,"aggr":"sum"}],"values":[{"name":"count","index":1,"aggr":"sum"}],"groups":[],"scatter":{"xAxis":{"name":"handle","index":0,"aggr":"sum"}}},"enabled":true,"editorMode":"ace/mode/sql","editorHide":false,"tableHide":false,"title":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1461004989265_384538445","id":"20160418-184309_1093666837","result":{"code":"SUCCESS","type":"TABLE","msg":"handle\tcount\nrosalind\t236\ntouchstone\t134\njaques\t117\ncelia\t97\norlando\t94\nduke senior\t48\ncorin\t42\nphebe\t30\nduke frederick\t19\namiens\t17\nsilvius\t15\naudrey\t9\nsir oliver martext\t7\nadam\t2\noliver\t2\n","comment":"","msgTable":[[{"key":"count","value":"rosalind"},{"key":"count","value":"236"}],[{"value":"touchstone"},{"value":"134"}],[{"value":"jaques"},{"value":"117"}],[{"value":"celia"},{"value":"97"}],[{"value":"orlando"},{"value":"94"}],[{"value":"duke senior"},{"value":"48"}],[{"value":"corin"},{"value":"42"}],[{"value":"phebe"},{"value":"30"}],[{"value":"duke frederick"},{"value":"19"}],[{"value":"amiens"},{"value":"17"}],[{"value":"silvius"},{"value":"15"}],[{"value":"audrey"},{"value":"9"}],[{"value":"sir oliver martext"},{"value":"7"}],[{"value":"adam"},{"value":"2"}],[{"value":"oliver"},{"value":"2"}]],"columnNames":[{"name":"handle","index":0,"aggr":"sum"},{"name":"count","index":1,"aggr":"sum"}],"rows":[["rosalind","236"],["touchstone","134"],["jaques","117"],["celia","97"],["orlando","94"],["duke senior","48"],["corin","42"],["phebe","30"],["duke frederick","19"],["amiens","17"],["silvius","15"],["audrey","9"],["sir oliver martext","7"],["adam","2"],["oliver","2"]]},"dateCreated":"Apr 18, 2016 6:43:09 PM","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:22"},{"dateUpdated":"Apr 18, 2016 6:43:09 PM","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/scala"},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1461004989266_384538445","id":"20160418-184309_1977340255","result":{"code":"SUCCESS","type":"TEXT"},"dateCreated":"Apr 18, 2016 6:43:09 PM","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:23"}],"name":"Tweeter Analytics","id":"2BGEB4WMQ","angularObjects":{"2BFVURX7P":[],"2BJ26CJUP":[],"2BGC7K1CY":[],"2BGG4QMZP":[],"2BHTSMVYX":[],"2BJAXRYGW":[],"2BH1XZMES":[],"2BJ3XKTDP":[],"2BHEWU631":[],"2BJ9J1UR6":[],"2BG18JECV":[],"2BGYEW4KP":[],"2BGTRC1KJ":[],"2BHPKYBCN":[]},"config":{"looknfeel":"default"},"info":{}} 2 | -------------------------------------------------------------------------------- /1.9/tweeter.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "/tweeter", 3 | "cmd": "export KAFKA_BROKERS='broker.kafka.l4lb.thisdcos.directory:9092' && until rake cassandra:setup; do sleep 5; done && rails server", 4 | "cpus": 0.25, 5 | "mem": 256, 6 | "disk": 0, 7 | "instances": 3, 8 | "container": { 9 | "type": "DOCKER", 10 | "docker": { 11 | "image": "mesosphere/tweeter", 12 | "network": "BRIDGE", 13 | "portMappings": [ 14 | { 15 | "containerPort": 3000, 16 | "hostPort": 0, 17 | "servicePort": 10000, 18 | "labels": { 19 | "VIP_0": "1.1.1.1:30000" 20 | } 21 | } 22 | ] 23 | } 24 | }, 25 | "env": { 26 | "CASSANDRA_HOSTS": "node-0-server.cassandra.mesos", 27 | "RAILS_ENV": "production", 28 | "SECRET_KEY_BASE": "be6ea21bd0e8ddad06accbdfbfcbc6f120815744a8177fb1196442c1670401c86a1d020f1fb62f9b7d6bacc8cf818de277d23d3f3e7dcf704ca88965e5b9ed86" 29 | }, 30 | "healthChecks": [ 31 | { 32 | "path": "/", 33 | "protocol": "HTTP", 34 | "portIndex": 0 35 | } 36 | ], 37 | "labels": { 38 | "HAPROXY_GROUP": "external" 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rails:onbuild 2 | 3 | RUN bin/rake assets:precompile 4 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | # Bundle edge Rails instead: gem 'rails', github: 'rails/rails' 4 | gem 'rails', '~> 4.2.7.1' 5 | # Use SCSS for stylesheets 6 | gem 'sass-rails', '~> 4.0.3' 7 | # Use Uglifier as compressor for JavaScript assets 8 | gem 'uglifier', '>= 1.3.0' 9 | 10 | # Use Cassandra Driver as primary storage 11 | gem 'cassandra-driver', '~> 3.0' 12 | 13 | # Kafka for logging tweets 14 | gem 'ruby-kafka', '~> 0.2.0' 15 | 16 | # Spring speeds up development by keeping your application running 17 | # in the background. Read more: https://github.com/rails/spring 18 | gem 'spring', group: :development 19 | 20 | # Use ActiveModel has_secure_password 21 | # gem 'bcrypt', '~> 3.1.7' 22 | 23 | # Use unicorn as the app server 24 | # gem 'unicorn' 25 | 26 | # Use Capistrano for deployment 27 | # gem 'capistrano-rails', group: :development 28 | 29 | # Use debugger 30 | # gem 'debugger', group: [:development, :test] 31 | 32 | gem 'rubocop', '~> 0.51.0' 33 | -------------------------------------------------------------------------------- /Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | actionmailer (4.2.7.1) 5 | actionpack (= 4.2.7.1) 6 | actionview (= 4.2.7.1) 7 | activejob (= 4.2.7.1) 8 | mail (~> 2.5, >= 2.5.4) 9 | rails-dom-testing (~> 1.0, >= 1.0.5) 10 | actionpack (4.2.7.1) 11 | actionview (= 4.2.7.1) 12 | activesupport (= 4.2.7.1) 13 | rack (~> 1.6) 14 | rack-test (~> 0.6.2) 15 | rails-dom-testing (~> 1.0, >= 1.0.5) 16 | rails-html-sanitizer (~> 1.0, >= 1.0.2) 17 | actionview (4.2.7.1) 18 | activesupport (= 4.2.7.1) 19 | builder (~> 3.1) 20 | erubis (~> 2.7.0) 21 | rails-dom-testing (~> 1.0, >= 1.0.5) 22 | rails-html-sanitizer (~> 1.0, >= 1.0.2) 23 | activejob (4.2.7.1) 24 | activesupport (= 4.2.7.1) 25 | globalid (>= 0.3.0) 26 | activemodel (4.2.7.1) 27 | activesupport (= 4.2.7.1) 28 | builder (~> 3.1) 29 | activerecord (4.2.7.1) 30 | activemodel (= 4.2.7.1) 31 | activesupport (= 4.2.7.1) 32 | arel (~> 6.0) 33 | activesupport (4.2.7.1) 34 | i18n (~> 0.7) 35 | json (~> 1.7, >= 1.7.7) 36 | minitest (~> 5.1) 37 | thread_safe (~> 0.3, >= 0.3.4) 38 | tzinfo (~> 1.1) 39 | arel (6.0.4) 40 | ast (2.3.0) 41 | builder (3.2.3) 42 | cassandra-driver (3.2.0) 43 | ione (~> 1.2) 44 | concurrent-ruby (1.0.5) 45 | crass (1.0.3) 46 | erubis (2.7.0) 47 | execjs (2.7.0) 48 | globalid (0.4.1) 49 | activesupport (>= 4.2.0) 50 | hike (1.2.3) 51 | i18n (0.9.1) 52 | concurrent-ruby (~> 1.0) 53 | ione (1.2.4) 54 | json (1.8.6) 55 | loofah (2.1.1) 56 | crass (~> 1.0.2) 57 | nokogiri (>= 1.5.9) 58 | mail (2.7.0) 59 | mini_mime (>= 0.1.1) 60 | mini_mime (1.0.0) 61 | mini_portile2 (2.3.0) 62 | minitest (5.10.3) 63 | multi_json (1.12.2) 64 | nokogiri (1.8.1) 65 | mini_portile2 (~> 2.3.0) 66 | parallel (1.12.0) 67 | parser (2.4.0.2) 68 | ast (~> 2.3) 69 | powerpack (0.1.1) 70 | rack (1.6.8) 71 | rack-test (0.6.3) 72 | rack (>= 1.0) 73 | rails (4.2.7.1) 74 | actionmailer (= 4.2.7.1) 75 | actionpack (= 4.2.7.1) 76 | actionview (= 4.2.7.1) 77 | activejob (= 4.2.7.1) 78 | activemodel (= 4.2.7.1) 79 | activerecord (= 4.2.7.1) 80 | activesupport (= 4.2.7.1) 81 | bundler (>= 1.3.0, < 2.0) 82 | railties (= 4.2.7.1) 83 | sprockets-rails 84 | rails-deprecated_sanitizer (1.0.3) 85 | activesupport (>= 4.2.0.alpha) 86 | rails-dom-testing (1.0.8) 87 | activesupport (>= 4.2.0.beta, < 5.0) 88 | nokogiri (~> 1.6) 89 | rails-deprecated_sanitizer (>= 1.0.1) 90 | rails-html-sanitizer (1.0.3) 91 | loofah (~> 2.0) 92 | railties (4.2.7.1) 93 | actionpack (= 4.2.7.1) 94 | activesupport (= 4.2.7.1) 95 | rake (>= 0.8.7) 96 | thor (>= 0.18.1, < 2.0) 97 | rainbow (2.2.2) 98 | rake 99 | rake (12.3.0) 100 | rubocop (0.51.0) 101 | parallel (~> 1.10) 102 | parser (>= 2.3.3.1, < 3.0) 103 | powerpack (~> 0.1) 104 | rainbow (>= 2.2.2, < 3.0) 105 | ruby-progressbar (~> 1.7) 106 | unicode-display_width (~> 1.0, >= 1.0.1) 107 | ruby-kafka (0.2.0) 108 | ruby-progressbar (1.9.0) 109 | sass (3.2.19) 110 | sass-rails (4.0.5) 111 | railties (>= 4.0.0, < 5.0) 112 | sass (~> 3.2.2) 113 | sprockets (~> 2.8, < 3.0) 114 | sprockets-rails (~> 2.0) 115 | spring (2.0.2) 116 | activesupport (>= 4.2) 117 | sprockets (2.12.4) 118 | hike (~> 1.2) 119 | multi_json (~> 1.0) 120 | rack (~> 1.0) 121 | tilt (~> 1.1, != 1.3.0) 122 | sprockets-rails (2.3.3) 123 | actionpack (>= 3.0) 124 | activesupport (>= 3.0) 125 | sprockets (>= 2.8, < 4.0) 126 | thor (0.20.0) 127 | thread_safe (0.3.6) 128 | tilt (1.4.1) 129 | tzinfo (1.2.4) 130 | thread_safe (~> 0.1) 131 | uglifier (3.2.0) 132 | execjs (>= 0.3.0, < 3) 133 | unicode-display_width (1.3.0) 134 | 135 | PLATFORMS 136 | ruby 137 | 138 | DEPENDENCIES 139 | cassandra-driver (~> 3.0) 140 | rails (~> 4.2.7.1) 141 | rubocop (~> 0.51.0) 142 | ruby-kafka (~> 0.2.0) 143 | sass-rails (~> 4.0.3) 144 | spring 145 | uglifier (>= 1.3.0) 146 | 147 | BUNDLED WITH 148 | 1.16.0 149 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Tweeter 2 | 3 | Tweeter is a sample service that demonstrates how easy it is to run a Twitter-like service on DC/OS. 4 | 5 | Capabilities: 6 | 7 | * Stores tweets in Cassandra 8 | * Streams tweets to Kafka as they come in 9 | * Real time tweet analytics with Spark and Zeppelin 10 | 11 | ## Install Prerequisites on Your Machine 12 | 13 | To run the demo script (`cli_script.sh`), the following pieces of software are expected to be available: 14 | 15 | * dcos cli 16 | * cypress 17 | 18 | ### Installing cypress 19 | 20 | Cypress is a nodejs package that executes UI tests. This is used by the demo script to submit a tweet and verify it is displayed within the Tweeter UI. 21 | 22 | To install on OSX, perform the following from within this project's directory: 23 | 24 | ``` 25 | $ brew update 26 | $ brew install node 27 | $ npm install -g cypress-cli 28 | $ cypress install 29 | ``` 30 | 31 | You can run the UI tests separately with `cypress run`. 32 | 33 | ### Configuring cypress 34 | 35 | The demo script has a section that creates the JSON file `ci-conf.json`. This file is read by the cypress to determine the URL of the DC/OS cluster, the URL of the tweeter application, and the log in credentials to use. Without this file the UI tests will fail. 36 | 37 | Example: 38 | 39 | ``` 40 | { 41 | "tweeter_url": "52.xx.xx.xx:10000", 42 | "url": "http://my-cool-demo.us-west-2.elb.amazonaws.com/", 43 | "username": "admin", 44 | "password": "password" 45 | } 46 | ``` 47 | 48 | ## Demo Cluster Prerequisites 49 | 50 | You'll need a DC/OS cluster with one public node and at least five private nodes and the DC/OS CLI locally installed. 51 | 52 | ## Scripted Demo 53 | 54 | `cli_script.sh` in this repository can be utilized to setup a tweeter demo 55 | cluster automatically and provide some random traffic, however the GUI experience with Zeppelin analytics and DC/OS feature presentation must still be done by hand: 56 | * Install Zeppelin from the GUI using the default values 57 | * Log into the Tweeter UI at http://[elb_hostname] and post a sample tweet 58 | * Start the tweeter load job from the CLI using the command dcos/bin/dcos marathon app add post-tweets.json 59 | * Kill one of the Tweeter containers in Marathon and show that the Tweeter is still up and tweets are still flowing in 60 | * Log into Zeppelin using the https interface at https://[master_ip]/service/zeppelin 61 | * Click Import note and import tweeter-analytics.json from the Tweeter repo clone you made locally 62 | * Open the newly loaded "Tweeter Analytics" Notebook 63 | * Run the Load Dependencies step to load the required libraries into Zeppelin 64 | * Run the Spark Streaming step, which reads the tweet stream from Zookeeper, and puts them into a temporary table that can be queried using SparkSQL - this spins up the Zeppelin spark context so you can show them the increased utilization on the dashboard 65 | * Next, run the Top Tweeters SQL query, which counts the number of tweets per user, using the table created in the previous step 66 | * The table updates continuously as new tweets come in, so re-running the query will produce a different result every time 67 | 68 | 69 | ### Stage EBC Demo 70 | Run Tweeter against an EE cluster, but do not start Zeppelin or post tweets 71 | ``` 72 | $ bash cli_script.sh --infra --url http://my.dcos.url 73 | ``` 74 | 75 | ### Get Manual Demo script and Run Nothing 76 | This combination of options will not actually run the demo but stop and prompt you with the appropriate CLI command to do to run the demo. Note, by specifying -oss without a DCOS\_AUTH\_TOKEN set, the dummy CI auth token will be used 77 | ``` 78 | $ bash cli_script.sh --step --manual --oss --url http://my.dcos.url 79 | ``` 80 | 81 | ### Open DC/OS Tweeter Demo Setup 82 | The steps below are applicable for Open DC/OS, when it does not have a super 83 | set. The auth token we set below 84 | 85 | #### Login to dcos and copy the auth token: 86 | ``` 87 | $ dcos auth login 88 | 89 | Please go to the following link in your browser: 90 | 91 | http://54.70.182.15/login?redirect_uri=urn:ietf:wg:oauth:2.0:oob 92 | 93 | Enter authentication token: 94 | 95 | ``` 96 | 97 | If you wish to access this token again later, use the cli command: 98 | ``` 99 | dcos config show core.dcos_acs_token 100 | ``` 101 | 102 | #### Set DCOS Auth Token to the environment variable 103 | 104 | ``` 105 | export DCOS_AUTH_TOKEN= 106 | ``` 107 | #### Run the cli script 108 | ``` 109 | $ ./cli_script.sh --oss --url http://52.70.182.15 110 | ``` 111 | 112 | ## Manual Test Steps 113 | ### Install the cluster prereqs 114 | ``` 115 | $ dcos package install marathon-lb 116 | $ dcos package install cassandra --cli 117 | $ dcos package install kafka --cli 118 | $ dcos package install zeppelin 119 | ``` 120 | 121 | Wait until the Kafka and Cassandra services are healthy. You can check their status with: 122 | 123 | ``` 124 | $ dcos kafka endpoints broker 125 | ... 126 | $ dcos cassandra endpoints node 127 | ... 128 | ``` 129 | 130 | ### Edit the Tweeter Service Config 131 | 132 | Edit the `HAPROXY_0_VHOST` label in `tweeter.json` to match your public ELB hostname. If you are running DC/OS 1.8 or DC/OS 1.9, please use the `tweeter.json` file located in the 1.8 or 1.9 folder. Be sure to remove the leading `http://` and the trailing `/` For example: 133 | 134 | ```json 135 | { 136 | "labels": { 137 | "HAPROXY_GROUP": "external", 138 | "HAPROXY_0_VHOST": "brenden-7-publicsl-1dnroe89snjkq-221614774.us-west-2.elb.amazonaws.com" 139 | } 140 | } 141 | ``` 142 | 143 | ### Run the Tweeter Service 144 | 145 | Launch three instances of Tweeter on Marathon using the config file in this repo: 146 | 147 | ``` 148 | $ dcos marathon app add tweeter.json 149 | ``` 150 | 151 | The service talks to Cassandra via `node-0-server.cassandra.autoip.dcos.thisdcos.directory`, and Kafka via `broker.kafka.l4lb.thisdcos.directory:9092` in this example. 152 | 153 | Traffic is routed to the service via marathon-lb. Navigate to `http://` to see the Tweeter UI and post a Tweet. 154 | 155 | 156 | ### Post a lot of Tweets 157 | 158 | Post a lot of Shakespeare tweets from a file: 159 | 160 | ``` 161 | dcos marathon app add post-tweets.json 162 | ``` 163 | 164 | This will post more than 100k tweets one by one, so you'll see them coming in steadily when you refresh the page. Take a look at the Networking page on the UI to see the load balancing in action. 165 | 166 | 167 | ### Streaming Analytics 168 | 169 | Next, we'll do real-time analytics on the stream of tweets coming in from Kafka. 170 | 171 | Navigate to Zeppelin at `https:///service/zeppelin/`, click `Import note` and import `tweeter-analytics.json`. Zeppelin is preconfigured to execute Spark jobs on the DCOS cluster, so there is no further configuration or setup required. 172 | 173 | Run the *Load Dependencies* step to load the required libraries into Zeppelin. Next, run the *Spark Streaming* step, which reads the tweet stream from Zookeeper, and puts them into a temporary table that can be queried using SparkSQL. Next, run the *Top Tweeters* SQL query, which counts the number of tweets per user, using the table created in the previous step. The table updates continuously as new tweets come in, so re-running the query will produce a different result every time. 174 | 175 | 176 | NOTE: if /service/zeppelin is showing as Disconnected (and hence can’t load the notebook), make sure you're using HTTPS instead of HTTP, until [this PR](https://github.com/dcos/dcos/pull/27) gets merged. Alternatively, you can use marathon-lb. To do this, add the following labels to the Zeppelin service and restart: 177 | 178 | 179 | `HAPROXY_0_VHOST = [elb hostname]` 180 | 181 | `HAPROXY_GROUP = external` 182 | 183 | You can get the ELB hostname from the CCM “Public Server” link. Once Zeppelin restarts, this should allow you to use that link to reach the Zeppelin GUI in “connected” mode. 184 | 185 | ## Developing Tweeter 186 | 187 | You'll need Ruby and a couple of libraries on your local machine to hack on this service. If you just want to run the demo, you don't need this. 188 | 189 | ### Homebrew on Mac OS X 190 | 191 | Using Homebrew, install `rbenv`, a Ruby version manager: 192 | 193 | ```bash 194 | $ brew update 195 | $ brew install rbenv 196 | ``` 197 | 198 | Run this command and follow the instructions to setup your environment: 199 | 200 | ```bash 201 | $ rbenv init 202 | ``` 203 | 204 | To install the required Ruby version for Tweeter, run from inside this repo: 205 | 206 | ```bash 207 | $ rbenv install 208 | ``` 209 | 210 | Then install the Ruby package manager and Tweeter's dependencies. From this repo run: 211 | 212 | ```bash 213 | $ gem install bundler 214 | $ bundle install 215 | ``` 216 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | # Add your own tasks in files placed in lib/tasks ending in .rake, 2 | # for example lib/tasks/capistrano.rake, and they will 3 | # automatically be available to Rake. 4 | 5 | require File.expand_path('../config/application', __FILE__) 6 | 7 | Rails.application.load_tasks 8 | -------------------------------------------------------------------------------- /app/assets/stylesheets/_tweeter.css: -------------------------------------------------------------------------------- 1 | .profile-heading { 2 | margin-bottom: 0; 3 | } 4 | 5 | .tweet-content:last-child { 6 | margin-bottom: 0; 7 | } 8 | -------------------------------------------------------------------------------- /app/assets/stylesheets/application.css: -------------------------------------------------------------------------------- 1 | /* 2 | * This is a manifest file that'll be compiled into application.css, which will include all the files 3 | * listed below. 4 | * 5 | * Any CSS and SCSS file within this directory, lib/assets/stylesheets, vendor/assets/stylesheets, 6 | * or vendor/assets/stylesheets of plugins, if any, can be referenced here using a relative path. 7 | * 8 | * You're free to add application-wide styles to this file and they'll appear at the bottom of the 9 | * compiled file so the styles you add here take precedence over styles defined in any styles 10 | * defined in the other CSS/SCSS files in this directory. It is generally better to create a new 11 | * file per style scope. 12 | * 13 | *= require_tree . 14 | *= require_self 15 | */ 16 | -------------------------------------------------------------------------------- /app/controllers/analytics_controller.rb: -------------------------------------------------------------------------------- 1 | # Analytics coontroller 2 | class AnalyticsController < ActionController::Base 3 | layout 'application' 4 | 5 | def index 6 | @analytics = Analytics.all 7 | end 8 | end 9 | -------------------------------------------------------------------------------- /app/controllers/application_controller.rb: -------------------------------------------------------------------------------- 1 | # Main application controller 2 | class ApplicationController < ActionController::Base 3 | # Prevent CSRF attacks by raising an exception. 4 | # For APIs, you may want to use :null_session instead. 5 | protect_from_forgery with: :exception 6 | end 7 | -------------------------------------------------------------------------------- /app/controllers/tweets_controller.rb: -------------------------------------------------------------------------------- 1 | # Tweets coontroller 2 | class TweetsController < ActionController::Base 3 | layout 'application' 4 | 5 | def create 6 | @tweet = Tweet.create(tweet_params) 7 | log_tweet(@tweet) 8 | redirect_to root_path 9 | end 10 | 11 | def destroy 12 | Tweet.find(params[:id]).destroy 13 | redirect_to root_path 14 | end 15 | 16 | def index 17 | @tweets = Tweet.all(params[:paged].present?) 18 | end 19 | 20 | def show 21 | @tweet = Tweet.find(params[:id]) 22 | end 23 | 24 | private 25 | 26 | def tweet_params 27 | params.require(:tweet).permit(:content, :handle) 28 | end 29 | 30 | def log_tweet(tweet) 31 | # TODO move producer setup out of request/response cycle 32 | kafka = Kafka.new(KAFKA_OPTIONS) 33 | producer = kafka.producer 34 | producer.produce(tweet.to_json, topic: KAFKA_TOPIC) 35 | producer.deliver_messages 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /app/models/analytics.rb: -------------------------------------------------------------------------------- 1 | require 'cassandra' 2 | require 'time' 3 | 4 | # Tweet class that talks to Cassandra 5 | class Analytics 6 | @@cluster = Cassandra.cluster(CASSANDRA_OPTIONS) 7 | @@keyspace = 'tweeter' 8 | @@session = @@cluster.connect(@@keyspace) 9 | @@paging_state = nil 10 | 11 | attr_accessor :key, :frequency 12 | 13 | def self.all(paged = false) 14 | results = @@session.execute( 15 | 'SELECT key, frequency FROM analytics ' \ 16 | 'WHERE kind = ? ORDER BY frequency DESC', 17 | arguments: ['tweet'], 18 | page_size: 25, 19 | paging_state: (paged ? @@paging_state : nil) 20 | ) 21 | @@paging_state = results.paging_state 22 | results.map do |anal| 23 | c = Analytics.new 24 | c.key, c.frequency = anal['key'], anal['frequency'] 25 | c 26 | end 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /app/models/tweet.rb: -------------------------------------------------------------------------------- 1 | require 'cassandra' 2 | require 'time' 3 | 4 | # Tweet class that talks to Cassandra 5 | class Tweet 6 | include ActiveModel::Serialization 7 | 8 | @@cluster = Cassandra.cluster(CASSANDRA_OPTIONS) 9 | @@keyspace = 'tweeter' 10 | @@session = @@cluster.connect(@@keyspace) 11 | @@generator = Cassandra::Uuid::Generator.new 12 | @@paging_state = nil 13 | 14 | attr_accessor :id, :content, :created_at, :handle 15 | 16 | def avatar_url 17 | "//robohash.org/#{handle}.png?size=144x144&bgset=bg2" 18 | end 19 | 20 | def attributes 21 | {'id' => id, 'content' => content, 'created_at' => created_at, 'handle' => handle} 22 | end 23 | 24 | def destroy 25 | @@session.execute( 26 | 'DELETE from tweets WHERE id = ?', 27 | arguments: [@id]) 28 | end 29 | 30 | def self.all(paged = false) 31 | result = @@session.execute( 32 | 'SELECT id, content, created_at, handle FROM tweets ' \ 33 | 'WHERE kind = ? ORDER BY created_at DESC', 34 | arguments: ['tweet'], 35 | page_size: 25, 36 | paging_state: (paged ? @@paging_state : nil) 37 | ) 38 | @@paging_state = result.paging_state 39 | result.map do |tweet| 40 | c = Tweet.new 41 | c.id, c.content, c.handle = tweet['id'], tweet['content'], tweet['handle'] 42 | c.created_at = tweet['created_at'].to_time.utc.iso8601 43 | c 44 | end 45 | end 46 | 47 | def self.create(params) 48 | c = Tweet.new 49 | c.id = SecureRandom.urlsafe_base64 50 | c.content = params[:content] 51 | cassandra_time = @@generator.now 52 | c.created_at = cassandra_time.to_time.utc.iso8601 53 | c.handle = params[:handle].downcase 54 | @@session.execute( 55 | 'INSERT INTO tweets (kind, id, content, created_at, handle) ' \ 56 | 'VALUES (?, ?, ?, ?, ?)', 57 | arguments: ['tweet', c.id, c.content, cassandra_time, c.handle]) 58 | c 59 | end 60 | 61 | def self.find(id) 62 | tweet = @@session.execute( 63 | 'SELECT id, content, created_at, handle FROM tweets WHERE id = ?', 64 | arguments: [id]).first 65 | c = Tweet.new 66 | c.id = tweet['id'] 67 | c.content = tweet['content'] 68 | c.created_at = tweet['created_at'].to_time.utc.iso8601 69 | c.handle = tweet['handle'] 70 | c 71 | end 72 | end 73 | -------------------------------------------------------------------------------- /app/views/analytics/index.html.erb: -------------------------------------------------------------------------------- 1 |
2 |
3 |
4 |
5 | <% if @analytics.length < 1 %> 6 |

7 | You haven't done any analyzing, or you have no tweets to analyze! 8 |

9 | <% else %> 10 | <% @analytics.each do |anal| %> 11 |
12 | <%= anal.frequency %> 13 |

<%= anal.key %>

14 |
15 | <% end %> 16 | <% end %> 17 |
18 |
19 |
20 |
-------------------------------------------------------------------------------- /app/views/layouts/application.html.erb: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Tweeter 6 | <%= stylesheet_link_tag 'application', media: 'all' %> 7 | <%= csrf_meta_tags %> 8 | 9 | 10 | 20 | <%= yield %> 21 | 22 | 23 | -------------------------------------------------------------------------------- /app/views/tweets/index.html.erb: -------------------------------------------------------------------------------- 1 |
2 |
3 |
4 |
5 |
6 | <%= form_tag("/tweets", :method => "post") do |form| %> 7 |
8 | <%= label_tag "tweet[handle]", "Handle" %> 9 | <%= text_field_tag "tweet[handle]", nil, 10 | :autofocus => true, 11 | :class => "form-control", 12 | :maxlength => 15, 13 | :required => true, 14 | :pattern => "^[\\S]*$", 15 | :title => "Handles cannot contain spaces" %> 16 |
17 |
18 | <%= label_tag "tweet[content]", "Compose new Tweet" %> 19 | <%= text_area_tag "tweet[content]", nil, 20 | :class => "form-control", 21 | :maxlength => 140, 22 | :required => true, 23 | :style => "resize: vertical;" %> 24 |
25 |
26 | <%= submit_tag("Tweet", :class => "btn btn-primary btn-sm pull-right") %> 27 |
28 | <% end %> 29 |
30 |
31 |
32 |
33 |
34 | <% if @tweets.length < 1 %> 35 |

36 | What are you waiting for?
Start tweeting. 37 |

38 | <% else %> 39 | <% @tweets.each do |tweet| %> 40 |
41 |
42 |
43 | <%= tweet.handle %> 45 |
46 |
47 |
48 |
49 | <%= form_tag(tweet_path(tweet.id), :method => "delete") do |form| %> 50 | <%= hidden_field("tweet[id]", tweet.id) %> 51 | 52 | <% end %> 53 |
54 | @<%= tweet.handle %> · 55 | 56 | 57 | <% created_at = tweet.created_at %> 58 | 61 | 62 | 63 |
64 | <%= simple_format tweet.content, :class => "tweet-content" %> 65 |
66 |
67 |
68 | <% end %> 69 | <% end %> 70 |
71 |
72 |
73 |
74 | -------------------------------------------------------------------------------- /app/views/tweets/show.html.erb: -------------------------------------------------------------------------------- 1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | <%= @tweet.handle %> 10 |
11 |
12 |
13 |
14 | <%= form_tag(tweet_path(@tweet.id), :method => "delete") do |form| %> 15 | <%= hidden_field("tweet[id]", @tweet.id) %> 16 | <%= submit_tag("X", :class => "btn btn-link btn-xs text-muted", :title => "Delete tweet") %> 17 | <% end %> 18 |
19 | @<%= @tweet.handle %> 20 |
21 | <%= simple_format @tweet.content, :class => "tweet-content lead" %> 22 |
23 |
24 |
25 | <% created_at = @tweet.created_at %> 26 | 29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 | 37 | -------------------------------------------------------------------------------- /bin/bundle: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__) 3 | load Gem.bin_path('bundler', 'bundle') 4 | -------------------------------------------------------------------------------- /bin/rails: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | begin 3 | load File.expand_path('../spring', __FILE__) 4 | end 5 | APP_PATH = File.expand_path('../../config/application', __FILE__) 6 | require_relative '../config/boot' 7 | require 'rails/commands' 8 | -------------------------------------------------------------------------------- /bin/rake: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | begin 3 | load File.expand_path('../spring', __FILE__) 4 | end 5 | require_relative '../config/boot' 6 | require 'rake' 7 | Rake.application.run 8 | -------------------------------------------------------------------------------- /bin/spring: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | # This file loads spring without using Bundler, in order to be fast 4 | # It gets overwritten when you run the `spring binstub` command 5 | 6 | unless defined?(Spring) 7 | require 'rubygems' 8 | require 'bundler' 9 | 10 | match = Bundler.default_lockfile.read.match( 11 | /^GEM$.*?^ spring \((.*?)\)$.*?^$/m 12 | ) 13 | if match 14 | bundle = [Bundler.bundle_path.to_s] + Gem.path 15 | ENV['GEM_PATH'] = bundle.join(File::PATH_SEPARATOR) 16 | ENV['GEM_HOME'] = '' 17 | Gem.paths = ENV 18 | 19 | gem 'spring', match[1] 20 | require 'spring/binstub' 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /bin/tweet: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | while read json 4 | do 5 | curl -ksS -H 'Content-Type: application/json' --data-binary "$json" "$2/tweets" > /dev/null 6 | echo $json 7 | done < "$1" 8 | -------------------------------------------------------------------------------- /cassandra-clear.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "/cassandra-clear", 3 | "container": { 4 | "type": "DOCKER", 5 | "docker": { 6 | "image": "spotify/cassandra", 7 | "network": "HOST" 8 | } 9 | }, 10 | "cmd": "/usr/bin/cqlsh -e \"TRUNCATE tweeter.tweets; TRUNCATE tweeter.analytics;\" cassandra-dcos-node.cassandra.dcos.mesos 9160 && sleep 99999999", 11 | "cpus": 0.1, 12 | "mem": 256.0 13 | } -------------------------------------------------------------------------------- /cli_script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -o errexit 3 | # Set stable package versions here 4 | CASSANDRA_STABLE=${CASSANDRA_STABLE:='1.0.12-2.2.5'} 5 | KAFKA_STABLE=${KAFKA_STABLE:='1.1.9-0.10.0.0'} 6 | MARATHON_LB_STABLE=${MARATHON_LB_STABLE:='1.3.5'} 7 | ZEPPELIN_STABLE=${ZEPPELIN_STABLE:='0.6.0'} 8 | 9 | USAGE="$(basename "$0") [-h|--help] [--step --manual --stable --infra] 10 | Script for setting up the Tweeter demo in DC/OS 11 | 12 | Current Stable Package Versions: 13 | Cassandra: $CASSANDRA_STABLE 14 | Kafka: $KAFKA_STABLE 15 | Marathon-LB: $MARATHON_LB_STABLE 16 | Zeppelin: $ZEPPELIN_STABLE 17 | 18 | Requirements: 19 | - DC/OS cluster with 1 public slave and 5 private slaves 20 | - DCOS CLI installed on localhost 21 | 22 | Credentials: 23 | Enterprise 24 | - AWS default bootstrapuser/deleteme 25 | - Override with --user & --pw 26 | OSS 27 | - Super long lived OAuth token used in CI 28 | - Override with DCOS_AUTH_TOKEN set in env 29 | 30 | Options: 31 | -h, --help Prints this help message 32 | --stable Runs with set stable packages listed above. To override, 33 | use the above key as an environment variable. 34 | E.G. CASSANDRA_STABLE='1.0.12-2.2.5' $(basename "$0") 35 | --infra Exit after installing infrastructure for Tweeter and 36 | leave the installation of Tweeter app to user 37 | --step Pause after all DC/OS actions until user acknowledges 38 | --manual Do not actually run any of the steps (allows user to) 39 | --url Target DC/OS master to run script against 40 | --oss Target DC/OS installation is an Open Source deployment 41 | --user DC/OS Enterprise username (default: bootstrapuser) 42 | --pw DC/OS Enterprise password (default: deleteme) 43 | --cypress Run cypress UI tests 44 | " 45 | # Instantiate default options 46 | USE_STABLE=false 47 | INFRA_ONLY=false 48 | STEP_MODE=false 49 | MANUAL_MODE=false 50 | DCOS_OSS=false 51 | RUN_CYPRESS=false 52 | 53 | # Command Line Handler 54 | while [[ $# -gt 0 ]]; do 55 | key="$1" 56 | 57 | case $key in 58 | --stable) 59 | USE_STABLE=true ;; 60 | --infra) 61 | INFRA_ONLY=true ;; 62 | --step) 63 | STEP_MODE=true ;; 64 | --manual) 65 | MANUAL_MODE=true ;; 66 | --oss) 67 | DCOS_OSS=true ;; 68 | --cypress) 69 | RUN_CYPRESS=true ;; 70 | --url) 71 | DCOS_URL="$2" 72 | shift ;; 73 | --user) 74 | DCOS_USER="$2" 75 | shift ;; 76 | --pw) 77 | DCOS_PW="$2" 78 | shift ;; 79 | -h|--help) 80 | echo "$USAGE" 81 | exit 0 ;; 82 | *) 83 | echo "Unrecognized option: $key" 84 | echo "$USAGE" >&2 85 | exit 1 ;; 86 | esac 87 | shift 88 | done 89 | 90 | # Required input checks 91 | if [[ -z $DCOS_URL ]]; then 92 | echo "DCOS_URL is not set! Provide with --url or DCOS_URL env-var" 93 | exit 1 94 | fi 95 | 96 | ci_auth_token='eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6Ik9UQkVOakZFTWtWQ09VRTRPRVpGTlRNMFJrWXlRa015Tnprd1JrSkVRemRCTWpBM1FqYzVOZyJ9.eyJlbWFpbCI6ImFsYmVydEBiZWtzdGlsLm5ldCIsImVtYWlsX3ZlcmlmaWVkIjp0cnVlLCJpc3MiOiJodHRwczovL2Rjb3MuYXV0aDAuY29tLyIsInN1YiI6Imdvb2dsZS1vYXV0aDJ8MTA5OTY0NDk5MDExMTA4OTA1MDUwIiwiYXVkIjoiM3lGNVRPU3pkbEk0NVExeHNweHplb0dCZTlmTnhtOW0iLCJleHAiOjIwOTA4ODQ5NzQsImlhdCI6MTQ2MDE2NDk3NH0.OxcoJJp06L1z2_41_p65FriEGkPzwFB_0pA9ULCvwvzJ8pJXw9hLbmsx-23aY2f-ydwJ7LSibL9i5NbQSR2riJWTcW4N7tLLCCMeFXKEK4hErN2hyxz71Fl765EjQSO5KD1A-HsOPr3ZZPoGTBjE0-EFtmXkSlHb1T2zd0Z8T5Z2-q96WkFoT6PiEdbrDA-e47LKtRmqsddnPZnp0xmMQdTr2MjpVgvqG7TlRvxDcYc-62rkwQXDNSWsW61FcKfQ-TRIZSf2GS9F9esDF4b5tRtrXcBNaorYa9ql0XAWH5W_ct4ylRNl3vwkYKWa4cmPvOqT5Wlj9Tf0af4lNO40PQ' 97 | DCOS_AUTH_TOKEN=${DCOS_AUTH_TOKEN:=$ci_auth_token} 98 | DCOS_USER=${DCOS_USER:='bootstrapuser'} 99 | DCOS_PW=${DCOS_PW:='deleteme'} 100 | 101 | demo_eval() { 102 | if $MANUAL_MODE; then 103 | printf "### Execute the following command: ###\n\n" 104 | # replace % in arg with %% to prevent printf interpretation 105 | printf "${1//\%/\%\%}\n\n" 106 | printf "######################################\n" 107 | else 108 | log_msg "Executing: $1" 109 | eval $1 110 | fi 111 | user_continue 112 | } 113 | 114 | user_continue() { 115 | if $STEP_MODE; then 116 | read -p 'Continue? (y/n) ' resp 117 | case $resp in 118 | y) return ;; 119 | n) exit 0 ;; 120 | *) user_continue ;; 121 | esac 122 | fi 123 | return 124 | } 125 | 126 | is_running() { 127 | status=`dcos marathon app list | grep $1 | awk '{print $6}'` 128 | if [[ $status = '---' ]]; then 129 | return 0 130 | else 131 | return 1 132 | fi 133 | } 134 | 135 | log_msg() { 136 | echo `date -u +'%D %T'`: $1 137 | } 138 | 139 | wait_for_deployment() { 140 | for service in $*; do 141 | until is_running $service; do 142 | log_msg "Wait for $service to finish deploying..." 143 | sleep 3 144 | done 145 | done 146 | } 147 | 148 | ee_login() { 149 | cat < /dev/null || ( echo 'DC/OS must be installed!' && exit 1 ) 170 | 171 | # Setup access to the desired DCOS cluster and install marathon lb 172 | log_msg "Setting DCOS CLI to use $DCOS_URL" 173 | if $DCOS_OSS; then 174 | log_msg "Starting DC/OS OSS Demo" 175 | log_msg "Override default credentials with DCOS_AUTH_TOKEN" 176 | oss_login 177 | else 178 | log_msg "Starting DC/OS Enterprise Demo" 179 | log_msg "Override default credentials with --user and --pw" 180 | demo_eval ee_login 181 | # Get the dcos EE CLI 182 | demo_eval 'dcos package install --cli --yes dcos-enterprise-cli' 183 | demo_eval 'dcos security org service-accounts keypair -l 4096 k.priv k.pub' 184 | demo_eval 'dcos security org service-accounts create -p k.pub -d "Marathon LB" dcos_marathon_lb' 185 | demo_eval 'dcos security secrets create-sa-secret k.priv dcos_marathon_lb marathon-lb' 186 | log_msg "Get auth headers to do calls outside of DC/OS CLI (ACLs)" 187 | auth_t=`dcos config show core.dcos_acs_token` 188 | log_msg "Received auth token: $auth_t" 189 | auth_h="Authorization: token=$auth_t" 190 | 191 | # Make our ACLs 192 | demo_eval "curl -skSL -X PUT -H 'Content-Type: application/json' -d '{\"description\":\"Marathon admin events\"}' -H \"$auth_h\" $DCOS_URL/acs/api/v1/acls/dcos:service:marathon:marathon:admin:events" 193 | demo_eval "curl -skSL -X PUT -H 'Content-Type: application/json' -d '{\"description\":\"Marathon all services\"}' -H \"$auth_h\" $DCOS_URL/acs/api/v1/acls/dcos:service:marathon:marathon:services:%252F" 194 | # Add our dcos_marathon_lb service account to the ACLs 195 | demo_eval "curl -skSL -X PUT -H \"$auth_h\" $DCOS_URL/acs/api/v1/acls/dcos:service:marathon:marathon:admin:events/users/dcos_marathon_lb/read" 196 | demo_eval "curl -skSL -X PUT -H \"$auth_h\" $DCOS_URL/acs/api/v1/acls/dcos:service:marathon:marathon:services:%252F/users/dcos_marathon_lb/read" 197 | 198 | cat < options.json 199 | { 200 | "marathon-lb": { 201 | "secret_name": "marathon-lb" 202 | } 203 | } 204 | EOF 205 | fi 206 | 207 | if $INFRA_ONLY; then 208 | install_packages=(marathon-lb cassandra kafka) 209 | else 210 | install_packages=(marathon-lb cassandra kafka zeppelin) 211 | fi 212 | 213 | for pkg in ${install_packages[*]}; do 214 | cmd="dcos --log-level=ERROR package install --yes" 215 | if [[ $pkg = 'marathon-lb' ]] && ! $DCOS_OSS; then 216 | cmd="$cmd --options=options.json" 217 | fi 218 | cmd="$cmd $pkg" 219 | if $USE_STABLE; then 220 | key="${pkg^^}_STABLE" 221 | key="${key//-/_}" # replace - with _ for marathon-lb 222 | eval ver='$'$key 223 | cmd="$cmd --package-version=$ver" 224 | fi 225 | demo_eval "$cmd" 226 | done 227 | 228 | # query until services are listed as running 229 | wait_for_deployment ${install_packages[*]} 230 | 231 | # once running, deploy tweeter app and then post to it 232 | demo_eval "dcos marathon app add tweeter.json" 233 | wait_for_deployment tweeter 234 | 235 | # get the public IP of the public node if unset 236 | cat < public-ip.json 237 | { 238 | "id": "/public-ip", 239 | "cmd": "curl http://169.254.169.254/latest/meta-data/public-ipv4 && sleep 3600", 240 | "cpus": 0.25, 241 | "mem": 32, 242 | "instances": 1, 243 | "acceptedResourceRoles": [ 244 | "slave_public" 245 | ] 246 | } 247 | EOF 248 | demo_eval "dcos marathon app add public-ip.json" 249 | wait_for_deployment public-ip 250 | public_ip_str=`dcos task log --lines=1 public-ip` 251 | public_ip="${public_ip_str##* }" 252 | demo_eval "dcos marathon app remove public-ip" 253 | 254 | log_msg "Tweeter home page can be found at: http://$public_ip:10000/" 255 | log_msg "Zeppelin can be found at: $DCOS_URL/service/zeppelin" 256 | 257 | if $INFRA_ONLY; then 258 | log_msg "To post to tweeter, do: dcos marathon app add post-tweets.json" 259 | log_msg "Infrastructure setup complete! Exiting setup..." 260 | exit 0 261 | fi 262 | 263 | demo_eval "dcos marathon app add post-tweets.json" 264 | wait_for_deployment post-tweets 265 | 266 | # short sleep to make sure tweets are posted 267 | sleep 30 268 | 269 | # Run cypress tests if user opted-in. 270 | if $RUN_CYPRESS; then 271 | if $DCOS_OSS; then 272 | cat < ci-conf.json 273 | { 274 | "tweeter_url": "${public_ip}:10000", 275 | "url": "${DCOS_URL}" 276 | } 277 | EOF 278 | else 279 | cat < ci-conf.json 280 | { 281 | "tweeter_url": "${public_ip}:10000", 282 | "url": "${DCOS_URL}", 283 | "username": "${DCOS_USER}", 284 | "password": "${DCOS_PW}" 285 | } 286 | EOF 287 | fi 288 | 289 | if (cypress --help &> /dev/null); then 290 | log_msg "Running cypress tests" 291 | demo_eval "yes | cypress update" 292 | demo_eval "yes | cypress run" 293 | else 294 | log_msg "cypress is not installed; skipping..." 295 | fi 296 | fi 297 | 298 | # Now that tweets have been posted and the site is up, make sure it all works: 299 | log_msg "Pulling Tweets from $public_ip:10000" 300 | tweet_count=`curl -sSlvf $public_ip:10000 | grep 'class="tweet-content"' | wc -l` 301 | if [[ $tweet_count > 0 ]]; then 302 | log_msg "Tweeter is up and running; $tweet_count tweets shown" 303 | exit 0 304 | else 305 | log_msg "Failure: No tweets found!" 306 | exit 1 307 | fi 308 | -------------------------------------------------------------------------------- /config.ru: -------------------------------------------------------------------------------- 1 | # This file is used by Rack-based servers to start the application. 2 | 3 | require ::File.expand_path('../config/environment', __FILE__) 4 | run Rails.application 5 | -------------------------------------------------------------------------------- /config/application.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path('../boot', __FILE__) 2 | 3 | # require 'rails/all' 4 | 5 | # Include individual models to prevent initializing ActiveRecord 6 | # See: http://stackoverflow.com/a/19078854/368697 7 | require 'action_controller/railtie' 8 | require 'active_model/railtie' 9 | require 'rails/test_unit/railtie' 10 | require 'sprockets/railtie' 11 | 12 | # Require the gems listed in Gemfile, including any gems 13 | # you've limited to :test, :development, or :production. 14 | Bundler.require(*Rails.groups) 15 | 16 | module RailsOnMesos 17 | class Application < Rails::Application 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /config/boot.rb: -------------------------------------------------------------------------------- 1 | # Set up gems listed in the Gemfile. 2 | ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__) 3 | 4 | require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE']) 5 | -------------------------------------------------------------------------------- /config/environment.rb: -------------------------------------------------------------------------------- 1 | # Load the Rails application. 2 | require File.expand_path('../application', __FILE__) 3 | 4 | # Initialize the Rails application. 5 | Rails.application.initialize! 6 | -------------------------------------------------------------------------------- /config/environments/development.rb: -------------------------------------------------------------------------------- 1 | Rails.application.configure do 2 | # In the development environment your application's code is reloaded on 3 | # every request. This slows down response time but is perfect for development 4 | # since you don't have to restart the web server when you make code changes. 5 | config.cache_classes = false 6 | 7 | # Do not eager load code on boot. 8 | config.eager_load = false 9 | 10 | # Show full error reports and disable caching. 11 | config.consider_all_requests_local = true 12 | config.action_controller.perform_caching = false 13 | 14 | # Don't care if the mailer can't send. 15 | # config.action_mailer.raise_delivery_errors = false 16 | 17 | # Print deprecation notices to the Rails logger. 18 | config.active_support.deprecation = :log 19 | 20 | # Debug mode disables concatenation and preprocessing of assets. 21 | # This option may cause significant delays in view rendering with a large 22 | # number of complex assets. 23 | config.assets.debug = true 24 | 25 | # Adds additional error checking when serving assets at runtime. 26 | # Checks for improperly declared sprockets dependencies. 27 | # Raises helpful error messages. 28 | config.assets.raise_runtime_errors = true 29 | 30 | # Raises error for missing translations 31 | # config.action_view.raise_on_missing_translations = true 32 | end 33 | -------------------------------------------------------------------------------- /config/environments/production.rb: -------------------------------------------------------------------------------- 1 | Rails.application.configure do 2 | # Code is not reloaded between requests. 3 | config.cache_classes = true 4 | 5 | # Eager load code on boot. This eager loads most of Rails and 6 | # your application in memory, allowing both threaded web servers 7 | # and those relying on copy on write to perform better. 8 | # Rake tasks automatically ignore this option for performance. 9 | config.eager_load = true 10 | 11 | # Full error reports are disabled and caching is turned on. 12 | config.consider_all_requests_local = false 13 | config.action_controller.perform_caching = true 14 | 15 | # Disable Rails's static asset server (Apache or nginx will already do this). 16 | config.serve_static_files = true 17 | 18 | # Compress JavaScripts and CSS. 19 | config.assets.js_compressor = :uglifier 20 | 21 | # Do not fallback to assets pipeline if a precompiled asset is missed. 22 | config.assets.compile = false 23 | 24 | # Generate digests for assets URLs. 25 | config.assets.digest = true 26 | 27 | config.assets.version = '1.0' 28 | 29 | # Set to :debug to see everything in the log. 30 | config.log_level = :info 31 | 32 | config.i18n.fallbacks = true 33 | 34 | # Send deprecation notices to registered listeners. 35 | config.active_support.deprecation = :notify 36 | 37 | # Log to stdout 38 | config.logger = ActiveSupport::Logger.new(STDOUT) 39 | 40 | # Use default logging formatter so that PID and timestamp are not suppressed. 41 | config.log_formatter = ::Logger::Formatter.new 42 | 43 | # Do not dump schema after migrations. 44 | # config.active_record.dump_schema_after_migration = false 45 | end 46 | -------------------------------------------------------------------------------- /config/environments/test.rb: -------------------------------------------------------------------------------- 1 | Rails.application.configure do 2 | # The test environment is used exclusively to run your application's 3 | # test suite. You never need to work with it otherwise. Remember that 4 | # your test database is "scratch space" for the test suite and is wiped 5 | # and recreated between test runs. Don't rely on the data there! 6 | config.cache_classes = true 7 | 8 | # Do not eager load code on boot. This avoids loading your whole application 9 | # just for the purpose of running a single test. If you are using a tool that 10 | # preloads Rails for running tests, you may have to set it to true. 11 | config.eager_load = false 12 | 13 | # Configure static asset server for tests with Cache-Control for performance. 14 | config.serve_static_files = true 15 | config.static_cache_control = 'public, max-age=3600' 16 | 17 | # Show full error reports and disable caching. 18 | config.consider_all_requests_local = true 19 | config.action_controller.perform_caching = false 20 | 21 | # Raise exceptions instead of rendering exception templates. 22 | config.action_dispatch.show_exceptions = false 23 | 24 | # Disable request forgery protection in test environment. 25 | config.action_controller.allow_forgery_protection = false 26 | 27 | # Tell Action Mailer not to deliver emails to the real world. 28 | # The :test delivery method accumulates sent emails in the 29 | # ActionMailer::Base.deliveries array. 30 | # config.action_mailer.delivery_method = :test 31 | 32 | # Print deprecation notices to the stderr. 33 | config.active_support.deprecation = :stderr 34 | 35 | # Raises error for missing translations 36 | # config.action_view.raise_on_missing_translations = true 37 | end 38 | -------------------------------------------------------------------------------- /config/initializers/cassandra.rb: -------------------------------------------------------------------------------- 1 | # Cassandra config 2 | hosts = (ENV['CASSANDRA_HOSTS'] || '127.0.0.1').split(',') 3 | 4 | CASSANDRA_OPTIONS = { 5 | hosts: hosts, 6 | timeout: 300, 7 | consistency: :quorum, 8 | } 9 | -------------------------------------------------------------------------------- /config/initializers/cookies_serializer.rb: -------------------------------------------------------------------------------- 1 | # Be sure to restart your server when you modify this file. 2 | 3 | Rails.application.config.action_dispatch.cookies_serializer = :json 4 | -------------------------------------------------------------------------------- /config/initializers/filter_parameter_logging.rb: -------------------------------------------------------------------------------- 1 | # Be sure to restart your server when you modify this file. 2 | 3 | # Configure sensitive parameters which will be filtered from the log file. 4 | Rails.application.config.filter_parameters += [:password] 5 | -------------------------------------------------------------------------------- /config/initializers/kafka.rb: -------------------------------------------------------------------------------- 1 | # Kafka config 2 | hosts = (ENV['KAFKA_BROKERS'] || '127.0.0.1:9092').split(',') 3 | 4 | KAFKA_OPTIONS = { 5 | seed_brokers: hosts, 6 | logger: Rails.logger 7 | } 8 | 9 | KAFKA_TOPIC = 'tweets' 10 | -------------------------------------------------------------------------------- /config/initializers/session_store.rb: -------------------------------------------------------------------------------- 1 | # Be sure to restart your server when you modify this file. 2 | 3 | Rails.application.config.session_store( 4 | :cookie_store, 5 | key: '_rails-on-mesos_session' 6 | ) 7 | -------------------------------------------------------------------------------- /config/initializers/wrap_parameters.rb: -------------------------------------------------------------------------------- 1 | # Be sure to restart your server when you modify this file. 2 | 3 | # This file contains settings for ActionController::ParamsWrapper which 4 | # is enabled by default. 5 | 6 | # Enable parameter wrapping for JSON. 7 | # You can disable this by setting :format to an empty array. 8 | ActiveSupport.on_load(:action_controller) do 9 | wrap_parameters format: [:json] if respond_to?(:wrap_parameters) 10 | end 11 | 12 | # To enable root element in JSON for ActiveRecord objects. 13 | # ActiveSupport.on_load(:active_record) do 14 | # self.include_root_in_json = true 15 | # end 16 | -------------------------------------------------------------------------------- /config/locales/en.yml: -------------------------------------------------------------------------------- 1 | # Files in the config/locales directory are used for internationalization 2 | # and are automatically loaded by Rails. If you want to use locales other 3 | # than English, add the necessary files in this directory. 4 | # 5 | # To use the locales, use `I18n.t`: 6 | # 7 | # I18n.t 'hello' 8 | # 9 | # In views, this is aliased to just `t`: 10 | # 11 | # <%= t('hello') %> 12 | # 13 | # To use a different locale, set it with `I18n.locale`: 14 | # 15 | # I18n.locale = :es 16 | # 17 | # This would use the information in config/locales/es.yml. 18 | # 19 | # To learn more, please read the Rails Internationalization guide 20 | # available at http://guides.rubyonrails.org/i18n.html. 21 | 22 | en: 23 | hello: "Hello world" 24 | -------------------------------------------------------------------------------- /config/routes.rb: -------------------------------------------------------------------------------- 1 | Rails.application.routes.draw do 2 | # The priority is based upon order of creation: 3 | # first created -> highest priority. 4 | # See how all your routes lay out with "rake routes". 5 | 6 | # You can have the root of your site routed with "root" 7 | root 'tweets#index' 8 | 9 | resources :tweets, except: %w(edit new update) 10 | 11 | resources :analytics, except: %w(create edit new update show) 12 | end 13 | -------------------------------------------------------------------------------- /config/secrets.yml: -------------------------------------------------------------------------------- 1 | # Be sure to restart your server when you modify this file. 2 | 3 | # Your secret key is used for verifying the integrity of signed cookies. 4 | # If you change this key, all old signed cookies will become invalid! 5 | 6 | # Make sure the secret is at least 30 characters and all random, 7 | # no regular words or you'll be exposed to dictionary attacks. 8 | # You can use `rake secret` to generate a secure secret key. 9 | 10 | # Make sure the secrets in this file are kept private 11 | # if you're sharing your code publicly. 12 | 13 | development: 14 | secret_key_base: be6ea21bd0e8ddad06accbdfbfcbc6f120815744a8177fb1196442c1670401c86a1d020f1fb62f9b7d6bacc8cf818de277d23d3f3e7dcf704ca88965e5b9ed86 15 | 16 | test: 17 | secret_key_base: 9747c683a30b28ff768babd4dd424d3f56a6ee2efa62fed79b796761f3fd3e01e4d4a1d19ace0f37a7730fb86f164e4eb257805f77c40396925c7fcc7614150c 18 | 19 | # Do not keep production secrets in the repository, 20 | # instead read values from the environment. 21 | production: 22 | secret_key_base: <%= ENV["SECRET_KEY_BASE"] %> 23 | -------------------------------------------------------------------------------- /cypress.json: -------------------------------------------------------------------------------- 1 | { 2 | "fixturesFolder": "tests/fixtures", 3 | "integrationFolder": "tests/integration", 4 | "screenshotsFolder": "tests/screenshots", 5 | "screenshotOnHeadlessFailure": false, 6 | "pageLoadTimeout": 120000, 7 | "supportFile": "tests/support/commands.js" 8 | } 9 | -------------------------------------------------------------------------------- /lib/tasks/db.rake: -------------------------------------------------------------------------------- 1 | require 'cassandra' 2 | 3 | namespace :cassandra do 4 | desc "TODO" 5 | task :setup => :environment do 6 | cluster = Cassandra.cluster(CASSANDRA_OPTIONS) 7 | keyspace = 'tweeter' 8 | session = cluster.connect() 9 | session.execute( 10 | "CREATE KEYSPACE IF NOT EXISTS tweeter WITH replication = \ 11 | {'class': 'SimpleStrategy','replication_factor': 3}") 12 | session.execute( 13 | "CREATE TABLE IF NOT EXISTS tweeter.tweets ( \ 14 | kind VARCHAR, \ 15 | id VARCHAR, \ 16 | content VARCHAR, \ 17 | created_at timeuuid, \ 18 | handle VARCHAR, \ 19 | PRIMARY KEY (kind, created_at) \ 20 | ) WITH CLUSTERING ORDER BY (created_at DESC)" 21 | ) 22 | session.execute( 23 | "CREATE TABLE IF NOT EXISTS tweeter.analytics ( \ 24 | kind VARCHAR, \ 25 | key VARCHAR, \ 26 | frequency INT, \ 27 | PRIMARY KEY (kind, frequency) \ 28 | ) WITH CLUSTERING ORDER BY (frequency DESC)" 29 | ) 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /log/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/d2iq-archive/tweeter/f5ce4f0fdc2f30206ac8eabed5c5337cf3cb0878/log/.keep -------------------------------------------------------------------------------- /post-tweets.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "/post-tweets", 3 | "cmd": "bin/tweet shakespeare-tweets.json http://1.1.1.1:30000", 4 | "cpus": 0.25, 5 | "mem": 256, 6 | "disk": 0, 7 | "instances": 1, 8 | "container": { 9 | "type": "DOCKER", 10 | "docker": { 11 | "image": "mesosphere/tweeter", 12 | "network": "BRIDGE", 13 | "portMappings": [ 14 | { 15 | "containerPort": 3000, 16 | "hostPort": 0, 17 | "servicePort": 10101, 18 | "protocol": "tcp" 19 | } 20 | ] 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /public/404.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | The page you were looking for doesn't exist (404) 5 | 6 | 55 | 56 | 57 | 58 | 59 |
60 |
61 |

The page you were looking for doesn't exist.

62 |

You may have mistyped the address or the page may have moved.

63 |
64 |

If you are the application owner check the logs for more information.

65 |
66 | 67 | 68 | -------------------------------------------------------------------------------- /public/422.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | The change you wanted was rejected (422) 5 | 6 | 55 | 56 | 57 | 58 | 59 |
60 |
61 |

The change you wanted was rejected.

62 |

Maybe you tried to change something you didn't have access to.

63 |
64 |

If you are the application owner check the logs for more information.

65 |
66 | 67 | 68 | -------------------------------------------------------------------------------- /public/500.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | We're sorry, but something went wrong (500) 5 | 6 | 55 | 56 | 57 | 58 | 59 |
60 |
61 |

We're sorry, but something went wrong.

62 |
63 |

If you are the application owner check the logs for more information.

64 |
65 | 66 | 67 | -------------------------------------------------------------------------------- /public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/d2iq-archive/tweeter/f5ce4f0fdc2f30206ac8eabed5c5337cf3cb0878/public/favicon.ico -------------------------------------------------------------------------------- /router/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:jessie 2 | 3 | RUN apt-get update && apt-get -y install wget build-essential libreadline-dev libncurses5-dev libpcre3-dev libssl-dev && apt-get -q -y clean 4 | RUN wget http://openresty.org/download/ngx_openresty-1.7.10.1.tar.gz \ 5 | && tar xvfz ngx_openresty-1.7.10.1.tar.gz \ 6 | && cd ngx_openresty-1.7.10.1 \ 7 | && ./configure --with-luajit --with-http_gzip_static_module --with-http_ssl_module \ 8 | && make \ 9 | && make install \ 10 | && rm -rf /ngx_openresty* 11 | 12 | EXPOSE 8080 13 | CMD /usr/local/openresty/nginx/sbin/nginx 14 | 15 | ADD nginx.conf /usr/local/openresty/nginx/conf/nginx.conf 16 | ADD app.lua /usr/local/openresty/nginx/conf/app.lua 17 | RUN chmod a+r /usr/local/openresty/nginx/conf/app.lua 18 | -------------------------------------------------------------------------------- /router/app.lua: -------------------------------------------------------------------------------- 1 | local cjson = require "cjson" 2 | 3 | resp = ngx.location.capture('/__mesos_dns/v1/services/_tweeter._tcp.marathon.mesos') 4 | 5 | backends = cjson.decode(resp.body) 6 | backend = backends[1] 7 | ngx.var.target = "http://" .. backend['ip'] .. ":" .. backend['port'] 8 | -------------------------------------------------------------------------------- /router/marathon.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "/tweeter-router", 3 | "cpus": 1, 4 | "mem": 256, 5 | "instances": 1, 6 | "constraints": [["hostname", "UNIQUE"]], 7 | "acceptedResourceRoles": ["slave_public"], 8 | "container": { 9 | "type": "DOCKER", 10 | "docker": { 11 | "image": "mesosphere/tweeter-router:06506e020400bdc89b5e0a510993f5f62227a8e2", 12 | "network": "BRIDGE", 13 | "portMappings": [ 14 | { 15 | "containerPort": 8080, 16 | "hostPort": 80, 17 | "protocol": "tcp" 18 | } 19 | ] 20 | } 21 | }, 22 | "healthChecks": [{ 23 | "protocol": "TCP", 24 | "gracePeriodSeconds": 600, 25 | "intervalSeconds": 30, 26 | "portIndex": 0, 27 | "timeoutSeconds": 10, 28 | "maxConsecutiveFailures": 2 29 | }] 30 | } 31 | -------------------------------------------------------------------------------- /router/nginx.conf: -------------------------------------------------------------------------------- 1 | daemon off; 2 | error_log stderr debug; 3 | 4 | events { 5 | worker_connections 1024; 6 | } 7 | 8 | http { 9 | access_log off; 10 | include mime.types; 11 | 12 | server { 13 | listen 8080; 14 | 15 | location / { 16 | set $target ""; 17 | 18 | rewrite_by_lua_file "conf/app.lua"; 19 | 20 | proxy_set_header Host $host; 21 | proxy_set_header X-Real-IP $remote_addr; 22 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 23 | proxy_set_header X-Forwarded-Proto $scheme; 24 | 25 | proxy_pass $target; 26 | } 27 | 28 | location /__mesos_dns/ { 29 | allow 127.0.0.1; 30 | deny all; 31 | proxy_pass http://master.mesos:8123/; 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /tests/integration/tweeter_spec.js: -------------------------------------------------------------------------------- 1 | describe('Tweeter Demo', function() { 2 | it('can post and read tweets', function() { 3 | cy.readFile('ci-conf.json') 4 | .then(function(settings) { 5 | cy.visit(settings.tweeter_url); 6 | 7 | var handle = 'cypressbot'; 8 | var myTweet = 'my first tweet'; 9 | 10 | cy.tweet(handle, myTweet) 11 | .get('div.list-group') // tweet listing 12 | .find('p.tweet-content') // tweet text/content 13 | .contains(myTweet); 14 | }); 15 | }); 16 | 17 | it('can view load balancing', function() { 18 | cy.readFile('ci-conf.json') 19 | .then(function(settings) { 20 | cy.visit(settings.url); 21 | 22 | if ('username' in settings && settings['username'] != '') { 23 | cy.eelogin(settings.username, settings.password) 24 | 25 | cy.get('.sidebar-menu-item-label').contains('Networking').click(); 26 | cy.get('.sidebar-menu-item a').contains('Service Addresses').click(); 27 | // find the 1.1.1.1 network and click on it 28 | cy.get('table tbody tr td a').contains('1.1.1').click() 29 | // there should be a canvas element. this is where the graph 30 | // is drawn. not sure what to verify within the canvas. 31 | cy.get('canvas').should('have.length.of.at.least', 1) 32 | // make sure there are at least 3 IPs in the IP table. 33 | cy.get('table tbody tr:visible').should('have.length', 3) 34 | } 35 | }); 36 | }); 37 | }); 38 | -------------------------------------------------------------------------------- /tests/support/commands.js: -------------------------------------------------------------------------------- 1 | Cypress.addParentCommand('tweet', function(handle, content) { 2 | var handle = handle || 'cypressbot'; 3 | var content = content || 'foobar'; 4 | 5 | cy.get('#tweet_handle').clear().type(handle) 6 | .get('#tweet_content').clear().type(content) 7 | .get('input[name=commit]').click(); 8 | }); 9 | 10 | Cypress.addParentCommand('eelogin', function(username, password) { 11 | var username = username || 'guest'; 12 | var password = password || 'password'; 13 | 14 | cy.clearCookies(); 15 | 16 | cy.get("input[name=uid]") 17 | .clear() 18 | .type(username) 19 | .get("input[name=password]", {log: false}) 20 | .clear({log: false}) 21 | .type(password, {log: false}) 22 | .get("button.button.button-primary:first") 23 | .click(); 24 | }); 25 | -------------------------------------------------------------------------------- /tweeter-analytics.json: -------------------------------------------------------------------------------- 1 | {"paragraphs":[{"title":"Load Dependencies","text":"%dep\nz.load(\"org.apache.commons:commons-csv:1.2\")\nz.load(\"org.apache.spark:spark-streaming-kafka_2.10:1.5.2\")\nz.load(\"com.typesafe.play:play-json_2.10:2.4.6\")\n","dateUpdated":"Apr 18, 2016 6:43:09 PM","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/scala","title":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1461004989264_383768947","id":"20160418-184309_508939537","result":{"code":"SUCCESS","type":"TEXT","msg":"res0: org.apache.zeppelin.spark.dep.Dependency = org.apache.zeppelin.spark.dep.Dependency@5ba40793\n"},"dateCreated":"Apr 18, 2016 6:43:09 PM","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:20"},{"title":"Spark Streaming","text":"import java.text.{SimpleDateFormat, ParseException}\nimport java.util.Date\n\nimport org.apache.spark.{SparkConf}\nimport org.apache.spark.streaming.kafka.{KafkaUtils}\nimport org.apache.spark.streaming.{StreamingContext, Seconds}\n\nimport play.api.libs.json._\n\nval ssc = new StreamingContext(sc, Seconds(2))\nval msgStream = KafkaUtils.createStream(ssc, \"master.mesos:2181/dcos-service-kafka\", \"zeppelin-consumer-group\", Map(\"tweets\" -> 1)).map(_._2)\n\nval tweetStream = msgStream.map(tweetString => {\n val tweet = Json.parse(tweetString)\n ((tweet \\ \"handle\").as[String], (tweet \\ \"content\").as[String], (tweet \\ \"created_at\").as[String])\n })\ntweetStream.print()\n \ntweetStream.window(Seconds(60))\n .foreachRDD(rdd => rdd.toDF(\"handle\", \"content\", \"created_at\").registerTempTable(\"tweets\"))\n \n \n \nssc.start()","dateUpdated":"Apr 18, 2016 6:44:03 PM","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"tableHide":false,"editorMode":"ace/mode/scala","title":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1461004989265_383384198","id":"20160418-184309_1078829498","result":{"code":"SUCCESS","type":"TEXT","msg":"import java.text.{SimpleDateFormat, ParseException}\nimport java.util.Date\nimport org.apache.spark.SparkConf\nimport org.apache.spark.streaming.kafka.KafkaUtils\nimport org.apache.spark.streaming.{StreamingContext, Seconds}\nimport play.api.libs.json._\nssc: org.apache.spark.streaming.StreamingContext = org.apache.spark.streaming.StreamingContext@33126f3f\nmsgStream: org.apache.spark.streaming.dstream.DStream[String] = org.apache.spark.streaming.dstream.MappedDStream@1f5376fc\ntweetStream: org.apache.spark.streaming.dstream.DStream[(String, String, String)] = org.apache.spark.streaming.dstream.MappedDStream@20f4af29\n"},"dateCreated":"Apr 18, 2016 6:43:09 PM","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:21"},{"title":"Top tweeters","text":"%sql select handle, count(*) as count from tweets\n group by handle\n order by count desc","dateUpdated":"Apr 18, 2016 6:43:09 PM","config":{"colWidth":12,"graph":{"mode":"multiBarChart","height":560,"optionOpen":false,"keys":[{"name":"handle","index":0,"aggr":"sum"}],"values":[{"name":"count","index":1,"aggr":"sum"}],"groups":[],"scatter":{"xAxis":{"name":"handle","index":0,"aggr":"sum"}}},"enabled":true,"editorMode":"ace/mode/sql","editorHide":false,"tableHide":false,"title":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1461004989265_384538445","id":"20160418-184309_1093666837","result":{"code":"SUCCESS","type":"TABLE","msg":"handle\tcount\nrosalind\t236\ntouchstone\t134\njaques\t117\ncelia\t97\norlando\t94\nduke senior\t48\ncorin\t42\nphebe\t30\nduke frederick\t19\namiens\t17\nsilvius\t15\naudrey\t9\nsir oliver martext\t7\nadam\t2\noliver\t2\n","comment":"","msgTable":[[{"key":"count","value":"rosalind"},{"key":"count","value":"236"}],[{"value":"touchstone"},{"value":"134"}],[{"value":"jaques"},{"value":"117"}],[{"value":"celia"},{"value":"97"}],[{"value":"orlando"},{"value":"94"}],[{"value":"duke senior"},{"value":"48"}],[{"value":"corin"},{"value":"42"}],[{"value":"phebe"},{"value":"30"}],[{"value":"duke frederick"},{"value":"19"}],[{"value":"amiens"},{"value":"17"}],[{"value":"silvius"},{"value":"15"}],[{"value":"audrey"},{"value":"9"}],[{"value":"sir oliver martext"},{"value":"7"}],[{"value":"adam"},{"value":"2"}],[{"value":"oliver"},{"value":"2"}]],"columnNames":[{"name":"handle","index":0,"aggr":"sum"},{"name":"count","index":1,"aggr":"sum"}],"rows":[["rosalind","236"],["touchstone","134"],["jaques","117"],["celia","97"],["orlando","94"],["duke senior","48"],["corin","42"],["phebe","30"],["duke frederick","19"],["amiens","17"],["silvius","15"],["audrey","9"],["sir oliver martext","7"],["adam","2"],["oliver","2"]]},"dateCreated":"Apr 18, 2016 6:43:09 PM","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:22"},{"dateUpdated":"Apr 18, 2016 6:43:09 PM","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/scala"},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1461004989266_384538445","id":"20160418-184309_1977340255","result":{"code":"SUCCESS","type":"TEXT"},"dateCreated":"Apr 18, 2016 6:43:09 PM","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:23"}],"name":"Tweeter Analytics","id":"2BGEB4WMQ","angularObjects":{"2BFVURX7P":[],"2BJ26CJUP":[],"2BGC7K1CY":[],"2BGG4QMZP":[],"2BHTSMVYX":[],"2BJAXRYGW":[],"2BH1XZMES":[],"2BJ3XKTDP":[],"2BHEWU631":[],"2BJ9J1UR6":[],"2BG18JECV":[],"2BGYEW4KP":[],"2BGTRC1KJ":[],"2BHPKYBCN":[]},"config":{"looknfeel":"default"},"info":{}} 2 | -------------------------------------------------------------------------------- /tweeter.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "/tweeter", 3 | "cmd": "export KAFKA_BROKERS='broker.kafka.l4lb.thisdcos.directory:9092' && until rake cassandra:setup; do sleep 5; done && rails server --binding 0.0.0.0", 4 | "cpus": 0.25, 5 | "mem": 256, 6 | "disk": 0, 7 | "instances": 3, 8 | "container": { 9 | "type": "DOCKER", 10 | "docker": { 11 | "image": "mesosphere/tweeter", 12 | "network": "BRIDGE", 13 | "portMappings": [ 14 | { 15 | "containerPort": 3000, 16 | "hostPort": 0, 17 | "servicePort": 10000, 18 | "labels": { 19 | "VIP_0": "1.1.1.1:30000" 20 | } 21 | } 22 | ] 23 | } 24 | }, 25 | "env": { 26 | "CASSANDRA_HOSTS": "node-0-server.cassandra.autoip.dcos.thisdcos.directory", 27 | "RAILS_ENV": "production", 28 | "SECRET_KEY_BASE": "be6ea21bd0e8ddad06accbdfbfcbc6f120815744a8177fb1196442c1670401c86a1d020f1fb62f9b7d6bacc8cf818de277d23d3f3e7dcf704ca88965e5b9ed86" 29 | }, 30 | "healthChecks": [ 31 | { 32 | "path": "/", 33 | "protocol": "HTTP", 34 | "portIndex": 0 35 | } 36 | ], 37 | "labels": { 38 | "HAPROXY_GROUP": "external" 39 | } 40 | } 41 | --------------------------------------------------------------------------------