├── .gitignore ├── Dockerfile ├── config_sample.py ├── images ├── obama-pie-chart.png ├── overall-pie-chart.png └── top-authors.png ├── readme.md ├── requirements.txt └── sentiment.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | .DS_Store 3 | env 4 | config.py 5 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # start with a base image 2 | FROM ubuntu:14.10 3 | 4 | MAINTAINER Real Python 5 | 6 | # initial update 7 | RUN apt-get update -q 8 | 9 | # install wget, java, and mini-httpd web server 10 | RUN apt-get install -yq wget 11 | RUN apt-get install -yq default-jre-headless 12 | RUN apt-get install -yq mini-httpd 13 | 14 | # install elasticsearch 15 | RUN cd /tmp && \ 16 | wget -nv https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.3.5.tar.gz && \ 17 | tar zxf elasticsearch-1.3.5.tar.gz && \ 18 | rm -f elasticsearch-1.3.5.tar.gz && \ 19 | mv /tmp/elasticsearch-1.3.5 /elasticsearch 20 | 21 | # install kibana 22 | RUN cd /tmp && \ 23 | wget -nv https://download.elasticsearch.org/kibana/kibana/kibana-3.1.2.tar.gz && \ 24 | tar zxf kibana-3.1.2.tar.gz && \ 25 | rm -f kibana-3.1.2.tar.gz && \ 26 | mv /tmp/kibana-3.1.2 /kibana 27 | 28 | # start elasticsearch 29 | CMD /elasticsearch/bin/elasticsearch -Des.logger.level=OFF & mini-httpd -d /kibana -h `hostname` -r -D -p 8000 30 | 31 | # expose ports 32 | EXPOSE 8000 9200 -------------------------------------------------------------------------------- /config_sample.py: -------------------------------------------------------------------------------- 1 | consumer_key = "Your_consumer_key_here" 2 | consumer_secret = "Your_consumer_secret_here" 3 | access_token = "Your_access_token_here" 4 | access_token_secret = "Your_access_token_secret_here" 5 | -------------------------------------------------------------------------------- /images/obama-pie-chart.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/realpython/twitter-sentiment-elasticsearch/661681c15d5f0bd23955d25270e356c2600a5d57/images/obama-pie-chart.png -------------------------------------------------------------------------------- /images/overall-pie-chart.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/realpython/twitter-sentiment-elasticsearch/661681c15d5f0bd23955d25270e356c2600a5d57/images/overall-pie-chart.png -------------------------------------------------------------------------------- /images/top-authors.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/realpython/twitter-sentiment-elasticsearch/661681c15d5f0bd23955d25270e356c2600a5d57/images/top-authors.png -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | ## Twitter Sentiment - Python, Docker, Elasticsearch, Kibana 2 | 3 | So easy! 4 | 5 | Check out the blog post - [https://realpython.com/blog/python/twitter-sentiment-python-docker-elasticsearch-kibana/](https://realpython.com/blog/python/twitter-sentiment-python-docker-elasticsearch-kibana/) -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | elasticsearch==1.2.0 2 | textblob==0.9.0 3 | tweepy==2.3.0 4 | -------------------------------------------------------------------------------- /sentiment.py: -------------------------------------------------------------------------------- 1 | import json 2 | from tweepy.streaming import StreamListener 3 | from tweepy import OAuthHandler 4 | from tweepy import Stream 5 | from textblob import TextBlob 6 | from elasticsearch import Elasticsearch 7 | 8 | # import twitter keys and tokens 9 | from config import * 10 | 11 | # create instance of elasticsearch 12 | es = Elasticsearch() 13 | 14 | 15 | class TweetStreamListener(StreamListener): 16 | 17 | # on success 18 | def on_data(self, data): 19 | 20 | # decode json 21 | dict_data = json.loads(data) 22 | 23 | # pass tweet into TextBlob 24 | tweet = TextBlob(dict_data["text"]) 25 | 26 | # output sentiment polarity 27 | print tweet.sentiment.polarity 28 | 29 | # determine if sentiment is positive, negative, or neutral 30 | if tweet.sentiment.polarity < 0: 31 | sentiment = "negative" 32 | elif tweet.sentiment.polarity == 0: 33 | sentiment = "neutral" 34 | else: 35 | sentiment = "positive" 36 | 37 | # output sentiment 38 | print sentiment 39 | 40 | # add text and sentiment info to elasticsearch 41 | es.index(index="sentiment", 42 | doc_type="test-type", 43 | body={"author": dict_data["user"]["screen_name"], 44 | "date": dict_data["created_at"], 45 | "message": dict_data["text"], 46 | "polarity": tweet.sentiment.polarity, 47 | "subjectivity": tweet.sentiment.subjectivity, 48 | "sentiment": sentiment}) 49 | return True 50 | 51 | # on failure 52 | def on_error(self, status): 53 | print status 54 | 55 | if __name__ == '__main__': 56 | 57 | # create instance of the tweepy tweet stream listener 58 | listener = TweetStreamListener() 59 | 60 | # set twitter keys/tokens 61 | auth = OAuthHandler(consumer_key, consumer_secret) 62 | auth.set_access_token(access_token, access_token_secret) 63 | 64 | # create instance of the tweepy stream 65 | stream = Stream(auth, listener) 66 | 67 | # search twitter for "congress" keyword 68 | stream.filter(track=['congress']) 69 | --------------------------------------------------------------------------------