├── static
├── strat3_DRYS.png
├── strategy1_.png
├── strategy1_tsla.png
├── strategy4_TSLA.png
├── strat1_Z.png
├── strat3_Z.png
├── strat4_Z.png
├── strat5_Z.png
├── strat6_Z.png
├── strat7_Z.png
├── wsbLogo.png
├── strat1_AMD.png
├── strat1_AMZN.png
├── strat1_DRYS.png
├── strat1_FB.png
├── strat1_MSFT.png
├── strat1_NVDA.png
├── strat1_TSLA.png
├── strat3_AMD.png
├── strat3_AMZN.png
├── strat3_FB.png
├── strat3_MSFT.png
├── strat3_NVDA.png
├── strat3_TSLA.png
├── strat4_AMD.png
├── strat4_AMZN.png
├── strat4_FB.png
├── strat4_MSFT.png
├── strat4_NVDA.png
├── strat4_TSLA.png
├── strat5_AMD.png
├── strat5_AMZN.png
├── strat5_FB.png
├── strat5_MSFT.png
├── strat5_NVDA.png
├── strat5_TSLA.png
├── strat6_AMD.png
├── strat6_AMZN.png
├── strat6_FB.png
├── strat6_MSFT.png
├── strat6_NVDA.png
├── strat6_TSLA.png
├── strat7_AMD.png
├── strat7_AMZN.png
├── strat7_FB.png
├── strat7_MSFT.png
├── strat7_NVDA.png
├── strat7_TSLA.png
├── totalByDay_FB.png
├── totalByDay_Z.png
├── newTotalMentions.png
├── strategy1_TSLA.png
├── strategy3_TSLA.png
├── totalByDay_AMD.png
├── totalByDay_AMZN.png
├── totalByDay_DRYS.png
├── totalByDay_MSFT.png
├── totalByDay_NVDA.png
├── totalByDay_TSLA.png
└── js
│ ├── utils.js
│ └── leaflet-heat.js
├── runtime.txt
├── Procfile
├── getYolo.py
├── requirements.txt
├── script
├── convertDate.py
├── combineJson.py
├── prettyPrintJson.py
├── onlyTickers.py
└── cleanData.py
├── getDB.py
├── teslaAnalytics
├── old
│ ├── addDateColumn.py
│ ├── addSentimentColumn.py
│ └── addDate.py
├── addSentimentColumn.py
├── db.py
├── addDate.py
└── addSentiment.py
├── stockTickersBySentiment.py
├── compareToVolume.py
├── strategies.py
├── db.py
├── templates
├── example.html
├── index.html
├── viz1.html
├── days.html
├── viz2.html
├── viz3.html
├── ticker.html
└── lines.html
├── sample.py
├── dataVisualizations
├── app.py
├── templates
│ └── viz1.html
└── static
│ └── js
│ ├── utils.js
│ └── leaflet-heat.js
├── .gitignore
├── createDB.py
├── downloadIntraday.py
├── genFullAnalysis.py
├── app.py
├── algo.py
├── README.md
├── main.py
└── dataset
└── ListOfDatesOrder.json
/static/strat3_DRYS.png:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/static/strategy1_.png:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/static/strategy1_tsla.png:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/static/strategy4_TSLA.png:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/runtime.txt:
--------------------------------------------------------------------------------
1 | python-2.7.15
2 |
--------------------------------------------------------------------------------
/Procfile:
--------------------------------------------------------------------------------
1 | web: gunicorn app:app
2 |
--------------------------------------------------------------------------------
/static/strat1_Z.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat1_Z.png
--------------------------------------------------------------------------------
/static/strat3_Z.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat3_Z.png
--------------------------------------------------------------------------------
/static/strat4_Z.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat4_Z.png
--------------------------------------------------------------------------------
/static/strat5_Z.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat5_Z.png
--------------------------------------------------------------------------------
/static/strat6_Z.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat6_Z.png
--------------------------------------------------------------------------------
/static/strat7_Z.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat7_Z.png
--------------------------------------------------------------------------------
/static/wsbLogo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/wsbLogo.png
--------------------------------------------------------------------------------
/getYolo.py:
--------------------------------------------------------------------------------
1 | import main
2 |
3 | if __name__ == '__main__':
4 | print len(main.get_yolo_comments())
5 |
--------------------------------------------------------------------------------
/static/strat1_AMD.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat1_AMD.png
--------------------------------------------------------------------------------
/static/strat1_AMZN.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat1_AMZN.png
--------------------------------------------------------------------------------
/static/strat1_DRYS.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat1_DRYS.png
--------------------------------------------------------------------------------
/static/strat1_FB.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat1_FB.png
--------------------------------------------------------------------------------
/static/strat1_MSFT.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat1_MSFT.png
--------------------------------------------------------------------------------
/static/strat1_NVDA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat1_NVDA.png
--------------------------------------------------------------------------------
/static/strat1_TSLA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat1_TSLA.png
--------------------------------------------------------------------------------
/static/strat3_AMD.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat3_AMD.png
--------------------------------------------------------------------------------
/static/strat3_AMZN.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat3_AMZN.png
--------------------------------------------------------------------------------
/static/strat3_FB.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat3_FB.png
--------------------------------------------------------------------------------
/static/strat3_MSFT.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat3_MSFT.png
--------------------------------------------------------------------------------
/static/strat3_NVDA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat3_NVDA.png
--------------------------------------------------------------------------------
/static/strat3_TSLA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat3_TSLA.png
--------------------------------------------------------------------------------
/static/strat4_AMD.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat4_AMD.png
--------------------------------------------------------------------------------
/static/strat4_AMZN.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat4_AMZN.png
--------------------------------------------------------------------------------
/static/strat4_FB.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat4_FB.png
--------------------------------------------------------------------------------
/static/strat4_MSFT.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat4_MSFT.png
--------------------------------------------------------------------------------
/static/strat4_NVDA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat4_NVDA.png
--------------------------------------------------------------------------------
/static/strat4_TSLA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat4_TSLA.png
--------------------------------------------------------------------------------
/static/strat5_AMD.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat5_AMD.png
--------------------------------------------------------------------------------
/static/strat5_AMZN.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat5_AMZN.png
--------------------------------------------------------------------------------
/static/strat5_FB.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat5_FB.png
--------------------------------------------------------------------------------
/static/strat5_MSFT.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat5_MSFT.png
--------------------------------------------------------------------------------
/static/strat5_NVDA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat5_NVDA.png
--------------------------------------------------------------------------------
/static/strat5_TSLA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat5_TSLA.png
--------------------------------------------------------------------------------
/static/strat6_AMD.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat6_AMD.png
--------------------------------------------------------------------------------
/static/strat6_AMZN.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat6_AMZN.png
--------------------------------------------------------------------------------
/static/strat6_FB.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat6_FB.png
--------------------------------------------------------------------------------
/static/strat6_MSFT.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat6_MSFT.png
--------------------------------------------------------------------------------
/static/strat6_NVDA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat6_NVDA.png
--------------------------------------------------------------------------------
/static/strat6_TSLA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat6_TSLA.png
--------------------------------------------------------------------------------
/static/strat7_AMD.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat7_AMD.png
--------------------------------------------------------------------------------
/static/strat7_AMZN.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat7_AMZN.png
--------------------------------------------------------------------------------
/static/strat7_FB.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat7_FB.png
--------------------------------------------------------------------------------
/static/strat7_MSFT.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat7_MSFT.png
--------------------------------------------------------------------------------
/static/strat7_NVDA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat7_NVDA.png
--------------------------------------------------------------------------------
/static/strat7_TSLA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strat7_TSLA.png
--------------------------------------------------------------------------------
/static/totalByDay_FB.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/totalByDay_FB.png
--------------------------------------------------------------------------------
/static/totalByDay_Z.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/totalByDay_Z.png
--------------------------------------------------------------------------------
/static/newTotalMentions.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/newTotalMentions.png
--------------------------------------------------------------------------------
/static/strategy1_TSLA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strategy1_TSLA.png
--------------------------------------------------------------------------------
/static/strategy3_TSLA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/strategy3_TSLA.png
--------------------------------------------------------------------------------
/static/totalByDay_AMD.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/totalByDay_AMD.png
--------------------------------------------------------------------------------
/static/totalByDay_AMZN.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/totalByDay_AMZN.png
--------------------------------------------------------------------------------
/static/totalByDay_DRYS.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/totalByDay_DRYS.png
--------------------------------------------------------------------------------
/static/totalByDay_MSFT.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/totalByDay_MSFT.png
--------------------------------------------------------------------------------
/static/totalByDay_NVDA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/totalByDay_NVDA.png
--------------------------------------------------------------------------------
/static/totalByDay_TSLA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theriley106/TheWSBIndex/HEAD/static/totalByDay_TSLA.png
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask==0.12.3
2 | gunicorn==19.7.1
3 | Flask-Cors==3.0.7
4 | dateparser==0.7.0
5 | Flask-SocketIO==3.0.0
6 | Flask-Sockets==0.2.1
7 | textblob==0.15.0
8 |
--------------------------------------------------------------------------------
/script/convertDate.py:
--------------------------------------------------------------------------------
1 | import dateparser as dp
2 |
3 | def convert_date(stringVal):
4 | return str(int(round(int(stringVal),-3)))
5 |
6 | print dp.parse(convert_date(1390255095))
7 | #dt = dp.parse('1390255095')
8 | #print (dt)
9 |
--------------------------------------------------------------------------------
/getDB.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 |
3 | def run_command(sqlCommand):
4 | connection = sqlite3.connect("myDB.db")
5 | cur = connection.cursor()
6 | cur.execute(sqlCommand)
7 | rows = cur.fetchall()
8 | return rows
9 |
10 | if __name__ == '__main__':
11 | result = run_command("""SELECT * FROM comments WHERE (body != "[deleted]")""")
12 | print len(result)
13 |
--------------------------------------------------------------------------------
/script/combineJson.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import json
4 |
5 | if __name__ == '__main__':
6 | filenames = sys.argv[1:]
7 | saveAs = filenames.pop(-1)
8 | writeAs = 'w'
9 | for filename in filenames:
10 | with open(filename) as f:
11 | with open(saveAs, writeAs) as s:
12 | for line in f:
13 | s.write(json.dumps(line) + "\n")
14 | writeAs = 'a'
15 |
--------------------------------------------------------------------------------
/teslaAnalytics/old/addDateColumn.py:
--------------------------------------------------------------------------------
1 | # encoding=utf8
2 | import sys
3 | reload(sys)
4 | sys.setdefaultencoding('utf8')
5 | import sqlite3
6 | import json
7 | connection = sqlite3.connect("myDB.db")
8 |
9 |
10 | cursor = connection.cursor()
11 | sql_command = """
12 | ALTER TABLE comments ADD COLUMN dateVal TEXT;"""
13 |
14 |
15 | cursor.execute(sql_command)
16 | connection.commit()
17 |
18 |
19 | connection.close()
20 |
--------------------------------------------------------------------------------
/stockTickersBySentiment.py:
--------------------------------------------------------------------------------
1 | import json
2 | import main
3 |
4 |
5 |
6 | if __name__ == '__main__':
7 | myVals = []
8 | g = len(main.STOCK_TICKERS)
9 | for i, val in enumerate(main.STOCK_TICKERS):
10 | sentiment = main.get_sentiment_by_ticker(val)
11 | print("{} | {}".format(val, sentiment))
12 | myVals.append({"ticker": val, "sentiment": sentiment})
13 | with open('sentimentByTicker.json', 'w') as fout:
14 | json.dump(myVals, fout)
15 | print("{}/{}".format(i, g))
16 |
--------------------------------------------------------------------------------
/compareToVolume.py:
--------------------------------------------------------------------------------
1 | import json
2 | import main
3 |
4 | if __name__ == '__main__':
5 | a = json.load(open("dataset/AllCounts.json"))
6 | myVals = {}
7 | for key, val in a.iteritems():
8 | totalVolume = main.get_average_volume_by_ticker(key)
9 | if key != 0 and val != 0:
10 | ratio = float(totalVolume) / float(val)
11 | else:
12 | ratio = 0
13 | myVals[key] = ratio
14 | print("{} - {}".format(key, ratio))
15 | with open('volumeCommentRatio.json', 'w') as fout:
16 | json.dump(myVals, fout)
17 |
--------------------------------------------------------------------------------
/strategies.py:
--------------------------------------------------------------------------------
1 | '''
2 | t -> type(str) | contains a stock ticker
3 | d -> type(list(str)) | represents dates where t is mentioned
4 | x -> type(dict[d]) | represents t mentions on a given t
5 | o -> type(float) | containing overall sentiment towards t
6 | s -> type(dict[d]) | representing sentiment towards t on d
7 | a -> type(dict[d]) | represents total stock mentions on d
8 | r -> type(dict[d]) | represents ratio of x[d] to a[d]
9 | p -> type(dict[d]) | represents position to take; either [-1,0,1]
10 | '''
11 |
--------------------------------------------------------------------------------
/db.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import main
3 |
4 | def run_command(sqlCommand):
5 | connection = sqlite3.connect("myDB.db")
6 | cur = connection.cursor()
7 | cur.execute(sqlCommand)
8 | rows = cur.fetchall()
9 | return rows
10 |
11 | #def update(sqlCommand):
12 |
13 |
14 | #"""SELECT tickers FROM comments WHERE tickers not NULL"""
15 |
16 |
17 |
18 | if __name__ == '__main__':
19 | result = run_command("""SELECT tickers FROM comments WHERE tickers not NULL""")
20 | print len(result)
21 | #for row in result:
22 | # print row
23 |
--------------------------------------------------------------------------------
/teslaAnalytics/addSentimentColumn.py:
--------------------------------------------------------------------------------
1 | # encoding=utf8
2 | import sys
3 | reload(sys)
4 | sys.setdefaultencoding('utf8')
5 | import sqlite3
6 | import json
7 | connection = sqlite3.connect("myDB.db")
8 |
9 |
10 | cursor = connection.cursor()
11 | sql_command = """
12 | ALTER TABLE comments ADD COLUMN polarity DECIMAL;"""
13 |
14 |
15 | cursor.execute(sql_command)
16 |
17 | sql_command = """
18 | ALTER TABLE comments ADD COLUMN subjectivity DECIMAL;"""
19 |
20 |
21 | cursor.execute(sql_command)
22 | connection.commit()
23 | connection.close()
24 |
--------------------------------------------------------------------------------
/teslaAnalytics/db.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import main
3 |
4 | def run_command(sqlCommand):
5 | connection = sqlite3.connect("myDB.db")
6 | cur = connection.cursor()
7 | cur.execute(sqlCommand)
8 | rows = cur.fetchall()
9 | return rows
10 |
11 | #def update(sqlCommand):
12 |
13 |
14 | #"""SELECT tickers FROM comments WHERE tickers not NULL"""
15 |
16 |
17 |
18 | if __name__ == '__main__':
19 | result = run_command("""SELECT tickers FROM comments WHERE tickers not NULL""")
20 | print len(result)
21 | #for row in result:
22 | # print row
23 |
--------------------------------------------------------------------------------
/teslaAnalytics/old/addSentimentColumn.py:
--------------------------------------------------------------------------------
1 | # encoding=utf8
2 | import sys
3 | reload(sys)
4 | sys.setdefaultencoding('utf8')
5 | import sqlite3
6 | import json
7 | connection = sqlite3.connect("myDB.db")
8 |
9 |
10 | cursor = connection.cursor()
11 | sql_command = """
12 | ALTER TABLE comments ADD COLUMN polarity DECIMAL;"""
13 |
14 |
15 | cursor.execute(sql_command)
16 |
17 | sql_command = """
18 | ALTER TABLE comments ADD COLUMN subjectivity DECIMAL;"""
19 |
20 |
21 | cursor.execute(sql_command)
22 | connection.commit()
23 | connection.close()
24 |
--------------------------------------------------------------------------------
/templates/example.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | [^"]+)"\}')
24 | for line in res.text.splitlines():
25 | m = pattern.match(line)
26 | if m is not None:
27 | yahoo_crumb = m.groupdict()['crumb']
28 | return yahoo_cookie, yahoo_crumb
29 |
30 | for file in glob.glob("data/*.csv"):
31 | item = file.partition("/")[2].partition(".cs")[0]
32 | ITEMS.remove(item)
33 |
34 | def download_historical(start_Date="20120412", end_Date="20181231"):
35 | while len(ITEMS) > 0:
36 | lock.acquire()
37 | symbol = ITEMS.pop()
38 | lock.release()
39 | try:
40 | cookieVal = get_yahoo_crumb_cookie()
41 | sd = str(int(time.mktime(datetime.datetime.strptime(start_Date, "%Y%m%d").timetuple())))
42 | ed = str(int(time.mktime(datetime.datetime.strptime(end_Date, "%Y%m%d").timetuple())))
43 | url = URL.format(symbol, sd, ed, cookieVal[1])
44 | res = requests.get(url, allow_redirects=True, cookies={'B': cookieVal[0]})
45 | file = open("data/{}.csv".format(symbol), 'wb')
46 | file.write(res.content)
47 | file.close()
48 | print("Done with: {}".format(symbol))
49 | except Exception as exp:
50 | print exp
51 |
52 | if __name__ == '__main__':
53 | threads = [threading.Thread(target=download_historical) for i in range(THREADS)]
54 | for thread in threads:
55 | thread.start()
56 | for thread in threads:
57 | thread.join()
58 |
--------------------------------------------------------------------------------
/templates/viz1.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Stock Tickers by Total Mentions
7 |
8 |
9 |
16 |
17 |
18 |
19 |
20 |
76 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/dataVisualizations/templates/viz1.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Stock Tickers by Total Mentions
7 |
8 |
9 |
16 |
17 |
18 |
19 |
20 |
76 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/templates/days.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | {{stock}} by Total Mentions
7 |
8 |
9 |
16 |
17 |
18 |
19 |
20 |
77 |
78 |
79 |
80 |
--------------------------------------------------------------------------------
/templates/viz2.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Stock Tickers by Total Mentions
7 |
8 |
9 |
16 |
17 |
18 |
19 |
20 |
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/templates/viz3.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | /r/Tesla Comments Per Day vs. Stock Price
7 |
8 |
9 |
16 |
17 |
18 |
19 |
20 |
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/static/js/utils.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | window.chartColors = {
4 | red: 'rgb(255, 99, 132)',
5 | orange: 'rgb(255, 159, 64)',
6 | yellow: 'rgb(255, 205, 86)',
7 | green: 'rgb(75, 192, 192)',
8 | blue: 'rgb(54, 162, 235)',
9 | purple: 'rgb(153, 102, 255)',
10 | grey: 'rgb(201, 203, 207)'
11 | };
12 |
13 | (function(global) {
14 | var Months = [
15 | 'January',
16 | 'February',
17 | 'March',
18 | 'April',
19 | 'May',
20 | 'June',
21 | 'July',
22 | 'August',
23 | 'September',
24 | 'October',
25 | 'November',
26 | 'December'
27 | ];
28 |
29 | var COLORS = [
30 | '#4dc9f6',
31 | '#f67019',
32 | '#f53794',
33 | '#537bc4',
34 | '#acc236',
35 | '#166a8f',
36 | '#00a950',
37 | '#58595b',
38 | '#8549ba'
39 | ];
40 |
41 | var Samples = global.Samples || (global.Samples = {});
42 | var Color = global.Color;
43 |
44 | Samples.utils = {
45 | // Adapted from http://indiegamr.com/generate-repeatable-random-numbers-in-js/
46 | srand: function(seed) {
47 | this._seed = seed;
48 | },
49 |
50 | rand: function(min, max) {
51 | var seed = this._seed;
52 | min = min === undefined ? 0 : min;
53 | max = max === undefined ? 1 : max;
54 | this._seed = (seed * 9301 + 49297) % 233280;
55 | return min + (this._seed / 233280) * (max - min);
56 | },
57 |
58 | numbers: function(config) {
59 | var cfg = config || {};
60 | var min = cfg.min || 0;
61 | var max = cfg.max || 1;
62 | var from = cfg.from || [];
63 | var count = cfg.count || 8;
64 | var decimals = cfg.decimals || 8;
65 | var continuity = cfg.continuity || 1;
66 | var dfactor = Math.pow(10, decimals) || 0;
67 | var data = [];
68 | var i, value;
69 |
70 | for (i = 0; i < count; ++i) {
71 | value = (from[i] || 0) + this.rand(min, max);
72 | if (this.rand() <= continuity) {
73 | data.push(Math.round(dfactor * value) / dfactor);
74 | } else {
75 | data.push(null);
76 | }
77 | }
78 |
79 | return data;
80 | },
81 |
82 | labels: function(config) {
83 | var cfg = config || {};
84 | var min = cfg.min || 0;
85 | var max = cfg.max || 100;
86 | var count = cfg.count || 8;
87 | var step = (max - min) / count;
88 | var decimals = cfg.decimals || 8;
89 | var dfactor = Math.pow(10, decimals) || 0;
90 | var prefix = cfg.prefix || '';
91 | var values = [];
92 | var i;
93 |
94 | for (i = min; i < max; i += step) {
95 | values.push(prefix + Math.round(dfactor * i) / dfactor);
96 | }
97 |
98 | return values;
99 | },
100 |
101 | months: function(config) {
102 | var cfg = config || {};
103 | var count = cfg.count || 12;
104 | var section = cfg.section;
105 | var values = [];
106 | var i, value;
107 |
108 | for (i = 0; i < count; ++i) {
109 | value = Months[Math.ceil(i) % 12];
110 | values.push(value.substring(0, section));
111 | }
112 |
113 | return values;
114 | },
115 |
116 | color: function(index) {
117 | return COLORS[index % COLORS.length];
118 | },
119 |
120 | transparentize: function(color, opacity) {
121 | var alpha = opacity === undefined ? 0.5 : 1 - opacity;
122 | return Color(color).alpha(alpha).rgbString();
123 | }
124 | };
125 |
126 | // DEPRECATED
127 | window.randomScalingFactor = function() {
128 | return Math.round(Samples.utils.rand(-100, 100));
129 | };
130 |
131 | // INITIALIZATION
132 |
133 | Samples.utils.srand(Date.now());
134 |
135 | // Google Analytics
136 | /* eslint-disable */
137 | if (document.location.hostname.match(/^(www\.)?chartjs\.org$/)) {
138 | (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
139 | (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
140 | m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
141 | })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
142 | ga('create', 'UA-28909194-3', 'auto');
143 | ga('send', 'pageview');
144 | }
145 | /* eslint-enable */
146 |
147 | }(this));
148 |
--------------------------------------------------------------------------------
/dataVisualizations/static/js/utils.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | window.chartColors = {
4 | red: 'rgb(255, 99, 132)',
5 | orange: 'rgb(255, 159, 64)',
6 | yellow: 'rgb(255, 205, 86)',
7 | green: 'rgb(75, 192, 192)',
8 | blue: 'rgb(54, 162, 235)',
9 | purple: 'rgb(153, 102, 255)',
10 | grey: 'rgb(201, 203, 207)'
11 | };
12 |
13 | (function(global) {
14 | var Months = [
15 | 'January',
16 | 'February',
17 | 'March',
18 | 'April',
19 | 'May',
20 | 'June',
21 | 'July',
22 | 'August',
23 | 'September',
24 | 'October',
25 | 'November',
26 | 'December'
27 | ];
28 |
29 | var COLORS = [
30 | '#4dc9f6',
31 | '#f67019',
32 | '#f53794',
33 | '#537bc4',
34 | '#acc236',
35 | '#166a8f',
36 | '#00a950',
37 | '#58595b',
38 | '#8549ba'
39 | ];
40 |
41 | var Samples = global.Samples || (global.Samples = {});
42 | var Color = global.Color;
43 |
44 | Samples.utils = {
45 | // Adapted from http://indiegamr.com/generate-repeatable-random-numbers-in-js/
46 | srand: function(seed) {
47 | this._seed = seed;
48 | },
49 |
50 | rand: function(min, max) {
51 | var seed = this._seed;
52 | min = min === undefined ? 0 : min;
53 | max = max === undefined ? 1 : max;
54 | this._seed = (seed * 9301 + 49297) % 233280;
55 | return min + (this._seed / 233280) * (max - min);
56 | },
57 |
58 | numbers: function(config) {
59 | var cfg = config || {};
60 | var min = cfg.min || 0;
61 | var max = cfg.max || 1;
62 | var from = cfg.from || [];
63 | var count = cfg.count || 8;
64 | var decimals = cfg.decimals || 8;
65 | var continuity = cfg.continuity || 1;
66 | var dfactor = Math.pow(10, decimals) || 0;
67 | var data = [];
68 | var i, value;
69 |
70 | for (i = 0; i < count; ++i) {
71 | value = (from[i] || 0) + this.rand(min, max);
72 | if (this.rand() <= continuity) {
73 | data.push(Math.round(dfactor * value) / dfactor);
74 | } else {
75 | data.push(null);
76 | }
77 | }
78 |
79 | return data;
80 | },
81 |
82 | labels: function(config) {
83 | var cfg = config || {};
84 | var min = cfg.min || 0;
85 | var max = cfg.max || 100;
86 | var count = cfg.count || 8;
87 | var step = (max - min) / count;
88 | var decimals = cfg.decimals || 8;
89 | var dfactor = Math.pow(10, decimals) || 0;
90 | var prefix = cfg.prefix || '';
91 | var values = [];
92 | var i;
93 |
94 | for (i = min; i < max; i += step) {
95 | values.push(prefix + Math.round(dfactor * i) / dfactor);
96 | }
97 |
98 | return values;
99 | },
100 |
101 | months: function(config) {
102 | var cfg = config || {};
103 | var count = cfg.count || 12;
104 | var section = cfg.section;
105 | var values = [];
106 | var i, value;
107 |
108 | for (i = 0; i < count; ++i) {
109 | value = Months[Math.ceil(i) % 12];
110 | values.push(value.substring(0, section));
111 | }
112 |
113 | return values;
114 | },
115 |
116 | color: function(index) {
117 | return COLORS[index % COLORS.length];
118 | },
119 |
120 | transparentize: function(color, opacity) {
121 | var alpha = opacity === undefined ? 0.5 : 1 - opacity;
122 | return Color(color).alpha(alpha).rgbString();
123 | }
124 | };
125 |
126 | // DEPRECATED
127 | window.randomScalingFactor = function() {
128 | return Math.round(Samples.utils.rand(-100, 100));
129 | };
130 |
131 | // INITIALIZATION
132 |
133 | Samples.utils.srand(Date.now());
134 |
135 | // Google Analytics
136 | /* eslint-disable */
137 | if (document.location.hostname.match(/^(www\.)?chartjs\.org$/)) {
138 | (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
139 | (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
140 | m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
141 | })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
142 | ga('create', 'UA-28909194-3', 'auto');
143 | ga('send', 'pageview');
144 | }
145 | /* eslint-enable */
146 |
147 | }(this));
148 |
--------------------------------------------------------------------------------
/genFullAnalysis.py:
--------------------------------------------------------------------------------
1 | import json
2 | import main
3 | import algo
4 | import saveScreenshots
5 |
6 |
7 | if __name__ == '__main__':
8 | message = """
9 |
10 | # {0} Analysis
11 |
12 |
13 | Stock Ticker: {1}
14 |
15 |
16 | Total Comments Mentioning Ticker: {2}
17 |
18 |
19 | Average Sentiment Towards Ticker: {3}
20 |
21 |
22 | Stock Mention Ranking (SMR): {4}
23 |
24 |
25 | Ticker First Mentioned on WSB: {5} Days Ago
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 | ## {0} Strategy Specific Returns (Starting w/ $1,000,000)
35 |
36 | ### Overview
37 | {6}
38 |
39 | ### Return by Strategy
40 |
41 | Note: Trades based on WallStreetBets comments are in BLUE, trades based on holding long-term are in RED
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 | #
64 | """
65 | ticker = raw_input("Ticker: ").upper()
66 | a = main.Trade(ticker)
67 | company = main.get_company_by_ticker(ticker)
68 | startAmount = 1000000
69 | stockMentionRanking = main.calc_stock_ranking(ticker) + 1
70 | print(stockMentionRanking)
71 | totalCount = main.get_total_count_by_ticker(ticker)
72 | print(totalCount)
73 | sentiment = main.get_sentiment_by_ticker(ticker)
74 | print(sentiment)
75 | mentionsByDay = main.get_weekday_by_ticker(ticker)
76 | stockFirstMentioned = main.get_first_comment_with_ticker(ticker)
77 | #print stockFirstMentioned
78 | table = '''
79 | |Strategy Name|Total Trades|Return Percentage|Return|Alpha|
80 | | ------------- |:-------:|:-------:|:-------:|:-------:|'''
81 | buyAndHold = a.calc_buy_and_hold(startAmount)-startAmount
82 | print(" buy and hold ")
83 | print(buyAndHold)
84 |
85 | strat1 = a.test_strategy(algo.strategy1, startAmount)-startAmount
86 | typeVal = "Strategy 1"
87 | returnVal = "${:,.2f}".format(strat1)
88 | if "-" in str(returnVal):
89 | returnVal = returnVal.replace("-", "")
90 | returnVal = "-" + returnVal
91 | table += "\n|{0}|{1}|{2}%|{3}|0|".format(typeVal, a.totalTrades, round((((float(strat1)/float(startAmount)))*100), 2), returnVal)
92 |
93 |
94 | strat4 = a.test_strategy(algo.strategy4, startAmount)-startAmount
95 | typeVal = "Strategy 4"
96 | returnVal = "${:,.2f}".format(strat4)
97 | if "-" in str(returnVal):
98 | returnVa2 = returnVal.replace("-", "")
99 | returnVal = "-" + returnVal
100 | table += "\n|{0}|{1}|{2}%|{3}|0|".format(typeVal, a.totalTrades, round((((float(strat4)/float(startAmount)))*100), 2), returnVal)
101 |
102 |
103 | strat5 = a.test_strategy(algo.strategy5, startAmount)-startAmount
104 | typeVal = "Strategy 5"
105 | returnVal = "${:,.2f}".format(strat5)
106 | if "-" in str(returnVal):
107 | returnVal = returnVal.replace("-", "")
108 | returnVal = "-" + returnVal
109 | table += "\n|{0}|{1}|{2}%|{3}|0|".format(typeVal, a.totalTrades, round((((float(strat5)/float(startAmount)))*100), 2), returnVal)
110 |
111 | strat6 = a.test_strategy(algo.strategy6, startAmount)-startAmount
112 | typeVal = "Strategy 6"
113 | returnVal = "${:,.2f}".format(strat6)
114 | if "-" in str(returnVal):
115 | returnVal = returnVal.replace("-", "")
116 | returnVal = "-" + returnVal
117 | table += "\n|{0}|{1}|{2}%|{3}|0|".format(typeVal, a.totalTrades, round((((float(strat6)/float(startAmount)))*100), 2), returnVal)
118 |
119 | strat7 = a.test_strategy(algo.strategy7, startAmount)-startAmount
120 | typeVal = "Strategy 7"
121 | returnVal = "${:,.2f}".format(strat7)
122 | if "-" in str(returnVal):
123 | returnVal = returnVal.replace("-", "")
124 | returnVal = "-" + returnVal
125 | table += "\n|{0}|{1}|{2}%|{3}|0|".format(typeVal, a.totalTrades, round((((float(strat7)/float(startAmount)))*100), 2), returnVal)
126 |
127 |
128 | message = message.format(company, ticker, '{:,}'.format(totalCount), round(sentiment, 4), stockMentionRanking, '{:,}'.format(stockFirstMentioned), table)
129 | print message
130 | '''for key, value in main.calc_predicted_direction("MU").iteritems():
131 | print("{} {}".format(key, value))'''
132 |
--------------------------------------------------------------------------------
/templates/ticker.html:
--------------------------------------------------------------------------------
1 |
79 |
80 | Pure CSS Ticker (No-JS)
81 | A smooth horizontal news like ticker using CSS transform on infinite loop
82 |
83 |
84 |
85 |
Letterpress chambray brunch.
86 |
Vice mlkshk crucifix beard chillwave meditation hoodie asymmetrical Helvetica.
87 |
Ugh PBR&B kale chips Echo Park.
88 |
Gluten-free mumblecore chambray mixtape food truck.
89 |
95 |
96 |
97 |
98 | So, annoyingly, most JS solutions don't do horizontal tickers on an infinite loop, nor do they render all that smoothly.
99 | The difficulty with CSS was getting the animation to transform the entire items 100% yet include an offset that was only the width of the browser (and not the items full width).
100 | Setting the start of the animation to anything less than zero (e.g. -100%) is unreliable as it is based on the items width, and may not offset the full width of the browser or creates too large an offset
101 | Padding left on the wrapper allows us the correct initial offset, but you still get a 'jump' as it then loops too soon. (The full text does not travel off-screen)
102 | This is where adding display:inline-block to the item parent, where the natural behaviour of the element exists as inline, gives an opportunity to add padding-right 100% here. The padding is taken from the parent (as its treated as inline) which usefully is the wrapper width.
103 | Magically* we now have perfect 100% offset, a true 100% translate (width of items) and enough padding in the element to ensure all items leave the screen before it repeats! (width of browser)
104 | *Why this works: The inside of an inline-block is formatted as a block box, and the element itself is formatted as an atomic inline-level box.
Uses `box-sizing: content-box`
105 | Padding is calculated on the width of the containing box.
106 | So as both the ticker and the items are formatted as nested inline, the padding must be calculated by the ticker wrap.
107 |
108 | Ticker content c/o Hipsum.co
109 |
--------------------------------------------------------------------------------
/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, render_template, request, url_for, redirect, Markup, jsonify, make_response, send_from_directory, session
2 | import json
3 | import calendar
4 | import main
5 | import algo
6 | import datetime
7 | import time
8 | from flask_sockets import Sockets
9 | import random
10 |
11 |
12 |
13 | app = Flask(__name__, static_url_path='/static')
14 | sockets = Sockets(app)
15 |
16 | balance = [100000]
17 |
18 | @sockets.route('/echo')
19 | def echo_socket(ws):
20 | while True:
21 | #message = ws.receive()
22 | ws.send(str(balance[-1]))
23 | num = random.randint(1, 10000)
24 | if random.randint(1,2) == 2:
25 | num *= -1
26 | balance.append(balance[-1] + num)
27 | time.sleep(.1)
28 |
29 |
30 | @app.route('/', methods=['GET'])
31 | def index():
32 | return render_template("index.html")
33 |
34 | @app.route('/viz1', methods=['GET'])
35 | def viz1():
36 | a = json.load(open("static/AllCounts.json"))
37 | db = []
38 | for key, val in a.iteritems():
39 | db.append({"Ticker": key, "Mentions": val})
40 | db = sorted(db, key=lambda k: k['Mentions'])[-20:]
41 | return render_template("viz1.html", DATABASE=db)
42 |
43 | @app.route('/viz2', methods=['GET'])
44 | def viz2():
45 | db = json.load(open("TESLA_DATA.json"))
46 | return render_template("viz2.html", DATABASE=db)
47 |
48 | @app.route('/viz3', methods=['GET'])
49 | def viz3():
50 | db = json.load(open("TESLA_DATA_COMMENTS.json"))
51 | return render_template("viz3.html", DATABASE=db)
52 |
53 | @app.route('/totalByDay/', methods=['GET'])
54 | def totalByDay(ticker):
55 | days = list(calendar.day_abbr)
56 | mentionsByDay = main.get_weekday_by_ticker(ticker.upper())
57 | db = []
58 | for i in range(7):
59 | a = mentionsByDay[str(i)]
60 | db.append({"Day": days[i], "Mentions": a})
61 | #db = sorted(db, key=lambda k: k['Mentions'])[-20:]
62 | return render_template("days.html", DATABASE=db, stock=ticker.upper())
63 |
64 | @app.route('/strat1/', methods=['GET'])
65 | def testStrat1(ticker):
66 | ticker = ticker.upper()
67 | a = main.Trade(ticker)
68 | startAmount = 1000000
69 | buyAndHold = a.calc_buy_and_hold(startAmount)-startAmount
70 | strat1 = a.test_strategy(algo.strategy1, startAmount)-startAmount
71 | strat1Info = a.get_more_info()
72 | return render_template("lines.html", DATABASE=strat1Info, strategy="Strategy 1", balance='{:,.2f}'.format(startAmount), stock=ticker.upper())
73 |
74 | @app.route('/strat3/', methods=['GET'])
75 | def testStrat3(ticker):
76 | ticker = ticker.upper()
77 | a = main.Trade(ticker)
78 | startAmount = 1000000
79 | buyAndHold = a.calc_buy_and_hold(startAmount)-startAmount
80 | strat1 = a.test_strategy(algo.strategy3, startAmount)-startAmount
81 | strat1Info = a.get_more_info()
82 | return render_template("lines.html", DATABASE=strat1Info, strategy="Strategy 3", balance='{:,.2f}'.format(startAmount), stock=ticker.upper())
83 |
84 | @app.route('/strat4/', methods=['GET'])
85 | def testStrat4(ticker):
86 | ticker = ticker.upper()
87 | a = main.Trade(ticker)
88 | startAmount = 1000000
89 | buyAndHold = a.calc_buy_and_hold(startAmount)-startAmount
90 | strat1 = a.test_strategy(algo.strategy4, startAmount)-startAmount
91 | strat1Info = a.get_more_info()
92 | return render_template("lines.html", DATABASE=strat1Info, strategy="Strategy 4", balance='{:,.2f}'.format(startAmount), stock=ticker.upper())
93 |
94 | @app.route('/strat5/', methods=['GET'])
95 | def testStrat5(ticker):
96 | ticker = ticker.upper()
97 | a = main.Trade(ticker)
98 | startAmount = 1000000
99 | buyAndHold = a.calc_buy_and_hold(startAmount)-startAmount
100 | strat1 = a.test_strategy(algo.strategy5, startAmount)-startAmount
101 | strat1Info = a.get_more_info()
102 | return render_template("lines.html", DATABASE=strat1Info, strategy="Strategy 5", balance='{:,.2f}'.format(startAmount), stock=ticker.upper())
103 |
104 | @app.route('/strat6/', methods=['GET'])
105 | def testStrat6(ticker):
106 | ticker = ticker.upper()
107 | a = main.Trade(ticker)
108 | startAmount = 1000000
109 | buyAndHold = a.calc_buy_and_hold(startAmount)-startAmount
110 | strat1 = a.test_strategy(algo.strategy6, startAmount)-startAmount
111 | strat1Info = a.get_more_info()
112 | return render_template("lines.html", DATABASE=strat1Info, strategy="Strategy 6", balance='{:,.2f}'.format(startAmount), stock=ticker.upper())
113 |
114 | @app.route('/strat7/', methods=['GET'])
115 | def testStrat7(ticker):
116 | ticker = ticker.upper()
117 | a = main.Trade(ticker)
118 | startAmount = 1000000
119 | buyAndHold = a.calc_buy_and_hold(startAmount)-startAmount
120 | strat1 = a.test_strategy(algo.strategy7, startAmount)-startAmount
121 | strat1Info = a.get_more_info()
122 | return render_template("lines.html", DATABASE=strat1Info, strategy="Strategy 7", balance='{:,.2f}'.format(startAmount), stock=ticker.upper())
123 |
124 |
125 | if __name__ == '__main__':
126 | app.run(host='127.0.0.1', port=5000)
127 |
--------------------------------------------------------------------------------
/static/js/leaflet-heat.js:
--------------------------------------------------------------------------------
1 | /*
2 | (c) 2014, Vladimir Agafonkin
3 | simpleheat, a tiny JavaScript library for drawing heatmaps with Canvas
4 | https://github.com/mourner/simpleheat
5 | */
6 | !function(){"use strict";function t(i){return this instanceof t?(this._canvas=i="string"==typeof i?document.getElementById(i):i,this._ctx=i.getContext("2d"),this._width=i.width,this._height=i.height,this._max=1,void this.clear()):new t(i)}t.prototype={defaultRadius:25,defaultGradient:{.4:"blue",.6:"cyan",.7:"lime",.8:"yellow",1:"red"},data:function(t,i){return this._data=t,this},max:function(t){return this._max=t,this},add:function(t){return this._data.push(t),this},clear:function(){return this._data=[],this},radius:function(t,i){i=i||15;var a=this._circle=document.createElement("canvas"),s=a.getContext("2d"),e=this._r=t+i;return a.width=a.height=2*e,s.shadowOffsetX=s.shadowOffsetY=200,s.shadowBlur=i,s.shadowColor="black",s.beginPath(),s.arc(e-200,e-200,t,0,2*Math.PI,!0),s.closePath(),s.fill(),this},gradient:function(t){var i=document.createElement("canvas"),a=i.getContext("2d"),s=a.createLinearGradient(0,0,0,256);i.width=1,i.height=256;for(var e in t)s.addColorStop(e,t[e]);return a.fillStyle=s,a.fillRect(0,0,1,256),this._grad=a.getImageData(0,0,1,256).data,this},draw:function(t){this._circle||this.radius(this.defaultRadius),this._grad||this.gradient(this.defaultGradient);var i=this._ctx;i.clearRect(0,0,this._width,this._height);for(var a,s=0,e=this._data.length;e>s;s++)a=this._data[s],i.globalAlpha=Math.max(a[2]/this._max,t||.05),i.drawImage(this._circle,a[0]-this._r,a[1]-this._r);var n=i.getImageData(0,0,this._width,this._height);return this._colorize(n.data,this._grad),i.putImageData(n,0,0),this},_colorize:function(t,i){for(var a,s=3,e=t.length;e>s;s+=4)a=4*t[s],a&&(t[s-3]=i[a],t[s-2]=i[a+1],t[s-1]=i[a+2])}},window.simpleheat=t}(),/*
7 | (c) 2014, Vladimir Agafonkin
8 | Leaflet.heat, a tiny and fast heatmap plugin for Leaflet.
9 | https://github.com/Leaflet/Leaflet.heat
10 | */
11 | L.HeatLayer=(L.Layer?L.Layer:L.Class).extend({initialize:function(t,i){this._latlngs=t,L.setOptions(this,i)},setLatLngs:function(t){return this._latlngs=t,this.redraw()},addLatLng:function(t){return this._latlngs.push(t),this.redraw()},setOptions:function(t){return L.setOptions(this,t),this._heat&&this._updateOptions(),this.redraw()},redraw:function(){return!this._heat||this._frame||this._map._animating||(this._frame=L.Util.requestAnimFrame(this._redraw,this)),this},onAdd:function(t){this._map=t,this._canvas||this._initCanvas(),t._panes.overlayPane.appendChild(this._canvas),t.on("moveend",this._reset,this),t.options.zoomAnimation&&L.Browser.any3d&&t.on("zoomanim",this._animateZoom,this),this._reset()},onRemove:function(t){t.getPanes().overlayPane.removeChild(this._canvas),t.off("moveend",this._reset,this),t.options.zoomAnimation&&t.off("zoomanim",this._animateZoom,this)},addTo:function(t){return t.addLayer(this),this},_initCanvas:function(){var t=this._canvas=L.DomUtil.create("canvas","leaflet-heatmap-layer leaflet-layer"),i=L.DomUtil.testProp(["transformOrigin","WebkitTransformOrigin","msTransformOrigin"]);t.style[i]="50% 50%";var a=this._map.getSize();t.width=a.x,t.height=a.y;var s=this._map.options.zoomAnimation&&L.Browser.any3d;L.DomUtil.addClass(t,"leaflet-zoom-"+(s?"animated":"hide")),this._heat=simpleheat(t),this._updateOptions()},_updateOptions:function(){this._heat.radius(this.options.radius||this._heat.defaultRadius,this.options.blur),this.options.gradient&&this._heat.gradient(this.options.gradient),this.options.max&&this._heat.max(this.options.max)},_reset:function(){var t=this._map.containerPointToLayerPoint([0,0]);L.DomUtil.setPosition(this._canvas,t);var i=this._map.getSize();this._heat._width!==i.x&&(this._canvas.width=this._heat._width=i.x),this._heat._height!==i.y&&(this._canvas.height=this._heat._height=i.y),this._redraw()},_redraw:function(){var t,i,a,s,e,n,h,o,r,d=[],_=this._heat._r,l=this._map.getSize(),m=new L.Bounds(L.point([-_,-_]),l.add([_,_])),c=void 0===this.options.max?1:this.options.max,u=void 0===this.options.maxZoom?this._map.getMaxZoom():this.options.maxZoom,f=1/Math.pow(2,Math.max(0,Math.min(u-this._map.getZoom(),12))),g=_/2,p=[],v=this._map._getMapPanePos(),w=v.x%g,y=v.y%g;for(t=0,i=this._latlngs.length;i>t;t++)if(a=this._map.latLngToContainerPoint(this._latlngs[t]),m.contains(a)){e=Math.floor((a.x-w)/g)+2,n=Math.floor((a.y-y)/g)+2;var x=void 0!==this._latlngs[t].alt?this._latlngs[t].alt:void 0!==this._latlngs[t][2]?+this._latlngs[t][2]:1;r=x*f,p[n]=p[n]||[],s=p[n][e],s?(s[0]=(s[0]*s[2]+a.x*r)/(s[2]+r),s[1]=(s[1]*s[2]+a.y*r)/(s[2]+r),s[2]+=r):p[n][e]=[a.x,a.y,r]}for(t=0,i=p.length;i>t;t++)if(p[t])for(h=0,o=p[t].length;o>h;h++)s=p[t][h],s&&d.push([Math.round(s[0]),Math.round(s[1]),Math.min(s[2],c)]);this._heat.data(d).draw(this.options.minOpacity),this._frame=null},_animateZoom:function(t){var i=this._map.getZoomScale(t.zoom),a=this._map._getCenterOffset(t.center)._multiplyBy(-i).subtract(this._map._getMapPanePos());L.DomUtil.setTransform?L.DomUtil.setTransform(this._canvas,a,i):this._canvas.style[L.DomUtil.TRANSFORM]=L.DomUtil.getTranslateString(a)+" scale("+i+")"}}),L.heatLayer=function(t,i){return new L.HeatLayer(t,i)};
--------------------------------------------------------------------------------
/dataVisualizations/static/js/leaflet-heat.js:
--------------------------------------------------------------------------------
1 | /*
2 | (c) 2014, Vladimir Agafonkin
3 | simpleheat, a tiny JavaScript library for drawing heatmaps with Canvas
4 | https://github.com/mourner/simpleheat
5 | */
6 | !function(){"use strict";function t(i){return this instanceof t?(this._canvas=i="string"==typeof i?document.getElementById(i):i,this._ctx=i.getContext("2d"),this._width=i.width,this._height=i.height,this._max=1,void this.clear()):new t(i)}t.prototype={defaultRadius:25,defaultGradient:{.4:"blue",.6:"cyan",.7:"lime",.8:"yellow",1:"red"},data:function(t,i){return this._data=t,this},max:function(t){return this._max=t,this},add:function(t){return this._data.push(t),this},clear:function(){return this._data=[],this},radius:function(t,i){i=i||15;var a=this._circle=document.createElement("canvas"),s=a.getContext("2d"),e=this._r=t+i;return a.width=a.height=2*e,s.shadowOffsetX=s.shadowOffsetY=200,s.shadowBlur=i,s.shadowColor="black",s.beginPath(),s.arc(e-200,e-200,t,0,2*Math.PI,!0),s.closePath(),s.fill(),this},gradient:function(t){var i=document.createElement("canvas"),a=i.getContext("2d"),s=a.createLinearGradient(0,0,0,256);i.width=1,i.height=256;for(var e in t)s.addColorStop(e,t[e]);return a.fillStyle=s,a.fillRect(0,0,1,256),this._grad=a.getImageData(0,0,1,256).data,this},draw:function(t){this._circle||this.radius(this.defaultRadius),this._grad||this.gradient(this.defaultGradient);var i=this._ctx;i.clearRect(0,0,this._width,this._height);for(var a,s=0,e=this._data.length;e>s;s++)a=this._data[s],i.globalAlpha=Math.max(a[2]/this._max,t||.05),i.drawImage(this._circle,a[0]-this._r,a[1]-this._r);var n=i.getImageData(0,0,this._width,this._height);return this._colorize(n.data,this._grad),i.putImageData(n,0,0),this},_colorize:function(t,i){for(var a,s=3,e=t.length;e>s;s+=4)a=4*t[s],a&&(t[s-3]=i[a],t[s-2]=i[a+1],t[s-1]=i[a+2])}},window.simpleheat=t}(),/*
7 | (c) 2014, Vladimir Agafonkin
8 | Leaflet.heat, a tiny and fast heatmap plugin for Leaflet.
9 | https://github.com/Leaflet/Leaflet.heat
10 | */
11 | L.HeatLayer=(L.Layer?L.Layer:L.Class).extend({initialize:function(t,i){this._latlngs=t,L.setOptions(this,i)},setLatLngs:function(t){return this._latlngs=t,this.redraw()},addLatLng:function(t){return this._latlngs.push(t),this.redraw()},setOptions:function(t){return L.setOptions(this,t),this._heat&&this._updateOptions(),this.redraw()},redraw:function(){return!this._heat||this._frame||this._map._animating||(this._frame=L.Util.requestAnimFrame(this._redraw,this)),this},onAdd:function(t){this._map=t,this._canvas||this._initCanvas(),t._panes.overlayPane.appendChild(this._canvas),t.on("moveend",this._reset,this),t.options.zoomAnimation&&L.Browser.any3d&&t.on("zoomanim",this._animateZoom,this),this._reset()},onRemove:function(t){t.getPanes().overlayPane.removeChild(this._canvas),t.off("moveend",this._reset,this),t.options.zoomAnimation&&t.off("zoomanim",this._animateZoom,this)},addTo:function(t){return t.addLayer(this),this},_initCanvas:function(){var t=this._canvas=L.DomUtil.create("canvas","leaflet-heatmap-layer leaflet-layer"),i=L.DomUtil.testProp(["transformOrigin","WebkitTransformOrigin","msTransformOrigin"]);t.style[i]="50% 50%";var a=this._map.getSize();t.width=a.x,t.height=a.y;var s=this._map.options.zoomAnimation&&L.Browser.any3d;L.DomUtil.addClass(t,"leaflet-zoom-"+(s?"animated":"hide")),this._heat=simpleheat(t),this._updateOptions()},_updateOptions:function(){this._heat.radius(this.options.radius||this._heat.defaultRadius,this.options.blur),this.options.gradient&&this._heat.gradient(this.options.gradient),this.options.max&&this._heat.max(this.options.max)},_reset:function(){var t=this._map.containerPointToLayerPoint([0,0]);L.DomUtil.setPosition(this._canvas,t);var i=this._map.getSize();this._heat._width!==i.x&&(this._canvas.width=this._heat._width=i.x),this._heat._height!==i.y&&(this._canvas.height=this._heat._height=i.y),this._redraw()},_redraw:function(){var t,i,a,s,e,n,h,o,r,d=[],_=this._heat._r,l=this._map.getSize(),m=new L.Bounds(L.point([-_,-_]),l.add([_,_])),c=void 0===this.options.max?1:this.options.max,u=void 0===this.options.maxZoom?this._map.getMaxZoom():this.options.maxZoom,f=1/Math.pow(2,Math.max(0,Math.min(u-this._map.getZoom(),12))),g=_/2,p=[],v=this._map._getMapPanePos(),w=v.x%g,y=v.y%g;for(t=0,i=this._latlngs.length;i>t;t++)if(a=this._map.latLngToContainerPoint(this._latlngs[t]),m.contains(a)){e=Math.floor((a.x-w)/g)+2,n=Math.floor((a.y-y)/g)+2;var x=void 0!==this._latlngs[t].alt?this._latlngs[t].alt:void 0!==this._latlngs[t][2]?+this._latlngs[t][2]:1;r=x*f,p[n]=p[n]||[],s=p[n][e],s?(s[0]=(s[0]*s[2]+a.x*r)/(s[2]+r),s[1]=(s[1]*s[2]+a.y*r)/(s[2]+r),s[2]+=r):p[n][e]=[a.x,a.y,r]}for(t=0,i=p.length;i>t;t++)if(p[t])for(h=0,o=p[t].length;o>h;h++)s=p[t][h],s&&d.push([Math.round(s[0]),Math.round(s[1]),Math.min(s[2],c)]);this._heat.data(d).draw(this.options.minOpacity),this._frame=null},_animateZoom:function(t){var i=this._map.getZoomScale(t.zoom),a=this._map._getCenterOffset(t.center)._multiplyBy(-i).subtract(this._map._getMapPanePos());L.DomUtil.setTransform?L.DomUtil.setTransform(this._canvas,a,i):this._canvas.style[L.DomUtil.TRANSFORM]=L.DomUtil.getTranslateString(a)+" scale("+i+")"}}),L.heatLayer=function(t,i){return new L.HeatLayer(t,i)};
--------------------------------------------------------------------------------
/templates/lines.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | {{strategy}} Results for {{stock}} Starting with a ${{balance}} Account Balance
7 |
8 |
9 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
160 |
161 |
162 |
163 |
--------------------------------------------------------------------------------
/algo.py:
--------------------------------------------------------------------------------
1 | import main
2 | import json
3 | import traceback
4 |
5 | INVERSE = True
6 | DAY_DELAY = 14
7 |
8 | def by_word_count():
9 | c = open("comments.txt").read().split("\n")
10 | a = MultiThread(c, calc_words)
11 | g = a.run_all()
12 | print g
13 |
14 | def by_word_countz():
15 | a = DatasetProcess(calc_words, saveAs="myFile.json")
16 | a.run()
17 |
18 | def strategy0(tradeClass):
19 | description = """Buying and holding the stock"""
20 | strategy = {'trades': {}, 'delay': 0, "description": description}
21 | for date, ratio in tradeClass.ratio_by_date.iteritems():
22 | strategy['trades'][date] = {"trade": "long"}
23 | return strategy
24 |
25 | def strategy1(tradeClass):
26 | description = """Trading based off of changes in ticker mention frequency"""
27 | strategy = {'trades': {}, 'delay': 3, "description": description}
28 | for date, ratio in tradeClass.ratio_by_date.iteritems():
29 | diffVal = (ratio - tradeClass.average_ratio)
30 | if abs(diffVal / tradeClass.average_ratio) * 100 < 25:
31 | strategy['trades'][date] = {"trade": None}
32 | else:
33 | if diffVal > 0:
34 | strategy['trades'][date] = {"trade": "long"}
35 | else:
36 | #strategy['trades'][date] = {"trade": "long"}
37 | strategy['trades'][date] = {"trade": "short"}
38 | return strategy
39 |
40 | def strategy3(tradeClass):
41 | description = """Trading based off of changes in ticker mention frequency"""
42 | strategy = {'trades': {}, 'delay': 3, "description": description}
43 | for date, ratio in tradeClass.ratio_by_date.iteritems():
44 | diffVal = (ratio - tradeClass.average_ratio)
45 | if abs(diffVal / tradeClass.average_ratio) * 100 < 25:
46 | strategy['trades'][date] = {"trade": None}
47 | else:
48 | strategy['trades'][date] = {"trade": "short"}
49 | return strategy
50 |
51 | def strategy5(tradeClass):
52 | description = """Trading based off of changes in ticker mention frequency"""
53 | strategy = {'trades': {}, 'delay': 3, "description": description}
54 | for date, ratio in tradeClass.ratio_by_date.iteritems():
55 | diffVal = (ratio - tradeClass.average_ratio)
56 | if abs(diffVal / tradeClass.average_ratio) * 100 < 25:
57 | strategy['trades'][date] = {"trade": None}
58 | else:
59 | strategy['trades'][date] = {"trade": "long"}
60 | return strategy
61 |
62 | def strategy6(tradeClass):
63 | description = """Trading based off of changes in ticker mention frequency"""
64 | strategy = {'trades': {}, 'delay': 3, "description": description}
65 | for date, ratio in tradeClass.ratio_by_date.iteritems():
66 | diffVal = (ratio - tradeClass.average_ratio)
67 | if abs(diffVal / tradeClass.average_ratio) * 100 < 25:
68 | strategy['trades'][date] = {"trade": "long"}
69 | else:
70 | strategy['trades'][date] = {"trade": None}
71 | return strategy
72 |
73 | def strategy7(tradeClass):
74 | description = """Trading based off of changes in ticker mention frequency"""
75 | strategy = {'trades': {}, 'delay': 3, "description": description}
76 | for date, ratio in tradeClass.ratio_by_date.iteritems():
77 | diffVal = (ratio - tradeClass.average_ratio)
78 | if abs(diffVal / tradeClass.average_ratio) * 100 < 25:
79 | strategy['trades'][date] = {"trade": "short"}
80 | else:
81 | strategy['trades'][date] = {"trade": None}
82 | return strategy
83 |
84 | def strategy4(tradeClass):
85 | description = """Trading based off of changes in ticker mention frequency"""
86 | strategy = {'trades': {}, 'delay': 3, "description": description}
87 | prevTrade = None
88 | for date, ratio in tradeClass.ratio_by_date.iteritems():
89 | diffVal = (ratio - tradeClass.average_ratio)
90 | if abs(diffVal / tradeClass.average_ratio) * 100 < 25:
91 | strategy['trades'][date] = {"trade": prevTrade}
92 | else:
93 | if diffVal > 0:
94 | prevTrade = "long"
95 | strategy['trades'][date] = {"trade": "long"}
96 | else:
97 | prevTrade = "short"
98 | strategy['trades'][date] = {"trade": "short"}
99 | return strategy
100 |
101 | def strategy2(tradeClass):
102 | dayDelay = 14
103 | for i in range(dayDelay, len(tradeClass.modified_dates)):
104 | indicatorDay = tradeClass.modified_dates[i-dayDelay]
105 | tradeDay = tradeClass.modified_dates[i]
106 | if indicatorDay in tradeClass.historical_data:
107 | pass
108 |
109 |
110 | if __name__ == '__main__':
111 | infoz = {'buyAndHold': 0}
112 | for stock in main.STOCK_TICKERS:
113 | try:
114 | if main.get_total_count_by_ticker(stock) > 1000:
115 | a = main.Trade(stock)
116 | startAmount = 1000000
117 | buyAndHold = a.calc_buy_and_hold(startAmount)-startAmount
118 | strat1 = a.test_strategy(strategy1, startAmount)-startAmount
119 | strat3 = a.test_strategy(strategy3, startAmount)-startAmount
120 | strat4 = a.test_strategy(strategy4, startAmount)-startAmount
121 | strat5 = a.test_strategy(strategy5, startAmount)-startAmount
122 | strat6 = a.test_strategy(strategy6, startAmount)-startAmount
123 | strat7 = a.test_strategy(strategy7, startAmount)-startAmount
124 | print("{} | Returns from buy and hold: ${:,.2f}".format(stock, buyAndHold))
125 | print("{} | Returns from Strategy 1: ${:,.2f}".format(stock, strat1))
126 | print("{} | Returns from Strategy 3: ${:,.2f}".format(stock, strat3))
127 | print("{} | Returns from Strategy 4: ${:,.2f}".format(stock, strat4))
128 | print("{} | Returns from Strategy 5: ${:,.2f}".format(stock, strat5))
129 | print("{} | Returns from Strategy 6: ${:,.2f}".format(stock, strat6))
130 | print("{} | Returns from Strategy 7: ${:,.2f}".format(stock, strat7))
131 | info = []
132 | info.append({'name': 'strat1','count': strat1})
133 | info.append({'name': 'strat3','count': strat3})
134 | info.append({'name': 'strat4','count': strat4})
135 | info.append({'name': 'strat5','count': strat5})
136 | info.append({'name': 'strat6','count': strat6})
137 | info.append({'name': 'strat7','count': strat7})
138 | info.append({'name':'buyAndHold', 'count': buyAndHold})
139 | if max([strat1, strat3, strat4, strat5, strat6, strat7]) > buyAndHold:
140 | print("WINNER")
141 | print("\n")
142 | newlist = sorted(info, key=lambda k: k['count'])
143 | name = newlist[-1]['name']
144 | if name not in infoz:
145 | infoz[name] = 0
146 | infoz[name] += 1
147 | print infoz
148 |
149 | except Exception as exp:
150 | #print exp
151 | pass
152 | print info
153 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | The WSB Index
2 |
3 |
4 |
5 |
An algorithmic trading strategy to predict market volatility from /r/WallStreetBets comments
6 |
7 |
8 |
9 |
10 | # Overall Analysis
11 |
12 |
13 | Total Comments: 2,979,131
14 |
15 |
16 | Total Comments Mentioning Valid Securities: 281,550
17 |
18 |
19 | First Comment: Wednesday, April 11, 2012 4:46:43 PM
20 |
21 | #
22 |
23 | ### Top Securities by Total Comment Mentions
24 |
25 | |Ticker|Mentions|Company|Sector|Industry|
26 | | ------------- |:-------------:|:-----:|:-------------:|:-----:|
27 | |MU|33450|Micron Technology, Inc.|Technology|Semiconductors|
28 | |AMD|32526|Advanced Micro Devices, Inc.|Technology|Semiconductors|
29 | |TSLA|12079|Tesla, Inc. |Capital Goods|Auto Manufacturing|
30 | |AAPL|11760|Apple Inc.|Technology|Computer Manufacturing|
31 | |NVDA|11087|NVIDIA Corporation|Technology|Semiconductors|
32 | |AMZN|10835|Amazon.com, Inc.|Consumer Services|Catalog/Specialty Distribution|
33 | |FB|10827|Facebook, Inc.|Technology|Computer Software: Programming, Data Processing|
34 | |Z|9188|Zillow Group, Inc.|Miscellaneous|Business Services|
35 | |MSFT|8137|Microsoft Corporation|Technology|Computer Software: Prepackaged Software|
36 | |QQQ|4939|Invesco QQQ Trust, Series 1|n/a|n/a|
37 |
38 | #
39 |
40 | ### Best Securities by Sentiment Polarity (w/ 50+ Mentions)
41 |
42 | |Ticker|Sentiment|Company|Sector|Industry|
43 | | ------------- |:-------------:|:-----:|:-------------:|:-----:|
44 | |SPWR|0.11|SunPower Corporation|Technology|Semiconductors|
45 | |NTES|0.0969|NetEase, Inc.|Miscellaneous|Business Services|
46 | |SWKS|0.0949|Skyworks Solutions, Inc.|Technology|Semiconductors|
47 | |NTLA|0.0927|Intellia Therapeutics, Inc.|Health Care|Biotechnology: In Vitro & In Vivo Diagnostic Substances|
48 | |ONCS|0.0912|OncoSec Medical Incorporated|Health Care|Major Pharmaceuticals|
49 |
50 | #
51 |
52 | ### Worst Securities by Sentiment Polarity (w/ 50+ Mentions)
53 |
54 | |Ticker|Sentiment|Company|Sector|Industry|
55 | | ------------- |:-------------:|:-----:|:-------------:|:-----:|
56 | |TRIL|-0.0415|Trillium Therapeutics Inc.|Health Care|Major Pharmaceuticals|
57 | |LION|-0.0412|Fidelity Southern Corporation|Finance|Major Banks|
58 | |LOCO|-0.0356|El Pollo Loco Holdings, Inc.|Consumer Services|Restaurants|
59 | |RETA|-0.0329|Reata Pharmaceuticals, Inc.|Health Care|Major Pharmaceuticals|
60 | |NEXT|-0.0319|NextDecade Corporation|Public Utilities|Oil & Gas Production|
61 |
62 | #
63 |
64 | ### Tickers based on Trading Volume/Mention Ratio
65 | |Ticker|Volume/Mention|Company|Average Volume|Mentions|
66 | | ------------- |:-------:|:-------:|:-------:|:-------:|
67 | |PT|6.203|Pintec Technology Holdings Limited|9143|1471|
68 | |LINK|8.3809|Interlink Electronics, Inc.|5540|661|
69 | |OLD|10.0424|The Long-Term Care ETF|1767|176|
70 | |PY|11.1917|Principal Shareholder Yield Index ETF|1701|153|
71 | |VALU|38.0743|Value Line, Inc.|3846|101|
72 | |SELF|45.4174|Global Self Storage, Inc.|12626|277|
73 | |SG|51.3975|Sirius International Insurance Group, Ltd.|4677|91|
74 | |BRAC|52.8569|Black Ridge Acquisition Corp.|31027|587|
75 | |APM|53.5876|Aptorum Group Limited|214|4|
76 | |SP|55.1967|SP Plus Corporation|73246|1323|
77 |
78 | #
79 |
80 | ### Most Active Posters on /r/WallStreetBets (Excluding Bots)
81 | |Username|Total Comments|Average Sentiment|Most Mentioned Ticker|
82 | | ------------- |:-------:|:-----:|:-------------:|
83 | theycallme1 | 16967 | 0.0249 | Z |
84 | avgazn247 | 14042 | 0.0247 | MU |
85 | SIThereAndThere | 7915 | 0.0151 | Z |
86 | brutalpancake | 6022 | 0.0288 | MU |
87 | Macabilly | 5554 | 0.0165 | AMD |
88 |
89 | #
90 |
91 | ### Best Tickers Based on Average Upvote Count
92 |
93 | |Ticker|Average Upvotes|Company|Sector|Industry|
94 | | ------------- |:-------------:|:-----:|:-------------:|:-----:|
95 | |WASH|52.0|Washington Trust Bancorp, Inc.|Finance|Major Banks|
96 | |ALQA|44.0|Alliqua BioMedical, Inc.|Health Care|Medical/Dental Instruments|
97 | |FMBI|39.0|First Midwest Bancorp, Inc.|Finance|Major Banks|
98 | |POOL|33.0|Pool Corporation|Consumer Durables|Industrial Specialties|
99 | |LIND|31.0|Lindblad Expeditions Holdings Inc. |Consumer Services|Transportation Services|
100 |
101 | #
102 |
103 | ### Worst Tickers Based on Average Upvote Count
104 |
105 | |Ticker|Average Upvotes|Company|Sector|Industry|
106 | | ------------- |:-------------:|:-----:|:-------------:|:-----:|
107 | |CNTY|-8.0|Century Casinos, Inc.|Consumer Services|Hotels/Resorts|
108 | |CTRN|-8.0|Citi Trends, Inc.|Consumer Services|Clothing/Shoe/Accessory Stores|
109 | |ABIL|-4.0|Ability Inc.|Consumer Durables|Telecommunications Equipment|
110 | |DOVA|-3.0|Dova Pharmaceuticals, Inc.|Health Care|Major Pharmaceuticals|
111 | |EEMA|-3.0|iShares MSCI Emerging Markets Asia ETF|n/a|n/a|
112 |
113 | #
114 |
115 | # Tesla, Inc. Analysis
116 |
117 |
118 | Stock Ticker: TSLA
119 |
120 |
121 | Total Comments Mentioning Ticker: 12,079
122 |
123 |
124 | Average Sentiment Towards Ticker: 0.042
125 |
126 |
127 | Stock Mention Ranking (SMR): 3
128 |
129 |
130 | Ticker First Mentioned on WSB: 2,105 Days Ago
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 | ## Tesla, Inc. Strategy Specific Returns (Starting w/ $1,000,000)
140 |
141 | ### Overview
142 | |Strategy Name|Total Trades|Return Percentage|Return|Alpha|
143 | | ------------- |:-------:|:-------:|:-------:|:-------:|
144 | |Buy And Hold|1|3.13%|$3,131.02|0|
145 | |Strategy 1|197|51.78%|$517,800.90|48.65|
146 | |Strategy 4|242|28.36%|$283,592.14|25.23|
147 | |Strategy 5|56|4.82%|$48,154.94|1.69|
148 | |Strategy 6|55|15.41%|$154,132.37|12.28|
149 | |Strategy 7|75|-15.16%|-$151,645.97|-18.29|
150 |
151 | ### Return by Strategy
152 |
153 | Note: Trades based on WallStreetBets comments are in BLUE, trades based on holding long-term are in RED
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 | #
176 |
177 | # Facebook, Inc. Analysis
178 |
179 |
180 | Stock Ticker: FB
181 |
182 |
183 | Total Comments Mentioning Ticker: 10,827
184 |
185 |
186 | Average Sentiment Towards Ticker: 0.0459
187 |
188 |
189 | Stock Mention Ranking (SMR): 7
190 |
191 |
192 | Ticker First Mentioned on WSB: 2,468 Days Ago
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 | ## Facebook, Inc. Strategy Specific Returns (Starting w/ $1,000,000)
202 |
203 | ### Overview
204 |
205 | |Strategy Name|Total Trades|Return Percentage|Return|Alpha|
206 | | ------------- |:-------:|:-------:|:-------:|:-------:|
207 | |Buy and Hold|1|0.57%|$5,671.91|0|
208 | |Strategy 1|242|-26.86%|-$268,566.70|-27.43|
209 | |Strategy 4|276|-24.62%|-$-246,156.84|-25.19|
210 | |Strategy 5|27|-9.8%|-$98,046.78|-10.37|
211 | |Strategy 6|27|9.87%|$98,653.12|9.3|
212 | |Strategy 7|44|0.32%|$3,186.45|-0.25|
213 |
214 | ### Return by Strategy
215 |
216 | Note: Trades based on WallStreetBets comments are in BLUE, trades based on holding long-term are in RED
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 | #
239 |
240 |
241 | # Data Visualizations
242 |
243 | ### Top-20 Stock Tickers by Total Mentions
244 |
245 |
246 |
247 |
248 |
249 | ### Stock Tickers Mentions by Day
250 |
251 | ### Stock Ticker Mentions by Average Vote Count
252 |
253 | # Strategies
254 |
255 | ### Notation
256 |
257 | | Abbreviation | Meaning |Formula |
258 | | ------------- |:-------------:|:-------------:|
259 | | T | Stock Ticker | |
260 | | OP | Opening Price | |
261 | | CP | Closing Price | |
262 | | DP | Price Delta| abs(CP-OP) |
263 | | PP | % Variation | ((CP-OP) / OP) * 100 |
264 | | V | Volume | |
265 | | TC | Total Comments Mentioning T | |
266 | | S | Sentiment Towards T | |
267 | | TR | Ticker Rank (By Mentions) | |
268 | | AC | Total Comments | |
269 |
270 | #
271 |
272 | # Language Processing
273 |
274 | ### Overview
275 |
276 | WallStreetBets is a discussion forum about day trading, stocks, options, futures, and anything market related, so it would be innacurate to assume that any comment containing a stock ticker indicated a long position on the security.
277 |
278 | From my understanding this is an NLP problem that's relatively difficult to solve. A *slighly* more accurate way of extracting the type of position from a comment would be to assume a long position unless the word "short" is present in the comment.
279 |
280 | Unfortunately, this strategy would fail in comments discussing options, and in our model the purchase of a put option would imply the same sentiment as a short position.
281 |
282 | Lastly, the discussion of multiple securities in a single comment can cause confusion as to the implied position relative to each stock ticker.
283 |
284 | ### Proposed Solution
285 |
286 | Rather than using NLTK or RAKE, I created an algorithm present in *main.extract_buy_or_sell()* that attempts to extract the indicated position towards each stock ticker in a comment. Here is the algorithm in Psuedocode:
287 |
288 | ```
289 | comment_info = {'puts': [], 'calls': [], 'buy': [], 'sell': []}
290 | for sentence in comment:
291 | while sentence:
292 | word = sentence.pop()
293 | if word == 'buy' or 'buying':
294 | tempList = []
295 | while sentence:
296 | newWord = sentence.pop()
297 | if newWord is StockTicker:
298 | tempList.append(newWord)
299 | elif newWord == 'puts' and len(tempList) > 0:
300 | comment_info['puts'] += tempList
301 | tempList.clear()
302 | break
303 | elif newWord == 'calls' and len(tempList) > 0:
304 | comment_info['calls'] += tempList
305 | tempList.clear()
306 | break
307 | comment_info['buy'] += tempList
308 | elif word == 'short' or 'shorting':
309 | while sentence:
310 | newWord = sentence.pop()
311 | if newWord is StockTicker:
312 | comment_info['sell'] += newWord
313 | else:
314 | break
315 | elif word == 'sell' or 'sold' or 'close' or 'closing' or 'shorts':
316 | tempList = []
317 | while sentence:
318 | newWord = sentence.pop()
319 | if newWord is StockTicker:
320 | tempList.append(newWord)
321 | elif newWord == 'puts' and len(tempList) > 0:
322 | comment_info['puts'] += tempList
323 | tempList.clear()
324 | break
325 | elif newWord == 'calls' and len(tempList) > 0:
326 | comment_info['calls'] += tempList
327 | tempList.clear()
328 | break
329 | elif newWord == 'shorts' and len(tempList) > 0:
330 | comment_info['buy'] += tempList
331 | tempList.clear()
332 | break
333 | comment_info['sell'] += tempList
334 | elif word is StockTicker:
335 | tempList = [word]
336 | while sentence:
337 | newWord = sentence.pop()
338 | if newWord is StockTicker:
339 | tempList.append(newWord)
340 | elif newWord == 'puts' and len(tempList) > 0:
341 | comment_info['puts'] += tempList
342 | tempList.clear()
343 | break
344 | elif newWord == 'calls' and len(tempList) > 0:
345 | comment_info['calls'] += tempList
346 | tempList.clear()
347 | break
348 | comment_info['buy'] += tempList
349 | ```
350 |
351 | ### Examples
352 |
353 | ##### Note: This algo is only being used for stocks traded on the Nasdaq, hence certain *valid* stock tickers are considered *invalid* as we are not actively pursing information on them.
354 |
355 | **"Short GPRO"**
356 |
357 | > **{"sell": ["GPRO"], "buy": [], "calls": [], "puts": []}**
358 |
359 | **"HIMX, EOG, WPRT, VALE. Some high div paying stocks NLY, PMT, HTS."**
360 |
361 | > **{"sell": [], "buy": ["HIMX", "WPRT"], "calls": [], "puts": []}**
362 |
363 | **"AAMRQ and ONCS did great for me (and EPZM yesterday)"**
364 |
365 | > **{"sell": [], "buy": ["ONCS", "EPZM"], "calls": [], "puts": []}**
366 |
367 | **"holding ZGNX now. Thanks man!"**
368 |
369 | > **{"sell": [], "buy": ["ZGNX"], "calls": [], "puts": []}**
370 |
371 | **"I threw a couple of hundreds on VVUS earnings. It both beat and is up! Yay!"**
372 |
373 | > **{"sell": [], "buy": ["VVUS"], "calls": [], "puts": []}**
374 |
375 | **"No. OP is hyping his GPRO bet."**
376 |
377 | > **{"sell": [], "buy": ["GPRO"], "calls": [], "puts": []}**
378 |
379 | **"I closed my SRPT position today as well. 258% gain."**
380 |
381 | > **{"sell": ["SRPT"], "buy": [], "calls": [], "puts": []}**
382 |
383 | **"I think SFM has much better growth opportunty"**
384 |
385 | > **{"sell": [], "buy": ["SFM"], "calls": [], "puts": []}**
386 |
387 | **"Though he's doing great in AAPL."**
388 |
389 | > **{"sell": [], "buy": ["AAPL"], "calls": [], "puts": []}**
390 |
391 | **"I have some money to spare after my 300% gain on TSLA puts."**
392 |
393 | > **{"sell": [], "buy": [], "calls": [], "puts": ["TSLA"]}**
394 |
395 | **"Disclaimer, I am short Aug 100 covered calls. I am also long AAPL (though that's implied)."**
396 |
397 | > **{"sell": [], "buy": ["AAPL"], "calls": [], "puts": []}**
398 |
399 | **"looks like CSIQ is about to turn around tmr...."**
400 |
401 | > **{"sell": [], "buy": ["CSIQ"], "calls": [], "puts": []}**
402 |
403 | **"GPRO October $60 Puts"**
404 |
405 | > **{"sell": [], "buy": [], "calls": [], "puts": ["GPRO"]}**
406 |
407 | #
408 |
409 | # Notable Revisions
410 |
411 | ### December 29th 2018
412 |
413 | Prior to December 29th, sentiment analysis was done on comments without taking the sentiment of the ticker itself into consideration. This overlook returned biased results in favor of companies with ticker names that doubled as valid words in the english dictionary.
414 |
415 | To fix this overlook, I modified the string prior to calculating sentiment so that all tickers are replaced with "TSLA" (a sentiment neutral ticker).
416 |
417 | An example of the bias caused by this overlook can be seen in the original *Best Securities by Sentiment Polarity* table below (initially published in [commit 89ddf9d](https://github.com/theriley106/TheWSBIndex/commit/89ddf9dd93d96ba8a1722ecc8d05b026feec75b3)).
418 |
419 | |Ticker|Sentiment|Company|Sector|Industry|
420 | | ------------- |:-------------:|:-----:|:-------------:|:-----:|
421 | |GOOD|0.4238|Gladstone Commercial Corporation|Consumer Services|Real Estate|
422 | |NICE|0.4114|NICE Ltd|Technology|Computer Manufacturing|
423 | |WIN|0.4112|Windstream Holdings, Inc.|Public Utilities|Telecommunications Equipment|
424 | |LOVE|0.2962|The Lovesac Company|Consumer Services|Other Specialty Stores|
425 | |STRO|0.2424|Sutro Biopharma, Inc.|Health Care|Biotechnology: Biological Products (No Diagnostic Substances)|
426 |
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | import csv
2 | import json
3 | import re
4 | import dateparser as dp
5 | import threading
6 | import random
7 | import time
8 | import datetime
9 | from collections import Counter
10 | import math
11 | from textblob import TextBlob
12 | import db
13 |
14 | TOTAL_DATES = -365
15 |
16 | COLUMNS = ['open', 'high', 'low', 'close']
17 | IS_TICKER = re.compile("[A-Z]{1,4}|\d{1,3}(?=\.)|\d{4,}")
18 | # This is a regex that determines if a string is a stock ticker
19 | COMPANY_LIST = "companylist.csv"
20 |
21 | WSB_DATASET = "/media/christopher/ssd/wsbData.json"
22 | HISTORICAL_DATA = "data/{0}.csv"
23 | ALL_COUNTS = "dataset/AllCounts.json"
24 | DATES = json.load(open("dataset/ListOfDatesOrder.json"))
25 |
26 | def get_all_possible_tickers(fileName="companylist.csv"):
27 | with open(fileName, 'rb') as f:
28 | reader = csv.reader(f)
29 | your_list = list(reader)
30 | return [x[0] for x in your_list[1:]]
31 |
32 | def get_company_by_ticker(tickerVal):
33 | with open(COMPANY_LIST, 'rb') as f:
34 | reader = csv.reader(f)
35 | your_list = list(reader)
36 | for x in your_list[1:]:
37 | if x[0] == tickerVal:
38 | return x[1]
39 |
40 | def get_all_info_by_ticker(tickerVal):
41 | with open(COMPANY_LIST, 'rb') as f:
42 | reader = csv.reader(f)
43 | your_list = list(reader)
44 | for x in your_list[1:]:
45 | if x[0] == tickerVal:
46 | return x
47 |
48 | def get_average_volume_by_ticker(tickerVal):
49 | try:
50 | with open(HISTORICAL_DATA.format(tickerVal), 'rb') as f:
51 | reader = csv.reader(f)
52 | your_list = list(reader)
53 | total = 0
54 | count = 0
55 | your_list.pop(0)
56 | for x in your_list[TOTAL_DATES:]:
57 | try:
58 | total += int(x[-1])
59 | count += 1
60 | except:
61 | pass
62 | return (float(total) / float(count))
63 | except Exception as exp:
64 | print exp
65 | return 0
66 |
67 | def get_open_price_by_ticker(tickerVal, date):
68 | try:
69 | with open(HISTORICAL_DATA.format(tickerVal), 'rb') as f:
70 | reader = csv.reader(f)
71 | your_list = list(reader)
72 | for x in your_list[1:]:
73 | if x[0] == date:
74 | return x[1]
75 | except Exception as exp:
76 | print exp
77 | return 0
78 |
79 | def get_close_price_by_ticker(tickerVal, date):
80 | try:
81 | with open(HISTORICAL_DATA.format(tickerVal), 'rb') as f:
82 | reader = csv.reader(f)
83 | your_list = list(reader)
84 | for x in your_list[1:]:
85 | if x[0] == date:
86 | return x[4]
87 | except Exception as exp:
88 | print exp
89 | return 0
90 |
91 | def get_diff_from_ticker(tickerVal):
92 | info = {}
93 | try:
94 | with open(HISTORICAL_DATA.format(tickerVal), 'rb') as f:
95 | reader = csv.reader(f)
96 | your_list = list(reader)
97 | your_list.pop(0)
98 | for x in your_list[TOTAL_DATES:]:
99 | #print x
100 | try:
101 | info[x[0]] = float(x[4]) - float(x[1])
102 | except:
103 | pass
104 | except:
105 | pass
106 | return info
107 |
108 | def get_percent_diff_from_ticker(tickerVal):
109 | info = {}
110 | try:
111 | with open(HISTORICAL_DATA.format(tickerVal), 'rb') as f:
112 | reader = csv.reader(f)
113 | your_list = list(reader)
114 | your_list.pop(0)
115 | for x in your_list[TOTAL_DATES:]:
116 | #print x
117 | try:
118 | info[x[0]] = ((float(x[4]) - float(x[1])) / float(x[1])) * 100
119 | except:
120 | pass
121 | except:
122 | pass
123 | return info
124 |
125 | def get_total_count_by_ticker(tickerVal):
126 | a = json.load(open("dataset/AllCounts.json"))
127 | return a[tickerVal]
128 |
129 | def get_day_difference_between_utc(utcTime):
130 | a = datetime.datetime.now().date()
131 | b = convert_date(utcTime)
132 | delta = a - b
133 | return delta.days
134 |
135 | def get_dates():
136 | sql_command = """SELECT dateVal FROM comments;"""
137 | totalCount = 0
138 | dates = []
139 | for val in set(db.run_command(sql_command)):
140 | dateVal = val[0]
141 | if dateVal not in dates:
142 | dates.append(dateVal)
143 | return dates
144 |
145 | def get_total_count_dates(dateVal):
146 | sql_command = """SELECT count(body) FROM comments WHERE dateVal = '{}';""".format(dateVal)
147 | return db.run_command(sql_command)
148 |
149 | def get_total_ticker_count_dates(tickerVal):
150 | a = json.load(open("dataset/totalByDate.json"))
151 | info = {}
152 | for key, val in a.iteritems():
153 | info[key] = 0
154 | sql_command = """SELECT dateVal, tickers FROM comments WHERE tickers LIKE '%{}%';""".format(tickerVal)
155 | for val in db.run_command(sql_command):
156 | dateVal = val[0]
157 | tickers = [x.upper() for x in val[1].split(",") if len(x) > 0]
158 | if tickerVal.upper() in tickers:
159 | info[dateVal] += 1
160 | return info
161 |
162 | def calc_ratio_info(tickerVal):
163 | a = json.load(open("dataset/totalByDate.json"))
164 | info = {}
165 | for key, val in a.iteritems():
166 | info[key] = 0
167 | sql_command = """SELECT dateVal, tickers FROM comments WHERE tickers LIKE '%{}%';""".format(tickerVal)
168 | for val in db.run_command(sql_command):
169 | dateVal = val[0]
170 | tickers = [x.upper() for x in val[1].split(",") if len(x) > 0]
171 | if tickerVal.upper() in tickers:
172 | info[dateVal] += 1
173 | totalRatio = 0.0
174 | totalCount = 0
175 | for key, val in info.iteritems():
176 | ratio = float(info[key]) / float(a[key])
177 | totalRatio += ratio
178 | info[key] = ratio
179 | totalCount += 1
180 | return {"average": totalRatio / float(totalCount), "dates": info}
181 |
182 | def calc_predicted_direction(tickerVal):
183 | a = json.load(open("dataset/totalByDate.json"))
184 | info = {}
185 | for key, val in a.iteritems():
186 | info[key] = 0
187 | sql_command = """SELECT dateVal, tickers FROM comments WHERE tickers LIKE '%{}%';""".format(tickerVal)
188 | for val in db.run_command(sql_command):
189 | dateVal = val[0]
190 | tickers = [x.upper() for x in val[1].split(",") if len(x) > 0]
191 | if tickerVal.upper() in tickers:
192 | info[dateVal] += 1
193 | totalRatio = 0.0
194 | totalCount = 0
195 | for key, val in info.iteritems():
196 | totalRatio += float(info[key]) / float(a[key])
197 | totalCount += 1
198 | averageVal = totalRatio / float(totalCount)
199 | trades = {}
200 | for key, value in a.iteritems():
201 | trades[key] = 0
202 | for key, value in info.iteritems():
203 | thisAvg = float(info[key]) / float(a[key])
204 | diffVal = thisAvg - averageVal
205 | if ((abs(float(diffVal)) / averageVal) * 100) < 25:
206 | trades[key] = 0
207 | else:
208 | if diffVal > 0:
209 | trades[key] = 1
210 | else:
211 | trades[key] = -1
212 | return trades
213 |
214 | def get_count_by_ticker(tickerVal):
215 | # This is super hacky because the tickers are stored as a string like F,TSLA,ETC.
216 | sql_command = """SELECT tickers FROM comments WHERE tickers LIKE '%{}%';""".format(tickerVal)
217 | totalCount = 0
218 | for val in db.run_command(sql_command):
219 | tickers = [x.upper() for x in val[0].split(",") if len(x) > 0]
220 | if tickerVal.upper() in tickers:
221 | totalCount += 1
222 | return totalCount
223 |
224 | def get_weekday_by_ticker(tickerVal):
225 | # This is super hacky because the tickers are stored as a string like F,TSLA,ETC.
226 | info = {}
227 | sql_command = """SELECT weekday, tickers FROM comments WHERE tickers LIKE '%{}%';""".format(tickerVal)
228 | totalVal = 0
229 | totalCount = 0
230 | for val in db.run_command(sql_command):
231 | weekday = str(val[0])
232 | tickers = [x.upper() for x in val[1].split(",") if len(x) > 0]
233 | if tickerVal.upper() in tickers:
234 | if weekday not in info:
235 | info[weekday] = 0
236 | info[weekday] += 1
237 | return info
238 |
239 | def get_first_comment_with_ticker(tickerVal):
240 | sql_command = """SELECT created_utc, tickers FROM comments WHERE tickers LIKE '%{}%';""".format(tickerVal)
241 | largest_num = 0
242 | for val in db.run_command(sql_command):
243 | utcTime = str(val[0])
244 | tickers = [x.upper() for x in val[1].split(",") if len(x) > 0]
245 | if tickerVal.upper() in tickers:
246 | z = get_day_difference_between_utc(utcTime)
247 | if z > largest_num:
248 | largest_num = z
249 | return largest_num
250 |
251 | def get_average_by_ticker(tickerVal):
252 | # This is super hacky because the tickers are stored as a string like F,TSLA,ETC.
253 | info = {}
254 | sql_command = """SELECT weekday, tickers FROM comments WHERE tickers LIKE '%{}%';""".format(tickerVal)
255 | totalVal = 0
256 | totalCount = 0
257 | for val in db.run_command(sql_command):
258 | weekday = str(val[0])
259 | tickers = [x.upper() for x in val[1].split(",") if len(x) > 0]
260 | if tickerVal.upper() in tickers:
261 | if weekday not in info:
262 | info[weekday] = 0
263 | info[weekday] += 1
264 | return info
265 |
266 | '''
267 | def get_sentiment_by_ticker(tickerVal):
268 | # This is super hacky because the tickers are stored as a string like F,TSLA,ETC.
269 | sql_command = """SELECT polarity, tickers FROM comments WHERE tickers LIKE '%{}%';""".format(tickerVal)
270 | totalVal = 0
271 | totalCount = 0
272 | for val in db.run_command(sql_command):
273 | sentiment = val[0]
274 | tickers = [x.upper() for x in val[1].split(",") if len(x) > 0]
275 | if tickerVal.upper() in tickers:
276 | totalVal += sentiment
277 | totalCount += 1
278 | if totalCount == 0:
279 | return 0
280 | return float(totalVal) / float(totalCount)
281 | '''
282 |
283 | def get_sentiment_by_ticker(tickerVal):
284 | a = json.load(open('dataset/sentimentByTicker.json'))
285 | for val in a:
286 | if val['ticker'] == tickerVal:
287 | return val['sentiment']
288 |
289 | def get_average_upvotes_by_ticker(tickerVal):
290 | # This is super hacky because the tickers are stored as a string like F,TSLA,ETC.
291 | sql_command = """SELECT ups, tickers FROM comments WHERE tickers LIKE '%{}%';""".format(tickerVal)
292 | totalVal = 0
293 | totalCount = 0
294 | for val in db.run_command(sql_command):
295 | sentiment = val[0]
296 | tickers = [x.upper() for x in val[1].split(",") if len(x) > 0]
297 | if tickerVal.upper() in tickers:
298 | if sentiment != None:
299 | totalVal += sentiment
300 | totalCount += 1
301 | if totalCount == 0:
302 | return 0
303 | return float(totalVal) / float(totalCount)
304 |
305 | def get_all_counts(sort=True, reverse=False):
306 | h = []
307 | g = json.load(open(ALL_COUNTS))
308 | for key, val in g.iteritems():
309 | h.append({'count': val, 'ticker': key})
310 | if sort == True:
311 | h = sorted(h, key=lambda k: k['count'])
312 | if reverse == True:
313 | h = h[::-1]
314 | return h
315 |
316 | def get_yolo_comments():
317 | # This returns tickers that are used in "YOLO" comments
318 | sql_command = """SELECT tickers FROM comments WHERE body LIKE '%yolo%' AND tickers not NULL;"""
319 |
320 | tickers = []
321 | for val in db.run_command(sql_command):
322 | tickers += [x.upper() for x in val[0].split(",") if len(x) > 0]
323 | return tickers
324 |
325 |
326 |
327 | STOCK_TICKERS = get_all_possible_tickers()
328 | STOCK_TICKERS.remove("EDIT")
329 |
330 | def read_csv(filename):
331 | # Reads the dataset with historical prices
332 | with open(filename, 'rb') as f:
333 | reader = csv.reader(f)
334 | return list(reader)
335 |
336 | def get_sentiment(stringVal):
337 | # Replaces each ticker with TSLA as it's sentiment neutral
338 | z = re.findall('[A-Z]{1,4}|\d{1,3}(?=\.)|\d{4,}', stringVal)
339 | for val in set(z):
340 | stringVal = stringVal.replace(val, "TSLA")
341 | return TextBlob(stringVal)
342 |
343 | def convert_date(dateVal):
344 | # Converts to format 2004-01-05
345 | dt = dp.parse(dateVal)
346 | return dt.date()
347 |
348 | def get_weekday(dateVal):
349 | # Converts to format 2004-01-05
350 | dt = dp.parse(dateVal)
351 | return dt.weekday()
352 |
353 | def extract_tickers(string):
354 | e = re.findall('[A-Z]{1,4}|\d{1,3}(?=\.)|\d{4,}', string)
355 | return list(set(e).intersection(set(STOCK_TICKERS)))
356 |
357 | def isTicker(stringVal):
358 | if IS_TICKER.match(stringVal):
359 | return stringVal in set(STOCK_TICKERS)
360 | return False
361 |
362 | def extract_buy_or_sell(string):
363 | info = {'puts': [], 'calls': [], 'buy': [], 'sell': []}
364 | # Extracts the words buy or sell from the comment
365 | for val in re.split("(? 0:
369 | word = allWords.pop(0)
370 | if re.match("[\W]?([Bb]uy)[\W]?", word):
371 | # This means it's the word buy
372 | tempList = []
373 | while len(allWords) > 0:
374 | newWord = allWords.pop(0)
375 | if isTicker(newWord):
376 | tempList.append(newWord)
377 | elif re.match("[\W]?([Pp]ut[s]?)[\W]?", newWord):
378 | if len(tempList) > 0:
379 | # This means a sentence like
380 | # put $5 in TSLA
381 | while len(tempList) > 0:
382 | info['puts'].append(tempList.pop())
383 | break
384 |
385 | elif re.match("[\W]?([Cc]all[s]?)[\W]?", newWord):
386 | if len(tempList) > 0:
387 | # This means a sentence like
388 | # call my friend to put $5 in TSLA
389 | while len(tempList) > 0:
390 | info['calls'].append(tempList.pop())
391 | break
392 | info['buy'] += tempList
393 |
394 | elif re.match("[\W]?[Ss]horting?[\W]?", word):
395 | while len(allWords) > 0:
396 | newWord = allWords.pop(0)
397 | if isTicker(newWord):
398 | info['sell'].append(newWord)
399 | else:
400 | break
401 |
402 | elif re.match("[\W]?([Ss]ell|[Ss]old|[Cc]los[(e|ing)]|[Ss]hort[s]?)[\W]?", word):
403 | # This means it's indicating they want to sell
404 | # Sell TSLA puts would be equivilant to a call
405 | tempList = []
406 | while len(allWords) > 0:
407 | newWord = allWords.pop(0)
408 | if isTicker(newWord):
409 | tempList.append(newWord)
410 | elif re.match("[\W]?([Pp]ut[s]?)[\W]?", newWord):
411 | if len(tempList) > 0:
412 | # This means a sentence like
413 | # put $5 in TSLA
414 | while len(tempList) > 0:
415 | info['calls'].append(tempList.pop())
416 | break
417 |
418 | elif re.match("[\W]?([Cc]all[s]?)[\W]?", newWord):
419 | if len(tempList) > 0:
420 | # This means a sentence like
421 | # call my friend to put $5 in TSLA
422 | while len(tempList) > 0:
423 | info['puts'].append(tempList.pop())
424 | break
425 |
426 | elif re.match("[Ss]hort[s]?", newWord):
427 | # IE closing out a short == buy
428 | if len(tempList) > 0:
429 | # This means a sentence like
430 | # call my friend to put $5 in TSLA
431 | while len(tempList) > 0:
432 | info['buy'].append(tempList.pop())
433 | break
434 |
435 | info['sell'] += tempList
436 |
437 | elif isTicker(word):
438 | tempList = [word]
439 | while len(allWords) > 0:
440 | newWord = allWords.pop(0)
441 | if isTicker(newWord):
442 | tempList.append(newWord)
443 | elif re.match("[\W]?([Pp]ut[s]?)[\W]?", newWord):
444 | if len(tempList) > 0:
445 | # This means a sentence like
446 | # put $5 in TSLA
447 | while len(tempList) > 0:
448 | info['puts'].append(tempList.pop())
449 | break
450 |
451 | elif re.match("[\W]?([Cc]all[s]?)[\W]?", newWord):
452 | if len(tempList) > 0:
453 | # This means a sentence like
454 | # call my friend to put $5 in TSLA
455 | while len(tempList) > 0:
456 | info['calls'].append(tempList.pop())
457 | break
458 | info['buy'] += tempList
459 | return info
460 |
461 |
462 |
463 | def random_string(stringVal):
464 | # Should return float or int
465 | return float(stringVal)
466 |
467 | def text_entropy(s):
468 | p, lns = Counter(s), float(len(s))
469 | return -sum( count/lns * math.log(count/lns, 2) for count in p.values())
470 |
471 | def calc_words(stringVal):
472 | return stringVal.count(" ")
473 |
474 | class DatasetProcess(object):
475 | def __init__(self, function, createNew=False, verbose=True, threads=1, saveAs=False):
476 | self.lock = threading.Lock()
477 | self.threads = threads
478 | # This is the lock for multithreading
479 | self.functionVal = function
480 | self.totalRuns = 0
481 | self.totalCount = 0
482 | self.results = {}
483 | self.totalTime = 0
484 | self.forumnData = {}
485 | self.saveAs = saveAs
486 | self.verbose = verbose
487 |
488 | def save(self, fileName):
489 | with open(fileName, 'w') as fp:
490 | json.dump(self.forumnData, fp)
491 |
492 | def run(self):
493 | with open(WSB_DATASET) as f:
494 | for i, line in enumerate(f):
495 | val = json.loads(line)
496 | dayVal = convert_date(val['created_utc'])
497 | if dayVal not in self.forumnData:
498 | self.forumnData[dayVal] = []
499 | self.forumnData[dayVal].append(self.functionVal(val['body']))
500 | if self.verbose == True:
501 | if i % 2000 == 0:
502 | print i
503 | if self.saveAs != False:
504 | self.save(self.saveAs)
505 | return self.forumnData
506 |
507 | def get_average(self):
508 | self.average = (float(self.totalCount) / float(self.totalRuns))
509 | return self.average
510 |
511 | def get_diff_from_average(self):
512 | self.average = (float(self.totalCount) / float(self.totalRuns))
513 | return [x - self.average for x in self.toReturn]
514 |
515 | class MultiThread(object):
516 | def __init__(self, listOfObjects, function, threads=1):
517 | self.lock = threading.Lock()
518 | self.threads = threads
519 | self.totalLength = len(listOfObjects)
520 | # This is the lock for multithreading
521 | self.objects = [{'id': x, 'val': e} for x, e in enumerate(listOfObjects)]
522 | self.functionVal = function
523 | self.totalRuns = 0
524 | self.totalCount = 0
525 | self.results = {}
526 | self.totalTime = 0
527 |
528 | def run_single(self):
529 | while len(self.objects) > 0:
530 | self.lock.acquire()
531 | if len(self.objects) == 0:
532 | self.lock.release()
533 | break
534 | this_val = self.objects.pop()
535 | self.lock.release()
536 | returnVal = self.functionVal(this_val['val'])
537 | self.lock.acquire()
538 | self.totalRuns += 1
539 | self.totalCount += float(returnVal)
540 | self.lock.release()
541 | self.results[str(this_val['id'])] = returnVal
542 | self.toReturn = []
543 |
544 | def run_all(self):
545 | start = time.time()
546 | threads = [threading.Thread(target=self.run_single) for i in range(self.threads)]
547 | for thread in threads:
548 | thread.start()
549 | for thread in threads:
550 | thread.join()
551 | end = time.time()
552 | self.totalTime = end-start
553 | self.average = (float(self.totalCount) / float(self.totalRuns))
554 | self.toReturn = [self.results[str(i)] for i in range(self.totalLength)]
555 | return self.toReturn
556 |
557 | def get_average(self):
558 | self.average = (float(self.totalCount) / float(self.totalRuns))
559 | return self.average
560 |
561 | def get_diff_from_average(self):
562 | self.average = (float(self.totalCount) / float(self.totalRuns))
563 | return [x - self.average for x in self.toReturn]
564 |
565 | def run_on_all(listOfStrings, function):
566 | a = MultiThread(listOfStrings, function, 20)
567 | function()
568 |
569 | class Algo(object):
570 | """docstring for Algo"""
571 | def __init__(self):
572 | self.days = []
573 | # These are the days in the dataset
574 | self.dataset = {}
575 | # These are the values of the dataset
576 | self.read_dataset()
577 | # Fills the dataset with info from the CSV
578 | #print self.dataset
579 | self.forumnData = {}
580 | # This contains the forumn dataset
581 | #self.read_forumn_data()
582 | # Reads the dataset
583 |
584 | def read_dataset(self, filename="vixcurrent.csv"):
585 | csvFile = read_csv(filename)
586 | # This is the csv file containing historical prices
587 | csvFile = csvFile[2:]
588 | # Removes the header columns
589 | for row in csvFile:
590 | # This gets the current dataset
591 | day = row[0]
592 | # This is the day value
593 | self.days.append(day)
594 | # Adds the day to the list of days
595 | self.dataset[day] = {}
596 | # This contains the information from each column
597 | for i, columnVal in enumerate(COLUMNS):
598 | # Iterates over each column
599 | self.dataset[day][columnVal] = float(row[i+1])
600 | # Assigns each value to the info dict
601 |
602 | def read_forumn_data(self, filename="/media/christopher/ssd/wsbData.json"):
603 | # This is the data from wallstreet bets
604 | # It populates the forumnData
605 | with open(filename) as f:
606 | for i, line in enumerate(f):
607 | val = json.loads(line)
608 | dayVal = convert_date(val['created_utc'])
609 | if dayVal not in self.forumnData:
610 | self.forumnData[dayVal] = []
611 | self.forumnData[dayVal].append(val)
612 | if i % 2000 == 0:
613 | print i
614 |
615 | def calc_diff_from_date(self, date, days):
616 | # Calculates the difference in values from a specified day onward
617 | # Ie: date=1/29/2004, days=7
618 | dayIndex = self.days.index(date)
619 | # This is the index of the inputted day
620 | info = {}
621 | for column in COLUMNS:
622 | # Goes over each column in the dataset
623 | currentVal = self.dataset[date][column]
624 | futureVal = self.dataset[self.days[dayIndex+days]][column]
625 | info[column] = currentVal - futureVal
626 | return info
627 |
628 | def calc_avg_from_date(self, date, days):
629 | # Calculates the average values from a specified day onward
630 | # Ie: date=1/29/2004, days=7
631 | dayIndex = self.days.index(date)
632 | # This is the index of the inputted day
633 | info = {}
634 | for column in COLUMNS:
635 | # Goes over each column in the dataset
636 | info[column] = []
637 | #print dayIndex
638 | for i in range(dayIndex, dayIndex+days):
639 | # Goes through each column in the dataset
640 | dayInfo = self.dataset[self.days[i]]
641 | # This is all the info for the current day
642 | for column in COLUMNS:
643 | # Goes over each column in the dataset
644 | info[column].append(dayInfo[column])
645 | for column in COLUMNS:
646 | # Goes through each column in the dataset
647 | info[column] = (sum(info[column]) / len(info[column]))
648 | return info
649 |
650 | def calculate_day_diff(self, date):
651 | # This calculates the daily change
652 | return (self.dataset[date]['close'] - self.dataset[date]['open'])
653 |
654 | def calc_for_all(self, functionVal):
655 | # This will run each day through a specific function
656 | returnVal = {}
657 | for val in self.days:
658 | # Iterates over each day
659 | returnVal[val] = functionVal(val)
660 | return returnVal
661 |
662 | #def calc_forumn_frequency(self):
663 |
664 | def calc_stock_ranking(tickerVal):
665 | for i, val in enumerate(get_all_counts(reverse=True)):
666 | if val['ticker'] == tickerVal:
667 | break
668 | return i
669 |
670 | class Trade():
671 | """docstring for Trade"""
672 | def __init__(self, ticker):
673 | self.ticker = ticker
674 | self.all_counts = get_all_counts(reverse=True)
675 | # Contains total counts by stock ticker
676 | # dict(count, ticker)
677 | self.overall_sentiment = get_sentiment_by_ticker(ticker)
678 | # This contains the overall sentiment towards the ticker
679 | self.total_count = get_total_count_by_ticker(ticker)
680 | # Contains the total amount of mentions for this ticker
681 | self.ticker_ranking = calc_stock_ranking(ticker)
682 | self.historical_data = get_diff_from_ticker(ticker)
683 | # Contains difference in open-close price for a given day
684 | # type (dict[date])
685 | self.percent_diff = get_percent_diff_from_ticker(ticker)
686 | # Percentage difference per day
687 | self.all_dates = DATES
688 | self.modified_dates = [x for x in self.all_dates if x in self.historical_data]
689 | if len(self.modified_dates) == 0:
690 | raise Exception("No Data for this stock")
691 | x = calc_ratio_info(ticker)
692 | # This calculates comment ratio for the ticker
693 | self.ratio_by_date = x['dates']
694 | # Comment ratio by date
695 | self.average_ratio = x['average']
696 | # This is the average comment ratio
697 | self.info_vals = []
698 | self.totalTrades = 0
699 |
700 | def short(self, date, amount, duration=0):
701 | #print("Shorting for {} days".format(duration))
702 | if duration > 0:
703 | open_amt = get_open_price_by_ticker(self.ticker, date)
704 | indexVal = self.modified_dates.index(date)
705 | if (indexVal + duration) >= len(self.modified_dates):
706 | close_date = self.modified_dates[-1]
707 | else:
708 | close_date = self.modified_dates[indexVal + duration]
709 | close_amt = get_close_price_by_ticker(self.ticker, close_date)
710 | else:
711 | open_amt = get_open_price_by_ticker(self.ticker, date)
712 | close_amt = get_close_price_by_ticker(self.ticker, date)
713 | #print("Start: {}".format(amount))
714 | #print("End: {}".format(amount + (amount * (-1*((float(close_amt) - float(open_amt)) / float(open_amt))))))
715 | increase = ((float(close_amt) - float(open_amt)) / float(open_amt))
716 | #print("Stock increase: {}%".format(increase))
717 | return amount + (amount * (-1*((float(close_amt) - float(open_amt)) / float(open_amt))))
718 |
719 | def long(self, date, amount, duration=0):
720 | #print("Long for {} days".format(duration))
721 | if duration > 0:
722 | #print("Duration")
723 | open_amt = get_open_price_by_ticker(self.ticker, date)
724 | indexVal = self.modified_dates.index(date)
725 | if (indexVal + duration) >= len(self.modified_dates):
726 | close_date = self.modified_dates[-1]
727 | else:
728 | close_date = self.modified_dates[indexVal + duration]
729 | close_amt = get_close_price_by_ticker(self.ticker, close_date)
730 | else:
731 | open_amt = get_open_price_by_ticker(self.ticker, date)
732 | close_amt = get_close_price_by_ticker(self.ticker, date)
733 | return amount + (amount * ((float(close_amt) - float(open_amt)) / float(open_amt)))
734 | #return amount + (amount * ((self.percent_diff[date])*.01))
735 |
736 | def calc_buy_and_hold(self, balance):
737 | a = get_open_price_by_ticker(self.ticker, self.modified_dates[0])
738 | b = get_close_price_by_ticker(self.ticker, self.modified_dates[-1])
739 | return balance + (balance * ((float(b) / float(a)) / float(a)))
740 |
741 | def test_strategy(self, function, balance):
742 | goingLongBalance = balance
743 | info = function(self)
744 | dayDelay = info.get('delay', 0)
745 | # Defaults to 0 delay
746 | totalTrades = 0
747 | self.info_vals = []
748 | totalShares = goingLongBalance / float(get_open_price_by_ticker(self.ticker, self.modified_dates[dayDelay]))
749 | for i in range(dayDelay, len(self.modified_dates)):
750 | indicatorDay = self.modified_dates[i-dayDelay]
751 | tradeDay = self.modified_dates[i]
752 | tradeType = info['trades'][indicatorDay].get('trade')
753 | if tradeType != None:
754 | if info['trades'][indicatorDay].get("completed", False) == False:
755 | totalTrades += 1
756 | if tradeType == 'short':
757 | e = i
758 | duration = 0
759 | while e < len(self.modified_dates):
760 | tempIndicatorDay = self.modified_dates[e-dayDelay]
761 | tempTradeType = info['trades'][tempIndicatorDay].get('trade')
762 | if tempTradeType == "long":
763 | info['trades'][tempIndicatorDay]["completed"] = True
764 | duration += 1
765 | else:
766 | break
767 | e += 1
768 | balance = self.short(tradeDay, balance, duration)
769 | elif tradeType == 'long':
770 | e = i
771 | duration = 0
772 | while e < len(self.modified_dates):
773 | tempIndicatorDay = self.modified_dates[e-dayDelay]
774 | tempTradeType = info['trades'][tempIndicatorDay].get('trade')
775 | if tempTradeType == "long":
776 | info['trades'][tempIndicatorDay]["completed"] = True
777 | duration += 1
778 | else:
779 | break
780 | e += 1
781 | balance = self.long(tradeDay, balance, duration)
782 | goingLongBalance = totalShares * float(get_close_price_by_ticker(self.ticker, self.modified_dates[i]))
783 | self.info_vals.append({"date": self.modified_dates[i], "long_balance": goingLongBalance, "strategy_balance": balance})
784 | print("Total Trades: {}".format(totalTrades))
785 | self.totalTrades = totalTrades
786 | return balance
787 |
788 | def get_more_info(self):
789 | return self.info_vals
790 |
791 |
792 |
793 |
794 |
795 |
796 | if __name__ == '__main__':
797 | #a = Algo()
798 | #print a.calc_diff_from_date('1/5/2004', 7)
799 | #a.calc_for_all(a.calculate_day_diff)
800 | #print len(a.forumnData)
801 | #message = """you should buy TSLA. Maybe even AMD if you feel like it."""
802 | #message = """buying TSLA calls and maybe AMD if I feel like it"""
803 | #message = """closing out my TSLA shorts"""
804 | #message = """short TSLA probably or put put AMD call"""
805 | #message = """dude i don't even know"""
806 | #e = re.compile("[\W]?([Bb]uy|[Ss]ell)[\W]?")
807 | #if e.match('bestbuy'):
808 | # print("Found")
809 | '''for message in open("dataset/onlyComments.txt").read().split("\n"):
810 |
811 | if len(message) > 0:
812 | f = extract_buy_or_sell(message)
813 | if len(message) < 100:
814 | totalCount = 0
815 | for key, val in f.iteritems():
816 | totalCount += len(val)
817 | if val > 0:
818 | print("{} | {}".format(message, f))'''
819 | #print isTicker("BARH")
820 | #newWord = "puts"
821 | #print re.match("[\W]?([Pp]ut[s]?)[\W]?", newWord)
822 | #listOfObjects = range(1,100)
823 | #objectVal = [{'id': x, 'val': e} for x, e in enumerate(listOfObjects)]
824 | #print objectVal
825 | '''c = open("comments.txt").read().split("\n")
826 | a = MultiThread(c, calc_words)
827 | g = a.run_all()
828 | print g
829 | print a.get_diff_from_average()
830 | print a.totalTime'''
831 | #a = MultiThread([str(i) for i in range(0,101)], random_string)
832 | #b = a.run_all()
833 | #print a.get_diff_from_average()
834 | #print b
835 | #print get_diff_from_ticker("MU")
836 | a = Trade("TSLA")
837 | for key, value in a.ratio_by_date.iteritems():
838 | print("{} - {}".format(value, a.average_ratio))
839 |
--------------------------------------------------------------------------------
/dataset/ListOfDatesOrder.json:
--------------------------------------------------------------------------------
1 | ["2012-04-11", "2012-04-12", "2012-04-13", "2012-04-16", "2012-04-17", "2012-04-19", "2012-04-20", "2012-04-21", "2012-04-22", "2012-04-23", "2012-04-24", "2012-04-25", "2012-04-26", "2012-04-27", "2012-04-28", "2012-04-29", "2012-04-30", "2012-05-01", "2012-05-02", "2012-05-03", "2012-05-04", "2012-05-05", "2012-05-06", "2012-05-07", "2012-05-08", "2012-05-09", "2012-05-10", "2012-05-11", "2012-05-12", "2012-05-13", "2012-05-14", "2012-05-15", "2012-05-16", "2012-05-17", "2012-05-18", "2012-05-19", "2012-05-20", "2012-05-21", "2012-05-22", "2012-05-23", "2012-05-24", "2012-05-25", "2012-05-26", "2012-05-27", "2012-05-28", "2012-05-29", "2012-05-30", "2012-05-31", "2012-06-01", "2012-06-02", "2012-06-03", "2012-06-04", "2012-06-05", "2012-06-06", "2012-06-07", "2012-06-08", "2012-06-09", "2012-06-10", "2012-06-11", "2012-06-12", "2012-06-13", "2012-06-14", "2012-06-15", "2012-06-16", "2012-06-17", "2012-06-18", "2012-06-19", "2012-06-20", "2012-06-21", "2012-06-22", "2012-06-23", "2012-06-24", "2012-06-25", "2012-06-26", "2012-06-27", "2012-06-28", "2012-06-29", "2012-06-30", "2012-07-01", "2012-07-02", "2012-07-03", "2012-07-04", "2012-07-05", "2012-07-06", "2012-07-07", "2012-07-08", "2012-07-09", "2012-07-10", "2012-07-11", "2012-07-12", "2012-07-13", "2012-07-14", "2012-07-15", "2012-07-16", "2012-07-17", "2012-07-18", "2012-07-19", "2012-07-20", "2012-07-21", "2012-07-22", "2012-07-23", "2012-07-24", "2012-07-25", "2012-07-26", "2012-07-27", "2012-07-28", "2012-07-29", "2012-07-30", "2012-07-31", "2012-08-01", "2012-08-02", "2012-08-03", "2012-08-04", "2012-08-05", "2012-08-06", "2012-08-07", "2012-08-08", "2012-08-09", "2012-08-10", "2012-08-11", "2012-08-12", "2012-08-13", "2012-08-14", "2012-08-15", "2012-08-16", "2012-08-17", "2012-08-18", "2012-08-19", "2012-08-20", "2012-08-21", "2012-08-22", "2012-08-23", "2012-08-24", "2012-08-25", "2012-08-26", "2012-08-27", "2012-08-28", "2012-08-29", "2012-08-30", "2012-08-31", "2012-09-02", "2012-09-04", "2012-09-05", "2012-09-06", "2012-09-07", "2012-09-08", "2012-09-09", "2012-09-10", "2012-09-11", "2012-09-12", "2012-09-13", "2012-09-14", "2012-09-15", "2012-09-17", "2012-09-18", "2012-09-19", "2012-09-20", "2012-09-21", "2012-09-22", "2012-09-24", "2012-09-25", "2012-09-26", "2012-09-27", "2012-09-28", "2012-09-29", "2012-09-30", "2012-10-01", "2012-10-02", "2012-10-03", "2012-10-04", "2012-10-05", "2012-10-06", "2012-10-07", "2012-10-08", "2012-10-09", "2012-10-10", "2012-10-11", "2012-10-12", "2012-10-13", "2012-10-14", "2012-10-15", "2012-10-16", "2012-10-17", "2012-10-18", "2012-10-21", "2012-10-22", "2012-10-23", "2012-10-24", "2012-10-25", "2012-10-26", "2012-10-27", "2012-10-29", "2012-10-31", "2012-11-01", "2012-11-02", "2012-11-04", "2012-11-05", "2012-11-06", "2012-11-07", "2012-11-08", "2012-11-09", "2012-11-11", "2012-11-12", "2012-11-13", "2012-11-14", "2012-11-15", "2012-11-16", "2012-11-17", "2012-11-18", "2012-11-19", "2012-11-20", "2012-11-21", "2012-11-24", "2012-11-26", "2012-11-27", "2012-11-28", "2012-11-29", "2012-11-30", "2012-12-03", "2012-12-05", "2012-12-06", "2012-12-07", "2012-12-08", "2012-12-09", "2012-12-10", "2012-12-11", "2012-12-12", "2012-12-13", "2012-12-14", "2012-12-15", "2012-12-17", "2012-12-18", "2012-12-19", "2012-12-20", "2012-12-21", "2012-12-23", "2012-12-24", "2012-12-25", "2012-12-26", "2012-12-27", "2012-12-28", "2012-12-29", "2012-12-31", "2013-01-01", "2013-01-04", "2013-01-07", "2013-01-08", "2013-01-09", "2013-01-10", "2013-01-15", "2013-01-16", "2013-01-17", "2013-01-18", "2013-01-19", "2013-01-20", "2013-01-21", "2013-01-22", "2013-01-23", "2013-01-24", "2013-01-25", "2013-01-26", "2013-01-27", "2013-01-28", "2013-01-29", "2013-01-30", "2013-01-31", "2013-02-01", "2013-02-02", "2013-02-03", "2013-02-04", "2013-02-05", "2013-02-09", "2013-02-10", "2013-02-11", "2013-02-12", "2013-02-13", "2013-02-14", "2013-02-15", "2013-02-16", "2013-02-17", "2013-02-19", "2013-02-20", "2013-02-21", "2013-02-22", "2013-02-23", "2013-02-24", "2013-02-25", "2013-02-26", "2013-02-27", "2013-02-28", "2013-03-01", "2013-03-02", "2013-03-03", "2013-03-04", "2013-03-05", "2013-03-06", "2013-03-07", "2013-03-08", "2013-03-09", "2013-03-10", "2013-03-11", "2013-03-12", "2013-03-13", "2013-03-14", "2013-03-15", "2013-03-16", "2013-03-18", "2013-03-19", "2013-03-20", "2013-03-21", "2013-03-22", "2013-03-23", "2013-03-24", "2013-03-25", "2013-03-26", "2013-03-27", "2013-03-28", "2013-03-29", "2013-03-30", "2013-03-31", "2013-04-01", "2013-04-02", "2013-04-03", "2013-04-04", "2013-04-05", "2013-04-07", "2013-04-08", "2013-04-09", "2013-04-10", "2013-04-11", "2013-04-12", "2013-04-13", "2013-04-14", "2013-04-15", "2013-04-16", "2013-04-17", "2013-04-18", "2013-04-19", "2013-04-20", "2013-04-21", "2013-04-22", "2013-04-23", "2013-04-24", "2013-04-25", "2013-04-26", "2013-04-27", "2013-04-28", "2013-04-29", "2013-04-30", "2013-05-01", "2013-05-02", "2013-05-03", "2013-05-04", "2013-05-06", "2013-05-07", "2013-05-08", "2013-05-09", "2013-05-10", "2013-05-11", "2013-05-12", "2013-05-13", "2013-05-14", "2013-05-15", "2013-05-16", "2013-05-17", "2013-05-18", "2013-05-19", "2013-05-20", "2013-05-21", "2013-05-22", "2013-05-23", "2013-05-24", "2013-05-25", "2013-05-28", "2013-05-29", "2013-05-30", "2013-05-31", "2013-06-01", "2013-06-02", "2013-06-03", "2013-06-04", "2013-06-05", "2013-06-06", "2013-06-07", "2013-06-08", "2013-06-09", "2013-06-10", "2013-06-11", "2013-06-12", "2013-06-13", "2013-06-14", "2013-06-15", "2013-06-16", "2013-06-17", "2013-06-18", "2013-06-19", "2013-06-20", "2013-06-21", "2013-06-22", "2013-06-23", "2013-06-24", "2013-06-25", "2013-06-26", "2013-06-27", "2013-06-28", "2013-06-30", "2013-07-01", "2013-07-02", "2013-07-03", "2013-07-04", "2013-07-05", "2013-07-06", "2013-07-07", "2013-07-08", "2013-07-09", "2013-07-10", "2013-07-11", "2013-07-12", "2013-07-13", "2013-07-14", "2013-07-15", "2013-07-16", "2013-07-17", "2013-07-18", "2013-07-19", "2013-07-20", "2013-07-21", "2013-07-22", "2013-07-23", "2013-07-24", "2013-07-25", "2013-07-26", "2013-07-27", "2013-07-28", "2013-07-29", "2013-07-30", "2013-07-31", "2013-08-01", "2013-08-02", "2013-08-03", "2013-08-05", "2013-08-06", "2013-08-07", "2013-08-08", "2013-08-09", "2013-08-10", "2013-08-12", "2013-08-13", "2013-08-14", "2013-08-15", "2013-08-16", "2013-08-17", "2013-08-18", "2013-08-19", "2013-08-20", "2013-08-21", "2013-08-22", "2013-08-23", "2013-08-25", "2013-08-26", "2013-08-27", "2013-08-28", "2013-08-29", "2013-08-30", "2013-08-31", "2013-09-01", "2013-09-02", "2013-09-03", "2013-09-04", "2013-09-05", "2013-09-06", "2013-09-07", "2013-09-09", "2013-09-10", "2013-09-11", "2013-09-12", "2013-09-13", "2013-09-14", "2013-09-15", "2013-09-16", "2013-09-17", "2013-09-18", "2013-09-19", "2013-09-20", "2013-09-21", "2013-09-22", "2013-09-23", "2013-09-24", "2013-09-25", "2013-09-26", "2013-09-27", "2013-09-28", "2013-09-29", "2013-09-30", "2013-10-01", "2013-10-02", "2013-10-03", "2013-10-04", "2013-10-05", "2013-10-06", "2013-10-07", "2013-10-08", "2013-10-09", "2013-10-10", "2013-10-11", "2013-10-12", "2013-10-14", "2013-10-15", "2013-10-16", "2013-10-17", "2013-10-18", "2013-10-19", "2013-10-20", "2013-10-21", "2013-10-22", "2013-10-23", "2013-10-24", "2013-10-25", "2013-10-26", "2013-10-27", "2013-10-28", "2013-10-29", "2013-10-30", "2013-10-31", "2013-11-01", "2013-11-02", "2013-11-03", "2013-11-04", "2013-11-05", "2013-11-06", "2013-11-07", "2013-11-08", "2013-11-09", "2013-11-10", "2013-11-11", "2013-11-12", "2013-11-13", "2013-11-14", "2013-11-15", "2013-11-16", "2013-11-17", "2013-11-18", "2013-11-19", "2013-11-20", "2013-11-21", "2013-11-22", "2013-11-23", "2013-11-24", "2013-11-25", "2013-11-26", "2013-11-27", "2013-11-28", "2013-11-29", "2013-11-30", "2013-12-01", "2013-12-02", "2013-12-03", "2013-12-04", "2013-12-05", "2013-12-06", "2013-12-07", "2013-12-08", "2013-12-09", "2013-12-10", "2013-12-11", "2013-12-12", "2013-12-13", "2013-12-14", "2013-12-15", "2013-12-16", "2013-12-17", "2013-12-18", "2013-12-19", "2013-12-20", "2013-12-21", "2013-12-22", "2013-12-23", "2013-12-24", "2013-12-25", "2013-12-26", "2013-12-27", "2013-12-28", "2013-12-29", "2013-12-30", "2013-12-31", "2014-01-01", "2014-01-02", "2014-01-03", "2014-01-04", "2014-01-05", "2014-01-06", "2014-01-07", "2014-01-08", "2014-01-09", "2014-01-10", "2014-01-11", "2014-01-12", "2014-01-13", "2014-01-14", "2014-01-15", "2014-01-16", "2014-01-17", "2014-01-18", "2014-01-19", "2014-01-20", "2014-01-21", "2014-01-22", "2014-01-23", "2014-01-24", "2014-01-25", "2014-01-26", "2014-01-27", "2014-01-28", "2014-01-29", "2014-01-30", "2014-01-31", "2014-02-01", "2014-02-02", "2014-02-03", "2014-02-04", "2014-02-05", "2014-02-06", "2014-02-07", "2014-02-08", "2014-02-09", "2014-02-10", "2014-02-11", "2014-02-12", "2014-02-13", "2014-02-14", "2014-02-15", "2014-02-16", "2014-02-17", "2014-02-18", "2014-02-19", "2014-02-20", "2014-02-21", "2014-02-22", "2014-02-23", "2014-02-24", "2014-02-25", "2014-02-26", "2014-02-27", "2014-02-28", "2014-03-01", "2014-03-02", "2014-03-03", "2014-03-04", "2014-03-05", "2014-03-06", "2014-03-07", "2014-03-08", "2014-03-09", "2014-03-10", "2014-03-11", "2014-03-12", "2014-03-13", "2014-03-14", "2014-03-15", "2014-03-16", "2014-03-17", "2014-03-18", "2014-03-19", "2014-03-20", "2014-03-21", "2014-03-22", "2014-03-23", "2014-03-24", "2014-03-25", "2014-03-26", "2014-03-27", "2014-03-28", "2014-03-29", "2014-03-30", "2014-03-31", "2014-04-01", "2014-04-02", "2014-04-03", "2014-04-04", "2014-04-05", "2014-04-06", "2014-04-07", "2014-04-08", "2014-04-09", "2014-04-10", "2014-04-11", "2014-04-12", "2014-04-13", "2014-04-14", "2014-04-15", "2014-04-16", "2014-04-17", "2014-04-18", "2014-04-19", "2014-04-20", "2014-04-21", "2014-04-22", "2014-04-23", "2014-04-24", "2014-04-25", "2014-04-26", "2014-04-27", "2014-04-28", "2014-04-29", "2014-04-30", "2014-05-01", "2014-05-02", "2014-05-03", "2014-05-04", "2014-05-05", "2014-05-06", "2014-05-07", "2014-05-08", "2014-05-09", "2014-05-10", "2014-05-11", "2014-05-12", "2014-05-13", "2014-05-14", "2014-05-15", "2014-05-16", "2014-05-17", "2014-05-18", "2014-05-19", "2014-05-20", "2014-05-21", "2014-05-22", "2014-05-23", "2014-05-24", "2014-05-25", "2014-05-26", "2014-05-27", "2014-05-28", "2014-05-29", "2014-05-30", "2014-05-31", "2014-06-01", "2014-06-02", "2014-06-03", "2014-06-04", "2014-06-05", "2014-06-06", "2014-06-07", "2014-06-08", "2014-06-09", "2014-06-10", "2014-06-11", "2014-06-12", "2014-06-13", "2014-06-14", "2014-06-15", "2014-06-16", "2014-06-17", "2014-06-18", "2014-06-19", "2014-06-20", "2014-06-21", "2014-06-22", "2014-06-23", "2014-06-24", "2014-06-25", "2014-06-26", "2014-06-27", "2014-06-28", "2014-06-29", "2014-06-30", "2014-07-01", "2014-07-02", "2014-07-03", "2014-07-04", "2014-07-05", "2014-07-06", "2014-07-07", "2014-07-08", "2014-07-09", "2014-07-10", "2014-07-11", "2014-07-12", "2014-07-13", "2014-07-14", "2014-07-15", "2014-07-16", "2014-07-17", "2014-07-18", "2014-07-19", "2014-07-20", "2014-07-21", "2014-07-22", "2014-07-23", "2014-07-24", "2014-07-25", "2014-07-26", "2014-07-27", "2014-07-28", "2014-07-29", "2014-07-30", "2014-07-31", "2014-08-01", "2014-08-02", "2014-08-04", "2014-08-05", "2014-08-06", "2014-08-07", "2014-08-08", "2014-08-09", "2014-08-10", "2014-08-11", "2014-08-12", "2014-08-13", "2014-08-14", "2014-08-15", "2014-08-16", "2014-08-17", "2014-08-18", "2014-08-19", "2014-08-20", "2014-08-21", "2014-08-22", "2014-08-23", "2014-08-24", "2014-08-25", "2014-08-26", "2014-08-27", "2014-08-28", "2014-08-29", "2014-08-30", "2014-08-31", "2014-09-01", "2014-09-02", "2014-09-03", "2014-09-04", "2014-09-05", "2014-09-06", "2014-09-07", "2014-09-08", "2014-09-09", "2014-09-10", "2014-09-11", "2014-09-12", "2014-09-13", "2014-09-14", "2014-09-15", "2014-09-16", "2014-09-17", "2014-09-18", "2014-09-19", "2014-09-20", "2014-09-21", "2014-09-22", "2014-09-23", "2014-09-24", "2014-09-25", "2014-09-26", "2014-09-27", "2014-09-28", "2014-09-29", "2014-09-30", "2014-10-01", "2014-10-02", "2014-10-03", "2014-10-04", "2014-10-05", "2014-10-06", "2014-10-07", "2014-10-08", "2014-10-09", "2014-10-10", "2014-10-11", "2014-10-12", "2014-10-13", "2014-10-14", "2014-10-15", "2014-10-16", "2014-10-17", "2014-10-18", "2014-10-19", "2014-10-20", "2014-10-21", "2014-10-22", "2014-10-23", "2014-10-24", "2014-10-25", "2014-10-26", "2014-10-27", "2014-10-28", "2014-10-29", "2014-10-30", "2014-10-31", "2014-11-01", "2014-11-02", "2014-11-03", "2014-11-04", "2014-11-05", "2014-11-06", "2014-11-07", "2014-11-08", "2014-11-09", "2014-11-10", "2014-11-11", "2014-11-12", "2014-11-13", "2014-11-14", "2014-11-15", "2014-11-16", "2014-11-17", "2014-11-18", "2014-11-19", "2014-11-20", "2014-11-21", "2014-11-22", "2014-11-23", "2014-11-24", "2014-11-25", "2014-11-26", "2014-11-27", "2014-11-28", "2014-11-29", "2014-11-30", "2014-12-01", "2014-12-02", "2014-12-03", "2014-12-04", "2014-12-05", "2014-12-06", "2014-12-07", "2014-12-08", "2014-12-09", "2014-12-10", "2014-12-11", "2014-12-12", "2014-12-13", "2014-12-14", "2014-12-15", "2014-12-16", "2014-12-17", "2014-12-18", "2014-12-19", "2014-12-20", "2014-12-21", "2014-12-22", "2014-12-23", "2014-12-24", "2014-12-25", "2014-12-26", "2014-12-27", "2014-12-28", "2014-12-29", "2014-12-30", "2014-12-31", "2015-01-01", "2015-01-02", "2015-01-03", "2015-01-04", "2015-01-05", "2015-01-06", "2015-01-07", "2015-01-08", "2015-01-09", "2015-01-10", "2015-01-11", "2015-01-12", "2015-01-13", "2015-01-14", "2015-01-15", "2015-01-16", "2015-01-17", "2015-01-18", "2015-01-19", "2015-01-20", "2015-01-21", "2015-01-22", "2015-01-23", "2015-01-24", "2015-01-25", "2015-01-26", "2015-01-27", "2015-01-28", "2015-01-29", "2015-01-30", "2015-01-31", "2015-02-01", "2015-02-02", "2015-02-03", "2015-02-04", "2015-02-05", "2015-02-06", "2015-02-07", "2015-02-08", "2015-02-09", "2015-02-10", "2015-02-11", "2015-02-12", "2015-02-13", "2015-02-14", "2015-02-15", "2015-02-16", "2015-02-17", "2015-02-18", "2015-02-19", "2015-02-20", "2015-02-21", "2015-02-22", "2015-02-23", "2015-02-24", "2015-02-25", "2015-02-26", "2015-02-27", "2015-02-28", "2015-03-01", "2015-03-02", "2015-03-03", "2015-03-04", "2015-03-05", "2015-03-06", "2015-03-07", "2015-03-08", "2015-03-09", "2015-03-10", "2015-03-11", "2015-03-12", "2015-03-13", "2015-03-14", "2015-03-15", "2015-03-16", "2015-03-17", "2015-03-18", "2015-03-19", "2015-03-20", "2015-03-21", "2015-03-22", "2015-03-23", "2015-03-24", "2015-03-25", "2015-03-26", "2015-03-27", "2015-03-28", "2015-03-29", "2015-03-30", "2015-03-31", "2015-04-01", "2015-04-02", "2015-04-03", "2015-04-04", "2015-04-05", "2015-04-06", "2015-04-07", "2015-04-08", "2015-04-09", "2015-04-10", "2015-04-11", "2015-04-12", "2015-04-13", "2015-04-14", "2015-04-15", "2015-04-16", "2015-04-17", "2015-04-18", "2015-04-19", "2015-04-20", "2015-04-21", "2015-04-22", "2015-04-23", "2015-04-24", "2015-04-25", "2015-04-26", "2015-04-27", "2015-04-28", "2015-04-29", "2015-04-30", "2015-05-01", "2015-05-02", "2015-05-03", "2015-05-04", "2015-05-05", "2015-05-06", "2015-05-07", "2015-05-08", "2015-05-09", "2015-05-10", "2015-05-11", "2015-05-12", "2015-05-13", "2015-05-14", "2015-05-15", "2015-05-16", "2015-05-17", "2015-05-18", "2015-05-19", "2015-05-20", "2015-05-21", "2015-05-22", "2015-05-23", "2015-05-24", "2015-05-25", "2015-05-26", "2015-05-27", "2015-05-28", "2015-05-29", "2015-05-30", "2015-05-31", "2015-06-01", "2015-06-02", "2015-06-03", "2015-06-04", "2015-06-05", "2015-06-06", "2015-06-07", "2015-06-08", "2015-06-09", "2015-06-10", "2015-06-11", "2015-06-12", "2015-06-13", "2015-06-14", "2015-06-15", "2015-06-16", "2015-06-17", "2015-06-18", "2015-06-19", "2015-06-20", "2015-06-21", "2015-06-22", "2015-06-23", "2015-06-24", "2015-06-25", "2015-06-26", "2015-06-27", "2015-06-28", "2015-06-29", "2015-06-30", "2015-07-01", "2015-07-02", "2015-07-03", "2015-07-04", "2015-07-05", "2015-07-06", "2015-07-07", "2015-07-08", "2015-07-09", "2015-07-10", "2015-07-11", "2015-07-12", "2015-07-13", "2015-07-14", "2015-07-15", "2015-07-16", "2015-07-17", "2015-07-18", "2015-07-19", "2015-07-20", "2015-07-21", "2015-07-22", "2015-07-23", "2015-07-24", "2015-07-25", "2015-07-26", "2015-07-27", "2015-07-28", "2015-07-29", "2015-07-30", "2015-07-31", "2015-08-01", "2015-08-02", "2015-08-03", "2015-08-04", "2015-08-05", "2015-08-06", "2015-08-07", "2015-08-08", "2015-08-09", "2015-08-10", "2015-08-11", "2015-08-12", "2015-08-13", "2015-08-14", "2015-08-15", "2015-08-16", "2015-08-17", "2015-08-18", "2015-08-19", "2015-08-20", "2015-08-21", "2015-08-22", "2015-08-23", "2015-08-24", "2015-08-25", "2015-08-26", "2015-08-27", "2015-08-28", "2015-08-29", "2015-08-30", "2015-08-31", "2015-09-01", "2015-09-02", "2015-09-03", "2015-09-04", "2015-09-05", "2015-09-06", "2015-09-07", "2015-09-08", "2015-09-09", "2015-09-10", "2015-09-11", "2015-09-12", "2015-09-13", "2015-09-14", "2015-09-15", "2015-09-16", "2015-09-17", "2015-09-18", "2015-09-19", "2015-09-20", "2015-09-21", "2015-09-22", "2015-09-23", "2015-09-24", "2015-09-25", "2015-09-26", "2015-09-27", "2015-09-28", "2015-09-29", "2015-09-30", "2015-10-01", "2015-10-02", "2015-10-03", "2015-10-04", "2015-10-05", "2015-10-06", "2015-10-07", "2015-10-08", "2015-10-09", "2015-10-10", "2015-10-11", "2015-10-12", "2015-10-13", "2015-10-14", "2015-10-15", "2015-10-16", "2015-10-17", "2015-10-18", "2015-10-19", "2015-10-20", "2015-10-21", "2015-10-22", "2015-10-23", "2015-10-24", "2015-10-25", "2015-10-26", "2015-10-27", "2015-10-28", "2015-10-29", "2015-10-30", "2015-10-31", "2015-11-01", "2015-11-02", "2015-11-03", "2015-11-04", "2015-11-05", "2015-11-06", "2015-11-07", "2015-11-08", "2015-11-09", "2015-11-10", "2015-11-11", "2015-11-12", "2015-11-13", "2015-11-14", "2015-11-15", "2015-11-16", "2015-11-17", "2015-11-18", "2015-11-19", "2015-11-20", "2015-11-21", "2015-11-22", "2015-11-23", "2015-11-24", "2015-11-25", "2015-11-26", "2015-11-27", "2015-11-28", "2015-11-29", "2015-11-30", "2015-12-01", "2015-12-02", "2015-12-03", "2015-12-04", "2015-12-05", "2015-12-06", "2015-12-07", "2015-12-08", "2015-12-09", "2015-12-10", "2015-12-11", "2015-12-12", "2015-12-13", "2015-12-14", "2015-12-15", "2015-12-16", "2015-12-17", "2015-12-18", "2015-12-19", "2015-12-20", "2015-12-21", "2015-12-22", "2015-12-23", "2015-12-24", "2015-12-25", "2015-12-26", "2015-12-27", "2015-12-28", "2015-12-29", "2015-12-30", "2015-12-31", "2016-01-01", "2016-01-02", "2016-01-03", "2016-01-04", "2016-01-05", "2016-01-06", "2016-01-07", "2016-01-08", "2016-01-09", "2016-01-10", "2016-01-11", "2016-01-12", "2016-01-13", "2016-01-14", "2016-01-15", "2016-01-16", "2016-01-17", "2016-01-18", "2016-01-19", "2016-01-20", "2016-01-21", "2016-01-22", "2016-01-23", "2016-01-24", "2016-01-25", "2016-01-26", "2016-01-27", "2016-01-28", "2016-01-29", "2016-01-30", "2016-01-31", "2016-02-01", "2016-02-02", "2016-02-03", "2016-02-04", "2016-02-05", "2016-02-06", "2016-02-07", "2016-02-08", "2016-02-09", "2016-02-10", "2016-02-11", "2016-02-12", "2016-02-13", "2016-02-14", "2016-02-15", "2016-02-16", "2016-02-17", "2016-02-18", "2016-02-19", "2016-02-20", "2016-02-21", "2016-02-22", "2016-02-23", "2016-02-24", "2016-02-25", "2016-02-26", "2016-02-27", "2016-02-28", "2016-02-29", "2016-03-01", "2016-03-02", "2016-03-03", "2016-03-04", "2016-03-05", "2016-03-06", "2016-03-07", "2016-03-08", "2016-03-09", "2016-03-10", "2016-03-11", "2016-03-12", "2016-03-13", "2016-03-14", "2016-03-15", "2016-03-16", "2016-03-17", "2016-03-18", "2016-03-19", "2016-03-20", "2016-03-21", "2016-03-22", "2016-03-23", "2016-03-24", "2016-03-25", "2016-03-26", "2016-03-27", "2016-03-28", "2016-03-29", "2016-03-30", "2016-03-31", "2016-04-01", "2016-04-02", "2016-04-03", "2016-04-04", "2016-04-05", "2016-04-06", "2016-04-07", "2016-04-08", "2016-04-09", "2016-04-10", "2016-04-11", "2016-04-12", "2016-04-13", "2016-04-14", "2016-04-15", "2016-04-16", "2016-04-17", "2016-04-18", "2016-04-19", "2016-04-20", "2016-04-21", "2016-04-22", "2016-04-23", "2016-04-24", "2016-04-25", "2016-04-26", "2016-04-27", "2016-04-28", "2016-04-29", "2016-04-30", "2016-05-01", "2016-05-02", "2016-05-03", "2016-05-04", "2016-05-05", "2016-05-06", "2016-05-07", "2016-05-08", "2016-05-09", "2016-05-10", "2016-05-11", "2016-05-12", "2016-05-13", "2016-05-14", "2016-05-15", "2016-05-16", "2016-05-17", "2016-05-18", "2016-05-19", "2016-05-20", "2016-05-21", "2016-05-22", "2016-05-23", "2016-05-24", "2016-05-25", "2016-05-26", "2016-05-27", "2016-05-28", "2016-05-29", "2016-05-30", "2016-05-31", "2016-06-01", "2016-06-02", "2016-06-03", "2016-06-04", "2016-06-05", "2016-06-06", "2016-06-07", "2016-06-08", "2016-06-09", "2016-06-10", "2016-06-11", "2016-06-12", "2016-06-13", "2016-06-14", "2016-06-15", "2016-06-16", "2016-06-17", "2016-06-18", "2016-06-19", "2016-06-20", "2016-06-21", "2016-06-22", "2016-06-23", "2016-06-24", "2016-06-25", "2016-06-26", "2016-06-27", "2016-06-28", "2016-06-29", "2016-06-30", "2016-07-01", "2016-07-02", "2016-07-03", "2016-07-04", "2016-07-05", "2016-07-06", "2016-07-07", "2016-07-08", "2016-07-09", "2016-07-10", "2016-07-11", "2016-07-12", "2016-07-13", "2016-07-14", "2016-07-15", "2016-07-16", "2016-07-17", "2016-07-18", "2016-07-19", "2016-07-20", "2016-07-21", "2016-07-22", "2016-07-23", "2016-07-24", "2016-07-25", "2016-07-26", "2016-07-27", "2016-07-28", "2016-07-29", "2016-07-30", "2016-07-31", "2016-08-01", "2016-08-02", "2016-08-03", "2016-08-04", "2016-08-05", "2016-08-06", "2016-08-07", "2016-08-08", "2016-08-09", "2016-08-10", "2016-08-11", "2016-08-12", "2016-08-13", "2016-08-14", "2016-08-15", "2016-08-16", "2016-08-17", "2016-08-18", "2016-08-19", "2016-08-20", "2016-08-21", "2016-08-22", "2016-08-23", "2016-08-24", "2016-08-25", "2016-08-26", "2016-08-27", "2016-08-28", "2016-08-29", "2016-08-30", "2016-08-31", "2016-09-01", "2016-09-02", "2016-09-03", "2016-09-04", "2016-09-05", "2016-09-06", "2016-09-07", "2016-09-08", "2016-09-09", "2016-09-10", "2016-09-11", "2016-09-12", "2016-09-13", "2016-09-14", "2016-09-15", "2016-09-16", "2016-09-17", "2016-09-18", "2016-09-19", "2016-09-20", "2016-09-21", "2016-09-22", "2016-09-23", "2016-09-24", "2016-09-25", "2016-09-26", "2016-09-27", "2016-09-28", "2016-09-29", "2016-09-30", "2016-10-01", "2016-10-02", "2016-10-03", "2016-10-04", "2016-10-05", "2016-10-06", "2016-10-07", "2016-10-08", "2016-10-09", "2016-10-10", "2016-10-11", "2016-10-12", "2016-10-13", "2016-10-14", "2016-10-15", "2016-10-16", "2016-10-17", "2016-10-18", "2016-10-19", "2016-10-20", "2016-10-21", "2016-10-22", "2016-10-23", "2016-10-24", "2016-10-25", "2016-10-26", "2016-10-27", "2016-10-28", "2016-10-29", "2016-10-30", "2016-10-31", "2016-11-01", "2016-11-02", "2016-11-03", "2016-11-04", "2016-11-05", "2016-11-06", "2016-11-07", "2016-11-08", "2016-11-09", "2016-11-10", "2016-11-11", "2016-11-12", "2016-11-13", "2016-11-14", "2016-11-15", "2016-11-16", "2016-11-17", "2016-11-18", "2016-11-19", "2016-11-20", "2016-11-21", "2016-11-22", "2016-11-23", "2016-11-24", "2016-11-25", "2016-11-26", "2016-11-27", "2016-11-28", "2016-11-29", "2016-11-30", "2016-12-01", "2016-12-02", "2016-12-03", "2016-12-04", "2016-12-05", "2016-12-06", "2016-12-07", "2016-12-08", "2016-12-09", "2016-12-10", "2016-12-11", "2016-12-12", "2016-12-13", "2016-12-14", "2016-12-15", "2016-12-16", "2016-12-17", "2016-12-18", "2016-12-19", "2016-12-20", "2016-12-21", "2016-12-22", "2016-12-23", "2016-12-24", "2016-12-25", "2016-12-26", "2016-12-27", "2016-12-28", "2016-12-29", "2016-12-30", "2016-12-31", "2017-01-01", "2017-01-02", "2017-01-03", "2017-01-04", "2017-01-05", "2017-01-06", "2017-01-07", "2017-01-08", "2017-01-09", "2017-01-10", "2017-01-11", "2017-01-12", "2017-01-13", "2017-01-14", "2017-01-15", "2017-01-16", "2017-01-17", "2017-01-18", "2017-01-19", "2017-01-20", "2017-01-21", "2017-01-22", "2017-01-23", "2017-01-24", "2017-01-25", "2017-01-26", "2017-01-27", "2017-01-28", "2017-01-29", "2017-01-30", "2017-01-31", "2017-02-01", "2017-02-02", "2017-02-03", "2017-02-04", "2017-02-05", "2017-02-06", "2017-02-07", "2017-02-08", "2017-02-09", "2017-02-10", "2017-02-11", "2017-02-12", "2017-02-13", "2017-02-14", "2017-02-15", "2017-02-16", "2017-02-17", "2017-02-18", "2017-02-19", "2017-02-20", "2017-02-21", "2017-02-22", "2017-02-23", "2017-02-24", "2017-02-25", "2017-02-26", "2017-02-27", "2017-02-28", "2017-03-01", "2017-03-02", "2017-03-03", "2017-03-04", "2017-03-05", "2017-03-06", "2017-03-07", "2017-03-08", "2017-03-09", "2017-03-10", "2017-03-11", "2017-03-12", "2017-03-13", "2017-03-14", "2017-03-15", "2017-03-16", "2017-03-17", "2017-03-18", "2017-03-19", "2017-03-20", "2017-03-21", "2017-03-22", "2017-03-23", "2017-03-24", "2017-03-25", "2017-03-26", "2017-03-27", "2017-03-28", "2017-03-29", "2017-03-30", "2017-03-31", "2017-04-01", "2017-04-02", "2017-04-03", "2017-04-04", "2017-04-05", "2017-04-06", "2017-04-07", "2017-04-08", "2017-04-09", "2017-04-10", "2017-04-11", "2017-04-12", "2017-04-13", "2017-04-14", "2017-04-15", "2017-04-16", "2017-04-17", "2017-04-18", "2017-04-19", "2017-04-20", "2017-04-21", "2017-04-22", "2017-04-23", "2017-04-24", "2017-04-25", "2017-04-26", "2017-04-27", "2017-04-28", "2017-04-29", "2017-04-30", "2017-05-01", "2017-05-02", "2017-05-03", "2017-05-04", "2017-05-05", "2017-05-06", "2017-05-07", "2017-05-08", "2017-05-09", "2017-05-10", "2017-05-11", "2017-05-12", "2017-05-13", "2017-05-14", "2017-05-15", "2017-05-16", "2017-05-17", "2017-05-18", "2017-05-19", "2017-05-20", "2017-05-21", "2017-05-22", "2017-05-23", "2017-05-24", "2017-05-25", "2017-05-26", "2017-05-27", "2017-05-28", "2017-05-29", "2017-05-30", "2017-05-31", "2017-06-01", "2017-06-02", "2017-06-03", "2017-06-04", "2017-06-05", "2017-06-06", "2017-06-07", "2017-06-08", "2017-06-09", "2017-06-10", "2017-06-11", "2017-06-12", "2017-06-13", "2017-06-14", "2017-06-15", "2017-06-16", "2017-06-17", "2017-06-18", "2017-06-19", "2017-06-20", "2017-06-21", "2017-06-22", "2017-06-23", "2017-06-24", "2017-06-25", "2017-06-26", "2017-06-27", "2017-06-28", "2017-06-29", "2017-06-30", "2017-07-01", "2017-07-02", "2017-07-03", "2017-07-04", "2017-07-05", "2017-07-06", "2017-07-07", "2017-07-08", "2017-07-09", "2017-07-10", "2017-07-11", "2017-07-12", "2017-07-13", "2017-07-14", "2017-07-15", "2017-07-16", "2017-07-17", "2017-07-18", "2017-07-19", "2017-07-20", "2017-07-21", "2017-07-22", "2017-07-23", "2017-07-24", "2017-07-25", "2017-07-26", "2017-07-27", "2017-07-28", "2017-07-29", "2017-07-30", "2017-07-31", "2017-08-01", "2017-08-02", "2017-08-03", "2017-08-04", "2017-08-05", "2017-08-06", "2017-08-07", "2017-08-08", "2017-08-09", "2017-08-10", "2017-08-11", "2017-08-12", "2017-08-13", "2017-08-14", "2017-08-15", "2017-08-16", "2017-08-17", "2017-08-18", "2017-08-19", "2017-08-20", "2017-08-21", "2017-08-22", "2017-08-23", "2017-08-24", "2017-08-25", "2017-08-26", "2017-08-27", "2017-08-28", "2017-08-29", "2017-08-30", "2017-08-31", "2017-09-01", "2017-09-02", "2017-09-03", "2017-09-04", "2017-09-05", "2017-09-06", "2017-09-07", "2017-09-08", "2017-09-09", "2017-09-10", "2017-09-11", "2017-09-12", "2017-09-13", "2017-09-14", "2017-09-15", "2017-09-16", "2017-09-17", "2017-09-18", "2017-09-19", "2017-09-20", "2017-09-21", "2017-09-22", "2017-09-23", "2017-09-24", "2017-09-25", "2017-09-26", "2017-09-27", "2017-09-28", "2017-09-29", "2017-09-30", "2017-10-01", "2017-10-02", "2017-10-03", "2017-10-04", "2017-10-05", "2017-10-06", "2017-10-07", "2017-10-08", "2017-10-09", "2017-10-10", "2017-10-11", "2017-10-12", "2017-10-13", "2017-10-14", "2017-10-15", "2017-10-16", "2017-10-17", "2017-10-18", "2017-10-19", "2017-10-20", "2017-10-21", "2017-10-22", "2017-10-23", "2017-10-24", "2017-10-25", "2017-10-26", "2017-10-27", "2017-10-28", "2017-10-29", "2017-10-30", "2017-10-31", "2017-11-01", "2017-11-02", "2017-11-03", "2017-11-04", "2017-11-05", "2017-11-06", "2017-11-07", "2017-11-08", "2017-11-09", "2017-11-10", "2017-11-11", "2017-11-12", "2017-11-13", "2017-11-14", "2017-11-15", "2017-11-16", "2017-11-17", "2017-11-18", "2017-11-19", "2017-11-20", "2017-11-21", "2017-11-22", "2017-11-23", "2017-11-24", "2017-11-25", "2017-11-26", "2017-11-27", "2017-11-28", "2017-11-29", "2017-11-30", "2017-12-01", "2017-12-02", "2017-12-03", "2017-12-04", "2017-12-05", "2017-12-06", "2017-12-07", "2017-12-08", "2017-12-09", "2017-12-10", "2017-12-11", "2017-12-12", "2017-12-13", "2017-12-14", "2017-12-15", "2017-12-16", "2017-12-17", "2017-12-18", "2017-12-19", "2017-12-20", "2017-12-21", "2017-12-22", "2017-12-23", "2017-12-24", "2017-12-25", "2017-12-26", "2017-12-27", "2017-12-28", "2017-12-29", "2017-12-30", "2017-12-31", "2018-01-01", "2018-01-02", "2018-01-03", "2018-01-04", "2018-01-05", "2018-01-06", "2018-01-07", "2018-01-08", "2018-01-09", "2018-01-10", "2018-01-11", "2018-01-12", "2018-01-13", "2018-01-14", "2018-01-15", "2018-01-16", "2018-01-17", "2018-01-18", "2018-01-19", "2018-01-20", "2018-01-21", "2018-01-22", "2018-01-23", "2018-01-24", "2018-01-25", "2018-01-26", "2018-01-27", "2018-01-28", "2018-01-29", "2018-01-30", "2018-01-31", "2018-02-01", "2018-02-02", "2018-02-03", "2018-02-04", "2018-02-05", "2018-02-06", "2018-02-07", "2018-02-08", "2018-02-09", "2018-02-10", "2018-02-11", "2018-02-12", "2018-02-13", "2018-02-14", "2018-02-15", "2018-02-16", "2018-02-17", "2018-02-18", "2018-02-19", "2018-02-20", "2018-02-21", "2018-02-22", "2018-02-23", "2018-02-24", "2018-02-25", "2018-02-26", "2018-02-27", "2018-02-28", "2018-03-01", "2018-03-02", "2018-03-03", "2018-03-04", "2018-03-05", "2018-03-06", "2018-03-07", "2018-03-08", "2018-03-09", "2018-03-10", "2018-03-11", "2018-03-12", "2018-03-13", "2018-03-14", "2018-03-15", "2018-03-16", "2018-03-17", "2018-03-18", "2018-03-19", "2018-03-20", "2018-03-21", "2018-03-22", "2018-03-23", "2018-03-24", "2018-03-25", "2018-03-26", "2018-03-27", "2018-03-28", "2018-03-29", "2018-03-30", "2018-03-31", "2018-04-01", "2018-04-02", "2018-04-03", "2018-04-04", "2018-04-05", "2018-04-06", "2018-04-07", "2018-04-08", "2018-04-09", "2018-04-10", "2018-04-11", "2018-04-12", "2018-04-13", "2018-04-14", "2018-04-15", "2018-04-16", "2018-04-17", "2018-04-18", "2018-04-19", "2018-04-20", "2018-04-21", "2018-04-22", "2018-04-23", "2018-04-24", "2018-04-25", "2018-04-26", "2018-04-27", "2018-04-28", "2018-04-29", "2018-04-30", "2018-05-01", "2018-05-02", "2018-05-03", "2018-05-04", "2018-05-05", "2018-05-06", "2018-05-07", "2018-05-08", "2018-05-09", "2018-05-10", "2018-05-11", "2018-05-12", "2018-05-13", "2018-05-14", "2018-05-15", "2018-05-16", "2018-05-17", "2018-05-18", "2018-05-19", "2018-05-20", "2018-05-21", "2018-05-22", "2018-05-23", "2018-05-24", "2018-05-25", "2018-05-26", "2018-05-27", "2018-05-28", "2018-05-29", "2018-05-30", "2018-05-31", "2018-06-01", "2018-06-02", "2018-06-03", "2018-06-04", "2018-06-05", "2018-06-06", "2018-06-07", "2018-06-08", "2018-06-09", "2018-06-10", "2018-06-11", "2018-06-12", "2018-06-13", "2018-06-14", "2018-06-15", "2018-06-16", "2018-06-17", "2018-06-18", "2018-06-19", "2018-06-20", "2018-06-21", "2018-06-22", "2018-06-23", "2018-06-24", "2018-06-25", "2018-06-26", "2018-06-27", "2018-06-28", "2018-06-29", "2018-06-30", "2018-07-01", "2018-07-02", "2018-07-03", "2018-07-04", "2018-07-05", "2018-07-06", "2018-07-07", "2018-07-08", "2018-07-09", "2018-07-10", "2018-07-11", "2018-07-12", "2018-07-13", "2018-07-14", "2018-07-15", "2018-07-16", "2018-07-17", "2018-07-18", "2018-07-19", "2018-07-20", "2018-07-21", "2018-07-22", "2018-07-23", "2018-07-24", "2018-07-25", "2018-07-26", "2018-07-27", "2018-07-28", "2018-07-29", "2018-07-30", "2018-07-31", "2018-08-01", "2018-08-02", "2018-08-03", "2018-08-04", "2018-08-05", "2018-08-06", "2018-08-07", "2018-08-08", "2018-08-09", "2018-08-10", "2018-08-11", "2018-08-12", "2018-08-13", "2018-08-14", "2018-08-15", "2018-08-16", "2018-08-17", "2018-08-18", "2018-08-19", "2018-08-20", "2018-08-21", "2018-08-22", "2018-08-23", "2018-08-24", "2018-08-25", "2018-08-26", "2018-08-27", "2018-08-28", "2018-08-29", "2018-08-30", "2018-08-31", "2018-09-01", "2018-09-02", "2018-09-03", "2018-09-04", "2018-09-05", "2018-09-06", "2018-09-07", "2018-09-08", "2018-09-09", "2018-09-10", "2018-09-11", "2018-09-12", "2018-09-13", "2018-09-14", "2018-09-15", "2018-09-16", "2018-09-17", "2018-09-18", "2018-09-19", "2018-09-20", "2018-09-21", "2018-09-22", "2018-09-23", "2018-09-24", "2018-09-25", "2018-09-26", "2018-09-27", "2018-09-28", "2018-09-29", "2018-09-30", "2018-10-01", "2018-10-02", "2018-10-03", "2018-10-04", "2018-10-05", "2018-10-06", "2018-10-07", "2018-10-08", "2018-10-09", "2018-10-10", "2018-10-11", "2018-10-12", "2018-10-13", "2018-10-14", "2018-10-15", "2018-10-16", "2018-10-17", "2018-10-18", "2018-10-19", "2018-10-20", "2018-10-21", "2018-10-22", "2018-10-23", "2018-10-24", "2018-10-25", "2018-10-26", "2018-10-27", "2018-10-28", "2018-10-29", "2018-10-30", "2018-10-31"]
--------------------------------------------------------------------------------