├── .gitignore ├── .travis.yml ├── Dockerfile ├── LISCENSE_MATPLOTLIB.txt ├── docker-compose.yml.example ├── docker ├── create_admin.py ├── crontab_docker.txt ├── env.example ├── supervisord.conf └── wait-and-run.sh ├── history ├── __init__.py ├── admin.py ├── apps.py ├── management │ ├── __init__.py │ └── commands │ │ ├── __init__.py │ │ ├── alert_fail_cases.py │ │ ├── compare_perf.py │ │ ├── predict_many_sk.py │ │ ├── predict_many_v2.py │ │ ├── pull_balance.py │ │ ├── pull_bitcointalk.py │ │ ├── pull_deposits.py │ │ ├── pull_prices.py │ │ ├── pull_reddit.py │ │ ├── pull_twitter.py │ │ ├── scheduled_trades.py │ │ └── trade.py ├── migrations │ ├── 0001_initial.py │ ├── 0002_auto_20160330_1854.py │ ├── 0003_auto_20160330_1920.py │ ├── 0004_socialnetworkmention.py │ ├── 0005_socialnetworkmention_network_created_on.py │ ├── 0006_auto_20160416_1305.py │ ├── 0007_socialnetworkmention_sentiment_polarity.py │ ├── 0008_auto_20160416_1920.py │ ├── 0009_auto_20160417_1332.py │ └── __init__.py ├── models.py ├── poloniex.py ├── predict.py ├── technical_indicators.py ├── templates │ ├── __init__.py │ ├── admin │ │ ├── __init__.py │ │ ├── auth │ │ │ └── user │ │ │ │ ├── add_form.html │ │ │ │ └── change_password.html │ │ ├── edit_inline │ │ │ ├── stacked.html │ │ │ └── tabular.html │ │ ├── includes │ │ │ ├── fieldset.html │ │ │ └── object_delete_summary.html │ │ └── index.html │ ├── c_chart.html │ ├── chart.html │ ├── notfound.html │ ├── optimize.html │ └── profit.html ├── tests.py ├── tools.py └── views.py ├── howto_trade.md ├── license.txt ├── manage.py ├── pypolo ├── __init__.py ├── local_settings.py.example ├── settings.py ├── urls.py └── wsgi.py ├── readme.md ├── requirements.txt ├── research ├── 0224_jeff_thinktopic.txt └── 0225_kerry.txt ├── scripts ├── back_backup.sh ├── bash_profile ├── crontab.txt ├── deploy.sh ├── load_newest_data.sh ├── make_backup.sh ├── newdb.sh ├── restore_backup.sh ├── screens.sh └── server_setup ├── static └── .gitkeep └── tox.ini /.gitignore: -------------------------------------------------------------------------------- 1 | db.sqlite3 2 | pypolo/local_settings.py 3 | data.json 4 | db.sqlite3-journal 5 | docker/env 6 | docker-compose.yml 7 | static/* 8 | .DS_Store 9 | *.pyc 10 | *.log 11 | *.swp 12 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | services: 3 | - docker 4 | 5 | language: ruby 6 | 7 | env: 8 | DOCKER_COMPOSE_VERSION: 1.6.2 9 | 10 | before_install: 11 | - export DEBIAN_FRONTEND=noninteractive 12 | - sudo apt-get update 13 | - sudo apt-get -o Dpkg::Options::="--force-confnew" -y install docker-engine 14 | - sudo rm /usr/local/bin/docker-compose 15 | - curl -L https://github.com/docker/compose/releases/download/1.6.2/docker-compose-`uname -s`-`uname -m` > docker-compose 16 | - chmod +x docker-compose 17 | - sudo mv docker-compose /usr/local/bin 18 | - sudo pip install flake8 19 | 20 | script: 21 | - flake8 . 22 | - cp docker/env.example docker/env 23 | - cp docker-compose.yml.example docker-compose.yml 24 | - cp pypolo/local_settings.py.example pypolo/local_settings.py 25 | - sed -i 's/POLONIEX_API_KEY=/POLONIEX_API_KEY=dummy/' docker/env 26 | - sed -i 's/POLONIEX_API_SECRET=/POLONIEX_API_SECRET=dummy/' docker/env 27 | - docker-compose build 28 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:2.7 2 | 3 | RUN apt-get update && apt-get install -y \ 4 | gcc \ 5 | gettext \ 6 | mysql-client libmysqlclient-dev \ 7 | postgresql-client libpq-dev \ 8 | sqlite3 \ 9 | libblas-dev \ 10 | liblapack-dev \ 11 | libatlas-base-dev \ 12 | gfortran \ 13 | cron \ 14 | swig \ 15 | --no-install-recommends && rm -rf /var/lib/apt/lists/* 16 | 17 | RUN pip install numpy==1.7.1 18 | RUN pip install scipy==0.13.3 19 | RUN pip install matplotlib==1.3.1 20 | 21 | COPY requirements.txt /requirements.txt 22 | RUN pip install --no-cache-dir -r /requirements.txt 23 | 24 | RUN git clone git://github.com/bayerj/arac.git /root/arac && \ 25 | cd /root/arac/ && \ 26 | sed -i "s/.*test.*//i" SConstruct && \ 27 | scons && \ 28 | cp libarac.so /usr/lib/ && \ 29 | cd /root/ 30 | 31 | ENV PYTHONPATH=/root/arac/src/python 32 | -------------------------------------------------------------------------------- /LISCENSE_MATPLOTLIB.txt: -------------------------------------------------------------------------------- 1 | License agreement for matplotlib versions 1.3.0 and later 2 | ========================================================= 3 | 4 | 1. This LICENSE AGREEMENT is between the Matplotlib Development Team 5 | ("MDT"), and the Individual or Organization ("Licensee") accessing and 6 | otherwise using matplotlib software in source or binary form and its 7 | associated documentation. 8 | 9 | 2. Subject to the terms and conditions of this License Agreement, MDT 10 | hereby grants Licensee a nonexclusive, royalty-free, world-wide license 11 | to reproduce, analyze, test, perform and/or display publicly, prepare 12 | derivative works, distribute, and otherwise use matplotlib 13 | alone or in any derivative version, provided, however, that MDT's 14 | License Agreement and MDT's notice of copyright, i.e., "Copyright (c) 15 | 2012- Matplotlib Development Team; All Rights Reserved" are retained in 16 | matplotlib alone or in any derivative version prepared by 17 | Licensee. 18 | 19 | 3. In the event Licensee prepares a derivative work that is based on or 20 | incorporates matplotlib or any part thereof, and wants to 21 | make the derivative work available to others as provided herein, then 22 | Licensee hereby agrees to include in any such work a brief summary of 23 | the changes made to matplotlib . 24 | 25 | 4. MDT is making matplotlib available to Licensee on an "AS 26 | IS" basis. MDT MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR 27 | IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, MDT MAKES NO AND 28 | DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS 29 | FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB 30 | WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 31 | 32 | 5. MDT SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB 33 | FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR 34 | LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING 35 | MATPLOTLIB , OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF 36 | THE POSSIBILITY THEREOF. 37 | 38 | 6. This License Agreement will automatically terminate upon a material 39 | breach of its terms and conditions. 40 | 41 | 7. Nothing in this License Agreement shall be deemed to create any 42 | relationship of agency, partnership, or joint venture between MDT and 43 | Licensee. This License Agreement does not grant permission to use MDT 44 | trademarks or trade name in a trademark sense to endorse or promote 45 | products or services of Licensee, or any third party. 46 | 47 | 8. By copying, installing or otherwise using matplotlib , 48 | Licensee agrees to be bound by the terms and conditions of this License 49 | Agreement. 50 | 51 | License agreement for matplotlib versions prior to 1.3.0 52 | ======================================================== 53 | 54 | 1. This LICENSE AGREEMENT is between John D. Hunter ("JDH"), and the 55 | Individual or Organization ("Licensee") accessing and otherwise using 56 | matplotlib software in source or binary form and its associated 57 | documentation. 58 | 59 | 2. Subject to the terms and conditions of this License Agreement, JDH 60 | hereby grants Licensee a nonexclusive, royalty-free, world-wide license 61 | to reproduce, analyze, test, perform and/or display publicly, prepare 62 | derivative works, distribute, and otherwise use matplotlib 63 | alone or in any derivative version, provided, however, that JDH's 64 | License Agreement and JDH's notice of copyright, i.e., "Copyright (c) 65 | 2002-2011 John D. Hunter; All Rights Reserved" are retained in 66 | matplotlib alone or in any derivative version prepared by 67 | Licensee. 68 | 69 | 3. In the event Licensee prepares a derivative work that is based on or 70 | incorporates matplotlib or any part thereof, and wants to 71 | make the derivative work available to others as provided herein, then 72 | Licensee hereby agrees to include in any such work a brief summary of 73 | the changes made to matplotlib. 74 | 75 | 4. JDH is making matplotlib available to Licensee on an "AS 76 | IS" basis. JDH MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR 77 | IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, JDH MAKES NO AND 78 | DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS 79 | FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB 80 | WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 81 | 82 | 5. JDH SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB 83 | FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR 84 | LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING 85 | MATPLOTLIB , OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF 86 | THE POSSIBILITY THEREOF. 87 | 88 | 6. This License Agreement will automatically terminate upon a material 89 | breach of its terms and conditions. 90 | 91 | 7. Nothing in this License Agreement shall be deemed to create any 92 | relationship of agency, partnership, or joint venture between JDH and 93 | Licensee. This License Agreement does not grant permission to use JDH 94 | trademarks or trade name in a trademark sense to endorse or promote 95 | products or services of Licensee, or any third party. 96 | 97 | 8. By copying, installing or otherwise using matplotlib, 98 | Licensee agrees to be bound by the terms and conditions of this License 99 | Agreement. -------------------------------------------------------------------------------- /docker-compose.yml.example: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | db: 4 | image: postgres 5 | env_file: docker/env 6 | volumes: 7 | - 'db-data:/var/lib/postgresql/data' 8 | web: 9 | image: t0mk/pytrader-dev 10 | build: ./ 11 | command: /root/pytrader/docker/wait-and-run.sh db 12 | ports: 13 | - "8000:8000" 14 | depends_on: 15 | - db 16 | env_file: docker/env 17 | volumes: 18 | - './:/root/pytrader' 19 | volumes: 20 | db-data: 21 | 22 | -------------------------------------------------------------------------------- /docker/create_admin.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from django.contrib.auth.models import User 3 | import os 4 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pypolo.settings") 5 | 6 | username = os.environ.get("PYTRADER_USER", 'trader') 7 | password = os.environ.get("PYTRADER_PASSWORD", 'trader') 8 | if not User.objects.filter(username=username).exists(): 9 | User.objects.create_superuser(username, '', password) 10 | -------------------------------------------------------------------------------- /docker/crontab_docker.txt: -------------------------------------------------------------------------------- 1 | # m h dom mon dow command 2 | * * * * * #source /tmp/.app.env; cd pytrader; ./manage.py runserver $IP_ADDRESS:80 #KO NOTES -- This is done in a while loop in a screen now 3 | 1 1 * * * #source /tmp/.app.env; cd pytrader; ./manage.py predict_many_v2 #KO NOTES -- This is done in a while loop in a screen now 4 | 1 1 * * * #source /tmp/.app.env; cd pytrader; ./manage.py predict_many_sk #KO NOTES -- This is done in a while loop in a screen now 5 | */30 * * * * source /tmp/.app.env; cd pytrader; ./manage.py alert_fail_cases 6 | * * * * * source /tmp/.app.env; cd pytrader; ./manage.py pull_prices 7 | 1 */6 * * * source /tmp/.app.env; cd pytrader; ./manage.py pull_deposits 8 | */5 * * * * source /tmp/.app.env; cd pytrader; ./manage.py pull_balance 9 | */5 * * * * source /tmp/.app.env; cd pytrader; ./manage.py scheduled_trades 10 | 1,11,21,31,41,51 * * * * source /tmp/.app.env; cd pytrader; ./manage.py compare_perf #TODO: change me when granularity changes 11 | #1 1 * * * cd pytrader; sh scripts/make_backup.sh -------------------------------------------------------------------------------- /docker/env.example: -------------------------------------------------------------------------------- 1 | POSTGRES_USER=trader 2 | POSTGRES_PASSWORD=trader 3 | POSTGRES_DB=trader 4 | 5 | POLONIEX_API_KEY= 6 | POLONIEX_API_SECRET= 7 | 8 | PYTRADER_USER=trader 9 | PYTRADER_PASSWORD=trader -------------------------------------------------------------------------------- /docker/supervisord.conf: -------------------------------------------------------------------------------- 1 | [supervisord] 2 | logfile=/tmp/supervisord.log ; (main log file;default $CWD/supervisord.log) 3 | logfile_maxbytes=50MB ; (max main logfile bytes b4 rotation;default 50MB) 4 | logfile_backups=10 ; (num of main logfile rotation backups;default 10) 5 | loglevel=info ; (log level;default info; others: debug,warn,trace) 6 | pidfile=/tmp/supervisord.pid ; (supervisord pidfile;default supervisord.pid) 7 | nodaemon=true ; (start in foreground if true;default false) 8 | minfds=1024 ; (min. avail startup file descriptors;default 1024) 9 | minprocs=200 ; (min. avail process descriptors;default 200) 10 | 11 | [program:django] 12 | command = ./manage.py runserver 0.0.0.0:8000 13 | directory = /root/pytrader 14 | 15 | [program:cron] 16 | command = /usr/sbin/cron -f 17 | stdout_logfile = /%(program_name)s.log 18 | stderr_logfile = /%(program_name)s.log 19 | autorestart = true 20 | -------------------------------------------------------------------------------- /docker/wait-and-run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | host="$1" 6 | export PGPASSWORD=$POSTGRES_PASSWORD 7 | until psql -h "$host" -U $POSTGRES_USER $POSTGRES_DB -c '\l'; do 8 | >&2 echo "Postgres is unavailable - sleeping" 9 | sleep 1 10 | done 11 | 12 | >&2 echo "Postgres is up - doing Django magic" 13 | cd /root/pytrader 14 | 15 | ./manage.py syncdb --noinput 16 | # For some reason I can't make it run from the subdir. I dont know Django too 17 | # well. 18 | cp ./docker/create_admin.py ./ 19 | ./create_admin.py 20 | ./manage.py migrate --noinput 21 | crontab /root/pytrader/docker/crontab_docker.txt 22 | env > /tmp/.app.env 23 | exec supervisord -n -c /root/pytrader/docker/supervisord.conf 24 | -------------------------------------------------------------------------------- /history/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/owocki/pytrader/17d2f233ffa65808c21af2ce4e0c2cf4a292e73a/history/__init__.py -------------------------------------------------------------------------------- /history/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | from history.models import ( 3 | Price, PredictionTest, Trade, TradeRecommendation, Balance, PerformanceComp, 4 | Deposit, ClassifierTest, SocialNetworkMention 5 | ) 6 | 7 | 8 | class BalanceAdmin(admin.ModelAdmin): 9 | ordering = ['-id'] 10 | search_fields = ['symbol'] 11 | list_display = ['pk', 'created_on', 'symbol', 'coin_balance', 'btc_balance', 'usd_balance'] 12 | 13 | admin.site.register(Balance, BalanceAdmin) 14 | 15 | 16 | class TradeAdmin(admin.ModelAdmin): 17 | ordering = ['-created_on'] 18 | search_fields = ['type', 'symbol'] 19 | list_display = ['pk', 'price', 'status', 'created_on_str', 'symbol', 'type', 'amount'] 20 | readonly_fields = ['recommendation', 'algo'] 21 | 22 | def recommendation(self, obj): 23 | trs = TradeRecommendation.objects.filter(trade=obj) 24 | return ",".join(["Trade Rec {}". 25 | format(tr.pk, tr.pk) for tr in trs]) 26 | 27 | recommendation.allow_tags = True 28 | 29 | def algo(self, obj): 30 | trs = TradeRecommendation.objects.filter(trade=obj) 31 | html = "" 32 | if trs.count: 33 | tr = trs[0] 34 | if tr.clf: 35 | html += "{}".format(tr.clf.pk, tr.clf) 36 | if tr.made_by: 37 | html += "{}".format(tr.made_by.pk, tr.made_by) 38 | return html 39 | 40 | algo.allow_tags = True 41 | 42 | admin.site.register(Trade, TradeAdmin) 43 | 44 | 45 | class PriceAdmin(admin.ModelAdmin): 46 | ordering = ['-id'] 47 | search_fields = ['price', 'symbol'] 48 | list_display = ['pk', 'price', 'created_on', 'symbol'] 49 | 50 | 51 | admin.site.register(Price, PriceAdmin) 52 | 53 | 54 | class PredictionTestAdmin(admin.ModelAdmin): 55 | ordering = ['-id'] 56 | search_fields = ['symbol', 'output'] 57 | list_display = ['pk', 'type', 'symbol', 'created_on', 'percent_correct', 'profitloss', 'prediction_size'] 58 | 59 | admin.site.register(PredictionTest, PredictionTestAdmin) 60 | 61 | 62 | class PerformanceCompAdmin(admin.ModelAdmin): 63 | ordering = ['-id'] 64 | search_fields = ['symbol'] 65 | list_display = ['pk', 'created_on', 'symbol', 'nn_rec', 'actual_movement', 'delta'] 66 | 67 | admin.site.register(PerformanceComp, PerformanceCompAdmin) 68 | 69 | 70 | class TradeRecommendationAdmin(admin.ModelAdmin): 71 | ordering = ['-id'] 72 | search_fields = ['symbol', 'recommendation'] 73 | list_display = ['pk', 'created_on', 'symbol', 'recommendation', 'confidence'] 74 | 75 | admin.site.register(TradeRecommendation, TradeRecommendationAdmin) 76 | 77 | 78 | class DepositAdmin(admin.ModelAdmin): 79 | ordering = ['-id'] 80 | search_fields = ['symbol', 'amount', 'status'] 81 | list_display = ['pk', 'symbol', 'amount', 'status'] 82 | 83 | admin.site.register(Deposit, DepositAdmin) 84 | 85 | 86 | class ClassifierTestAdmin(admin.ModelAdmin): 87 | def view_link(obj): 88 | return u"View".format(obj.graph_url()) 89 | view_link.short_description = '' 90 | view_link.allow_tags = True 91 | 92 | ordering = ['-id'] 93 | search_fields = ['symbol', 'output'] 94 | list_display = ['pk', 'type', 'symbol', 'name', 'created_on', 95 | 'percent_correct', 'score', 'prediction_size', view_link] 96 | 97 | admin.site.register(ClassifierTest, ClassifierTestAdmin) 98 | 99 | 100 | class SocialNetworkMentionAdmin(admin.ModelAdmin): 101 | ordering = ['-id'] 102 | search_fields = ['symbol', 'network_name', 'network_id'] 103 | list_display = ['symbol', 'network_name', 'network_id'] 104 | 105 | admin.site.register(SocialNetworkMention, SocialNetworkMentionAdmin) 106 | -------------------------------------------------------------------------------- /history/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class HistoryConfig(AppConfig): 5 | name = 'history' 6 | -------------------------------------------------------------------------------- /history/management/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/owocki/pytrader/17d2f233ffa65808c21af2ce4e0c2cf4a292e73a/history/management/__init__.py -------------------------------------------------------------------------------- /history/management/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/owocki/pytrader/17d2f233ffa65808c21af2ce4e0c2cf4a292e73a/history/management/commands/__init__.py -------------------------------------------------------------------------------- /history/management/commands/alert_fail_cases.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from django.conf import settings 3 | from django.core.management.base import BaseCommand 4 | from history.models import PredictionTest, TradeRecommendation, get_time 5 | 6 | 7 | class Command(BaseCommand): 8 | 9 | help = 'sends email if a fail condition is met' 10 | 11 | def alert_email(self, fail_message): 12 | import smtplib 13 | sender = [settings.ALERT_EMAIL] 14 | receivers = [settings.ALERT_EMAIL] 15 | 16 | message = 'From: {0}\nTo: {0}\nSubject: Fail case\n\n{1}\n'.format( 17 | settings.ALERT_EMAIL, 18 | fail_message 19 | ) 20 | 21 | try: 22 | smtpObj = smtplib.SMTP(settings.SMTP_HOST, 587) 23 | smtpObj.login(settings.SMTP_USERNAME, settings.SMTP_PASSWORD) 24 | smtpObj.sendmail(sender, receivers, message) 25 | smtpObj.quit() 26 | print("Successfully sent email") 27 | except Exception as e: 28 | print("Error: unable to send email") 29 | print(e) 30 | 31 | def handle(self, *args, **options): 32 | last_pt = PredictionTest.objects.filter(type='mock').order_by('-created_on').first() 33 | last_trade = TradeRecommendation.objects.order_by('-created_on').first() 34 | 35 | print(last_pt.created_on) 36 | print(last_trade.created_on) 37 | 38 | # 7 hours thing is a hack for MST vs UTC timezone issues 39 | is_trader_running = last_trade.created_on > ( 40 | get_time() - datetime.timedelta(hours=int(7)) - datetime.timedelta(minutes=int(15))) 41 | is_trainer_running = last_pt.created_on > (get_time() - datetime.timedelta(hours=int(7)) - 42 | datetime.timedelta(minutes=int(15))) 43 | 44 | if not is_trader_running: 45 | self.alert_email("not is_trader_running") 46 | if not is_trainer_running: 47 | self.alert_email("not is_trainer_running") 48 | -------------------------------------------------------------------------------- /history/management/commands/compare_perf.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | from django.conf import settings 3 | from history.tools import get_fee_amount 4 | from history.models import Price, TradeRecommendation, PerformanceComp 5 | import datetime 6 | 7 | 8 | class Command(BaseCommand): 9 | 10 | help = 'compares market perf vs nn_perf' 11 | 12 | def handle(self, *args, **options): 13 | # setup 14 | buffer_between_prediction_and_this_script_mins = datetime.datetime.now().minute % 10 15 | granularity_mins = settings.TRADER_GRANULARITY_MINS 16 | ticker = 'BTC_ETH' 17 | 18 | # get data 19 | date_of_timerange_we_care_about_predictions_start = datetime.datetime.now() - datetime.timedelta( 20 | seconds=((granularity_mins) * 60 + (60 * (1 + buffer_between_prediction_and_this_script_mins)))) 21 | date_of_timerange_we_care_about_predictions_end = datetime.datetime.now() - datetime.timedelta( 22 | seconds=((granularity_mins) * 60)) 23 | tr_timerange_end = TradeRecommendation.objects.filter( 24 | symbol=ticker, created_on__gte=date_of_timerange_we_care_about_predictions_start, 25 | created_on__lte=date_of_timerange_we_care_about_predictions_end).order_by('-created_on').first().created_on 26 | tr_timerange_start = tr_timerange_end - datetime.timedelta(seconds=120) 27 | price_timerange_start = tr_timerange_end 28 | price_timerange_end = tr_timerange_end + datetime.timedelta(seconds=(granularity_mins * 60)) 29 | trs = TradeRecommendation.objects.filter(created_on__gte=tr_timerange_start, created_on__lte=tr_timerange_end) 30 | price_now = Price.objects.filter(symbol=ticker, created_on__lte=price_timerange_end 31 | ).order_by('-created_on').first().price 32 | price_then = Price.objects.filter(symbol=ticker, created_on__lte=price_timerange_start 33 | ).order_by('-created_on').first().price 34 | 35 | # nn attributes 36 | pct_buy = round(1.0 * sum(tr.recommendation == 'BUY' for tr in trs) / len(trs), 2) 37 | pct_sell = round(1.0 * sum(tr.recommendation == 'SELL' for tr in trs) / len(trs), 2) 38 | pct_hold = round(1.0 * sum(tr.recommendation == 'HOLD' for tr in trs) / len(trs), 2) 39 | price_diff = price_now - price_then 40 | price_pct = price_diff / price_then 41 | # -1 = sell, 0 = hold, 1 = wait 42 | price_buy_hold_sell = 0 if abs(price_pct) < get_fee_amount() else (1 if price_pct > 0 else -1) 43 | avg_nn_rec = 1.0 * sum(tr.net_amount for tr in trs) / len(trs) 44 | weighted_avg_nn_rec = 1.0 * sum(tr.net_amount * (tr.confidence / 100.0) for tr in trs) / len(trs) 45 | directionally_same = ((avg_nn_rec > 0 and price_buy_hold_sell > 0) or 46 | (avg_nn_rec < 0 and price_buy_hold_sell < 0)) 47 | delta = abs(abs(avg_nn_rec) - abs(price_buy_hold_sell)) * (1 if directionally_same else -1) 48 | 49 | pc = PerformanceComp(symbol=ticker, 50 | price_timerange_start=price_timerange_start, 51 | price_timerange_end=price_timerange_end, 52 | tr_timerange_start=tr_timerange_start, 53 | tr_timerange_end=tr_timerange_end, 54 | nn_rec=avg_nn_rec, 55 | actual_movement=price_buy_hold_sell, 56 | delta=delta, 57 | pct_buy=pct_buy, 58 | pct_sell=pct_sell, 59 | pct_hold=pct_hold, 60 | rec_count=trs.count(), 61 | weighted_avg_nn_rec=weighted_avg_nn_rec, 62 | directionally_same=directionally_same, 63 | directionally_same_int=1 if directionally_same else 0, 64 | created_on_str=(tr_timerange_end - datetime.timedelta(hours=7)).strftime('%Y-%m-%d %H:%M')) 65 | pc.save() 66 | -------------------------------------------------------------------------------- /history/management/commands/predict_many_sk.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | from history.models import ClassifierTest 3 | from django.conf import settings 4 | from history.tools import print_and_log 5 | from multiprocessing import Pool 6 | 7 | 8 | def do_classifier_test(name, ticker, data_set_inputs, granularity, min_back, timedelta_back): 9 | try: 10 | ct = ClassifierTest(name=name, 11 | type='mock', 12 | symbol=ticker, 13 | datasetinputs=data_set_inputs, 14 | granularity=granularity, 15 | minutes_back=min_back, 16 | timedelta_back_in_granularity_increments=timedelta_back) 17 | ct.get_classifier() 18 | ct.save() 19 | return_data = "(ct) {} {} {} {} {} {} returned {}% correct ".format(name, ticker, data_set_inputs, 20 | granularity, 21 | min_back, 22 | timedelta_back, 23 | ct.percent_correct) 24 | 25 | print_and_log(return_data) 26 | # Hack to only graph successful charts, until we figure out this warning 27 | # http://bits.owocki.com/010Z1M3d170p/Image%202016-03-02%20at%208.30.17%20AM.png 28 | if ct.percent_correct > 60 or not settings.MAKE_TRADES: 29 | ct.graph(ct.graph_url()) 30 | return return_data 31 | except Exception as e: 32 | return "Exception in {} {} {} {} {} {}: {}".format(name, ticker, data_set_inputs, 33 | granularity, 34 | min_back, 35 | timedelta_back, 36 | str(e)) 37 | 38 | 39 | class Command(BaseCommand): 40 | 41 | help = 'tests various settings that could make the NN more accurate' 42 | result_list = [] 43 | 44 | def _log_results(self, result): 45 | self.result_list.append(result) 46 | 47 | def handle(self, *args, **options): 48 | 49 | pool = Pool(settings.NUM_THREADS) 50 | conf = settings.TRAINER_CURRENCY_CONFIG['classifiers'] 51 | print("Starting SK run") 52 | for ticker in conf['ticker']: 53 | for min_back in conf['min_back']: 54 | for granularity in conf['granularity']: 55 | for datasetinputs in conf['datasetinputs']: 56 | for timedelta_back_in_granularity_increments in \ 57 | conf['timedelta_back_in_granularity_increments']: 58 | for name in conf['name']: 59 | pool.apply_async(do_classifier_test, args=( 60 | name, 61 | ticker, 62 | datasetinputs, 63 | granularity, 64 | min_back, 65 | timedelta_back_in_granularity_increments 66 | ), callback=self._log_results) 67 | print("All SK jobs queued") 68 | pool.close() 69 | pool.join() 70 | print("SK run complete") 71 | for result in self.result_list: 72 | print(result) 73 | -------------------------------------------------------------------------------- /history/management/commands/predict_many_v2.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | from history.predict import predict_v2 3 | from django.conf import settings 4 | from history.tools import print_and_log 5 | from multiprocessing import Pool 6 | 7 | 8 | def do_prediction_test(ticker, hidden_layers, min_back, epochs, granularity, datasetinputs, 9 | learningrate, bias, momentum, recurrent, weightdecay, 10 | timedelta_back_in_granularity_increments): 11 | try: 12 | predict_v2(ticker, 13 | hidden_layers=hidden_layers, 14 | NUM_MINUTES_BACK=min_back, 15 | NUM_EPOCHS=epochs, 16 | granularity_minutes=granularity, 17 | datasetinputs=datasetinputs, 18 | learningrate=learningrate, 19 | bias=bias, 20 | momentum=momentum, 21 | recurrent=recurrent, 22 | weightdecay=weightdecay, 23 | timedelta_back_in_granularity_increments=timedelta_back_in_granularity_increments) 24 | except Exception as e: 25 | print_and_log("(p)" + str(e)) 26 | 27 | 28 | class Command(BaseCommand): 29 | 30 | help = 'tests various settings that could make the NN more accurate' 31 | 32 | def handle(self, *args, **options): 33 | 34 | pool = Pool(settings.NUM_THREADS) 35 | conf = settings.TRAINER_CURRENCY_CONFIG['supervised_nn'] 36 | 37 | print("Starting V2 run") 38 | for ticker in conf['ticker']: 39 | for hidden_layers in conf['hidden_layers']: 40 | for min_back in conf['min_back']: 41 | for epochs in conf['epochs']: 42 | for granularity in conf['granularity']: 43 | for datasetinputs in conf['datasetinputs']: 44 | for bias in conf['bias']: 45 | for momentum in conf['momentum']: 46 | for learningrate in conf['learningrate']: 47 | for weightdecay in conf['weightdecay']: 48 | for recurrent in conf['recurrent']: 49 | for timedelta_back_in_granularity_increments in \ 50 | conf['timedelta_back_in_granularity_increments']: 51 | pool.apply_async(do_prediction_test, args=( 52 | ticker, hidden_layers, min_back, epochs, granularity, 53 | datasetinputs, 54 | learningrate, bias, momentum, recurrent, weightdecay, 55 | timedelta_back_in_granularity_increments 56 | )) 57 | print("All V2 jobs queued") 58 | pool.close() 59 | pool.join() 60 | print("V2 run complete") 61 | -------------------------------------------------------------------------------- /history/management/commands/pull_balance.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | from django.conf import settings 3 | from history.tools import get_exchange_rate_to_btc, get_exchange_rate_btc_to_usd, get_deposit_balance 4 | from history.models import Balance, Trade 5 | import datetime 6 | from django.db import transaction 7 | 8 | import warnings 9 | warnings.filterwarnings("ignore", category=DeprecationWarning) 10 | warnings.filterwarnings("ignore", category=RuntimeWarning) 11 | 12 | 13 | class Command(BaseCommand): 14 | 15 | help = 'pulls balances and stores them in a DB' 16 | 17 | def handle(self, *args, **options): 18 | from history.poloniex import poloniex 19 | 20 | # hit API 21 | poo = poloniex(settings.API_KEY, settings.API_SECRET) 22 | balances = poo.returnBalances() 23 | 24 | # record balances 25 | deposited_amount_btc, deposited_amount_usd = get_deposit_balance() 26 | with transaction.atomic(): 27 | for ticker in balances: 28 | val = float(balances[ticker]['available']) + float(balances[ticker]['onOrders']) 29 | if val > 0.0001: 30 | 31 | exchange_rate_coin_to_btc = get_exchange_rate_to_btc(ticker) 32 | exchange_rate_btc_to_usd = get_exchange_rate_btc_to_usd() 33 | btc_val = exchange_rate_coin_to_btc * val 34 | usd_val = exchange_rate_btc_to_usd * btc_val 35 | b = Balance(symbol=ticker, coin_balance=val, btc_balance=btc_val, 36 | exchange_to_btc_rate=exchange_rate_coin_to_btc, usd_balance=usd_val, 37 | exchange_to_usd_rate=exchange_rate_coin_to_btc, 38 | deposited_amount_btc=deposited_amount_btc if ticker == 'BTC' else 0.00, 39 | deposited_amount_usd=deposited_amount_usd if ticker == 'BTC' else 0.00) 40 | b.save() 41 | 42 | for b in Balance.objects.filter(date_str='0'): 43 | # django timezone stuff , FML 44 | b.date_str = datetime.datetime.strftime(b.created_on - datetime.timedelta(hours=int(7)), '%Y-%m-%d %H:%M') 45 | b.save() 46 | 47 | # normalize trade recommendations too. merp 48 | for tr in Trade.objects.filter(created_on_str=''): 49 | # django timezone stuff , FML 50 | tr.created_on_str = datetime.datetime.strftime( 51 | tr.created_on - datetime.timedelta(hours=int(7)), '%Y-%m-%d %H:%M') 52 | tr.save() 53 | -------------------------------------------------------------------------------- /history/management/commands/pull_bitcointalk.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | import datetime 4 | 5 | from django.core.management.base import BaseCommand 6 | from django.conf import settings 7 | from history.models import SocialNetworkMention 8 | 9 | import xml.etree.ElementTree as ET 10 | from BeautifulSoup import BeautifulSoup 11 | 12 | 13 | def get_message_id(link): 14 | tmp = link.split('#msg') 15 | tmp.reverse() 16 | return tmp[0] 17 | 18 | 19 | class Command(BaseCommand): 20 | 21 | help = 'pulls bitcointalk mentions and stores them in a DB' 22 | 23 | def handle(self, *args, **options): 24 | 25 | rss_url = 'https://bitcointalk.org/index.php?type=rss;action=.xml' 26 | response = requests.get(rss_url) 27 | root = ET.fromstring(response.text.encode('utf-8')) 28 | for item in root.iter('item'): 29 | children = {child.tag: child.text.encode('utf-8') for child in item} 30 | post_link = children['link'] 31 | response = requests.get(post_link) 32 | response_body = response.text.encode('utf-8') 33 | parsed_html = BeautifulSoup(response_body) 34 | post_body = parsed_html.find('div', attrs={'class': 'post'}).text 35 | message_id = get_message_id(children['guid']) 36 | # Sat, 16 Apr 2016 18:23:51 GMT 37 | network_created_on = datetime.datetime.strptime(children['pubDate'], 38 | "%a, %d %b %Y %X %Z") 39 | if SocialNetworkMention.objects.filter(network_name='bitcointalk', network_id=message_id).count() == 0: 40 | for currency_symbol in settings.SOCIAL_NETWORK_SENTIMENT_CONFIG['bitcointalk']: 41 | if currency_symbol.lower() in post_body.lower(): 42 | snm = SocialNetworkMention.objects.create( 43 | network_name='bitcointalk', 44 | network_id=message_id, 45 | network_created_on=network_created_on, 46 | text=post_body, 47 | symbol=currency_symbol, 48 | ) 49 | 50 | snm.set_sentiment() 51 | snm.save() 52 | 53 | print('saving {}'.format(currency_symbol)) 54 | -------------------------------------------------------------------------------- /history/management/commands/pull_deposits.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | from django.conf import settings 3 | import datetime 4 | from history.tools import get_utc_unixtime 5 | from history.models import Deposit 6 | 7 | 8 | class Command(BaseCommand): 9 | 10 | help = 'pulls balances and stores them in a DB' 11 | 12 | def handle(self, *args, **options): 13 | from history.poloniex import poloniex 14 | 15 | poo = poloniex(settings.API_KEY, settings.API_SECRET) 16 | now = get_utc_unixtime() 17 | r = poo.returnDepositHistory(0, now) 18 | deposits = r['deposits'] + r['withdrawals'] 19 | for d in deposits: 20 | print(d) 21 | currency = d['currency'] 22 | amount = float(d['amount']) * (-1 if 'withdrawalNumber' in d.keys() else 1) 23 | timestamp = d['timestamp'] 24 | txid = d['withdrawalNumber'] if 'withdrawalNumber' in d.keys() else d['txid'] 25 | status = d['status'] 26 | created_on = datetime.datetime.fromtimestamp(timestamp) 27 | try: 28 | d = Deposit.objects.get(txid=txid) 29 | except: 30 | d = Deposit() 31 | d.symbol = currency 32 | d.amount = amount 33 | d.txid = txid 34 | d.type = 'deposit' if amount > 0 else 'withdrawal' 35 | d.status = status 36 | d.created_on = created_on 37 | d.modified_on = created_on 38 | d.created_on_str = datetime.datetime.strftime( 39 | created_on - datetime.timedelta(hours=int(7)), '%Y-%m-%d %H:%M') 40 | d.save() 41 | -------------------------------------------------------------------------------- /history/management/commands/pull_prices.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | from django.conf import settings 3 | 4 | 5 | class Command(BaseCommand): 6 | 7 | help = 'pulls prices and stores them in a DB' 8 | 9 | def handle(self, *args, **options): 10 | from history.poloniex import poloniex 11 | from history.models import Price 12 | import time 13 | 14 | poo = poloniex(settings.API_KEY, settings.API_SECRET) 15 | price = poo.returnTicker() 16 | 17 | for ticker in price.keys(): 18 | this_price = price[ticker]['last'] 19 | this_volume = price[ticker]['quoteVolume'] 20 | the_str = ticker + ',' + str(time.time()) + ',' + this_price + ", " + this_volume 21 | print("(pp)"+the_str) 22 | p = Price() 23 | p.price = this_price 24 | p.volume = this_volume 25 | p.lowestask = price[ticker]['lowestAsk'] 26 | p.highestbid = price[ticker]['highestBid'] 27 | p.symbol = ticker 28 | p.created_on_str = str(p.created_on) 29 | p.save() 30 | -------------------------------------------------------------------------------- /history/management/commands/pull_reddit.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import praw 4 | 5 | from django.core.management.base import BaseCommand 6 | from django.conf import settings 7 | from history.models import SocialNetworkMention 8 | 9 | 10 | class Command(BaseCommand): 11 | 12 | help = 'pulls reddit mentions and stores them in a DB' 13 | 14 | def handle(self, *args, **options): 15 | 16 | r = praw.Reddit(user_agent='pytrader') 17 | limit = 20 18 | 19 | for subreddit_name, currencies in settings.SOCIAL_NETWORK_SENTIMENT_CONFIG['reddit'].items(): 20 | print(subreddit_name) 21 | subreddit = r.get_subreddit(subreddit_name) 22 | submission_set = [ 23 | subreddit.get_hot(limit=limit), 24 | subreddit.get_new(limit=limit), 25 | subreddit.get_rising(limit=limit), 26 | ] 27 | for submissions in submission_set: 28 | for x in submissions: 29 | network_created_on = datetime.datetime.fromtimestamp(x.created_utc) 30 | if SocialNetworkMention.objects.filter(network_name='reddit', network_id=x.id).count() == 0: 31 | for currency_symbol in currencies: 32 | snm = SocialNetworkMention.objects.create( 33 | network_name='reddit', 34 | network_id=x.id, 35 | network_created_on=network_created_on, 36 | network_username=str(x.author), 37 | text=x.selftext, 38 | symbol=currency_symbol, 39 | ) 40 | snm.set_sentiment() 41 | snm.save() 42 | 43 | print('saving {}'.format(currency_symbol)) 44 | -------------------------------------------------------------------------------- /history/management/commands/pull_twitter.py: -------------------------------------------------------------------------------- 1 | import twitter 2 | 3 | import datetime 4 | 5 | from django.core.management.base import BaseCommand 6 | from django.conf import settings 7 | from history.models import SocialNetworkMention 8 | 9 | 10 | class Command(BaseCommand): 11 | 12 | help = 'pulls twitter mentions and stores them in a DB' 13 | 14 | def handle(self, *args, **options): 15 | 16 | api = twitter.Api(consumer_key=settings.TWITTER_CONSUMER_KEY, 17 | consumer_secret=settings.TWITTER_CONSUMER_SECRET, 18 | access_token_key=settings.TWITTER_ACCESS_TOKEN_KEY, 19 | access_token_secret=settings.TWITTER_ACCESS_TOKEN_SECRET) 20 | 21 | for currency_symbol in settings.SOCIAL_NETWORK_SENTIMENT_CONFIG['twitter']: 22 | print(currency_symbol) 23 | results = api.GetSearch("$" + currency_symbol, count=200) 24 | for tweet in results: 25 | 26 | if SocialNetworkMention.objects.filter(network_name='twitter', network_id=tweet.id).count() == 0: 27 | snm = SocialNetworkMention.objects.create( 28 | network_name='twitter', 29 | network_id=tweet.id, 30 | network_username=tweet.user.screen_name, 31 | network_created_on=datetime.datetime.fromtimestamp(tweet.GetCreatedAtInSeconds()), 32 | text=tweet.text, 33 | symbol=currency_symbol, 34 | ) 35 | snm.set_sentiment() 36 | snm.save() 37 | -------------------------------------------------------------------------------- /history/management/commands/scheduled_trades.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | from django.conf import settings 3 | from history.models import Trade 4 | from history.tools import print_and_log 5 | import datetime 6 | 7 | import warnings 8 | warnings.filterwarnings("ignore", category=DeprecationWarning) 9 | warnings.filterwarnings("ignore", category=RuntimeWarning) 10 | 11 | 12 | class Command(BaseCommand): 13 | 14 | help = 'executes scheduled trades' 15 | 16 | def handle(self, *args, **options): 17 | from history.poloniex import poloniex 18 | from history.models import Price 19 | import time 20 | 21 | poo = poloniex(settings.API_KEY, settings.API_SECRET) 22 | 23 | if settings.MAKE_TRADES: 24 | time.sleep(40) 25 | 26 | for t in Trade.objects.filter(created_on__lt=datetime.datetime.now(), status='scheduled'): 27 | 28 | # bid right below the lowest ask, or right above the highest bid so that our orders get filled 29 | action = t.type 30 | price = Price.objects.filter(symbol=t.symbol).order_by('-created_on').first() 31 | if action == 'sell': 32 | rate = price.lowestask * 0.999 33 | else: 34 | rate = price.highestbid * 1.001 35 | 36 | t.price = rate 37 | 38 | if action == 'buy': 39 | try: 40 | response = {} if not settings.MAKE_TRADES else poo.buy(t.symbol, rate, t.amount) 41 | except Exception as e: 42 | print_and_log('(st)act_upon_recommendation:buy: ' + str(e)) 43 | elif action == 'sell': 44 | try: 45 | response = {} if not settings.MAKE_TRADES else poo.sell(t.symbol, rate, t.amount) 46 | except Exception as e: 47 | print_and_log('(st)act_upon_recommendation:sell: ' + str(e)) 48 | 49 | t.response = response 50 | t.orderNumber = response.get('orderNumber', '') 51 | t.status = 'error' if response.get('error', False) else 'open' 52 | t.calculatefees() 53 | t.calculate_exchange_rates() 54 | t.save() 55 | 56 | ot = t.opposite_trade 57 | ot.opposite_price = rate 58 | ot.net_profit = ((rate * t.amount) - (ot.price * ot.amount) if action == 'sell' else 59 | (ot.price * ot.amount) - (rate * t.amount)) - ot.fee_amount - t.fee_amount 60 | ot.calculate_profitability_exchange_rates() 61 | ot.save() 62 | -------------------------------------------------------------------------------- /history/management/commands/trade.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | from history.models import Price, PredictionTest, Trade, TradeRecommendation, Balance, get_time, ClassifierTest 3 | from history.tools import get_utc_unixtime, print_and_log 4 | import datetime 5 | import time 6 | from history.poloniex import poloniex 7 | from django.conf import settings 8 | from django.utils import timezone 9 | 10 | import warnings 11 | warnings.filterwarnings("ignore", category=DeprecationWarning) 12 | warnings.filterwarnings("ignore", category=RuntimeWarning) 13 | 14 | 15 | class Command(BaseCommand): 16 | 17 | help = 'mocks trading' 18 | 19 | def setup(self): 20 | # setup 21 | self.predictor_configs = settings.TRADER_CURRENCY_CONFIG 22 | 23 | def handle_open_orders(self): 24 | 25 | tickers = list(set([o['symbol'] for o in self.predictor_configs])) 26 | 27 | for ticker in tickers: 28 | try: 29 | # cancel any filled open orders 30 | open_orders = [] if not settings.MAKE_TRADES else self.poo.returnOpenOrders(ticker) 31 | for order in open_orders: 32 | orderNumber = order['orderNumber'] 33 | rate = order['rate'] 34 | self.poo.cancel(ticker, orderNumber) 35 | print_and_log('(t) -- handle_open_orders -- canceled stale order {} at rate {}'. 36 | format(orderNumber, rate)) 37 | for t in Trade.objects.filter(symbol=ticker, orderNumber=orderNumber): 38 | t.status = 'canceled' 39 | t.save() 40 | 41 | # update trade history 42 | trade_history = [] if not settings.MAKE_TRADES else self.poo.returnTradeHistory(ticker) 43 | orderNumbers = [th['orderNumber'] for th in trade_history] 44 | for t in Trade.objects.filter(symbol=ticker, orderNumber__in=orderNumbers): 45 | t.status = 'fill' 46 | t.save() 47 | except Exception as e: 48 | print_and_log('(t)handle_open_orders: ' + str(e)) 49 | 50 | def decide_trade_amount(self, recommendation, i): 51 | config = self.predictor_configs[i] 52 | # TODO: set an amount in USD, then figure out base_amount in currency based upon that, not a hardcoded value. 53 | symbol = config['symbol'] 54 | if symbol == 'USDT_BTC': 55 | amount = 0.01 56 | else: 57 | amount = 1 58 | amount = config['weight'] * amount 59 | confidence_median = 52.0 60 | confidence_leverage = 90.0 61 | 62 | # meter up & down our bet depending upon confidence 63 | this_confidence = self.confidence[i] 64 | rec = recommendation 65 | rec = rec.lower() 66 | confidence = this_confidence 67 | confidence_diff = confidence - confidence_median 68 | confidence_diff_positive = confidence_diff > 0 69 | # hey, look! an XOR 70 | if rec != 'hold': 71 | multiplier = 1 + (abs(confidence_diff) / confidence_leverage) 72 | if confidence_diff_positive: 73 | amount = amount * multiplier 74 | elif not confidence_diff_positive: 75 | amount = amount / multiplier 76 | 77 | # debugging info 78 | """ 79 | print_and_log('(t)---- decide trade amount --- amount: {}, multiplier: {}, confidence_diff: {},\ 80 | confidence: {} confidence_diff_positive: {} '.\ 81 | format(round(amount,2),round(multiplier,2),round(confidence_diff, 2), 82 | int(confidence),confidence_diff_positive)) 83 | """ 84 | 85 | return amount 86 | 87 | def get_portfolio_breakdown(self): 88 | bs = Balance.objects.filter(created_on__gt=(timezone.now() - datetime.timedelta(minutes=int(5)))) 89 | if len(bs) == 0: 90 | bs = Balance.objects.filter(created_on__gt=(timezone.now() - datetime.timedelta(minutes=int(10)))) 91 | return [(b.symbol, b.btc_balance) for b in bs] 92 | 93 | def get_portfolio_breakdown_pct(self): 94 | balances_btc = self.get_portfolio_breakdown() 95 | portfolio_value = sum([b[1] for b in balances_btc]) 96 | return [(b[0], round(100.0 * b[1] / portfolio_value, 1)) for b in balances_btc] 97 | 98 | def act_upon_recommendation(self, i, recommendation): 99 | # setup 100 | config = self.predictor_configs[i] 101 | currencyPair = config['symbol'] 102 | 103 | # bid right below the lowest ask, or right above the highest bid so that our orders get filled 104 | price = Price.objects.filter(symbol=currencyPair).order_by('-created_on').first() 105 | if recommendation == 'sell': 106 | rate = price.lowestask * 0.999 107 | else: 108 | rate = price.highestbid * 1.001 109 | 110 | # decide action 111 | action = recommendation.lower() 112 | amount = 0.00 113 | if action in ['buy', 'sell']: 114 | amount = self.decide_trade_amount(action, i) 115 | 116 | # do items 117 | response = {} 118 | if action == 'buy': 119 | try: 120 | response = {} if not settings.MAKE_TRADES else self.poo.buy(currencyPair, rate, amount) 121 | except Exception as e: 122 | print_and_log('(t)act_upon_recommendation:buy: ' + str(e)) 123 | elif action == 'sell': 124 | try: 125 | response = {} if not settings.MAKE_TRADES else self.poo.sell(currencyPair, rate, amount) 126 | except Exception as e: 127 | print_and_log('(t)act_upon_recommendation:sell: ' + str(e)) 128 | else: 129 | print_and_log('(t)---- act_upon_recommendation declining to act. NNs not decisive') 130 | 131 | if response or not settings.MAKE_TRADES: 132 | print_and_log('(t)---- act_upon_recommendation performing {} for {} units. response from api: {}'. 133 | format(action, amount, response)) 134 | 135 | # make this trade now 136 | t = Trade(type=action, 137 | symbol=currencyPair, 138 | price=rate, 139 | amount=amount, 140 | response=response, 141 | orderNumber=response.get('orderNumber', ''), 142 | status='error' if response.get('error', False) else 'open', 143 | net_amount=((1 if action == 'buy' else -1) * amount)) 144 | t.calculatefees() 145 | t.calculate_exchange_rates() 146 | t.save() 147 | self.trs[i].trade = t 148 | self.trs[i].save() 149 | 150 | if not response.get('error', False): 151 | 152 | # make opposite trade in {granularity} minutes 153 | ot = Trade(type='buy' if action == 'sell' else 'sell', 154 | symbol=currencyPair, 155 | price=0, 156 | amount=amount, 157 | response='', 158 | orderNumber='', 159 | status='scheduled', 160 | net_amount=((1 if action == 'sell' else -1) * amount), 161 | created_on=(datetime.datetime.now() + datetime.timedelta(minutes=config['granularity']))) 162 | ot.save() 163 | 164 | # make this trade now 165 | t.opposite_trade = ot 166 | ot.opposite_trade = t 167 | t.save() 168 | ot.save() 169 | 170 | def run_predictor(self, nn_index): 171 | predictor = self.predictors[nn_index] 172 | config = self.predictor_configs[nn_index] 173 | normalize = config['type'] == 'nn' 174 | prices = predictor.get_latest_prices(normalize=normalize) 175 | prices = prices[(len(prices) - predictor.datasetinputs):(len(prices) + 1)] 176 | recommend, nn_price, last_sample, projected_change_pct = predictor.predict(prices) 177 | confidence = predictor.confidence() 178 | if config['type'] == 'nn': 179 | clf = None 180 | made_by = predictor 181 | else: 182 | clf = predictor 183 | made_by = None 184 | 185 | print_and_log("(t)({})---- ({} w. {}% conf) ---- price from {} => {}({}% change); ". 186 | format(nn_index, recommend, round(confidence, 0), round(last_sample, 4), 187 | round(nn_price, 4), int(projected_change_pct * 100.0))) 188 | tr = TradeRecommendation(symbol=config['symbol'], 189 | made_on=str(prices), 190 | made_by=made_by, 191 | clf=clf, 192 | confidence=confidence, 193 | recommendation=recommend, 194 | net_amount=-1 if recommend == 'SELL' else (1 if recommend == 'BUY' else 0), 195 | created_on_str=str(get_time().strftime('%Y-%m-%d %H:%M'))) 196 | tr.save() 197 | self.trs[nn_index] = tr 198 | return recommend 199 | 200 | def get_traders(self): 201 | predictors = {} 202 | self.confidence = {} 203 | self.trs = {} 204 | self.predictor_configs.reverse() 205 | for i in range(0, len(self.predictor_configs)): 206 | config = self.predictor_configs[i] 207 | if config['type'] == 'nn': 208 | pt = PredictionTest() 209 | pt.type = 'real' 210 | pt.symbol = config['symbol'] 211 | pt.datasetinputs = config['datasetinputs'] 212 | pt.hiddenneurons = 5 213 | pt.minutes_back = 100 214 | pt.epochs = 1000 215 | pt.momentum = 0.1 216 | pt.granularity = config['granularity'] 217 | pt.bias = True 218 | pt.learningrate = 0.05 219 | pt.weightdecay = 0.0 220 | pt.recurrent = True 221 | pt.timedelta_back_in_granularity_increments = 0 222 | pt.save() 223 | predict_runtime = pt.predict_runtime() 224 | predict_confidence = pt.confidence() 225 | print_and_log("(t)predicted trainingtime for nn #{} {}: {}s, predicted confidence: {}%". 226 | format(i, config['name'], round(predict_runtime, 1), int(predict_confidence))) 227 | pt.get_nn(train=settings.MAKE_TRADES) 228 | print_and_log("(t)done training") 229 | predictors[i] = pt 230 | self.confidence[i] = predict_confidence 231 | else: 232 | ct = ClassifierTest(name=config['name'], 233 | type='real', 234 | symbol=config['symbol'], 235 | datasetinputs=config['datasetinputs'], 236 | granularity=config['granularity'], 237 | minutes_back=config['minutes_back'], 238 | timedelta_back_in_granularity_increments=0) 239 | predict_runtime = ct.predict_runtime() 240 | predict_confidence = ct.confidence() 241 | print_and_log("(t)predicted trainingtime for nn #{} {}: {}s, predicted confidence: {}%". 242 | format(i, config['name'], round(predict_runtime, 1), int(predict_confidence))) 243 | ct.get_classifier(test=False) 244 | print_and_log("(t)done training") 245 | predictors[i] = ct 246 | self.confidence[i] = predict_confidence 247 | ct.save() 248 | 249 | self.predictors = predictors 250 | return self.predictors 251 | 252 | def handle(self, *args, **options): 253 | # setup 254 | self.poo = poloniex(settings.API_KEY, settings.API_SECRET) 255 | self.setup() 256 | print_and_log("(t){} ---- ****** STARTING TRAINERS ******* ".format(str(datetime.datetime.now()))) 257 | self.get_traders() 258 | print_and_log("(t){} ---- ****** DONE TRAINING ALL TRAINERS ******* ".format(str(datetime.datetime.now()))) 259 | 260 | while True: 261 | 262 | # TLDR -- which NNs should run at this granularity? 263 | should_run = [] 264 | recommendations = dict.fromkeys(range(0, len(self.predictors))) 265 | 266 | for i in range(0, len(self.predictor_configs)): 267 | config = self.predictor_configs[i] 268 | if (int(get_utc_unixtime() / 60) % config['granularity'] == 0 and datetime.datetime.now().second < 1): 269 | should_run.append(i) 270 | 271 | # TLDR -- update open orders bfore placing new ones 272 | if len(should_run) > 0: 273 | self.handle_open_orders() 274 | 275 | # TLDR -- run the NNs specified at this granularity 276 | for i in should_run: 277 | config = self.predictor_configs[i] 278 | recommend = self.run_predictor(i) 279 | recommendations[i] = recommend 280 | time.sleep(1) 281 | 282 | # TLDR - act upon recommendations 283 | for i in range(0, len(recommendations)): 284 | recommendation = recommendations[i] 285 | config = self.predictor_configs[i] 286 | if recommendation is not None: 287 | print_and_log("(t)recommendation {} - {} : {}".format(i, str(config['name']), recommendation)) 288 | self.act_upon_recommendation(i, recommendation) 289 | 290 | # TLDR - cleanup and stats 291 | if len(should_run) > 0: 292 | pct_buy = round(100.0 * sum(recommendations[i] == 'BUY' for 293 | i in recommendations) / len(recommendations)) 294 | pct_sell = round(100.0 * sum(recommendations[i] == 'SELL' for 295 | i in recommendations) / len(recommendations)) 296 | print_and_log("(t)TLDR - {}% buy & {}% sell: {}".format(pct_buy, pct_sell, recommendations)) 297 | print_and_log("(t) ******************************************************************************* ") 298 | print_and_log("(t) portfolio is {}".format(self.get_portfolio_breakdown_pct())) 299 | print_and_log("(t) ******************************************************************************* ") 300 | print_and_log("(t) {} ..... waiting again ..... ".format(str(datetime.datetime.now()))) 301 | print_and_log("(t) ******************************************************************************* ") 302 | 303 | time.sleep(1) 304 | -------------------------------------------------------------------------------- /history/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import history.models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ] 12 | 13 | operations = [ 14 | migrations.CreateModel( 15 | name='Balance', 16 | fields=[ 17 | ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), 18 | ('created_on', models.DateTimeField(default=history.models.get_time)), 19 | ('modified_on', models.DateTimeField(default=history.models.get_time)), 20 | ('symbol', models.CharField(max_length=30)), 21 | ('coin_balance', models.FloatField()), 22 | ('usd_balance', models.FloatField(null=True)), 23 | ('btc_balance', models.FloatField()), 24 | ('exchange_to_btc_rate', models.FloatField()), 25 | ('exchange_to_usd_rate', models.FloatField(null=True)), 26 | ('deposited_amount_usd', models.FloatField(default=0.0)), 27 | ('deposited_amount_btc', models.FloatField(default=0.0)), 28 | ('date_str', models.CharField(default='0', max_length=20)), 29 | ], 30 | options={ 31 | 'abstract': False, 32 | }, 33 | ), 34 | migrations.CreateModel( 35 | name='ClassifierTest', 36 | fields=[ 37 | ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), 38 | ('created_on', models.DateTimeField(default=history.models.get_time)), 39 | ('modified_on', models.DateTimeField(default=history.models.get_time)), 40 | ('type', models.CharField(default='mock', max_length=30)), 41 | ('symbol', models.CharField(max_length=30)), 42 | ('name', models.CharField(default='', max_length=100)), 43 | ('datasetinputs', models.IntegerField()), 44 | ('granularity', models.IntegerField()), 45 | ('minutes_back', models.IntegerField(default=0)), 46 | ('timedelta_back_in_granularity_increments', models.IntegerField(default=0)), 47 | ('time', models.IntegerField(default=0)), 48 | ('prediction_size', models.IntegerField(default=0)), 49 | ('score', models.IntegerField(default=0)), 50 | ('output', models.TextField()), 51 | ('percent_correct', models.FloatField(null=True)), 52 | ], 53 | options={ 54 | 'abstract': False, 55 | }, 56 | ), 57 | migrations.CreateModel( 58 | name='Deposit', 59 | fields=[ 60 | ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), 61 | ('created_on', models.DateTimeField(default=history.models.get_time)), 62 | ('modified_on', models.DateTimeField(default=history.models.get_time)), 63 | ('symbol', models.CharField(max_length=30)), 64 | ('amount', models.FloatField(null=True)), 65 | ('type', models.CharField(max_length=10)), 66 | ('txid', models.CharField(default='', max_length=500)), 67 | ('status', models.CharField(default='none', max_length=100)), 68 | ('created_on_str', models.CharField(default='', max_length=50)), 69 | ], 70 | options={ 71 | 'abstract': False, 72 | }, 73 | ), 74 | migrations.CreateModel( 75 | name='PerformanceComp', 76 | fields=[ 77 | ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), 78 | ('created_on', models.DateTimeField(default=history.models.get_time)), 79 | ('modified_on', models.DateTimeField(default=history.models.get_time)), 80 | ('symbol', models.CharField(max_length=30)), 81 | ('nn_rec', models.FloatField()), 82 | ('actual_movement', models.FloatField()), 83 | ('delta', models.FloatField()), 84 | ('created_on_str', models.CharField(max_length=30)), 85 | ('directionally_same', models.BooleanField(default=False)), 86 | ('directionally_same_int', models.IntegerField(default=0)), 87 | ('weighted_avg_nn_rec', models.FloatField(default=0)), 88 | ('pct_buy', models.FloatField(default=0)), 89 | ('pct_hold', models.FloatField(default=0)), 90 | ('pct_sell', models.FloatField(default=0)), 91 | ('rec_count', models.IntegerField(default=0)), 92 | ('price_timerange_start', models.DateTimeField(default=None, null=True)), 93 | ('price_timerange_end', models.DateTimeField(default=None, null=True)), 94 | ('tr_timerange_start', models.DateTimeField(default=None, null=True)), 95 | ('tr_timerange_end', models.DateTimeField(default=None, null=True)), 96 | ], 97 | options={ 98 | 'abstract': False, 99 | }, 100 | ), 101 | migrations.CreateModel( 102 | name='PredictionTest', 103 | fields=[ 104 | ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), 105 | ('created_on', models.DateTimeField(default=history.models.get_time)), 106 | ('modified_on', models.DateTimeField(default=history.models.get_time)), 107 | ('type', models.CharField(default='mock', max_length=30)), 108 | ('symbol', models.CharField(max_length=30)), 109 | ('percent_correct', models.FloatField(null=True)), 110 | ('avg_diff', models.FloatField(null=True)), 111 | ('datasetinputs', models.IntegerField()), 112 | ('hiddenneurons', models.IntegerField()), 113 | ('granularity', models.IntegerField()), 114 | ('minutes_back', models.IntegerField(default=0)), 115 | ('time', models.IntegerField(default=0)), 116 | ('epochs', models.IntegerField(default=0)), 117 | ('prediction_size', models.IntegerField(default=0)), 118 | ('learningrate', models.FloatField(default=0)), 119 | ('momentum', models.FloatField(default=0)), 120 | ('weightdecay', models.FloatField(default=0)), 121 | ('bias', models.BooleanField(default=False)), 122 | ('bias_chart', models.IntegerField(default=-1)), 123 | ('recurrent', models.BooleanField(default=False)), 124 | ('recurrent_chart', models.IntegerField(default=-1)), 125 | ('profitloss', models.FloatField(default=0)), 126 | ('profitloss_int', models.IntegerField(default=0)), 127 | ('timedelta_back_in_granularity_increments', models.IntegerField(default=0)), 128 | ('output', models.TextField()), 129 | ], 130 | options={ 131 | 'abstract': False, 132 | }, 133 | ), 134 | migrations.CreateModel( 135 | name='Price', 136 | fields=[ 137 | ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), 138 | ('created_on', models.DateTimeField(default=history.models.get_time)), 139 | ('modified_on', models.DateTimeField(default=history.models.get_time)), 140 | ('symbol', models.CharField(max_length=30)), 141 | ('price', models.FloatField()), 142 | ('volume', models.FloatField(null=True)), 143 | ('lowestask', models.FloatField(null=True)), 144 | ('highestbid', models.FloatField(null=True)), 145 | ('created_on_str', models.CharField(default='', max_length=50)), 146 | ], 147 | options={ 148 | 'abstract': False, 149 | }, 150 | ), 151 | migrations.CreateModel( 152 | name='Trade', 153 | fields=[ 154 | ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), 155 | ('created_on', models.DateTimeField(default=history.models.get_time)), 156 | ('modified_on', models.DateTimeField(default=history.models.get_time)), 157 | ('symbol', models.CharField(max_length=30)), 158 | ('price', models.FloatField()), 159 | ('amount', models.FloatField(null=True)), 160 | ('type', models.CharField(max_length=10)), 161 | ('response', models.TextField()), 162 | ('orderNumber', models.CharField(default='', max_length=50)), 163 | ('status', models.CharField(default='none', max_length=10)), 164 | ('net_amount', models.FloatField(null=True)), 165 | ('created_on_str', models.CharField(default='', max_length=50)), 166 | ('fee_amount', models.FloatField(null=True)), 167 | ('btc_amount', models.FloatField(null=True)), 168 | ('usd_amount', models.FloatField(null=True)), 169 | ('btc_fee_amount', models.FloatField(null=True)), 170 | ('usd_fee_amount', models.FloatField(null=True)), 171 | ('opposite_price', models.FloatField(null=True)), 172 | ('net_profit', models.FloatField(null=True)), 173 | ('btc_net_profit', models.FloatField(null=True)), 174 | ('usd_net_profit', models.FloatField(null=True)), 175 | ('opposite_trade', models.ForeignKey(to='history.Trade', null=True)), 176 | ], 177 | options={ 178 | 'abstract': False, 179 | }, 180 | ), 181 | migrations.CreateModel( 182 | name='TradeRecommendation', 183 | fields=[ 184 | ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), 185 | ('created_on', models.DateTimeField(default=history.models.get_time)), 186 | ('modified_on', models.DateTimeField(default=history.models.get_time)), 187 | ('symbol', models.CharField(max_length=30)), 188 | ('made_on', models.TextField(max_length=30)), 189 | ('recommendation', models.CharField(max_length=30)), 190 | ('confidence', models.FloatField()), 191 | ('created_on_str', models.CharField(default='', max_length=30)), 192 | ('net_amount', models.FloatField(default=0)), 193 | ('clf', models.ForeignKey(to='history.ClassifierTest', null=True)), 194 | ('made_by', models.ForeignKey(to='history.PredictionTest', null=True)), 195 | ('trade', models.ForeignKey(to='history.Trade', null=True)), 196 | ], 197 | options={ 198 | 'abstract': False, 199 | }, 200 | ), 201 | ] 202 | -------------------------------------------------------------------------------- /history/migrations/0002_auto_20160330_1854.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import history.models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('history', '0001_initial'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='balance', 17 | name='created_on', 18 | field=models.DateTimeField(default=history.models.get_time, db_index=True), 19 | ), 20 | migrations.AlterField( 21 | model_name='balance', 22 | name='date_str', 23 | field=models.CharField(default='0', max_length=20, db_index=True), 24 | ), 25 | migrations.AlterField( 26 | model_name='classifiertest', 27 | name='type', 28 | field=models.CharField(default='mock', max_length=30, db_index=True), 29 | ), 30 | migrations.AlterField( 31 | model_name='deposit', 32 | name='created_on', 33 | field=models.DateTimeField(default=history.models.get_time, db_index=True), 34 | ), 35 | migrations.AlterField( 36 | model_name='performancecomp', 37 | name='created_on', 38 | field=models.DateTimeField(default=history.models.get_time, db_index=True), 39 | ), 40 | migrations.AlterField( 41 | model_name='performancecomp', 42 | name='price_timerange_end', 43 | field=models.DateTimeField(default=None, null=True, db_index=True), 44 | ), 45 | migrations.AlterField( 46 | model_name='performancecomp', 47 | name='price_timerange_start', 48 | field=models.DateTimeField(default=None, null=True, db_index=True), 49 | ), 50 | migrations.AlterField( 51 | model_name='predictiontest', 52 | name='type', 53 | field=models.CharField(default='mock', max_length=30, db_index=True), 54 | ), 55 | migrations.AlterField( 56 | model_name='price', 57 | name='created_on', 58 | field=models.DateTimeField(default=history.models.get_time, db_index=True), 59 | ), 60 | migrations.AlterField( 61 | model_name='price', 62 | name='symbol', 63 | field=models.CharField(max_length=30, db_index=True), 64 | ), 65 | migrations.AlterField( 66 | model_name='trade', 67 | name='created_on', 68 | field=models.DateTimeField(default=history.models.get_time, db_index=True), 69 | ), 70 | migrations.AlterField( 71 | model_name='traderecommendation', 72 | name='created_on', 73 | field=models.DateTimeField(default=history.models.get_time, db_index=True), 74 | ), 75 | ] 76 | -------------------------------------------------------------------------------- /history/migrations/0003_auto_20160330_1920.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('history', '0002_auto_20160330_1854'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='classifiertest', 16 | name='symbol', 17 | field=models.CharField(max_length=30, db_index=True), 18 | ), 19 | migrations.AlterField( 20 | model_name='predictiontest', 21 | name='symbol', 22 | field=models.CharField(max_length=30, db_index=True), 23 | ), 24 | ] 25 | -------------------------------------------------------------------------------- /history/migrations/0004_socialnetworkmention.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import history.models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('history', '0003_auto_20160330_1920'), 12 | ] 13 | 14 | operations = [ 15 | migrations.CreateModel( 16 | name='SocialNetworkMention', 17 | fields=[ 18 | ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), 19 | ('created_on', models.DateTimeField(default=history.models.get_time)), 20 | ('modified_on', models.DateTimeField(default=history.models.get_time)), 21 | ('network_name', models.CharField(max_length=30, db_index=True)), 22 | ('network_username', models.CharField(max_length=100)), 23 | ('network_id', models.BigIntegerField(default=0)), 24 | ('symbol', models.CharField(max_length=30, db_index=True)), 25 | ('text', models.TextField()), 26 | ], 27 | options={ 28 | 'abstract': False, 29 | }, 30 | ), 31 | ] 32 | -------------------------------------------------------------------------------- /history/migrations/0005_socialnetworkmention_network_created_on.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import history.models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('history', '0004_socialnetworkmention'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AddField( 16 | model_name='socialnetworkmention', 17 | name='network_created_on', 18 | field=models.DateTimeField(default=history.models.get_time), 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /history/migrations/0006_auto_20160416_1305.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('history', '0005_socialnetworkmention_network_created_on'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='socialnetworkmention', 16 | name='network_id', 17 | field=models.CharField(default=0, max_length=100, db_index=True), 18 | ), 19 | ] 20 | -------------------------------------------------------------------------------- /history/migrations/0007_socialnetworkmention_sentiment_polarity.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('history', '0006_auto_20160416_1305'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AddField( 15 | model_name='socialnetworkmention', 16 | name='sentiment_polarity', 17 | field=models.FloatField(default=0.0), 18 | ), 19 | ] 20 | -------------------------------------------------------------------------------- /history/migrations/0008_auto_20160416_1920.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('history', '0007_socialnetworkmention_sentiment_polarity'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AddField( 15 | model_name='socialnetworkmention', 16 | name='sentiment_subjectivity', 17 | field=models.FloatField(default=0.0), 18 | ), 19 | migrations.AlterField( 20 | model_name='socialnetworkmention', 21 | name='network_created_on', 22 | field=models.DateTimeField(auto_now_add=True), 23 | ), 24 | ] 25 | -------------------------------------------------------------------------------- /history/migrations/0009_auto_20160417_1332.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('history', '0008_auto_20160416_1920'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='socialnetworkmention', 16 | name='network_created_on', 17 | field=models.DateTimeField(), 18 | ), 19 | ] 20 | -------------------------------------------------------------------------------- /history/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/owocki/pytrader/17d2f233ffa65808c21af2ce4e0c2cf4a292e73a/history/migrations/__init__.py -------------------------------------------------------------------------------- /history/models.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | from django.utils import timezone 3 | import datetime 4 | from pybrain.datasets import SupervisedDataSet 5 | from pybrain.tools.shortcuts import buildNetwork 6 | from pybrain.supervised.trainers import BackpropTrainer 7 | from django.db import models 8 | from history.tools import create_sample_row, get_fee_amount 9 | from django.utils.timezone import localtime 10 | from django.conf import settings 11 | from django.core.urlresolvers import reverse 12 | import cgi 13 | import time 14 | import numpy as np 15 | import matplotlib 16 | import textblob 17 | import matplotlib.pyplot as plt 18 | from matplotlib.colors import ListedColormap 19 | from sklearn.cross_validation import train_test_split 20 | from sklearn.preprocessing import StandardScaler 21 | from sklearn.neighbors import KNeighborsClassifier 22 | from sklearn.svm import SVC 23 | from sklearn.tree import DecisionTreeClassifier 24 | from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier 25 | from sklearn.naive_bayes import GaussianNB 26 | from sklearn.discriminant_analysis import LinearDiscriminantAnalysis 27 | from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis 28 | 29 | import warnings 30 | warnings.filterwarnings("ignore", category=DeprecationWarning) 31 | 32 | matplotlib.use('Agg') 33 | np.random.seed(0) 34 | 35 | 36 | def get_time(): 37 | return localtime(timezone.now()) 38 | 39 | 40 | class TimeStampedModel(models.Model): 41 | created_on = models.DateTimeField(null=False, default=get_time, db_index=True) 42 | modified_on = models.DateTimeField(null=False, default=get_time) 43 | 44 | def get_readonly_fields(self, request, obj=None): 45 | return [f.name for f in self._meta.get_fields()] 46 | 47 | def has_add_permission(self, request, obj=None): 48 | return False 49 | 50 | def has_delete_permission(self, request, obj=None): 51 | return False 52 | 53 | def save(self, *args, **kwargs): 54 | self.modified_on = get_time() 55 | return super(TimeStampedModel, self).save(*args, **kwargs) 56 | 57 | class Meta: 58 | abstract = True 59 | 60 | def url_to_edit_object(self): 61 | url = reverse('admin:{0}_{1}_change'.format(self._meta.app_label, self._meta.model_name), args=[self.id]) 62 | return 'Edit {1}'.format(url, cgi.escape(str(self))) 63 | 64 | 65 | class AbstractedTesterClass(models.Model): 66 | created_on = models.DateTimeField(null=False, default=get_time) 67 | modified_on = models.DateTimeField(null=False, default=get_time) 68 | 69 | def get_readonly_fields(self, request, obj=None): 70 | return [f.name for f in self._meta.get_fields()] 71 | 72 | def has_add_permission(self, request, obj=None): 73 | return False 74 | 75 | def has_delete_permission(self, request, obj=None): 76 | return False 77 | 78 | def save(self, *args, **kwargs): 79 | self.modified_on = get_time() 80 | return super(AbstractedTesterClass, self).save(*args, **kwargs) 81 | 82 | class Meta: 83 | abstract = True 84 | 85 | def url_to_edit_object(self): 86 | url = reverse('admin:{0}_{1}_change'.format(self._meta.app_label, self._meta.model_name), args=[self.id]) 87 | return 'Edit {1}'.format(url, cgi.escape(str(self))) 88 | 89 | def confidence(self): 90 | related = self.related_mocks() 91 | related.exclude(percent_correct__isnull=True) 92 | related = [rel.percent_correct for rel in related] 93 | if len(related) == 0: 94 | return 0 95 | return sum(related) / len(related) 96 | 97 | def predict_runtime(self): 98 | related = self.related_mocks() 99 | related.exclude(time=0.0) 100 | related = [rel.time for rel in related] 101 | if len(related) == 0: 102 | return 0.00 103 | return sum(related) / len(related) 104 | 105 | def get_train_and_test_data(self): 106 | data = self.get_latest_prices() 107 | 108 | if self.timedelta_back_in_granularity_increments == 0: 109 | return data, [] 110 | 111 | sample_data = data[0:(-1 * self.timedelta_back_in_granularity_increments)] 112 | test_data = data[len(sample_data):] 113 | return sample_data, test_data 114 | 115 | def get_latest_prices(self, normalize=True): 116 | from history.tools import normalization, filter_by_mins 117 | splice_point = self.minutes_back + self.timedelta_back_in_granularity_increments 118 | 119 | prices = Price.objects.filter(symbol=self.symbol).order_by('-created_on') 120 | prices = filter_by_mins(prices, self.granularity) 121 | prices = [price.price for price in prices] 122 | prices = list(prices[0:splice_point]) 123 | if normalize: 124 | prices = normalization(prices) 125 | prices.reverse() 126 | return prices 127 | 128 | 129 | class Deposit(TimeStampedModel): 130 | symbol = models.CharField(max_length=30) 131 | amount = models.FloatField(null=True) 132 | type = models.CharField(max_length=10) 133 | txid = models.CharField(max_length=500, default='') 134 | status = models.CharField(max_length=100, default='none') 135 | created_on_str = models.CharField(max_length=50, default='') 136 | 137 | 138 | class Trade(TimeStampedModel): 139 | symbol = models.CharField(max_length=30) 140 | price = models.FloatField() 141 | amount = models.FloatField(null=True) 142 | type = models.CharField(max_length=10) 143 | response = models.TextField() 144 | orderNumber = models.CharField(max_length=50, default='') 145 | status = models.CharField(max_length=10, default='none') 146 | net_amount = models.FloatField(null=True) 147 | created_on_str = models.CharField(max_length=50, default='') 148 | fee_amount = models.FloatField(null=True) 149 | btc_amount = models.FloatField(null=True) 150 | usd_amount = models.FloatField(null=True) 151 | btc_fee_amount = models.FloatField(null=True) 152 | usd_fee_amount = models.FloatField(null=True) 153 | opposite_trade = models.ForeignKey('Trade', null=True) 154 | opposite_price = models.FloatField(null=True) 155 | net_profit = models.FloatField(null=True) 156 | btc_net_profit = models.FloatField(null=True) 157 | usd_net_profit = models.FloatField(null=True) 158 | 159 | def calculatefees(self): 160 | self.fee_amount = self.amount * get_fee_amount() 161 | 162 | def calculate_exchange_rates(self): 163 | from history.tools import get_exchange_rate_to_btc, get_exchange_rate_btc_to_usd 164 | ticker = '' 165 | for this_ticker in self.symbol.split('_'): 166 | if this_ticker != 'BTC': 167 | ticker = this_ticker 168 | exchange_rate_coin_to_btc = get_exchange_rate_to_btc(ticker) 169 | exchange_rate_btc_to_usd = get_exchange_rate_btc_to_usd() 170 | self.btc_amount = exchange_rate_coin_to_btc * self.amount 171 | self.usd_amount = exchange_rate_btc_to_usd * self.btc_amount 172 | self.btc_fee_amount = exchange_rate_coin_to_btc * self.fee_amount 173 | self.usd_fee_amount = exchange_rate_btc_to_usd * self.btc_fee_amount 174 | 175 | def calculate_profitability_exchange_rates(self): 176 | from history.tools import get_exchange_rate_to_btc, get_exchange_rate_btc_to_usd 177 | ticker = '' 178 | for this_ticker in self.symbol.split('_'): 179 | if this_ticker != 'BTC': 180 | ticker = this_ticker 181 | exchange_rate_coin_to_btc = get_exchange_rate_to_btc(ticker) 182 | exchange_rate_btc_to_usd = get_exchange_rate_btc_to_usd() 183 | self.btc_net_profit = exchange_rate_coin_to_btc * self.net_profit 184 | self.usd_net_profit = exchange_rate_btc_to_usd * self.btc_net_profit 185 | 186 | def __str__(self): 187 | return 'Trade {}'.format(self.pk) 188 | 189 | 190 | class Price(TimeStampedModel): 191 | symbol = models.CharField(max_length=30, db_index=True) 192 | price = models.FloatField() 193 | volume = models.FloatField(null=True) 194 | lowestask = models.FloatField(null=True) 195 | highestbid = models.FloatField(null=True) 196 | created_on_str = models.CharField(max_length=50, default='') 197 | 198 | 199 | class Balance(TimeStampedModel): 200 | symbol = models.CharField(max_length=30) 201 | coin_balance = models.FloatField() 202 | usd_balance = models.FloatField(null=True) 203 | btc_balance = models.FloatField() 204 | exchange_to_btc_rate = models.FloatField() 205 | exchange_to_usd_rate = models.FloatField(null=True) 206 | deposited_amount_usd = models.FloatField(default=0.00) 207 | deposited_amount_btc = models.FloatField(default=0.00) 208 | date_str = models.CharField(max_length=20, default='0', db_index=True) 209 | 210 | 211 | class PerformanceComp(TimeStampedModel): 212 | symbol = models.CharField(max_length=30) 213 | nn_rec = models.FloatField() 214 | actual_movement = models.FloatField() 215 | delta = models.FloatField() 216 | created_on_str = models.CharField(max_length=30) 217 | directionally_same = models.BooleanField(default=False) 218 | directionally_same_int = models.IntegerField(default=0) 219 | weighted_avg_nn_rec = models.FloatField(default=0) 220 | pct_buy = models.FloatField(default=0) 221 | pct_hold = models.FloatField(default=0) 222 | pct_sell = models.FloatField(default=0) 223 | rec_count = models.IntegerField(default=0) 224 | price_timerange_start = models.DateTimeField(null=True, default=None, db_index=True) 225 | price_timerange_end = models.DateTimeField(null=True, default=None, db_index=True) 226 | tr_timerange_start = models.DateTimeField(null=True, default=None) 227 | tr_timerange_end = models.DateTimeField(null=True, default=None) 228 | 229 | 230 | class TradeRecommendation(TimeStampedModel): 231 | symbol = models.CharField(max_length=30) 232 | made_by = models.ForeignKey('PredictionTest', null=True) 233 | clf = models.ForeignKey('ClassifierTest', null=True) 234 | made_on = models.TextField(max_length=30) 235 | recommendation = models.CharField(max_length=30) 236 | confidence = models.FloatField() 237 | created_on_str = models.CharField(max_length=30, default='') 238 | net_amount = models.FloatField(default=0) 239 | trade = models.ForeignKey('Trade', null=True, db_index=True) 240 | 241 | 242 | class ClassifierTest(AbstractedTesterClass): 243 | 244 | BUY = 1 245 | SELL = 0 246 | HOLD = -1 247 | 248 | type = models.CharField(max_length=30, default='mock', db_index=True) 249 | symbol = models.CharField(max_length=30, db_index=True) 250 | name = models.CharField(max_length=100, default='') 251 | datasetinputs = models.IntegerField() 252 | granularity = models.IntegerField() 253 | minutes_back = models.IntegerField(default=0) 254 | timedelta_back_in_granularity_increments = models.IntegerField(default=0) 255 | ############### 256 | time = models.IntegerField(default=0) 257 | prediction_size = models.IntegerField(default=0) 258 | score = models.IntegerField(default=0) 259 | output = models.TextField() 260 | percent_correct = models.FloatField(null=True) 261 | 262 | def __str__(self): 263 | return self.name + " on " + str(self.created_on) 264 | 265 | def rerun(self, keep_new_obj=False): 266 | pass # todo 267 | 268 | def related_mocks(self): 269 | days_ago = 2 270 | return ClassifierTest.objects.filter(created_on__gt=(timezone.now() - datetime.timedelta(days=int(days_ago))), 271 | symbol=self.symbol, 272 | minutes_back=self.minutes_back, 273 | granularity=self.granularity, 274 | datasetinputs=self.datasetinputs, 275 | name=self.name, 276 | type='mock') 277 | 278 | def get_classifier(self, train=True, test=True): 279 | 280 | all_output = "" 281 | h = .02 # step size in the mesh 282 | self.names = ["Nearest Neighbors", "Linear SVM", "RBF SVM", "Decision Tree", 283 | "Random Forest", "AdaBoost", "Naive Bayes", "Linear Discriminant Analysis", 284 | "Quadratic Discriminant Analysis"] 285 | classifiers = [ 286 | KNeighborsClassifier(3), 287 | SVC(kernel="linear", C=0.025), 288 | SVC(gamma=2, C=1), 289 | DecisionTreeClassifier(max_depth=5), 290 | RandomForestClassifier(max_depth=5, n_estimators=10, max_features=1), 291 | AdaBoostClassifier(), 292 | GaussianNB(), 293 | LinearDiscriminantAnalysis(), 294 | QuadraticDiscriminantAnalysis()] 295 | 296 | for i in range(0, len(self.names)): 297 | if self.names[i] == self.name: 298 | clf = classifiers[i] 299 | 300 | if train: 301 | start_time = int(time.time()) 302 | data = self.get_latest_prices(normalize=False) 303 | price_datasets = [[], []] 304 | for i, val in enumerate(data): 305 | try: 306 | # get classifier projection 307 | sample = create_sample_row(data, i, self.datasetinputs) 308 | last_price = data[i + self.datasetinputs - 1] 309 | next_price = data[i + self.datasetinputs] 310 | change = next_price - last_price 311 | pct_change = change / last_price 312 | fee_pct = get_fee_amount() 313 | fee_pct = fee_pct * 2 # fee x 2 since we'd need to clear both buy and sell fees to be profitable 314 | fee_pct = fee_pct * settings.FEE_MANAGEMENT_STRATEGY # see desc in settings.py 315 | do_buy = ClassifierTest.HOLD if abs(pct_change) < fee_pct else ( 316 | ClassifierTest.BUY if change > 0 else ClassifierTest.SELL) 317 | price_datasets[0].append(sample) 318 | price_datasets[1].append(do_buy) 319 | except Exception: 320 | pass 321 | 322 | data = price_datasets 323 | if self.timedelta_back_in_granularity_increments == 0: 324 | train_data = data 325 | test_data = [[], []] 326 | else: 327 | train_data = [data[0][0:(-1 * self.timedelta_back_in_granularity_increments)], 328 | data[1][0:(-1 * self.timedelta_back_in_granularity_increments)]] 329 | test_data = [data[0][len(train_data[0]):], data[1][len(train_data[1]):]] 330 | self.datasets = train_data 331 | 332 | X, y = train_data 333 | X = StandardScaler().fit_transform(X) 334 | self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=.4) 335 | 336 | self.min = {} 337 | self.max = {} 338 | self.xz = () 339 | mesh_args = [] 340 | for i in range(0, self.datasetinputs): 341 | self.min[i], self.max[i] = X[:, i].min() - .5, X[:, i].max() + .5 342 | mesh_args.append(np.arange(self.min[i], self.max[i], h)) 343 | self.xz = np.meshgrid(*mesh_args) 344 | 345 | clf.fit(self.X_train, self.y_train) 346 | score = clf.score(self.X_test, self.y_test) 347 | 348 | # Plot the decision boundary. For that, we will assign a color to each 349 | # point in the mesh [self.x_min, m_max]x[self.y_min, self.y_max]. 350 | 351 | self.ravel_args = [] 352 | for i in range(0, self.datasetinputs): 353 | self.ravel_args.append(self.xz[i].ravel()) 354 | 355 | self._input = np.column_stack(self.ravel_args) 356 | 357 | if hasattr(clf, "decision_function"): 358 | self.Z = clf.decision_function(self._input) 359 | else: 360 | self.Z = clf.predict_proba(self._input)[:, 1] 361 | 362 | if test and len(test_data) > 0: 363 | stats = {'r': 0, 'w': 0, 'p': {0: 0, 1: 0, -1: 0}, 'a': {0: 0, 1: 0, -1: 0}} 364 | ds = test_data 365 | for i in range(0, len(ds[0])): 366 | sample = ds[0][i] 367 | actual = ds[1][i] 368 | sample = StandardScaler().fit_transform(sample) 369 | prediction = clf.predict(sample) 370 | self.prediction = prediction 371 | stats['p'][prediction[0]] += 1 372 | stats['a'][actual] += 1 373 | stats['r' if actual == prediction[0] else 'w'] = stats['r' if actual == prediction[0] else 'w'] + 1 374 | pct_correct = (1.0 * stats['r'] / (stats['r'] + stats['w'])) 375 | all_output = all_output + str(('stats', self.name, round(pct_correct, 2))) 376 | all_output = all_output + str(('stats_debug', stats)) 377 | self.percent_correct = int(pct_correct * 100) 378 | self.prediction_size = len(test_data[0]) 379 | 380 | all_output = all_output + str((self.name, round(score * 100))) 381 | self.score = score * 100 382 | end_time = int(time.time()) 383 | self.time = end_time - start_time 384 | self.output = all_output 385 | 386 | self.clf = clf 387 | 388 | return clf 389 | 390 | def predict(self, sample): 391 | last_sample = sample[-1] 392 | nn_price = 0.00 393 | sample = StandardScaler().fit_transform(sample) 394 | recommend = self.clf.predict(sample) 395 | recommend_str = 'HOLD' if recommend[0] == ClassifierTest.HOLD else ( 396 | 'BUY' if recommend[0] == ClassifierTest.BUY else 'SELL') 397 | projected_change_pct = 0.00 398 | return recommend_str, nn_price, last_sample, projected_change_pct 399 | 400 | def graph_url(self): 401 | return '/static/' + str(self.pk) + '.png' 402 | 403 | def graph_link(self): 404 | return 'graph'.format(self.graph_url()) 405 | 406 | def graph(self, filename): 407 | figure = plt.figure(figsize=(27, 9)) 408 | figure.max_num_figures = 5 409 | matplotlib.figure.max_num_figures = 5 410 | i = 0 411 | cm = plt.cm.RdBu 412 | cm_bright = ListedColormap(['#00FF00', '#FF0000', '#0000FF']) 413 | ax = plt.subplot(1, 1, i) 414 | # Plot the training points 415 | ax.scatter(self.X_train[:, 0], self.X_train[:, 1], c=self.y_train, cmap=cm_bright) 416 | # and testing points 417 | ax.scatter(self.X_test[:, 0], self.X_test[:, 1], c=self.y_test, cmap=cm_bright, alpha=0.6) 418 | ax.set_xlim(self.xz[0].min(), self.xz[0].max()) 419 | ax.set_ylim(self.xz[1].min(), self.xz[1].max()) 420 | ax.set_xticks(()) 421 | ax.set_yticks(()) 422 | 423 | self.Z = self.clf.predict(self._input) 424 | self.Z = self.Z.reshape(self.xz[0].shape) 425 | ax.contourf(self.xz[0], self.xz[1], self.Z, cmap=cm, alpha=.8) 426 | 427 | # Plot also the training points 428 | ax.scatter(self.X_train[:, 0], self.X_train[:, 1], c=self.y_train, cmap=cm_bright) 429 | # and testing points 430 | ax.scatter(self.X_test[:, 0], self.X_test[:, 1], c=self.y_test, cmap=cm_bright, 431 | alpha=0.6) 432 | 433 | ax.set_xlim(self.xz[0].min(), self.xz[0].max()) 434 | ax.set_ylim(self.xz[1].min(), self.xz[1].max()) 435 | ax.set_xticks(()) 436 | ax.set_yticks(()) 437 | ax.set_title("(" + self.name + ")") 438 | text = ('%.2f' % self.score).lstrip('0') 439 | ax.text(self.xz[0].max() - .3, self.xz[1].min() + .3, text, 440 | size=15, horizontalalignment='right') 441 | i += 1 442 | filepath = settings.BASE_DIR + filename 443 | figure.subplots_adjust(left=.02, right=.98) 444 | figure.savefig(filepath, dpi=100) 445 | 446 | 447 | class PredictionTest(AbstractedTesterClass): 448 | type = models.CharField(max_length=30, default='mock', db_index=True) 449 | symbol = models.CharField(max_length=30, db_index=True) 450 | percent_correct = models.FloatField(null=True) 451 | avg_diff = models.FloatField(null=True) 452 | datasetinputs = models.IntegerField() 453 | hiddenneurons = models.IntegerField() 454 | granularity = models.IntegerField() 455 | minutes_back = models.IntegerField(default=0) 456 | time = models.IntegerField(default=0) 457 | epochs = models.IntegerField(default=0) 458 | prediction_size = models.IntegerField(default=0) 459 | learningrate = models.FloatField(default=0) 460 | momentum = models.FloatField(default=0) 461 | weightdecay = models.FloatField(default=0) 462 | bias = models.BooleanField(default=False) 463 | bias_chart = models.IntegerField(default=-1) 464 | recurrent = models.BooleanField(default=False) 465 | recurrent_chart = models.IntegerField(default=-1) 466 | profitloss = models.FloatField(default=0) 467 | profitloss_int = models.IntegerField(default=0) 468 | timedelta_back_in_granularity_increments = models.IntegerField(default=0) 469 | output = models.TextField() 470 | 471 | def __str__(self): 472 | return self.symbol + " on " + str(self.created_on) 473 | 474 | def rerun(self, keep_new_obj=False): 475 | from history.predict import predict_v2 476 | try: 477 | pk = predict_v2(self.symbol, 478 | hidden_layers=self.hiddenneurons, 479 | NUM_MINUTES_BACK=self.minutes_back, 480 | NUM_EPOCHS=self.epochs, 481 | granularity_minutes=self.granularity, 482 | datasetinputs=self.datasetinputs, 483 | learningrate=self.learningrate, 484 | bias=self.bias, 485 | momentum=self.momentum, 486 | weightdecay=self.weightdecay, 487 | recurrent=self.recurrent) 488 | pt = PredictionTest.objects.get(pk=pk) 489 | if not keep_new_obj: 490 | pt.delete() 491 | else: 492 | return pt.pk 493 | except Exception as e: 494 | print(e) 495 | 496 | def related_mocks(self): 497 | days_ago = 2 498 | return PredictionTest.objects.filter(created_on__gt=(timezone.now() - datetime.timedelta(days=int(days_ago))), 499 | symbol=self.symbol, 500 | hiddenneurons=self.hiddenneurons, 501 | minutes_back=self.minutes_back, 502 | epochs=self.epochs, 503 | granularity=self.granularity, 504 | datasetinputs=self.datasetinputs, 505 | learningrate=self.learningrate, 506 | bias=self.bias, 507 | momentum=self.momentum, 508 | weightdecay=self.weightdecay, 509 | recurrent=self.recurrent, 510 | type='mock') 511 | 512 | def create_DS(self, data): 513 | size = self.datasetinputs 514 | DS = SupervisedDataSet(size, 1) 515 | try: 516 | for i, val in enumerate(data): 517 | sample = create_sample_row(data, i, size) 518 | target = data[i + size] 519 | DS.addSample(sample, (target,)) 520 | except Exception as e: 521 | if "list index out of range" not in str(e): 522 | print(e) 523 | return DS 524 | 525 | def get_nn(self, train=True): 526 | 527 | train_data, results_data = self.get_train_and_test_data() 528 | DS = self.create_DS(train_data) 529 | 530 | try: 531 | import arac # noqa 532 | print("ARAC Available, using fast mode network builder!") 533 | FNN = buildNetwork(DS.indim, self.hiddenneurons, DS.outdim, bias=self.bias, recurrent=self.recurrent, 534 | fast=True) 535 | except ImportError: 536 | FNN = buildNetwork(DS.indim, self.hiddenneurons, DS.outdim, bias=self.bias, recurrent=self.recurrent) 537 | FNN.randomize() 538 | 539 | TRAINER = BackpropTrainer(FNN, dataset=DS, learningrate=self.learningrate, 540 | momentum=self.momentum, verbose=False, weightdecay=self.weightdecay) 541 | 542 | if train: 543 | for i in range(self.epochs): 544 | TRAINER.train() 545 | 546 | self.nn = FNN 547 | return FNN 548 | 549 | def recommend_trade(self, nn_price, last_sample, fee_amount=get_fee_amount()): 550 | fee_amount = fee_amount * 2 # fee x 2 since we'd need to clear both buy and sell fees to be profitable 551 | fee_amount = fee_amount * settings.FEE_MANAGEMENT_STRATEGY # see desc in settings.py 552 | anticipated_percent_increase = (nn_price - last_sample) / last_sample 553 | if abs(anticipated_percent_increase) < fee_amount: 554 | should_trade = 'HOLD' 555 | elif anticipated_percent_increase > fee_amount: 556 | should_trade = 'BUY' 557 | elif anticipated_percent_increase < fee_amount: 558 | should_trade = 'SELL' 559 | return should_trade 560 | 561 | def predict(self, sample): 562 | last_sample = sample[-1] 563 | nn_price = self.nn.activate(sample) 564 | recommend = self.recommend_trade(nn_price, last_sample) 565 | projected_change_pct = (nn_price - last_sample) / last_sample 566 | return recommend, nn_price, last_sample, projected_change_pct 567 | 568 | 569 | class SocialNetworkMention(AbstractedTesterClass): 570 | network_name = models.CharField(max_length=30, db_index=True) 571 | network_username = models.CharField(max_length=100) 572 | network_id = models.CharField(default=0, max_length=100, db_index=True) 573 | network_created_on = models.DateTimeField() 574 | symbol = models.CharField(max_length=30, db_index=True) 575 | text = models.TextField() 576 | sentiment_polarity = models.FloatField(default=0.00) 577 | sentiment_subjectivity = models.FloatField(default=0.00) 578 | 579 | def set_sentiment(self): 580 | sentiment = textblob.TextBlob(self.text).sentiment 581 | self.sentiment_polarity = sentiment.polarity 582 | self.sentiment_subjectivity = sentiment.subjectivity 583 | -------------------------------------------------------------------------------- /history/poloniex.py: -------------------------------------------------------------------------------- 1 | import urllib 2 | import urllib2 3 | import json 4 | import time 5 | import hmac 6 | import hashlib 7 | 8 | 9 | def createTimeStamp(datestr, format="%Y-%m-%d %H:%M:%S"): 10 | return time.mktime(time.strptime(datestr, format)) 11 | 12 | 13 | class poloniex: 14 | def __init__(self, APIKey, Secret): 15 | self.APIKey = APIKey 16 | self.Secret = Secret 17 | 18 | def post_process(self, before): 19 | after = before 20 | 21 | # Add timestamps if there isnt one but is a datetime 22 | if('return' in after): 23 | if(isinstance(after['return'], list)): 24 | for x in xrange(0, len(after['return'])): 25 | if(isinstance(after['return'][x], dict)): 26 | if('datetime' in after['return'][x] and 'timestamp' not in after['return'][x]): 27 | after['return'][x]['timestamp'] = float(createTimeStamp(after['return'][x]['datetime'])) 28 | 29 | return after 30 | 31 | def api_query(self, command, req={}): 32 | 33 | if(command == "returnTicker" or command == "return24Volume"): 34 | ret = urllib2.urlopen(urllib2.Request('https://poloniex.com/public?command=' + command)) 35 | return json.loads(ret.read()) 36 | elif(command == "returnOrderBook"): 37 | ret = urllib2.urlopen(urllib2.Request( 38 | 'http://poloniex.com/public?command=' + command + '¤cyPair=' + str(req['currencyPair']))) 39 | return json.loads(ret.read()) 40 | elif(command == "returnMarketTradeHistory"): 41 | ret = urllib2.urlopen(urllib2.Request('http://poloniex.com/public?command=' + 42 | "returnTradeHistory" + '¤cyPair=' + str(req['currencyPair']))) 43 | return json.loads(ret.read()) 44 | else: 45 | req['command'] = command 46 | req['nonce'] = int(time.time()*1000) 47 | post_data = urllib.urlencode(req) 48 | 49 | sign = hmac.new(self.Secret, post_data, hashlib.sha512).hexdigest() 50 | headers = { 51 | 'Sign': sign, 52 | 'Key': self.APIKey 53 | } 54 | 55 | ret = urllib2.urlopen(urllib2.Request('https://poloniex.com/tradingApi', post_data, headers)) 56 | jsonRet = json.loads(ret.read()) 57 | return self.post_process(jsonRet) 58 | 59 | def returnTicker(self): 60 | return self.api_query("returnTicker") 61 | 62 | def return24Volume(self): 63 | return self.api_query("return24Volume") 64 | 65 | def returnOrderBook(self, currencyPair): 66 | return self.api_query("returnOrderBook", {'currencyPair': currencyPair}) 67 | 68 | def returnMarketTradeHistory(self, currencyPair): 69 | return self.api_query("returnMarketTradeHistory", {'currencyPair': currencyPair}) 70 | 71 | # Returns all of your balances. 72 | # Outputs: 73 | # {"BTC":"0.59098578","LTC":"3.31117268", ... } 74 | def returnBalances(self): 75 | return self.api_query('returnCompleteBalances') 76 | 77 | # Returns your open orders for a given market, specified by the "currencyPair" POST parameter, e.g. "BTC_XCP" 78 | # Inputs: 79 | # currencyPair The currency pair e.g. "BTC_XCP" 80 | # Outputs: 81 | # orderNumber The order number 82 | # type sell or buy 83 | # rate Price the order is selling or buying at 84 | # Amount Quantity of order 85 | # total Total value of order (price * quantity) 86 | def returnOpenOrders(self, currencyPair): 87 | return self.api_query('returnOpenOrders', {"currencyPair": currencyPair}) 88 | 89 | # Returns your trade history for a given market, specified by the "currencyPair" POST parameter 90 | # Inputs: 91 | # currencyPair The currency pair e.g. "BTC_XCP" 92 | # Outputs: 93 | # date Date in the form: "2014-02-19 03:44:59" 94 | # rate Price the order is selling or buying at 95 | # amount Quantity of order 96 | # total Total value of order (price * quantity) 97 | # type sell or buy 98 | def returnTradeHistory(self, currencyPair): 99 | return self.api_query('returnTradeHistory', {"currencyPair": currencyPair}) 100 | 101 | # Places a buy order in a given market. Required POST parameters are "currencyPair", "rate", and "amount". 102 | # If successful, the method will return the order number. 103 | # Inputs: 104 | # currencyPair The curreny pair 105 | # rate price the order is buying at 106 | # amount Amount of coins to buy 107 | # Outputs: 108 | # orderNumber The order number 109 | def buy(self, currencyPair, rate, amount): 110 | return self.api_query('buy', {"currencyPair": currencyPair, "rate": rate, "amount": amount}) 111 | 112 | # Places a sell order in a given market. Required POST parameters are "currencyPair", "rate", and "amount". 113 | # If successful, the method will return the order number. 114 | # Inputs: 115 | # currencyPair The curreny pair 116 | # rate price the order is selling at 117 | # amount Amount of coins to sell 118 | # Outputs: 119 | # orderNumber The order number 120 | def sell(self, currencyPair, rate, amount): 121 | return self.api_query('sell', {"currencyPair": currencyPair, "rate": rate, "amount": amount}) 122 | 123 | # Cancels an order you have placed in a given market. Required POST parameters are "currencyPair" and "orderNumber". 124 | # Inputs: 125 | # currencyPair The curreny pair 126 | # orderNumber The order number to cancel 127 | # Outputs: 128 | # succes 1 or 0 129 | def cancel(self, currencyPair, orderNumber): 130 | return self.api_query('cancelOrder', {"currencyPair": currencyPair, "orderNumber": orderNumber}) 131 | 132 | # Immediately places a withdrawal for a given currency, with no email confirmation. 133 | # In order to use this method, the withdrawal privilege must be enabled for your API key. 134 | # Required POST parameters are "currency", "amount", and "address". Sample output: {"response":"Withdrew 2398 NXT."} 135 | # Inputs: 136 | # currency The currency to withdraw 137 | # amount The amount of this coin to withdraw 138 | # address The withdrawal address 139 | # Outputs: 140 | # response Text containing message about the withdrawal 141 | def withdraw(self, currency, amount, address): 142 | return self.api_query('withdraw', {"currency": currency, "amount": amount, "address": address}) 143 | 144 | def returnDepositHistory(self, start, end): 145 | return self.api_query('returnDepositsWithdrawals', {"start": start, "end": end}) 146 | -------------------------------------------------------------------------------- /history/predict.py: -------------------------------------------------------------------------------- 1 | from history.tools import create_sample_row 2 | from history.models import PredictionTest 3 | import time 4 | from history.tools import print_and_log 5 | 6 | 7 | def predict_v2(ticker, hidden_layers=15, NUM_MINUTES_BACK=1000, NUM_EPOCHS=1000, granularity_minutes=15, 8 | datasetinputs=5, learningrate=0.005, bias=False, momentum=0.1, weightdecay=0.0, recurrent=False, 9 | timedelta_back_in_granularity_increments=0): 10 | 11 | # setup 12 | print_and_log("(p)starting ticker:{} hidden:{} min:{} epoch:{} gran:{} dsinputs:{} learningrate:{} bias:{} momentum:{} weightdecay:{}\ 13 | recurrent:{}, timedelta_back_in_granularity_increments:{} ".format( 14 | ticker, hidden_layers, NUM_MINUTES_BACK, NUM_EPOCHS, granularity_minutes, datasetinputs, 15 | learningrate, bias, momentum, weightdecay, recurrent, timedelta_back_in_granularity_increments)) 16 | pt = PredictionTest() 17 | pt.type = 'mock' 18 | pt.symbol = ticker 19 | pt.datasetinputs = datasetinputs 20 | pt.hiddenneurons = hidden_layers 21 | pt.minutes_back = NUM_MINUTES_BACK 22 | pt.epochs = NUM_EPOCHS 23 | pt.momentum = momentum 24 | pt.granularity = granularity_minutes 25 | pt.bias = bias 26 | pt.bias_chart = -1 if pt.bias is None else (1 if pt.bias else 0) 27 | pt.learningrate = learningrate 28 | pt.weightdecay = weightdecay 29 | pt.recurrent = recurrent 30 | pt.recurrent_chart = -1 if pt.recurrent is None else (1 if pt.recurrent else 0) 31 | pt.timedelta_back_in_granularity_increments = timedelta_back_in_granularity_increments 32 | all_output = "" 33 | start_time = int(time.time()) 34 | 35 | # get neural network & data 36 | pt.get_nn() 37 | sample_data, test_data = pt.get_train_and_test_data() 38 | 39 | # output / testing 40 | round_to = 2 41 | num_times_directionally_correct = 0 42 | num_times = 0 43 | diffs = [] 44 | profitloss_pct = [] 45 | for i, val in enumerate(test_data): 46 | try: 47 | # get NN projection 48 | sample = create_sample_row(test_data, i, datasetinputs) 49 | recommend, nn_price, last_sample, projected_change_pct = pt.predict(sample) 50 | 51 | # calculate profitability 52 | actual_price = test_data[i+datasetinputs] 53 | diff = nn_price - actual_price 54 | diff_pct = 100 * diff / actual_price 55 | directionally_correct = ((actual_price - last_sample) > 0 and (nn_price - last_sample) > 0) \ 56 | or ((actual_price - last_sample) < 0 and (nn_price - last_sample) < 0) 57 | if recommend != 'HOLD': 58 | profitloss_pct = profitloss_pct + [abs((actual_price - last_sample) / last_sample) * 59 | (1 if directionally_correct else -1)] 60 | if directionally_correct: 61 | num_times_directionally_correct = num_times_directionally_correct + 1 62 | num_times = num_times + 1 63 | diffs.append(diff) 64 | output = "{}) seq ending in {} => {} (act {}, {}/{} pct off); Recommend: {}; Was Directionally Correct:{}\ 65 | ".format(i, round(actual_price, round_to), round(nn_price, round_to), 66 | round(actual_price, round_to), round(diff, round_to), round(diff_pct, 1), 67 | recommend, directionally_correct) 68 | all_output = all_output + "\n" + output 69 | except Exception as e: 70 | if "list index out of range" not in str(e): 71 | print_and_log("(p)"+str(e)) 72 | pass 73 | 74 | avg_diff = sum([abs(diff[0]) for diff in diffs]) / num_times # noqa 75 | pct_correct = 100 * num_times_directionally_correct / num_times 76 | modeled_profit_loss = sum(profitloss_pct) / len(profitloss_pct) 77 | output = 'directionally correct {} of {} times. {}%. avg diff={}, profit={}'.format( 78 | num_times_directionally_correct, num_times, round(pct_correct, 0), round(avg_diff, 4), 79 | round(modeled_profit_loss, 3)) 80 | print_and_log("(p)"+output) 81 | all_output = all_output + "\n" + output 82 | 83 | end_time = int(time.time()) 84 | pt.time = end_time - start_time 85 | pt.prediction_size = len(diffs) 86 | pt.output = all_output 87 | pt.percent_correct = pct_correct 88 | pt.avg_diff = avg_diff 89 | pt.profitloss = modeled_profit_loss 90 | pt.profitloss_int = int(pt.profitloss * 100) 91 | pt.save() 92 | 93 | return pt.pk 94 | -------------------------------------------------------------------------------- /history/technical_indicators.py: -------------------------------------------------------------------------------- 1 | # from https://github.com/matplotlib/matplotlib/blob/master/examples/pylab_examples/finance_work2.py 2 | # SEE ALSO LISCENSE_MATPLOTLIB 3 | 4 | import numpy as np 5 | import pandas as pd 6 | 7 | 8 | def moving_average(x, n, type='simple'): 9 | """ 10 | compute an n period moving average. 11 | 12 | type is 'simple' | 'exponential' 13 | 14 | """ 15 | x = np.asarray(x) 16 | if type == 'simple': 17 | weights = np.ones(n) 18 | else: 19 | weights = np.exp(np.linspace(-1., 0., n)) 20 | 21 | weights /= weights.sum() 22 | 23 | a = np.convolve(x, weights, mode='full')[:len(x)] 24 | a[:n] = a[n] 25 | return a 26 | 27 | 28 | def relative_strength(prices, n=14): 29 | """ 30 | compute the n period relative strength indicator 31 | http://stockcharts.com/school/doku.php?id=chart_school:glossary_r#relativestrengthindex 32 | http://www.investopedia.com/terms/r/rsi.asp 33 | """ 34 | 35 | deltas = np.diff(prices) 36 | seed = deltas[:n + 1] 37 | up = seed[seed >= 0].sum() / n 38 | down = -seed[seed < 0].sum() / n 39 | rs = up / down 40 | rsi = np.zeros_like(prices) 41 | rsi[:n] = 100. - 100. / (1. + rs) 42 | 43 | for i in range(n, len(prices)): 44 | delta = deltas[i - 1] # cause the diff is 1 shorter 45 | 46 | if delta > 0: 47 | upval = delta 48 | downval = 0. 49 | else: 50 | upval = 0. 51 | downval = -delta 52 | 53 | up = (up * (n - 1) + upval) / n 54 | down = (down * (n - 1) + downval) / n 55 | 56 | rs = up / down 57 | rsi[i] = 100. - 100. / (1. + rs) 58 | 59 | return rsi 60 | 61 | 62 | def moving_average_convergence(x, nslow=26, nfast=12): 63 | """ 64 | compute the MACD (Moving Average Convergence/Divergence) using a fast and slow exponential moving avg' 65 | return value is emaslow, emafast, macd which are len(x) arrays 66 | """ 67 | emaslow = moving_average(x, nslow, type='exponential') 68 | emafast = moving_average(x, nfast, type='exponential') 69 | return emaslow, emafast, emafast - emaslow 70 | 71 | 72 | # thanks http://stackoverflow.com/questions/28477222/python-pandas-calculate-ichimoku-chart-components 73 | def ichimoku(price_objs): 74 | """ 75 | computes the ichimoku cloud for price_objs 76 | """ 77 | 78 | dates = [pd.to_datetime(str(obj.created_on)) for obj in price_objs] 79 | prices = [obj.price for obj in price_objs] 80 | 81 | d = {'date': dates, 82 | 'price': prices} 83 | _prices = pd.DataFrame(d) 84 | 85 | # Tenkan-sen (Conversion Line): (9-period high + 9-period low)/2)) 86 | period9_high = pd.rolling_max(_prices['price'], window=9) 87 | period9_low = pd.rolling_min(_prices['price'], window=9) 88 | tenkan_sen = (period9_high + period9_low) / 2 89 | 90 | # Kijun-sen (Base Line): (26-period high + 26-period low)/2)) 91 | period26_high = pd.rolling_max(_prices['price'], window=26) 92 | period26_low = pd.rolling_min(_prices['price'], window=26) 93 | kijun_sen = (period26_high + period26_low) / 2 94 | 95 | # Senkou Span A (Leading Span A): (Conversion Line + Base Line)/2)) 96 | senkou_span_a = ((tenkan_sen + kijun_sen) / 2).shift(26) 97 | 98 | # Senkou Span B (Leading Span B): (52-period high + 52-period low)/2)) 99 | period52_high = pd.rolling_max(_prices['price'], window=52) 100 | period52_low = pd.rolling_min(_prices['price'], window=52) 101 | senkou_span_b = ((period52_high + period52_low) / 2).shift(26) 102 | 103 | # The most current closing price plotted 22 time periods behind (optional) 104 | chikou_span = _prices.shift(-22) # 22 according to investopedia 105 | 106 | return { 107 | 'tenkan_sen': tenkan_sen, 108 | 'kijun_sen': kijun_sen, 109 | 'senkou_span_a': senkou_span_a, 110 | 'senkou_span_b': senkou_span_b, 111 | 'chikou_span': chikou_span, 112 | } 113 | -------------------------------------------------------------------------------- /history/templates/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/owocki/pytrader/17d2f233ffa65808c21af2ce4e0c2cf4a292e73a/history/templates/__init__.py -------------------------------------------------------------------------------- /history/templates/admin/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/owocki/pytrader/17d2f233ffa65808c21af2ce4e0c2cf4a292e73a/history/templates/admin/__init__.py -------------------------------------------------------------------------------- /history/templates/admin/auth/user/add_form.html: -------------------------------------------------------------------------------- 1 | {% extends "admin/change_form.html" %} 2 | {% load i18n %} 3 | 4 | {% block form_top %} 5 | {% if not is_popup %} 6 |
{% trans "First, enter a username and password. Then, you'll be able to edit more user options." %}
7 | {% else %} 8 |{% trans "Enter a username and password." %}
9 | {% endif %} 10 | {% endblock %} 11 | 12 | {% block after_field_sets %} 13 | 14 | {% endblock %} 15 | -------------------------------------------------------------------------------- /history/templates/admin/auth/user/change_password.html: -------------------------------------------------------------------------------- 1 | {% extends "admin/base_site.html" %} 2 | {% load i18n admin_static %} 3 | {% load admin_urls %} 4 | 5 | {% block extrahead %}{{ block.super }} 6 | 7 | {% endblock %} 8 | {% block extrastyle %}{{ block.super }}{% endblock %} 9 | {% block bodyclass %}{{ block.super }} {{ opts.app_label }}-{{ opts.model_name }} change-form{% endblock %} 10 | {% if not is_popup %} 11 | {% block breadcrumbs %} 12 | 19 | {% endblock %} 20 | {% endif %} 21 | {% block content %}{{ model.name }} | 26 | {% else %} 27 |{{ model.name }} | 28 | {% endif %} 29 | 30 | {% if model.add_url %} 31 |{% trans 'Add' %} | 32 | {% else %} 33 |34 | {% endif %} 35 | 36 | {% if model.admin_url %} 37 | | {% trans 'Change' %} | 38 | {% else %} 39 |40 | {% endif %} 41 | |
---|
{% trans "You don't have permission to edit anything." %}
48 | {% endif %} 49 |151 | Link 152 | | 153 |154 | Created 155 | | 156 |157 | Accuracy 158 | | 159 |
---|---|---|
163 | ClassifierTest #{{pt.pk}} 164 | | 165 |166 | {{pt.created_on}} 167 | | 168 |169 | {{pt.percent_correct}} 170 | | 171 |
148 | Link 149 | | 150 |151 | Created 152 | | 153 |154 | Accuracy 155 | | 156 |
---|---|---|
160 | PredictionTest #{{pt.pk}} 161 | | 162 |163 | {{pt.created_on}} 164 | | 165 |166 | {{pt.percent_correct}} 167 | | 168 |
98 | Symbol 99 | | 100 |101 | Date 102 | | 103 |104 | Avg Unit Buy Price 105 | | 106 |107 | Avg Unit Sell 108 | | 109 |110 | Buy Volume 111 | | 112 |113 | Sell Volume 114 | | 115 |116 | Net Volume 117 | | 118 |119 | Ending Balance 120 | | 121 |122 | Realized Net Profit / Loss 123 | | 124 |125 | Unrealized Net Profit / Loss 126 | | 127 |
---|---|---|---|---|---|---|---|---|---|
131 | {{this.symbol}} 132 | | 133 |134 | {{this.date}} 135 | | 136 |137 | {{this.buy}} 138 | | 139 |140 | {{this.sell}} 141 | | 142 |143 | {{this.buyvol}} {{this.one_symbol}} 144 | | 145 |146 | {{this.sellvol}} {{this.one_symbol}} 147 | | 148 |149 | {{this.netvol}} {{this.one_symbol}} 150 | | 151 |152 | {{this.bal}} {{this.one_symbol}} 153 | | 154 |155 | {{this.diff}} {{this.one_symbol}} 156 | | 157 |158 | {% if this.unrealizeddiff and this.date == max_date %} 159 | {{this.unrealizeddiff}} {{this.one_symbol}} 160 | {% endif %} 161 | | 162 |