├── tests ├── __init__.py ├── risk │ ├── __init__.py │ ├── risk-answer-key-checksums │ ├── annotation_utils.py │ ├── test_minute_risk.py │ ├── upload_answer_key.py │ ├── AnswerKeyLink.ipynb │ ├── AnswerKeyAnnotations.ipynb │ └── test_risk_cumulative.py ├── finance │ └── __init__.py ├── test_blotter.py ├── test_examples.py ├── test_utils.py ├── test_sources.py ├── test_data_util.py ├── test_exception_handling.py └── test_algorithm_gen.py ├── alephnull ├── gens │ ├── __init__.py │ ├── composites.py │ └── utils.py ├── utils │ ├── __init__.py │ ├── math_utils.py │ ├── protocol_utils.py │ ├── test_utils.py │ ├── simfactory.py │ ├── data.py │ └── tradingcalendar_lse.py ├── experiment │ ├── __init__.py │ ├── db_tester.py │ ├── notes_on_futures_data.txt │ └── margin mechanics pseudocode.txt ├── live │ └── __init__.py ├── data │ ├── __init__.py │ ├── benchmarks.py │ ├── treasuries_can.py │ ├── loader_utils.py │ └── treasuries.py ├── sources │ ├── __init__.py │ ├── data_source.py │ ├── futures_data_frame_source.py │ └── data_frame_source.py ├── finance │ ├── __init__.py │ ├── risk │ │ ├── __init__.py │ │ └── report.py │ ├── constants.py │ ├── performance │ │ └── __init__.py │ └── commission.py ├── version.py ├── transforms │ ├── __init__.py │ ├── returns.py │ ├── vwap.py │ ├── stddev.py │ └── mavg.py ├── __init__.py ├── examples │ ├── buyapple.py │ ├── buystock.py │ ├── test_algo.py │ ├── dual_ema_talib.py │ ├── dual_moving_average.py │ ├── pairtrade.py │ ├── olmar.py │ ├── buystockasfuture.py │ └── FuturesTradingAlgorithm.py ├── roll_method.py ├── errors.py └── protocol.py ├── MANIFEST.in ├── setup.cfg ├── Aleph Call diagram.png ├── docs ├── modules.rst ├── quickstart.rst ├── zipline.gens.rst ├── zipline.data.rst ├── contributing.rst ├── zipline.rst ├── zipline.finance.rst ├── zipline.utils.rst ├── zipline.transforms.rst ├── index.rst ├── installation.rst ├── make.bat └── Makefile ├── etc ├── goodies.txt ├── ordered_pip.sh ├── requirements.txt ├── requirements_dev.txt └── git-hooks │ └── pre-commit ├── Vagrantfile ├── .travis.yml ├── .gitignore ├── README.md ├── vagrant_init.sh └── setup.py /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/risk/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /alephnull/gens/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /alephnull/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/finance/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /alephnull/experiment/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include LICENSE 3 | -------------------------------------------------------------------------------- /alephnull/live/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'oglebrandon' 2 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [nosetests] 2 | verbosity=2 3 | detailed-errors=1 4 | with-ignore-docstrings=1 5 | -------------------------------------------------------------------------------- /Aleph Call diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CarterBain/AlephNull/HEAD/Aleph Call diagram.png -------------------------------------------------------------------------------- /docs/modules.rst: -------------------------------------------------------------------------------- 1 | *********************** 2 | Packages and Modules 3 | *********************** 4 | 5 | .. toctree:: 6 | :maxdepth: 4 7 | 8 | zipline 9 | -------------------------------------------------------------------------------- /etc/goodies.txt: -------------------------------------------------------------------------------- 1 | # Extra modules, goodies for algorithms. 2 | 3 | matplotlib==1.3.0 4 | scipy==0.12.0 5 | scikit-learn==0.13.1 6 | statsmodels==0.5.0 7 | patsy==0.1.0 8 | -------------------------------------------------------------------------------- /alephnull/data/__init__.py: -------------------------------------------------------------------------------- 1 | from . import loader 2 | from .loader import load_from_yahoo, load_bars_from_yahoo 3 | 4 | __all__ = ['loader', 'load_from_yahoo', 'load_bars_from_yahoo'] 5 | -------------------------------------------------------------------------------- /etc/ordered_pip.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | a=0 4 | while read line 5 | do 6 | if [[ -n "$line" && "$line" != \#* ]] ; then 7 | pip install --use-mirrors $line 8 | fi 9 | ((a = a + 1)) 10 | done < $1 11 | echo "$0: Final package count is $a"; 12 | -------------------------------------------------------------------------------- /alephnull/sources/__init__.py: -------------------------------------------------------------------------------- 1 | from alephnull.sources.data_frame_source import DataFrameSource, DataPanelSource 2 | from alephnull.sources.test_source import SpecificEquityTrades 3 | 4 | __all__ = [ 5 | 'DataFrameSource', 6 | 'DataPanelSource', 7 | 'SpecificEquityTrades', 8 | 'FuturesDataFrameSource' 9 | ] 10 | -------------------------------------------------------------------------------- /etc/requirements.txt: -------------------------------------------------------------------------------- 1 | iso8601==0.1.4 2 | 3 | # Logging 4 | Logbook==0.6.0 5 | 6 | # Scientific Libraries 7 | 8 | pytz==2013.8 9 | numpy==1.7.1 10 | 11 | pandas==0.12.0 12 | python-dateutil==2.1 13 | six==1.3.0 14 | 15 | # Cython is required for TA-Lib 16 | Cython==0.19.1 17 | TA-Lib==0.4.7 18 | 19 | # For fetching remote data 20 | requests==1.2.3 21 | -------------------------------------------------------------------------------- /Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | Vagrant.configure("2") do |config| 5 | config.vm.box = "precise64" 6 | config.vm.box_url = "http://files.vagrantup.com/precise64.box" 7 | config.vm.provider :virtualbox do |vb| 8 | vb.customize ["modifyvm", :id, "--memory", 2048, "--cpus", 2] 9 | end 10 | config.vm.provision "shell", path: "vagrant_init.sh" 11 | end 12 | -------------------------------------------------------------------------------- /alephnull/experiment/db_tester.py: -------------------------------------------------------------------------------- 1 | import alephnull.experiment.dummy_futures_data_generator as dat 2 | import alephnull.experiment.sqlite_interface as sq 3 | import random 4 | 5 | def test_db(): 6 | fdb = sq.FuturesDB("temp" + str(random.randint(0,1000000)) + ".db") 7 | fdb.initialize_tables() 8 | fdb.insert_dict(dat.create_dummy_universe_dict()) 9 | 10 | result = fdb.get_all_timestamps() 11 | return (fdb, result) -------------------------------------------------------------------------------- /docs/quickstart.rst: -------------------------------------------------------------------------------- 1 | ********** 2 | Quickstart 3 | ********** 4 | 5 | Dual-Moving Average Example 6 | =========================== 7 | 8 | The following code implements a simple dual moving average algorithm 9 | and tests it on data extracted from yahoo finance. 10 | 11 | .. include:: ../zipline/examples/dual_moving_average.py 12 | :literal: 13 | 14 | You can find other examples in `the zipline/examples directory `_. 15 | -------------------------------------------------------------------------------- /tests/risk/risk-answer-key-checksums: -------------------------------------------------------------------------------- 1 | 3ac0773c4be4e9e5bacd9c6fa0e03e15 2 | 3a5fae958c8bac684f1773fa8dff7810 3 | 19d580890e211a122e9e746f07c80cbc 4 | 70cfe3677a0ff401c801b8628e125d8f 5 | 99b3855ef1b8963163c3cb8f7e05cb70 6 | 97dfb557c3501179504926e4079e6446 7 | cc507b6fca18aabadac69657181edd4e 8 | 5b48e6a70181d73ecb7f07df5a3092e2 9 | 3343940379161143630503413627a53a 10 | 820235c4157a3c55474836438019ef2e 11 | 75c1b1441efbc2431215835a5079ccc6 12 | 37e3ea4a1788f1aa6f3ee0986bc625ae 13 | 651e611e723e2a58b1ded91d0cd39b66 14 | -------------------------------------------------------------------------------- /etc/requirements_dev.txt: -------------------------------------------------------------------------------- 1 | # Testing 2 | nose==1.3.0 3 | nose-parameterized==0.3.1 4 | nose-ignore-docstring==0.1 5 | xlrd==0.9.2 6 | 7 | # Linting 8 | 9 | flake8==2.1.0 10 | mccabe==0.2.1 11 | pep8==1.4.6 12 | pyflakes==0.7.3 13 | pip-tools==0.3.4 14 | 15 | # Documentation Conversion 16 | 17 | pyandoc==0.0.1 18 | 19 | # Example scripts that are run during unit tests use the following: 20 | 21 | scipy==0.12.0 22 | matplotlib==1.3.0 23 | # tornado and pyparsing are required by matplotlib 24 | tornado==3.1.1 25 | pyparsing==2.0.1 26 | statsmodels==0.4.3 27 | patsy==0.1.0 28 | 29 | Markdown==2.3.1 30 | -------------------------------------------------------------------------------- /alephnull/finance/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from . import trading 17 | 18 | __all__ = [ 19 | 'trading' 20 | ] 21 | -------------------------------------------------------------------------------- /docs/zipline.gens.rst: -------------------------------------------------------------------------------- 1 | :mod:`zipline.gens` subpackage 2 | ============================== 3 | 4 | .. automodule:: zipline.gens.__init__ 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | :mod:`composites` Module 10 | ------------------------- 11 | 12 | .. automodule:: zipline.gens.composites 13 | :members: 14 | :undoc-members: 15 | :show-inheritance: 16 | 17 | :mod:`tradesimulation` Module 18 | ------------------------------ 19 | 20 | .. automodule:: zipline.gens.tradesimulation 21 | :members: 22 | :undoc-members: 23 | :show-inheritance: 24 | 25 | :mod:`utils` Module 26 | --------------------- 27 | 28 | .. automodule:: zipline.gens.utils 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | 33 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "2.7" 4 | before_install: 5 | - wget -O ta-lib-0.4.0-src.tar.gz http://sourceforge.net/projects/ta-lib/files/ta-lib/0.4.0/ta-lib-0.4.0-src.tar.gz/download 6 | - tar xvzf ta-lib-0.4.0-src.tar.gz 7 | - pushd ta-lib; ./configure; make; sudo make install; popd 8 | - export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH 9 | - sudo apt-get install gfortran 10 | install: 11 | - cat etc/requirements_dev.txt | grep -v "^#" | grep -v "^$" | grep -v ipython | grep -v nose== | grep -v scipy | grep -v matplotlib | grep -v statsmodels | grep -v patsy | xargs pip install --use-mirrors 12 | - etc/ordered_pip.sh etc/requirements.txt 13 | before_script: 14 | - "flake8 alephnull tests" 15 | script: 16 | - nosetests --exclude=^test_examples 17 | -------------------------------------------------------------------------------- /alephnull/utils/math_utils.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import math 17 | 18 | 19 | def tolerant_equals(a, b, atol=10e-7, rtol=10e-7): 20 | return math.fabs(a - b) <= (atol + rtol * math.fabs(b)) 21 | -------------------------------------------------------------------------------- /alephnull/version.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2012 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | 17 | BANNER = """ 18 | Zipline {version} 19 | Released under BSD3 20 | """.strip() 21 | 22 | VERSION = (0, 0, 1, 'dev') 23 | 24 | 25 | def pretty_version(): 26 | return BANNER.format(version='.'.join(VERSION)) 27 | -------------------------------------------------------------------------------- /alephnull/finance/risk/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from . report import RiskReport 17 | from . period import RiskMetricsPeriod 18 | from . cumulative import RiskMetricsCumulative 19 | 20 | 21 | __all__ = [ 22 | 'RiskReport', 23 | 'RiskMetricsPeriod', 24 | 'RiskMetricsCumulative', 25 | ] 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .bundle 2 | db/*.sqlite3 3 | *.ipynb 4 | *.orig 5 | .idea/ 6 | . 7 | log/*.log 8 | *.log 9 | tmp/**/* 10 | tmp/* 11 | *.swp 12 | *~ 13 | #mac autosaving file 14 | .DS_Store 15 | *.py[co] 16 | 17 | # Installer logs 18 | pip-log.txt 19 | 20 | # Unit test / coverage reports 21 | .coverage 22 | .tox 23 | test.log 24 | .noseids 25 | *.xlsx 26 | 27 | # Compiled python files 28 | *.py[co] 29 | 30 | # Packages 31 | *.egg 32 | *.egg-info 33 | dist 34 | build 35 | eggs 36 | cover 37 | parts 38 | bin 39 | var 40 | sdist 41 | develop-eggs 42 | .installed.cfg 43 | coverage.xml 44 | nosetests.xml 45 | 46 | # C Extensions 47 | *.o 48 | *.so 49 | *.out 50 | # git add -f if needed 51 | *.c 52 | 53 | # Vim 54 | *.swp 55 | *.swo 56 | 57 | # Built documentation 58 | docs/_build/* 59 | 60 | # database of vbench 61 | benchmarks.db 62 | 63 | # downloaded data 64 | alephnull/data/*.msgpack 65 | 66 | # Vagrant temp folder 67 | .vagrant 68 | 69 | */.idea -------------------------------------------------------------------------------- /docs/zipline.data.rst: -------------------------------------------------------------------------------- 1 | :mod:`zipline.data` subpackage 2 | =============================== 3 | 4 | .. automodule:: zipline.data.__init__ 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | :mod:`benchmarks` Module 10 | ------------------------- 11 | 12 | .. automodule:: zipline.data.benchmarks 13 | :members: 14 | :undoc-members: 15 | :show-inheritance: 16 | 17 | :mod:`loader` Module 18 | -------------------- 19 | 20 | .. automodule:: zipline.data.loader 21 | :members: 22 | :undoc-members: 23 | :show-inheritance: 24 | 25 | :mod:`loader_utils` Module 26 | -------------------------- 27 | 28 | .. automodule:: zipline.data.loader_utils 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | 33 | :mod:`treasuries` Module 34 | ------------------------ 35 | 36 | .. automodule:: zipline.data.treasuries 37 | :members: 38 | :undoc-members: 39 | :show-inheritance: 40 | 41 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | AlephNull 2 | ======= 3 | AlephNull is a python module for the development 4 | and execution of algorithmic trading strategies. 5 | The library is being developed under commission 6 | by [Carter Bain](http://www.carterbain.com). 7 | 8 | The module is built on top of the [Zipline library](https://github.com/quantopian/zipline), 9 | the backbone of the web-based backtesting 10 | platform [Quantopian](https://www.quantopian.com) 11 | 12 | The aim of the module is to extend the features 13 | of Zipline, for use within an institutional framework. 14 | We hope to use the module to standardize research across 15 | our trade desk and support live execution across all 16 | asset classes for our clients. 17 | 18 | Dependencies (zipline) 19 | ------------ 20 | 21 | * Python (>= 2.7.2) 22 | * numpy (>= 1.6.0) 23 | * pandas (>= 0.9.0) 24 | * pytz 25 | * Logbook 26 | * requests 27 | * iso8601 28 | * [python-dateutil](https://pypi.python.org/pypi/python-dateutil) (>= 2.1) 29 | 30 | 31 | -------------------------------------------------------------------------------- /alephnull/finance/constants.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2012 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | TRADING_DAYS_IN_YEAR = 250 17 | TRADING_HOURS_IN_DAY = 6 18 | MINUTES_IN_HOUR = 60 19 | 20 | ANNUALIZER = {'daily': TRADING_DAYS_IN_YEAR, 21 | 'hourly': TRADING_DAYS_IN_YEAR * TRADING_HOURS_IN_DAY, 22 | 'minute': TRADING_DAYS_IN_YEAR * TRADING_HOURS_IN_DAY * 23 | MINUTES_IN_HOUR} 24 | -------------------------------------------------------------------------------- /tests/risk/annotation_utils.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import markdown 17 | 18 | 19 | # Inspired by: 20 | # http://catherinedevlin.blogspot.com/2013/06/\ 21 | # easy-html-output-in-ipython-notebook.html 22 | class Markdown(str): 23 | """ 24 | Markdown wrapper to allow dynamic Markdown cells. 25 | """ 26 | def _repr_html_(self): 27 | return markdown.markdown(self) 28 | -------------------------------------------------------------------------------- /alephnull/finance/performance/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from . tracker import PerformanceTracker, FuturesPerformanceTracker 17 | from . period import PerformancePeriod 18 | from . position import Position 19 | 20 | __all__ = [ 21 | 'PerformanceTracker', 22 | 'FuturesPerformanceTracker', 23 | 'PerformancePeriod', 24 | 'Position', 25 | 'FuturesPerformancePeriod', 26 | ] 27 | -------------------------------------------------------------------------------- /alephnull/transforms/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2012 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from . mavg import MovingAverage 17 | from . stddev import MovingStandardDev 18 | from . vwap import MovingVWAP 19 | from . returns import Returns 20 | from . batch_transform import BatchTransform, batch_transform 21 | 22 | __all__ = [ 23 | 'MovingAverage', 24 | 'MovingStandardDev', 25 | 'MovingVWAP', 26 | 'Returns', 27 | 'BatchTransform', 28 | 'batch_transform' 29 | ] 30 | -------------------------------------------------------------------------------- /etc/git-hooks/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # An hook script to verify linting and passing unit tests. 4 | # 5 | # Called by "git commit" with no arguments. The hook should 6 | # exit with non-zero status after issuing an appropriate message if 7 | # it wants to stop the commit. 8 | # 9 | # To enable this hook, copy or symlink to your repo's 10 | # ".git/hooks/pre-commit". 11 | # 12 | # Please read the following as it will execute on your machine on each commit. 13 | 14 | set -e 15 | 16 | # stash everything that wasn't just staged 17 | # so that we are only testing the staged code 18 | stash_result=$(git stash --keep-index) 19 | 20 | # Run flake8 linting 21 | flake8 zipline tests 22 | # Run unit tests 23 | nosetests -x 24 | 25 | # restore unstaged code 26 | # N.B. this won't run if linting or unit tests fail 27 | # But if either fail, it's probably best to have only the offending 28 | # staged commits 'active', anyway. 29 | stash_result=$(git stash --keep-index) 30 | if [ "$stash_result" != "No local changes to save" ] 31 | then 32 | git stash pop -q 33 | fi 34 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | *************************** 2 | Contributing to the project 3 | *************************** 4 | 5 | Style Guide 6 | =========== 7 | 8 | To ensure that changes and patches are focused on behavior changes, 9 | the zipline codebase adheres to PEP-8, 10 | ``_. 11 | 12 | The maintainers check the code using the flake8 script, 13 | ``_, which is included in the 14 | requirements_dev.txt. 15 | 16 | Before submitting patches or pull requests, please ensure that your 17 | changes pass 18 | 19 | :: 20 | 21 | flake8 zipline tests 22 | 23 | Discussion and Help 24 | =================== 25 | 26 | Discussion of the project is held at the Google Group, 27 | ``_, 28 | ``_. 29 | 30 | Source 31 | ====== 32 | 33 | The source for Zipline is hosted at 34 | ``_. 35 | 36 | Contact 37 | ======= 38 | 39 | For other questions, please contact ``_. 40 | 41 | -------------------------------------------------------------------------------- /docs/zipline.rst: -------------------------------------------------------------------------------- 1 | :mod:`zipline` Package 2 | ======================= 3 | 4 | .. automodule:: zipline.__init__ 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | :mod:`algorithm` Module 10 | ------------------------- 11 | 12 | .. automodule:: zipline.algorithm 13 | :members: 14 | :undoc-members: 15 | :show-inheritance: 16 | 17 | :mod:`sources` Module 18 | ---------------------- 19 | 20 | .. automodule:: zipline.sources 21 | :members: 22 | :undoc-members: 23 | :show-inheritance: 24 | 25 | :mod:`test_algorithms` Module 26 | ----------------------------- 27 | 28 | .. automodule:: zipline.test_algorithms 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | 33 | :mod:`version` Module 34 | --------------------- 35 | 36 | .. automodule:: zipline.version 37 | :members: 38 | :undoc-members: 39 | :show-inheritance: 40 | 41 | Subpackages 42 | ----------- 43 | 44 | .. toctree:: 45 | 46 | zipline.data 47 | zipline.finance 48 | zipline.gens 49 | zipline.transforms 50 | zipline.utils 51 | 52 | -------------------------------------------------------------------------------- /docs/zipline.finance.rst: -------------------------------------------------------------------------------- 1 | :mod:`zipline.finance` subpackage 2 | ================================== 3 | 4 | .. automodule:: zipline.finance.__init__ 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | :mod:`commission` Module 10 | ------------------------- 11 | 12 | .. automodule:: zipline.finance.commission 13 | :members: 14 | :undoc-members: 15 | :show-inheritance: 16 | 17 | :mod:`performance` Module 18 | ------------------------- 19 | 20 | .. automodule:: zipline.finance.performance 21 | :members: 22 | :undoc-members: 23 | :show-inheritance: 24 | 25 | :mod:`risk` Module 26 | ------------------ 27 | 28 | .. automodule:: zipline.finance.risk 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | 33 | :mod:`slippage` Module 34 | ------------------------- 35 | 36 | .. automodule:: zipline.finance.slippage 37 | :members: 38 | :undoc-members: 39 | :show-inheritance: 40 | 41 | :mod:`trading` Module 42 | --------------------- 43 | 44 | .. automodule:: zipline.finance.trading 45 | :members: 46 | :undoc-members: 47 | :show-inheritance: 48 | 49 | -------------------------------------------------------------------------------- /alephnull/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | """ 17 | Zipline 18 | """ 19 | 20 | # This is *not* a place to dump arbitrary classes/modules for convenience, 21 | # it is a place to expose the public interfaces. 22 | 23 | __version__ = "0.5.11.dev" 24 | 25 | from . import data 26 | from . import finance 27 | from . import gens 28 | from . import utils 29 | 30 | from . algorithm import TradingAlgorithm 31 | 32 | __all__ = [ 33 | 'data', 34 | 'finance', 35 | 'gens', 36 | 'utils', 37 | 'TradingAlgorithm' 38 | ] 39 | -------------------------------------------------------------------------------- /alephnull/utils/protocol_utils.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2012 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from ctypes import Structure, c_ubyte 17 | 18 | 19 | def Enum(*options): 20 | """ 21 | Fast enums are very important when we want really tight 22 | loops. These are probably going to evolve into pure C structs 23 | anyways so might as well get going on that. 24 | """ 25 | class cstruct(Structure): 26 | _fields_ = [(o, c_ubyte) for o in options] 27 | __iter__ = lambda s: iter(range(len(options))) 28 | return cstruct(*range(len(options))) 29 | -------------------------------------------------------------------------------- /docs/zipline.utils.rst: -------------------------------------------------------------------------------- 1 | :mod:`zipline.utils` subpackage 2 | =============================== 3 | 4 | .. automodule:: zipline.utils.__init__ 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | :mod:`factory` Module 10 | --------------------- 11 | 12 | .. automodule:: zipline.utils.factory 13 | :members: 14 | :undoc-members: 15 | :show-inheritance: 16 | 17 | :mod:`protocol_units` Module 18 | ---------------------------- 19 | 20 | .. automodule:: zipline.utils.protocol_utils 21 | :members: 22 | :undoc-members: 23 | :show-inheritance: 24 | 25 | :mod:`simfactory` Module 26 | -------------------------- 27 | 28 | .. automodule:: zipline.utils.simfactory 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | 33 | :mod:`test_utils` Module 34 | ------------------------ 35 | 36 | .. automodule:: zipline.utils.test_utils 37 | :members: 38 | :undoc-members: 39 | :show-inheritance: 40 | 41 | :mod:`tradingcalendar` Module 42 | ------------------------------ 43 | 44 | .. automodule:: zipline.utils.tradingcalendar 45 | :members: 46 | :undoc-members: 47 | :show-inheritance: 48 | 49 | -------------------------------------------------------------------------------- /docs/zipline.transforms.rst: -------------------------------------------------------------------------------- 1 | :mod:`zipline.transforms` subpackage 2 | ===================================== 3 | 4 | .. automodule:: zipline.transforms.__init__ 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | :mod:`mavg` Module 10 | ------------------------- 11 | 12 | .. automodule:: zipline.transforms.mavg 13 | :members: 14 | :undoc-members: 15 | :show-inheritance: 16 | 17 | :mod:`returns` Module 18 | ------------------------- 19 | 20 | .. automodule:: zipline.transforms.returns 21 | :members: 22 | :undoc-members: 23 | :show-inheritance: 24 | 25 | :mod:`stddev` Module 26 | ------------------------- 27 | 28 | .. automodule:: zipline.transforms.stddev 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | 33 | :mod:`utils` Module 34 | ------------------------- 35 | 36 | .. automodule:: zipline.transforms.utils 37 | :members: 38 | :undoc-members: 39 | :show-inheritance: 40 | 41 | :mod:`vwap` Module 42 | ------------------------- 43 | 44 | .. automodule:: zipline.transforms.vwap 45 | :members: 46 | :undoc-members: 47 | :show-inheritance: 48 | 49 | :mod:`talib` Module 50 | ------------------------- 51 | 52 | .. automodule:: zipline.transforms.ta 53 | :members: 54 | :undoc-members: 55 | :show-inheritance: 56 | -------------------------------------------------------------------------------- /tests/test_blotter.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | from nose_parameterized import parameterized 4 | from unittest import TestCase 5 | 6 | from zipline.finance.blotter import round_for_minimum_price_variation 7 | 8 | 9 | class BlotterTestCase(TestCase): 10 | 11 | @parameterized.expand([(0.00, 0.00), 12 | (0.01, 0.01), 13 | (0.0005, 0.00), 14 | (1.006, 1.00), 15 | (1.0095, 1.01), 16 | (1.00949, 1.00), 17 | (1.0005, 1.00)]) 18 | def test_round_for_minimum_price_variation_buy(self, price, expected): 19 | result = round_for_minimum_price_variation(price, is_buy=True) 20 | self.assertEqual(result, expected) 21 | self.assertEqual(math.copysign(1.0, result), 22 | math.copysign(1.0, expected)) 23 | 24 | @parameterized.expand([(0.00, 0.00), 25 | (0.01, 0.01), 26 | (0.0005, 0.00), 27 | (1.006, 1.01), 28 | (1.0005, 1.00), 29 | (1.00051, 1.01), 30 | (1.0095, 1.01)]) 31 | def test_round_for_minimum_price_variation_sell(self, price, expected): 32 | result = round_for_minimum_price_variation(price, is_buy=False) 33 | self.assertEqual(result, expected) 34 | self.assertEqual(math.copysign(1.0, result), 35 | math.copysign(1.0, expected)) 36 | -------------------------------------------------------------------------------- /tests/test_examples.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # This code is based on a unittest written by John Salvatier: 17 | # https://github.com/pymc-devs/pymc/blob/pymc3/tests/test_examples.py 18 | 19 | # Disable plotting 20 | # 21 | import matplotlib 22 | matplotlib.use('Agg') 23 | 24 | from os import path 25 | import os 26 | import fnmatch 27 | import imp 28 | 29 | 30 | def test_examples(): 31 | os.chdir(example_dir()) 32 | for fname in all_matching_files('.', '*.py'): 33 | yield check_example, fname 34 | 35 | 36 | def all_matching_files(d, pattern): 37 | def addfiles(fls, dir, nfiles): 38 | nfiles = fnmatch.filter(nfiles, pattern) 39 | nfiles = [path.join(dir, f) for f in nfiles] 40 | fls.extend(nfiles) 41 | 42 | files = [] 43 | path.walk(d, addfiles, files) 44 | return files 45 | 46 | 47 | def example_dir(): 48 | import zipline 49 | d = path.dirname(zipline.__file__) 50 | return path.join(path.abspath(d), 'examples/') 51 | 52 | 53 | def check_example(p): 54 | imp.load_source('__main__', path.basename(p)) 55 | -------------------------------------------------------------------------------- /alephnull/experiment/notes_on_futures_data.txt: -------------------------------------------------------------------------------- 1 | What it would theoretically look like: 2 | 3 | instead of: 4 | data :: DataFrame = load_from_yahoo(stocks=[SYMBOL], indexes={}, start=start, 5 | end=end) 6 | 7 | we would do this: 8 | data :: DataFrame = load_from_ib(contracts=[YGZ14, ESM15], indexes={}, start=start, end=end) 9 | 10 | 11 | Would have a dataframe somehow structured like so (may be lazy): 12 | 13 | ============================================================================== 14 | =================================== Price = Open Interest = Volume =========== 15 | = = N16 = 100.2 = 400 = 20 =========== 16 | Jul 2007 18:46:47 = YG = M17 = 140.2 = 300 = 30 =========== 17 | = = S15 = 160.2 = 100 = 25 =========== 18 | ============================================================================== 19 | = = N16 = 100.2 = 400 = 20 =========== 20 | Jul 2007 18:46:47 = NE = M17 = 140.2 = 300 = 30 =========== 21 | = = S15 = 160.2 = 100 = 25 =========== 22 | ============================================================================== 23 | 24 | hairbrained_strategy = HairbrainedAlgorithm() 25 | results = hairbrained_strategy.run(data) 26 | 27 | To test how this might work, I need some dummy data (I'll hardcode it or something - the interface in real life will be different anyway) and a simple futures trading algorithm. 28 | 29 | NEEDS TO TAKE INTO ACCOUNT: 30 | 31 | Costs of transferring contracts when one expires (rollover? Is that the word?) 32 | Margin requirements (cancelling if you can't make maintenance margin, ) -------------------------------------------------------------------------------- /alephnull/examples/buyapple.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright 2012 Quantopian, Inc. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | import matplotlib.pyplot as plt 18 | from datetime import datetime 19 | import pytz 20 | 21 | from alephnull.algorithm import TradingAlgorithm 22 | from alephnull.utils.factory import load_from_yahoo 23 | 24 | 25 | class BuyApple(TradingAlgorithm): # inherit from TradingAlgorithm 26 | """This is the simplest possible algorithm that does nothing but 27 | buy 1 apple share on each event. 28 | """ 29 | def handle_data(self, data): # overload handle_data() method 30 | self.order('AAPL', 1) # order SID (=0) and amount (=1 shares) 31 | 32 | 33 | if __name__ == '__main__': 34 | start = datetime(2008, 1, 1, 0, 0, 0, 0, pytz.utc) 35 | end = datetime(2010, 1, 1, 0, 0, 0, 0, pytz.utc) 36 | data = load_from_yahoo(stocks=['AAPL'], indexes={}, start=start, 37 | end=end) 38 | simple_algo = BuyApple() 39 | results = simple_algo.run(data) 40 | 41 | ax1 = plt.subplot(211) 42 | results.portfolio_value.plot(ax=ax1) 43 | ax2 = plt.subplot(212, sharex=ax1) 44 | data.AAPL.plot(ax=ax2) 45 | plt.gcf().set_size_inches(18, 8) 46 | -------------------------------------------------------------------------------- /alephnull/sources/data_source.py: -------------------------------------------------------------------------------- 1 | from abc import ( 2 | ABCMeta, 3 | abstractproperty 4 | ) 5 | 6 | from alephnull.protocol import DATASOURCE_TYPE 7 | from alephnull.protocol import Event 8 | 9 | 10 | class DataSource(object): 11 | 12 | __metaclass__ = ABCMeta 13 | 14 | @property 15 | def event_type(self): 16 | return DATASOURCE_TYPE.TRADE 17 | 18 | @property 19 | def mapping(self): 20 | """ 21 | Mappings of the form: 22 | target_key: (mapping_function, source_key) 23 | """ 24 | return {} 25 | 26 | @abstractproperty 27 | def raw_data(self): 28 | """ 29 | An iterator that yields the raw datasource, 30 | in chronological order of data, one event at a time. 31 | """ 32 | NotImplemented 33 | 34 | @abstractproperty 35 | def instance_hash(self): 36 | """ 37 | A hash that represents the unique args to the source. 38 | """ 39 | pass 40 | 41 | def get_hash(self): 42 | return self.__class__.__name__ + "-" + self.instance_hash 43 | 44 | def apply_mapping(self, raw_row): 45 | """ 46 | Override this to hand craft conversion of row. 47 | """ 48 | row = {target: mapping_func(raw_row[source_key]) 49 | for target, (mapping_func, source_key) 50 | in self.mapping.items()} 51 | row.update({'source_id': self.get_hash()}) 52 | row.update({'type': self.event_type}) 53 | return row 54 | 55 | @property 56 | def mapped_data(self): 57 | for row in self.raw_data: 58 | yield Event(self.apply_mapping(row)) 59 | 60 | def __iter__(self): 61 | return self 62 | 63 | def next(self): 64 | return self.mapped_data.next() 65 | -------------------------------------------------------------------------------- /alephnull/experiment/margin mechanics pseudocode.txt: -------------------------------------------------------------------------------- 1 | for all position@(yesterday's price, today's price, quantity) in positions 2 | difference = today's price - yesterday's price 3 | change in margin account = difference * quantity 4 | 5 | if margin account value - change in margin account >= maintenence margin 6 | margin account value -= change in margin account 7 | else ## margin account value - change in margin account < maintenence margin ## 8 | shortfall = maintenence margin - (margin account value - change in margin account) 9 | if current capital > shortfall 10 | margin account value = maintenence margin 11 | current capital -= shortfall 12 | else 13 | (maybe liquidate other positions if this has higher priority? Future.) 14 | current capital += margin account value 15 | delete position from positions 16 | 17 | 18 | 19 | global margin_account_value 20 | global maintenence_margin 21 | global current_capital 22 | global positions 23 | 24 | positions_to_remove[] 25 | 26 | for position in positions: 27 | last_price, current_price, quantity = position 28 | difference = current_price - last_price 29 | margin_account_change = difference * quantity 30 | 31 | if margin_account_value - margin_account_change >= maintenence_margin: 32 | margin_account_value -= margin_account_change 33 | else: 34 | shortfall = maintenence_margin - (margin_account_value - margin_account_change) 35 | if current_capital > shortfall: 36 | margin_account_value = maintenence_margin 37 | current_capital -= shortfall 38 | else: 39 | current_capital += margin_account_value 40 | positions_to_remove.append(position) 41 | 42 | for position in positions_to_remove: 43 | positions.remove(position) -------------------------------------------------------------------------------- /alephnull/examples/buystock.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright 2013 Carter Bain Wealth Management 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | import matplotlib.pyplot as plt 18 | from datetime import datetime 19 | import pytz 20 | 21 | from alephnull.algorithm import TradingAlgorithm 22 | from alephnull.utils.factory import load_from_yahoo 23 | 24 | SYMBOL = 'GS' 25 | 26 | class BuyStock(TradingAlgorithm): # inherit from TradingAlgorithm 27 | """This is the simplest possible algorithm that does nothing but 28 | buy 1 share of SYMBOL on each event. 29 | """ 30 | def handle_data(self, data): # overload handle_data() method 31 | self.order(SYMBOL, 1) # order SID (=0) and amount (=1 shares) 32 | 33 | 34 | if __name__ == '__main__': 35 | start = datetime(2008, 1, 1, 0, 0, 0, 0, pytz.utc) 36 | end = datetime(2013, 1, 1, 0, 0, 0, 0, pytz.utc) 37 | data = load_from_yahoo(stocks=[SYMBOL], indexes={}, start=start, 38 | end=end) 39 | simple_algo = BuyStock() 40 | results = simple_algo.run(data) 41 | 42 | ax1 = plt.subplot(211) 43 | results.portfolio_value.plot(ax=ax1) 44 | ax2 = plt.subplot(212, sharex=ax1) 45 | stock_data = getattr(data, SYMBOL) 46 | stock_data.plot(ax=ax2) 47 | plt.gcf().set_size_inches(18, 8) -------------------------------------------------------------------------------- /alephnull/gens/composites.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import heapq 17 | 18 | 19 | def _decorate_source(source): 20 | for message in source: 21 | yield ((message.dt, message.source_id), message) 22 | 23 | 24 | def date_sorted_sources(*sources): 25 | """ 26 | Takes an iterable of sources, generating namestrings and 27 | piping their output into date_sort. 28 | """ 29 | sorted_stream = heapq.merge(*(_decorate_source(s) for s in sources)) 30 | 31 | # Strip out key decoration 32 | for _, message in sorted_stream: 33 | yield message 34 | 35 | 36 | def sequential_transforms(stream_in, *transforms): 37 | """ 38 | Apply each transform in transforms sequentially to each event in stream_in. 39 | Each transform application will add a new entry indexed to the transform's 40 | hash string. 41 | """ 42 | # Recursively apply all transforms to the stream. 43 | stream_out = reduce(lambda stream, tnfm: tnfm.transform(stream), 44 | transforms, 45 | stream_in) 46 | 47 | return stream_out 48 | 49 | 50 | def alias_dt(stream_in): 51 | """ 52 | Alias the dt field to datetime on each message. 53 | """ 54 | for message in stream_in: 55 | message['datetime'] = message['dt'] 56 | yield message 57 | -------------------------------------------------------------------------------- /tests/risk/test_minute_risk.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import unittest 17 | import datetime 18 | import pytz 19 | 20 | from zipline.finance.trading import SimulationParameters 21 | from zipline.finance import risk 22 | 23 | 24 | class TestMinuteRisk(unittest.TestCase): 25 | 26 | def setUp(self): 27 | 28 | start_date = datetime.datetime( 29 | year=2006, 30 | month=1, 31 | day=3, 32 | hour=0, 33 | minute=0, 34 | tzinfo=pytz.utc) 35 | end_date = datetime.datetime( 36 | year=2006, month=1, day=3, tzinfo=pytz.utc) 37 | 38 | self.sim_params = SimulationParameters( 39 | period_start=start_date, 40 | period_end=end_date 41 | ) 42 | self.sim_params.emission_rate = 'minute' 43 | 44 | def test_minute_risk(self): 45 | 46 | risk_metrics = risk.RiskMetricsCumulative(self.sim_params) 47 | 48 | first_dt = self.sim_params.first_open 49 | second_dt = self.sim_params.first_open + datetime.timedelta(minutes=1) 50 | 51 | risk_metrics.update(first_dt, 1.0, 2.0) 52 | 53 | self.assertEquals(1, len(risk_metrics.metrics.alpha.valid())) 54 | 55 | risk_metrics.update(second_dt, 3.0, 4.0) 56 | 57 | self.assertEquals(2, len(risk_metrics.metrics.alpha.valid())) 58 | -------------------------------------------------------------------------------- /vagrant_init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script will be run by Vagrant to 4 | # set up everything necessary to use Zipline. 5 | 6 | # Because this is intended be a disposable dev VM setup, 7 | # no effort is made to use virtualenv/virtualenvwrapper 8 | 9 | # It is assumed that you have "vagrant up" 10 | # from the root of the zipline github checkout. 11 | # This will put the zipline code in the 12 | # /vagrant folder in the system. 13 | 14 | VAGRANT_LOG="/home/vagrant/vagrant.log" 15 | 16 | # Need to "hold" grub-pc so that it doesn't break 17 | # the rest of the package installs (in case of a "apt-get upgrade") 18 | # (grub-pc will complain that your boot device changed, probably 19 | # due to something that vagrant did, and break your console) 20 | 21 | echo "Obstructing updates to grub-pc..." 22 | apt-mark hold grub-pc 2>&1 >> "$VAGRANT_LOG" 23 | 24 | # Run a full apt-get update first. 25 | echo "Updating apt-get caches..." 26 | apt-get -y update 2>&1 >> "$VAGRANT_LOG" 27 | 28 | # Install required packages 29 | echo "Installing required packages..." 30 | apt-get -y install python-pip python-dev g++ make libfreetype6-dev libpng-dev libopenblas-dev liblapack-dev gfortran 2>&1 >> "$VAGRANT_LOG" 31 | 32 | # Add ta-lib 33 | echo "Installing ta-lib integration..." 34 | wget http://prdownloads.sourceforge.net/ta-lib/ta-lib-0.4.0-src.tar.gz 2>&1 >> "$VAGRANT_LOG" 35 | tar -xvzf ta-lib-0.4.0-src.tar.gz 2>&1 >> "$VAGRANT_LOG" 36 | cd ta-lib/ 37 | ./configure --prefix=/usr 2>&1 >> "$VAGRANT_LOG" 38 | make 2>&1 >> "$VAGRANT_LOG" 39 | sudo make install 2>&1 >> "$VAGRANT_LOG" 40 | cd ../ 41 | 42 | # Add Zipline python dependencies 43 | echo "Installing python package dependencies..." 44 | /vagrant/etc/ordered_pip.sh /vagrant/etc/requirements.txt 2>&1 >> "$VAGRANT_LOG" 45 | # Add scipy next (if it's not done now, breaks installing of statsmodels for some reason ??) 46 | echo "Installing scipy..." 47 | pip install scipy==0.12.0 2>&1 >> "$VAGRANT_LOG" 48 | echo "Installing zipline dev python dependencies..." 49 | pip install -r /vagrant/etc/requirements_dev.txt 2>&1 >> "$VAGRANT_LOG" 50 | echo "Finished!" 51 | -------------------------------------------------------------------------------- /alephnull/roll_method.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from pandas import Series, DataFrame 3 | 4 | from alephnull.protocol import BarData, SIDData 5 | 6 | 7 | def roll(logic): 8 | def wrap(func): 9 | def modified_func(self, data): 10 | positions = self.portfolio.positions 11 | frames = {} 12 | for sym in data.keys(): 13 | frames[sym] = DataFrame({k: Series(v.__dict__) for 14 | k, v in data[sym].iteritems()}) 15 | 16 | all_ = pd.concat(frames, axis=1).T 17 | try: 18 | all_ = all_.groupby(axis=0, level=0).apply(logic).reset_index( 19 | level=(0, 2), drop=True) 20 | except: 21 | all_ = all_.groupby(axis=0, level=0).apply(logic) 22 | 23 | 24 | #Todo: handle multiple contract returns 25 | all_ = all_.groupby(axis=0, level=0).agg(lambda x: x.max()) 26 | 27 | #Todo: Data should be reconstructed into BarData object 28 | data = all_.T.to_dict() 29 | 30 | front_months = [(sym, all_.ix[sym]['contract']) for sym in all_.index] 31 | back_months = [sym for sym in self.perf_tracker.get_portfolio().positions 32 | if sym not in front_months] 33 | 34 | offsets = {} 35 | for sym in back_months: 36 | offsets[sym] = 0 37 | for order_id in self.get_orders(sym): 38 | order = self.blotter.orders[order_id] 39 | if order.status != 3: 40 | offsets[sym] += (order.amount - order.filled) 41 | stack = self.perf_tracker.get_portfolio().positions[sym].amount + offsets[sym] 42 | if stack != 0: 43 | self.order(sym, -stack) 44 | [self.order(exp, stack) for exp in front_months if exp[0] == sym[0]] 45 | 46 | bar_data = BarData() 47 | bar_data.__dict__['_data'].update({k: SIDData(v) for k, v in data.iteritems()}) 48 | 49 | return func(self, bar_data) 50 | 51 | return modified_func 52 | 53 | return wrap 54 | 55 | 56 | -------------------------------------------------------------------------------- /tests/risk/upload_answer_key.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | """ 17 | Utility script for maintainer use to upload current version of the answer key 18 | spreadsheet to S3. 19 | """ 20 | import hashlib 21 | 22 | import boto 23 | 24 | from . import answer_key 25 | 26 | BUCKET_NAME = 'zipline-test-data' 27 | 28 | 29 | def main(): 30 | with open(answer_key.ANSWER_KEY_PATH, 'r') as f: 31 | md5 = hashlib.md5() 32 | while True: 33 | buf = f.read(1024) 34 | if not buf: 35 | break 36 | md5.update(buf) 37 | local_hash = md5.hexdigest() 38 | 39 | s3_conn = boto.connect_s3() 40 | 41 | bucket = s3_conn.get_bucket(BUCKET_NAME) 42 | key = boto.s3.key.Key(bucket) 43 | 44 | key.key = "risk/{local_hash}/risk-answer-key.xlsx".format( 45 | local_hash=local_hash) 46 | key.set_contents_from_filename(answer_key.ANSWER_KEY_PATH) 47 | key.set_acl('public-read') 48 | 49 | download_link = "http://s3.amazonaws.com/{bucket_name}/{key}".format( 50 | bucket_name=BUCKET_NAME, 51 | key=key.key) 52 | 53 | print("Uploaded to key: {key}".format(key=key.key)) 54 | print("Download link: {download_link}".format(download_link=download_link)) 55 | 56 | # Now update checksum file with the recently added answer key. 57 | # checksum file update will be then need to be commited via git. 58 | with open(answer_key.ANSWER_KEY_CHECKSUMS_PATH, 'a') as checksum_file: 59 | checksum_file.write(local_hash) 60 | checksum_file.write("\n") 61 | 62 | if __name__ == "__main__": 63 | main() 64 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from unittest import TestCase 17 | from zipline.utils.factory import (load_from_yahoo, 18 | load_bars_from_yahoo) 19 | import pandas as pd 20 | import pytz 21 | import numpy as np 22 | 23 | 24 | class TestFactory(TestCase): 25 | def test_load_from_yahoo(self): 26 | stocks = ['AAPL', 'GE'] 27 | start = pd.datetime(1993, 1, 1, 0, 0, 0, 0, pytz.utc) 28 | end = pd.datetime(2002, 1, 1, 0, 0, 0, 0, pytz.utc) 29 | data = load_from_yahoo(stocks=stocks, start=start, end=end) 30 | 31 | assert data.index[0] == pd.Timestamp('1993-01-04 00:00:00+0000') 32 | assert data.index[-1] == pd.Timestamp('2001-12-31 00:00:00+0000') 33 | for stock in stocks: 34 | assert stock in data.columns 35 | 36 | np.testing.assert_raises( 37 | AssertionError, load_from_yahoo, stocks=stocks, 38 | start=end, end=start 39 | ) 40 | 41 | def test_load_bars_from_yahoo(self): 42 | stocks = ['AAPL', 'GE'] 43 | start = pd.datetime(1993, 1, 1, 0, 0, 0, 0, pytz.utc) 44 | end = pd.datetime(2002, 1, 1, 0, 0, 0, 0, pytz.utc) 45 | data = load_bars_from_yahoo(stocks=stocks, start=start, end=end) 46 | 47 | assert data.major_axis[0] == pd.Timestamp('1993-01-04 00:00:00+0000') 48 | assert data.major_axis[-1] == pd.Timestamp('2001-12-31 00:00:00+0000') 49 | for stock in stocks: 50 | assert stock in data.items 51 | 52 | for ohlc in ['open', 'high', 'low', 'close', 'volume', 'price']: 53 | assert ohlc in data.minor_axis 54 | 55 | np.testing.assert_raises( 56 | AssertionError, load_bars_from_yahoo, stocks=stocks, 57 | start=end, end=start 58 | ) 59 | -------------------------------------------------------------------------------- /tests/risk/AnswerKeyLink.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "name": "AnswerKeyAnnotations" 4 | }, 5 | "nbformat": 3, 6 | "nbformat_minor": 0, 7 | "worksheets": [ 8 | { 9 | "cells": [ 10 | { 11 | "cell_type": "code", 12 | "collapsed": true, 13 | "input": [ 14 | "#\n", 15 | "# Copyright 2013 Quantopian, Inc.\n", 16 | "#\n", 17 | "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", 18 | "# you may not use this file except in compliance with the License.\n", 19 | "# You may obtain a copy of the License at\n", 20 | "#\n", 21 | "# http://www.apache.org/licenses/LICENSE-2.0\n", 22 | "#\n", 23 | "# Unless required by applicable law or agreed to in writing, software\n", 24 | "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", 25 | "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", 26 | "# See the License for the specific language governing permissions and\n", 27 | "# limitations under the License.\n", 28 | "\n", 29 | "from annotation_utils import Markdown\n", 30 | "import answer_key" 31 | ], 32 | "language": "python", 33 | "outputs": [], 34 | "prompt_number": 1 35 | }, 36 | { 37 | "cell_type": "code", 38 | "collapsed": false, 39 | "input": [ 40 | "Markdown(\"\"\"\n", 41 | "Download link for latest answer key: [{latest_answer_key_url}]({latest_answer_key_url})\n", 42 | "\"\"\".format(latest_answer_key_url=answer_key.LATEST_ANSWER_KEY_URL))" 43 | ], 44 | "language": "python", 45 | "outputs": [ 46 | { 47 | "html": [ 48 | "

Download link for latest answer key: https://s3.amazonaws.com/zipline-test-data/risk/5b48e6a70181d73ecb7f07df5a3092e2/risk-answer-key.xlsx

" 49 | ], 50 | "output_type": "pyout", 51 | "prompt_number": 2, 52 | "text": [ 53 | "'\\nDownload link for latest answer key: [https://s3.amazonaws.com/zipline-test-data/risk/cc507b6fca18aabadac69657181edd4e/risk-answer-key.xlsx](https://s3.amazonaws.com/zipline-test-data/risk/5b48e6a70181d73ecb7f07df5a3092e2/risk-answer-key.xlsx)\\n'" 54 | ] 55 | } 56 | ], 57 | "prompt_number": 2 58 | } 59 | ] 60 | } 61 | ] 62 | } -------------------------------------------------------------------------------- /tests/risk/AnswerKeyAnnotations.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "name": "AnswerKeyAnnotations" 4 | }, 5 | "nbformat": 3, 6 | "nbformat_minor": 0, 7 | "worksheets": [ 8 | { 9 | "cells": [ 10 | { 11 | "cell_type": "code", 12 | "collapsed": false, 13 | "input": [ 14 | "#\n", 15 | "# Copyright 2013 Quantopian, Inc.\n", 16 | "#\n", 17 | "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", 18 | "# you may not use this file except in compliance with the License.\n", 19 | "# You may obtain a copy of the License at\n", 20 | "#\n", 21 | "# http://www.apache.org/licenses/LICENSE-2.0\n", 22 | "#\n", 23 | "# Unless required by applicable law or agreed to in writing, software\n", 24 | "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", 25 | "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", 26 | "# See the License for the specific language governing permissions and\n", 27 | "# limitations under the License." 28 | ], 29 | "language": "python", 30 | "outputs": [] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "collapsed": false, 35 | "input": [ 36 | "%load_ext autoreload\n", 37 | "%autoreload 2" 38 | ], 39 | "language": "python", 40 | "outputs": [] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "collapsed": false, 45 | "input": [ 46 | "import datetime\n", 47 | "import pandas as pd\n", 48 | "from IPython.display import HTML\n", 49 | "\n", 50 | "import answer_key\n", 51 | "ANSWER_KEY = answer_key.ANSWER_KEY" 52 | ], 53 | "language": "python", 54 | "outputs": [] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "collapsed": false, 59 | "input": [ 60 | "print 'Period Returns Index'\n", 61 | "print ANSWER_KEY.RETURNS" 62 | ], 63 | "language": "python", 64 | "outputs": [] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "collapsed": false, 69 | "input": [ 70 | "HTML(answer_key.RETURNS_DATA.to_html())" 71 | ], 72 | "language": "python", 73 | "outputs": [] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "collapsed": false, 78 | "input": [ 79 | "ANSWER_KEY.ALGORITHM_CUMULATIVE_SHARPE" 80 | ], 81 | "language": "python", 82 | "outputs": [] 83 | } 84 | ] 85 | } 86 | ] 87 | } -------------------------------------------------------------------------------- /alephnull/examples/test_algo.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright 2013 Carter Bain Wealth Management 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | import datetime as dt 18 | import string 19 | 20 | import numpy as np 21 | import pandas as pd 22 | from pandas import DataFrame 23 | 24 | from alephnull.algorithm import TradingAlgorithm 25 | from alephnull.sources.futures_data_frame_source import FuturesDataFrameSource 26 | from alephnull.roll_method import roll 27 | 28 | 29 | source = DataFrame(np.random.uniform(100, 200, [60, 30])) 30 | cols = ['price', 'volume', 'open_interest'] 31 | scale = (len(source.columns) / len(cols)) 32 | source.columns = [scale * cols] 33 | sym = lambda x: np.random.choice([abc for abc in x], 34 | np.random.choice([2, 3])) 35 | month = lambda x: np.random.choice([abc for abc in x], 36 | np.random.choice([1])) 37 | 38 | contracts = np.ravel([[(''.join(month(string.letters[:26])) + 39 | str(np.random.choice([14, 15, 16])))] * len(cols) 40 | for x in xrange(len(source.columns) / len(cols) / 2)]) 41 | 42 | level_1 = len(source.columns) / len(contracts) * list(contracts) 43 | 44 | numsyms = len(source.columns) / (len(set(level_1)) * len(cols)) 45 | underlyings = [''.join(sym(string.letters[:26])) for x in xrange(numsyms)] 46 | level_0 = np.ravel([[sym] * len(set(level_1)) * len(cols) for sym in underlyings]) 47 | 48 | source.columns = pd.MultiIndex.from_tuples(zip(level_0, level_1, source.columns)) 49 | source.index = pd.date_range(start=dt.datetime.utcnow() - dt.timedelta(days=len(source.index) - 1), 50 | end=dt.datetime.utcnow(), freq='D') 51 | 52 | futdata = FuturesDataFrameSource(source.tz_localize('UTC')) 53 | 54 | 55 | class FrontTrader(TradingAlgorithm): 56 | @roll(lambda x: x[x['open_interest'] == x['open_interest'].max()]) 57 | def handle_data(self, data): 58 | for sym in data.keys(): 59 | self.order((sym, data[sym]['contract']), 2) 60 | return data 61 | 62 | 63 | bot = FrontTrader() 64 | stats = bot.run(futdata) 65 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. Zipline documentation master file, created by 2 | sphinx-quickstart on Wed Feb 8 15:29:56 2012. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | .. module:: zipline 7 | 8 | **************************************************** 9 | Zipline: Financial Backtester for Trading Algorithms 10 | **************************************************** 11 | 12 | Python is quickly becoming the glue language which holds together data science 13 | and related fields like quantitative finance. Zipline is a new, BSD-licensed 14 | quantitative trading system which allows easy backtesting of investment 15 | algorithms on historical data. The system is fundamentally event-driven and a 16 | close approximation of how live-trading systems operate. Moreover, Zipline 17 | comes "batteries included" as many common statistics like 18 | moving average and linear regression can be readily accessed from within a 19 | user-written algorithm. Input of historical data and output of performance 20 | statistics is based on Pandas DataFrames to integrate nicely into the existing 21 | Python eco-system. Furthermore, statistic and machine learning libraries like 22 | matplotlib, scipy, statsmodels, and sklearn support development, analysis and 23 | visualization of state-of-the-art trading systems. 24 | 25 | Zipline is currently used in production as the backtesting engine 26 | powering `quantopian.com `_ -- a free, community-centered 27 | platform that allows development and real-time backtesting of trading 28 | algorithms in the web browser. 29 | 30 | Features 31 | ======== 32 | 33 | * Ease of use: Zipline tries to get out of your way so that you can focus on 34 | algorithm development. See below for a code example. 35 | 36 | * Zipline comes "batteries included" as many common statistics like moving 37 | average and linear regression can be readily accessed from within a 38 | user-written algorithm. 39 | 40 | * Input of historical data and output of performance statistics is based on 41 | Pandas DataFrames to integrate nicely into the existing Python eco-system. 42 | 43 | * Statistic and machine learning libraries like matplotlib, scipy, statsmodels, 44 | and sklearn support development, analysis and visualization of 45 | state-of-the-art trading systems. 46 | 47 | Contents 48 | ======== 49 | 50 | .. toctree:: 51 | :maxdepth: 4 52 | 53 | manifesto.rst 54 | installation.rst 55 | quickstart.rst 56 | contributing.rst 57 | modules.rst 58 | 59 | Indices and tables 60 | ================== 61 | 62 | * :ref:`genindex` 63 | * :ref:`modindex` 64 | * :ref:`search` 65 | -------------------------------------------------------------------------------- /alephnull/gens/utils.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | 17 | import pytz 18 | import numbers 19 | 20 | from hashlib import md5 21 | from datetime import datetime 22 | from alephnull.protocol import DATASOURCE_TYPE 23 | 24 | 25 | def hash_args(*args, **kwargs): 26 | """Define a unique string for any set of representable args.""" 27 | arg_string = '_'.join([str(arg) for arg in args]) 28 | kwarg_string = '_'.join([str(key) + '=' + str(value) 29 | for key, value in kwargs.iteritems()]) 30 | combined = ':'.join([arg_string, kwarg_string]) 31 | 32 | hasher = md5() 33 | hasher.update(combined) 34 | return hasher.hexdigest() 35 | 36 | 37 | def assert_datasource_protocol(event): 38 | """Assert that an event meets the protocol for datasource outputs.""" 39 | 40 | assert isinstance(event.source_id, basestring) 41 | assert event.type in DATASOURCE_TYPE 42 | 43 | # Done packets have no dt. 44 | if not event.type == DATASOURCE_TYPE.DONE: 45 | assert isinstance(event.dt, datetime) 46 | assert event.dt.tzinfo == pytz.utc 47 | 48 | 49 | def assert_trade_protocol(event): 50 | """Assert that an event meets the protocol for datasource TRADE outputs.""" 51 | assert_datasource_protocol(event) 52 | 53 | assert event.type == DATASOURCE_TYPE.TRADE 54 | assert isinstance(event.sid, int) 55 | assert isinstance(event.price, numbers.Real) 56 | assert isinstance(event.volume, numbers.Integral) 57 | assert isinstance(event.dt, datetime) 58 | 59 | 60 | def assert_datasource_unframe_protocol(event): 61 | """Assert that an event is valid output of zp.DATASOURCE_UNFRAME.""" 62 | assert isinstance(event.source_id, basestring) 63 | assert event.type in DATASOURCE_TYPE 64 | 65 | 66 | def assert_sort_protocol(event): 67 | """Assert that an event is valid input to zp.FEED_FRAME.""" 68 | assert isinstance(event.source_id, basestring) 69 | assert event.type in DATASOURCE_TYPE 70 | 71 | 72 | def assert_sort_unframe_protocol(event): 73 | """Same as above.""" 74 | assert isinstance(event.source_id, basestring) 75 | assert event.type in DATASOURCE_TYPE 76 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright 2013 Quantopian, Inc. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | import sys 17 | 18 | from setuptools import setup, find_packages 19 | 20 | LONG_DESCRIPTION = None 21 | README_MARKDOWN = None 22 | 23 | with open('README.md') as markdown_source: 24 | README_MARKDOWN = markdown_source.read() 25 | 26 | if 'upload' in sys.argv: 27 | # Converts the README.md file to ReST, since PyPI uses ReST for formatting, 28 | # This allows to have one canonical README file, being the README.md 29 | # The conversion only needs to be done on upload. 30 | # Otherwise, the pandoc import and errors that are thrown when 31 | # pandoc are both overhead and a source of confusion for general 32 | # usage/installation. 33 | import pandoc 34 | pandoc.core.PANDOC_PATH = 'pandoc' 35 | doc = pandoc.Document() 36 | doc.markdown = README_MARKDOWN 37 | LONG_DESCRIPTION = doc.rst 38 | else: 39 | # If pandoc isn't installed, e.g. when downloading from pip, 40 | # just use the regular README. 41 | LONG_DESCRIPTION = README_MARKDOWN 42 | 43 | setup( 44 | name='alephnull', 45 | version='0.5.11.dev', 46 | description='A backtester for financial algorithms.', 47 | author='Quantopian Inc.', 48 | author_email='opensource@quantopian.com', 49 | packages=find_packages(), 50 | long_description=LONG_DESCRIPTION, 51 | license='Apache 2.0', 52 | classifiers=[ 53 | 'Development Status :: 4 - Beta', 54 | 'License :: OSI Approved :: Apache Software License', 55 | 'Natural Language :: English', 56 | 'Programming Language :: Python', 57 | 'Programming Language :: Python :: 2.7', 58 | 'Operating System :: OS Independent', 59 | 'Intended Audience :: Science/Research', 60 | 'Topic :: Office/Business :: Financial', 61 | 'Topic :: Scientific/Engineering :: Information Analysis', 62 | 'Topic :: System :: Distributed Computing', 63 | ], 64 | install_requires=[ 65 | 'iso8601', 66 | 'Logbook', 67 | 'pytz', 68 | 'requests', 69 | 'numpy', 70 | 'pandas' 71 | ], 72 | url="https://github.com/quantopian/zipline" 73 | ) 74 | -------------------------------------------------------------------------------- /alephnull/utils/test_utils.py: -------------------------------------------------------------------------------- 1 | from logbook import FileHandler 2 | from alephnull.finance.blotter import ORDER_STATUS 3 | 4 | 5 | def setup_logger(test, path='test.log'): 6 | test.log_handler = FileHandler(path) 7 | test.log_handler.push_application() 8 | 9 | 10 | def teardown_logger(test): 11 | test.log_handler.pop_application() 12 | test.log_handler.close() 13 | 14 | 15 | def drain_zipline(test, zipline): 16 | output = [] 17 | transaction_count = 0 18 | msg_counter = 0 19 | # start the simulation 20 | for update in zipline: 21 | msg_counter += 1 22 | output.append(update) 23 | if 'daily_perf' in update: 24 | transaction_count += \ 25 | len(update['daily_perf']['transactions']) 26 | 27 | return output, transaction_count 28 | 29 | 30 | def assert_single_position(test, zipline): 31 | 32 | output, transaction_count = drain_zipline(test, zipline) 33 | 34 | if 'expected_transactions' in test.zipline_test_config: 35 | test.assertEqual( 36 | test.zipline_test_config['expected_transactions'], 37 | transaction_count 38 | ) 39 | else: 40 | test.assertEqual( 41 | test.zipline_test_config['order_count'], 42 | transaction_count 43 | ) 44 | 45 | # the final message is the risk report, the second to 46 | # last is the final day's results. Positions is a list of 47 | # dicts. 48 | closing_positions = output[-2]['daily_perf']['positions'] 49 | 50 | # confirm that all orders were filled. 51 | # iterate over the output updates, overwriting 52 | # orders when they are updated. Then check the status on all. 53 | orders_by_id = {} 54 | for update in output: 55 | if 'daily_perf' in update: 56 | if 'orders' in update['daily_perf']: 57 | for order in update['daily_perf']['orders']: 58 | orders_by_id[order['id']] = order 59 | 60 | for order in orders_by_id.itervalues(): 61 | test.assertEqual( 62 | order['status'], 63 | ORDER_STATUS.FILLED, 64 | "") 65 | 66 | test.assertEqual( 67 | len(closing_positions), 68 | 1, 69 | "Portfolio should have one position." 70 | ) 71 | 72 | sid = test.zipline_test_config['sid'] 73 | test.assertEqual( 74 | closing_positions[0]['sid'], 75 | sid, 76 | "Portfolio should have one position in " + str(sid) 77 | ) 78 | 79 | return output, transaction_count 80 | 81 | 82 | class ExceptionSource(object): 83 | 84 | def __init__(self): 85 | pass 86 | 87 | def get_hash(self): 88 | return "ExceptionSource" 89 | 90 | def __iter__(self): 91 | return self 92 | 93 | def next(self): 94 | 5 / 0 95 | 96 | 97 | class ExceptionTransform(object): 98 | 99 | def __init__(self): 100 | self.window_length = 1 101 | pass 102 | 103 | def get_hash(self): 104 | return "ExceptionTransform" 105 | 106 | def update(self, event): 107 | assert False, "An assertion message" 108 | -------------------------------------------------------------------------------- /alephnull/finance/commission.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2012 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | 17 | class PerShare(object): 18 | """ 19 | Calculates a commission for a transaction based on a per 20 | share cost. 21 | """ 22 | 23 | def __init__(self, cost=0.03): 24 | """ 25 | Cost parameter is the cost of a trade per-share. $0.03 26 | means three cents per share, which is a very conservative 27 | (quite high) for per share costs. 28 | """ 29 | self.cost = float(cost) 30 | 31 | def __repr__(self): 32 | return "{class_name}(cost={cost})".format( 33 | class_name=self.__class__.__name__, 34 | cost=self.cost) 35 | 36 | def calculate(self, transaction): 37 | """ 38 | returns a tuple of: 39 | (per share commission, total transaction commission) 40 | """ 41 | return self.cost, abs(transaction.amount * self.cost) 42 | 43 | 44 | class PerTrade(object): 45 | """ 46 | Calculates a commission for a transaction based on a per 47 | trade cost. 48 | """ 49 | 50 | def __init__(self, cost=5.0): 51 | """ 52 | Cost parameter is the cost of a trade, regardless of 53 | share count. $5.00 per trade is fairly typical of 54 | discount brokers. 55 | """ 56 | # Cost needs to be floating point so that calculation using division 57 | # logic does not floor to an integer. 58 | self.cost = float(cost) 59 | 60 | def calculate(self, transaction): 61 | """ 62 | returns a tuple of: 63 | (per share commission, total transaction commission) 64 | """ 65 | if transaction.amount == 0: 66 | return 0.0, 0.0 67 | 68 | return abs(self.cost / transaction.amount), self.cost 69 | 70 | 71 | class PerDollar(object): 72 | """ 73 | Calculates a commission for a transaction based on a per 74 | dollar cost. 75 | """ 76 | 77 | def __init__(self, cost=0.0015): 78 | """ 79 | Cost parameter is the cost of a trade per-dollar. 0.0015 80 | on $1 million means $1,500 commission (=1,000,000 x 0.0015) 81 | """ 82 | self.cost = float(cost) 83 | 84 | def __repr__(self): 85 | return "{class_name}(cost={cost})".format( 86 | class_name=self.__class__.__name__, 87 | cost=self.cost) 88 | 89 | def calculate(self, transaction): 90 | """ 91 | returns a tuple of: 92 | (per share commission, total transaction commission) 93 | """ 94 | cost_per_share = transaction.price * self.cost 95 | return cost_per_share, abs(transaction.amount) * cost_per_share 96 | -------------------------------------------------------------------------------- /tests/test_sources.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | import pandas as pd 16 | import pytz 17 | from itertools import cycle 18 | 19 | from unittest import TestCase 20 | 21 | import zipline.utils.factory as factory 22 | from zipline.sources import DataFrameSource, DataPanelSource 23 | 24 | 25 | class TestDataFrameSource(TestCase): 26 | def test_df_source(self): 27 | source, df = factory.create_test_df_source() 28 | assert isinstance(source.start, pd.lib.Timestamp) 29 | assert isinstance(source.end, pd.lib.Timestamp) 30 | 31 | for expected_dt, expected_price in df.iterrows(): 32 | sid0 = source.next() 33 | 34 | assert expected_dt == sid0.dt 35 | assert expected_price[0] == sid0.price 36 | 37 | def test_df_sid_filtering(self): 38 | _, df = factory.create_test_df_source() 39 | source = DataFrameSource(df, sids=[0]) 40 | assert 1 not in [event.sid for event in source], \ 41 | "DataFrameSource should only stream selected sid 0, not sid 1." 42 | 43 | def test_panel_source(self): 44 | source, panel = factory.create_test_panel_source() 45 | assert isinstance(source.start, pd.lib.Timestamp) 46 | assert isinstance(source.end, pd.lib.Timestamp) 47 | for event in source: 48 | self.assertTrue('sid' in event) 49 | self.assertTrue('arbitrary' in event) 50 | self.assertTrue('volume' in event) 51 | self.assertTrue('price' in event) 52 | self.assertEquals(event['arbitrary'], 1.) 53 | self.assertEquals(event['volume'], 1000) 54 | self.assertEquals(event['sid'], 0) 55 | self.assertTrue(isinstance(event['volume'], int)) 56 | self.assertTrue(isinstance(event['arbitrary'], float)) 57 | 58 | def test_yahoo_bars_to_panel_source(self): 59 | stocks = ['AAPL', 'GE'] 60 | start = pd.datetime(1993, 1, 1, 0, 0, 0, 0, pytz.utc) 61 | end = pd.datetime(2002, 1, 1, 0, 0, 0, 0, pytz.utc) 62 | data = factory.load_bars_from_yahoo(stocks=stocks, 63 | indexes={}, 64 | start=start, 65 | end=end) 66 | 67 | check_fields = ['sid', 'open', 'high', 'low', 'close', 68 | 'volume', 'price'] 69 | source = DataPanelSource(data) 70 | stocks_iter = cycle(stocks) 71 | for event in source: 72 | for check_field in check_fields: 73 | self.assertIn(check_field, event) 74 | self.assertTrue(isinstance(event['volume'], (int, long))) 75 | self.assertEqual(stocks_iter.next(), event['sid']) 76 | -------------------------------------------------------------------------------- /alephnull/sources/futures_data_frame_source.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from alephnull.gens.utils import hash_args 4 | 5 | from alephnull.sources.data_source import DataSource 6 | 7 | 8 | class FuturesDataFrameSource(DataSource): 9 | """ 10 | Yields all events in event_list that match the given sid_filter. 11 | If no event_list is specified, generates an internal stream of events 12 | to filter. Returns all events if filter is None. 13 | 14 | Configuration options: 15 | 16 | sids : list of values representing simulated internal sids 17 | start : start date 18 | delta : timedelta between internal events 19 | filter : filter to remove the sids 20 | """ 21 | 22 | def __init__(self, data, **kwargs): 23 | """ 24 | Data must be a DataFrame formatted like this: 25 | 26 | ################################################################################################# 27 | # # GS # TW # 28 | # # N10 # H10 # G14 # H14 # 29 | # # Price # Volume # Price # Volume # Price # Metric3 # Price # Metric3 # 30 | # 2013-12-20 00:09:15 # 101.00 # 1000 # 60.34 # 2500 # 400.00 # -0.0034 # Price # -5.0 # 31 | # 2013-12-20 00:09:17 # 201.00 # 2000 # 20.34 # 2500 # 200.00 # -2.0034 # Price # -2.0 # 32 | # etc... # 33 | ################################################################################################# 34 | 35 | """ 36 | assert isinstance(data.index, pd.tseries.index.DatetimeIndex) 37 | 38 | self.data = data 39 | # Unpack config dictionary with default values. 40 | self.sids = kwargs.get('sids', list(set(['.'.join(tup[:2]) for tup in data.columns]))) 41 | self.start = kwargs.get('start', data.index[0]) 42 | self.end = kwargs.get('end', data.index[-1]) 43 | 44 | # Hash_value for downstream sorting. 45 | self.arg_string = hash_args(data, **kwargs) 46 | 47 | self._raw_data = None 48 | 49 | @property 50 | def mapping(self): 51 | return { 52 | 'dt': (lambda x: x, 'dt'), 53 | 'sid': (lambda x: x[:x.find(".")], 'sid'), 54 | 'contract': (lambda x: x[x.find(".") + 1:], 'sid'), 55 | 'price': (float, 'price'), 56 | 'volume': (int, 'volume'), 57 | 'open_interest': (int, 'open_interest'), 58 | } 59 | 60 | @property 61 | def instance_hash(self): 62 | return self.arg_string 63 | 64 | def raw_data_gen(self): 65 | for dt, series in self.data.iterrows(): 66 | events = {} 67 | for (underlying, exp, metric), value in series.iterkv(): 68 | sid = '.'.join([underlying, exp]) 69 | if sid in self.sids: 70 | if sid not in events: 71 | events[sid] = {'dt': dt, 'sid': sid} 72 | events[sid][metric] = value 73 | for event in events.itervalues(): 74 | yield event 75 | 76 | @property 77 | def raw_data(self): 78 | if not self._raw_data: 79 | self._raw_data = self.raw_data_gen() 80 | return self._raw_data -------------------------------------------------------------------------------- /alephnull/examples/dual_ema_talib.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright 2013 Quantopian, Inc. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | import matplotlib.pyplot as plt 18 | 19 | from alephnull.algorithm import TradingAlgorithm 20 | from alephnull.utils.factory import load_from_yahoo 21 | 22 | # Import exponential moving average from talib wrapper 23 | from alephnull.transforms.ta import EMA 24 | 25 | from datetime import datetime 26 | import pytz 27 | 28 | 29 | class DualEMATaLib(TradingAlgorithm): 30 | """Dual Moving Average Crossover algorithm. 31 | 32 | This algorithm buys apple once its short moving average crosses 33 | its long moving average (indicating upwards momentum) and sells 34 | its shares once the averages cross again (indicating downwards 35 | momentum). 36 | 37 | """ 38 | def initialize(self, short_window=20, long_window=40): 39 | # Add 2 mavg transforms, one with a long window, one 40 | # with a short window. 41 | self.short_ema_trans = EMA(timeperiod=short_window) 42 | self.long_ema_trans = EMA(timeperiod=long_window) 43 | 44 | # To keep track of whether we invested in the stock or not 45 | self.invested = False 46 | 47 | def handle_data(self, data): 48 | self.short_ema = self.short_ema_trans.handle_data(data) 49 | self.long_ema = self.long_ema_trans.handle_data(data) 50 | if self.short_ema is None or self.long_ema is None: 51 | return 52 | 53 | self.buy = False 54 | self.sell = False 55 | 56 | if self.short_ema > self.long_ema and not self.invested: 57 | self.order('AAPL', 100) 58 | self.invested = True 59 | self.buy = True 60 | elif self.short_ema < self.long_ema and self.invested: 61 | self.order('AAPL', -100) 62 | self.invested = False 63 | self.sell = True 64 | 65 | self.record(AAPL=data['AAPL'].price, 66 | short_ema=self.short_ema['AAPL'], 67 | long_ema=self.long_ema['AAPL'], 68 | buy=self.buy, 69 | sell=self.sell) 70 | 71 | if __name__ == '__main__': 72 | start = datetime(1990, 1, 1, 0, 0, 0, 0, pytz.utc) 73 | end = datetime(1991, 1, 1, 0, 0, 0, 0, pytz.utc) 74 | data = load_from_yahoo(stocks=['AAPL'], indexes={}, start=start, 75 | end=end) 76 | 77 | dma = DualEMATaLib() 78 | results = dma.run(data).dropna() 79 | 80 | fig = plt.figure() 81 | ax1 = fig.add_subplot(211, ylabel='portfolio value') 82 | results.portfolio_value.plot(ax=ax1) 83 | 84 | ax2 = fig.add_subplot(212) 85 | results[['AAPL', 'short_ema', 'long_ema']].plot(ax=ax2) 86 | 87 | ax2.plot(results.ix[results.buy].index, results.short_ema[results.buy], 88 | '^', markersize=10, color='m') 89 | ax2.plot(results.ix[results.sell].index, results.short_ema[results.sell], 90 | 'v', markersize=10, color='k') 91 | plt.legend(loc=0) 92 | plt.gcf().set_size_inches(18, 8) 93 | -------------------------------------------------------------------------------- /tests/test_data_util.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import unittest 17 | 18 | from collections import deque 19 | 20 | import numpy as np 21 | 22 | import pandas as pd 23 | import pandas.util.testing as tm 24 | 25 | from zipline.utils.data import RollingPanel 26 | 27 | 28 | class TestRollingPanel(unittest.TestCase): 29 | 30 | def test_basics(self): 31 | items = ['foo', 'bar', 'baz'] 32 | minor = ['A', 'B', 'C', 'D'] 33 | 34 | window = 10 35 | 36 | rp = RollingPanel(window, items, minor, cap_multiple=2) 37 | 38 | dates = pd.date_range('2000-01-01', periods=30, tz='utc') 39 | 40 | major_deque = deque() 41 | 42 | frames = {} 43 | 44 | for i in range(30): 45 | frame = pd.DataFrame(np.random.randn(3, 4), index=items, 46 | columns=minor) 47 | date = dates[i] 48 | 49 | rp.add_frame(date, frame) 50 | 51 | frames[date] = frame 52 | major_deque.append(date) 53 | 54 | if i >= window: 55 | major_deque.popleft() 56 | 57 | result = rp.get_current() 58 | expected = pd.Panel(frames, items=list(major_deque), 59 | major_axis=items, minor_axis=minor) 60 | tm.assert_panel_equal(result, expected.swapaxes(0, 1)) 61 | 62 | 63 | def f(option='clever', n=500, copy=False): 64 | items = range(5) 65 | minor = range(20) 66 | window = 100 67 | periods = n 68 | 69 | dates = pd.date_range('2000-01-01', periods=periods, tz='utc') 70 | frames = {} 71 | 72 | if option == 'clever': 73 | rp = RollingPanel(window, items, minor, cap_multiple=2) 74 | major_deque = deque() 75 | dummy = pd.DataFrame(np.random.randn(len(items), len(minor)), 76 | index=items, columns=minor) 77 | 78 | for i in range(periods): 79 | frame = dummy * (1 + 0.001 * i) 80 | date = dates[i] 81 | 82 | rp.add_frame(date, frame) 83 | 84 | frames[date] = frame 85 | major_deque.append(date) 86 | 87 | if i >= window: 88 | del frames[major_deque.popleft()] 89 | 90 | result = rp.get_current() 91 | if copy: 92 | result = result.copy() 93 | else: 94 | major_deque = deque() 95 | dummy = pd.DataFrame(np.random.randn(len(items), len(minor)), 96 | index=items, columns=minor) 97 | 98 | for i in range(periods): 99 | frame = dummy * (1 + 0.001 * i) 100 | date = dates[i] 101 | frames[date] = frame 102 | major_deque.append(date) 103 | 104 | if i >= window: 105 | del frames[major_deque.popleft()] 106 | 107 | result = pd.Panel(frames, items=list(major_deque), 108 | major_axis=items, minor_axis=minor) 109 | -------------------------------------------------------------------------------- /alephnull/utils/simfactory.py: -------------------------------------------------------------------------------- 1 | import alephnull.utils.factory as factory 2 | 3 | from alephnull.test_algorithms import TestAlgorithm 4 | 5 | 6 | def create_test_zipline(**config): 7 | """ 8 | :param config: A configuration object that is a dict with: 9 | 10 | - sid - an integer, which will be used as the security ID. 11 | - order_count - the number of orders the test algo will place, 12 | defaults to 100 13 | - order_amount - the number of shares per order, defaults to 100 14 | - trade_count - the number of trades to simulate, defaults to 101 15 | to ensure all orders are processed. 16 | - algorithm - optional parameter providing an algorithm. defaults 17 | to :py:class:`zipline.test.algorithms.TestAlgorithm` 18 | - trade_source - optional parameter to specify trades, if present. 19 | If not present :py:class:`zipline.sources.SpecificEquityTrades` 20 | is the source, with daily frequency in trades. 21 | - slippage: optional parameter that configures the 22 | :py:class:`zipline.gens.tradingsimulation.TransactionSimulator`. 23 | Expects an object with a simulate mehod, such as 24 | :py:class:`zipline.gens.tradingsimulation.FixedSlippage`. 25 | :py:mod:`zipline.finance.trading` 26 | - transforms: optional parameter that provides a list 27 | of StatefulTransform objects. 28 | """ 29 | assert isinstance(config, dict) 30 | sid_list = config.get('sid_list') 31 | if not sid_list: 32 | sid = config.get('sid') 33 | sid_list = [sid] 34 | 35 | concurrent_trades = config.get('concurrent_trades', False) 36 | 37 | if 'order_count' in config: 38 | order_count = config['order_count'] 39 | else: 40 | order_count = 100 41 | 42 | if 'order_amount' in config: 43 | order_amount = config['order_amount'] 44 | else: 45 | order_amount = 100 46 | 47 | if 'trade_count' in config: 48 | trade_count = config['trade_count'] 49 | else: 50 | # to ensure all orders are filled, we provide one more 51 | # trade than order 52 | trade_count = 101 53 | 54 | #------------------- 55 | # Create the Algo 56 | #------------------- 57 | if 'algorithm' in config: 58 | test_algo = config['algorithm'] 59 | else: 60 | test_algo = TestAlgorithm( 61 | sid, 62 | order_amount, 63 | order_count, 64 | sim_params=config.get('sim_params', 65 | factory.create_simulation_parameters()) 66 | ) 67 | 68 | #------------------- 69 | # Trade Source 70 | #------------------- 71 | if 'trade_source' in config: 72 | trade_source = config['trade_source'] 73 | else: 74 | trade_source = factory.create_daily_trade_source( 75 | sid_list, 76 | trade_count, 77 | test_algo.sim_params, 78 | concurrent=concurrent_trades 79 | ) 80 | if trade_source: 81 | test_algo.set_sources([trade_source]) 82 | 83 | #------------------- 84 | # Benchmark source 85 | #------------------- 86 | 87 | test_algo.benchmark_return_source = config.get('benchmark_source', None) 88 | 89 | #------------------- 90 | # Transforms 91 | #------------------- 92 | 93 | transforms = config.get('transforms', None) 94 | if transforms is not None: 95 | test_algo.set_transforms(transforms) 96 | 97 | #------------------- 98 | # Slippage 99 | # ------------------ 100 | slippage = config.get('slippage', None) 101 | if slippage is not None: 102 | test_algo.set_slippage(slippage) 103 | 104 | # ------------------ 105 | # generator/simulator 106 | sim = test_algo.get_generator() 107 | 108 | return sim 109 | -------------------------------------------------------------------------------- /alephnull/transforms/returns.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from alephnull.errors import WrongDataForTransform 17 | from alephnull.transforms.utils import TransformMeta 18 | from collections import defaultdict, deque 19 | 20 | 21 | class Returns(object): 22 | """ 23 | Class that maintains a dictionary from sids to the sid's 24 | closing price N trading days ago. 25 | """ 26 | __metaclass__ = TransformMeta 27 | 28 | def __init__(self, window_length): 29 | self.window_length = window_length 30 | self.mapping = defaultdict(self._create) 31 | 32 | def update(self, event): 33 | """ 34 | Update and return the calculated returns for this event's sid. 35 | """ 36 | tracker = self.mapping[event.sid] 37 | tracker.update(event) 38 | 39 | return tracker.returns 40 | 41 | def _create(self): 42 | return ReturnsFromPriorClose( 43 | self.window_length 44 | ) 45 | 46 | 47 | class ReturnsFromPriorClose(object): 48 | """ 49 | Records the last N closing events for a given security as well as the 50 | last event for the security. When we get an event for a new day, we 51 | treat the last event seen as the close for the previous day. 52 | """ 53 | 54 | def __init__(self, window_length): 55 | self.closes = deque() 56 | self.last_event = None 57 | self.returns = 0.0 58 | self.window_length = window_length 59 | 60 | def update(self, event): 61 | self.assert_required_fields(event) 62 | if self.last_event: 63 | 64 | # Day has changed since the last event we saw. Treat 65 | # the last event as the closing price for its day and 66 | # clear out the oldest close if it has expired. 67 | if self.last_event.dt.date() != event.dt.date(): 68 | 69 | self.closes.append(self.last_event) 70 | 71 | # We keep an event for the end of each trading day, so 72 | # if the number of stored events is greater than the 73 | # number of days we want to track, the oldest close 74 | # is expired and should be discarded. 75 | while len(self.closes) > self.window_length: 76 | # Pop the oldest event. 77 | self.closes.popleft() 78 | 79 | # We only generate a return value once we've seen enough days 80 | # to give a sensible value. Would be nice if we could query 81 | # db for closes prior to our initial event, but that would 82 | # require giving this transform database creds, which we want 83 | # to avoid. 84 | 85 | if len(self.closes) == self.window_length: 86 | last_close = self.closes[0].price 87 | change = event.price - last_close 88 | self.returns = change / last_close 89 | 90 | # the current event is now the last_event 91 | self.last_event = event 92 | 93 | def assert_required_fields(self, event): 94 | """ 95 | We only allow events with a price field to be run through 96 | the returns transform. 97 | """ 98 | if 'price' not in event: 99 | raise WrongDataForTransform( 100 | transform="ReturnsEventWindow", 101 | fields='price') 102 | -------------------------------------------------------------------------------- /alephnull/errors.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | 17 | class ZiplineError(Exception): 18 | msg = None 19 | 20 | def __init__(self, *args, **kwargs): 21 | self.args = args 22 | self.kwargs = kwargs 23 | self.message = str(self) 24 | 25 | def __str__(self): 26 | msg = self.msg.format(**self.kwargs) 27 | return msg 28 | 29 | __unicode__ = __str__ 30 | __repr__ = __str__ 31 | 32 | 33 | class WrongDataForTransform(ZiplineError): 34 | """ 35 | Raised whenever a rolling transform is called on an event that 36 | does not have the necessary properties. 37 | """ 38 | msg = "{transform} requires {fields}. Event cannot be processed." 39 | 40 | 41 | class UnsupportedSlippageModel(ZiplineError): 42 | """ 43 | Raised if a user script calls the override_slippage magic 44 | with a slipage object that isn't a VolumeShareSlippage or 45 | FixedSlipapge 46 | """ 47 | msg = """ 48 | You attempted to override slippage with an unsupported class. \ 49 | Please use VolumeShareSlippage or FixedSlippage. 50 | """.strip() 51 | 52 | 53 | class OverrideSlippagePostInit(ZiplineError): 54 | # Raised if a users script calls override_slippage magic 55 | # after the initialize method has returned. 56 | msg = """ 57 | You attempted to override slippage after the simulation has \ 58 | started. You may only call override_slippage in your initialize \ 59 | method. 60 | """.strip() 61 | 62 | 63 | class UnsupportedCommissionModel(ZiplineError): 64 | """ 65 | Raised if a user script calls the override_commission magic 66 | with a commission object that isn't a PerShare, PerTrade or 67 | PerDollar commission 68 | """ 69 | msg = """ 70 | You attempted to override commission with an unsupported class. \ 71 | Please use PerShare or PerTrade. 72 | """.strip() 73 | 74 | 75 | class OverrideCommissionPostInit(ZiplineError): 76 | """ 77 | Raised if a users script calls override_commission magic 78 | after the initialize method has returned. 79 | """ 80 | msg = """ 81 | You attempted to override commission after the simulation has \ 82 | started. You may only call override_commission in your initialize \ 83 | method. 84 | """.strip() 85 | 86 | 87 | class TransactionWithNoVolume(ZiplineError): 88 | """ 89 | Raised if a transact call returns a transaction with zero volume. 90 | """ 91 | msg = """ 92 | Transaction {txn} has a volume of zero. 93 | """.strip() 94 | 95 | 96 | class TransactionWithWrongDirection(ZiplineError): 97 | """ 98 | Raised if a transact call returns a transaction with a direction that 99 | does not match the order. 100 | """ 101 | msg = """ 102 | Transaction {txn} not in same direction as corresponding order {order}. 103 | """.strip() 104 | 105 | 106 | class TransactionWithNoAmount(ZiplineError): 107 | """ 108 | Raised if a transact call returns a transaction with zero amount. 109 | """ 110 | msg = """ 111 | Transaction {txn} has an amount of zero. 112 | """.strip() 113 | 114 | 115 | class TransactionVolumeExceedsOrder(ZiplineError): 116 | """ 117 | Raised if a transact call returns a transaction with a volume greater than 118 | the corresponding order. 119 | """ 120 | msg = """ 121 | Transaction volume of {txn} exceeds the order volume of {order}. 122 | """.strip() 123 | -------------------------------------------------------------------------------- /alephnull/transforms/vwap.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from collections import defaultdict 17 | 18 | from alephnull.errors import WrongDataForTransform 19 | from alephnull.transforms.utils import EventWindow, TransformMeta 20 | 21 | 22 | class MovingVWAP(object): 23 | """ 24 | Class that maintains a dictionary from sids to VWAPEventWindows. 25 | """ 26 | __metaclass__ = TransformMeta 27 | 28 | def __init__(self, market_aware=True, delta=None, window_length=None): 29 | 30 | self.market_aware = market_aware 31 | self.delta = delta 32 | self.window_length = window_length 33 | 34 | # Market-aware mode only works with full-day windows. 35 | if self.market_aware: 36 | assert self.window_length and not self.delta,\ 37 | "Market-aware mode only works with full-day windows." 38 | 39 | # Non-market-aware mode requires a timedelta. 40 | else: 41 | assert self.delta and not self.window_length, \ 42 | "Non-market-aware mode requires a timedelta." 43 | 44 | # No way to pass arguments to the defaultdict factory, so we 45 | # need to define a method to generate the correct EventWindows. 46 | self.sid_windows = defaultdict(self.create_window) 47 | 48 | def create_window(self): 49 | """Factory method for self.sid_windows.""" 50 | return VWAPEventWindow( 51 | self.market_aware, 52 | window_length=self.window_length, 53 | delta=self.delta 54 | ) 55 | 56 | def update(self, event): 57 | """ 58 | Update the event window for this event's sid. Returns the 59 | current vwap for the sid. 60 | """ 61 | # This will create a new EventWindow if this is the first 62 | # message for this sid. 63 | window = self.sid_windows[event.sid] 64 | window.update(event) 65 | return window.get_vwap() 66 | 67 | 68 | class VWAPEventWindow(EventWindow): 69 | """ 70 | Iteratively maintains a vwap for a single sid over a given 71 | timedelta. 72 | """ 73 | def __init__(self, market_aware=True, window_length=None, delta=None): 74 | EventWindow.__init__(self, market_aware, window_length, delta) 75 | self.flux = 0.0 76 | self.totalvolume = 0.0 77 | 78 | # Subclass customization for adding new events. 79 | def handle_add(self, event): 80 | # Sanity check on the event. 81 | self.assert_required_fields(event) 82 | self.flux += event.volume * event.price 83 | self.totalvolume += event.volume 84 | 85 | # Subclass customization for removing expired events. 86 | def handle_remove(self, event): 87 | self.flux -= event.volume * event.price 88 | self.totalvolume -= event.volume 89 | 90 | def get_vwap(self): 91 | """ 92 | Return the calculated vwap for this sid. 93 | """ 94 | # By convention, vwap is None if we have no events. 95 | if len(self.ticks) == 0: 96 | return None 97 | else: 98 | return (self.flux / self.totalvolume) 99 | 100 | # We need numerical price and volume to calculate a vwap. 101 | def assert_required_fields(self, event): 102 | if 'price' not in event or 'volume' not in event: 103 | raise WrongDataForTransform( 104 | transform="VWAPEventWindow", 105 | fields=self.fields) 106 | -------------------------------------------------------------------------------- /alephnull/examples/dual_moving_average.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright 2013 Quantopian, Inc. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | import matplotlib.pyplot as plt 18 | 19 | from alephnull.algorithm import TradingAlgorithm 20 | from alephnull.finance import trading 21 | from alephnull.transforms import MovingAverage 22 | from alephnull.utils.factory import load_from_yahoo 23 | 24 | from datetime import datetime 25 | import pytz 26 | 27 | 28 | class DualMovingAverage(TradingAlgorithm): 29 | """Dual Moving Average Crossover algorithm. 30 | 31 | This algorithm buys apple once its short moving average crosses 32 | its long moving average (indicating upwards momentum) and sells 33 | its shares once the averages cross again (indicating downwards 34 | momentum). 35 | 36 | """ 37 | def initialize(self, short_window=20, long_window=40): 38 | # Add 2 mavg transforms, one with a long window, one 39 | # with a short window. 40 | self.add_transform(MovingAverage, 'short_mavg', ['price'], 41 | window_length=short_window) 42 | 43 | self.add_transform(MovingAverage, 'long_mavg', ['price'], 44 | window_length=long_window) 45 | 46 | # To keep track of whether we invested in the stock or not 47 | self.invested = False 48 | 49 | def handle_data(self, data): 50 | self.short_mavg = data['AAPL'].short_mavg['price'] 51 | self.long_mavg = data['AAPL'].long_mavg['price'] 52 | self.buy = False 53 | self.sell = False 54 | 55 | if self.short_mavg > self.long_mavg and not self.invested: 56 | self.order('AAPL', 5000) 57 | self.invested = True 58 | self.buy = True 59 | elif self.short_mavg < self.long_mavg and self.invested: 60 | self.order('AAPL', -5000) 61 | self.invested = False 62 | self.sell = True 63 | 64 | self.record(short_mavg=self.short_mavg, 65 | long_mavg=self.long_mavg, 66 | buy=self.buy, 67 | sell=self.sell) 68 | 69 | if __name__ == '__main__': 70 | start = datetime(1990, 1, 1, 0, 0, 0, 0, pytz.utc) 71 | end = datetime(1991, 1, 1, 0, 0, 0, 0, pytz.utc) 72 | data = load_from_yahoo(stocks=['AAPL'], indexes={}, start=start, 73 | end=end) 74 | 75 | dma = DualMovingAverage() 76 | results = dma.run(data) 77 | 78 | br = trading.environment.benchmark_returns 79 | bm_returns = br[(br.index >= start) & (br.index <= end)] 80 | results['benchmark_returns'] = (1 + bm_returns).cumprod().values 81 | results['algorithm_returns'] = (1 + results.returns).cumprod() 82 | fig = plt.figure() 83 | ax1 = fig.add_subplot(211, ylabel='cumulative returns') 84 | 85 | results[['algorithm_returns', 'benchmark_returns']].plot(ax=ax1, 86 | sharex=True) 87 | 88 | ax2 = fig.add_subplot(212) 89 | data['AAPL'].plot(ax=ax2, color='r') 90 | results[['short_mavg', 'long_mavg']].plot(ax=ax2) 91 | 92 | ax2.plot(results.ix[results.buy].index, results.short_mavg[results.buy], 93 | '^', markersize=10, color='m') 94 | ax2.plot(results.ix[results.sell].index, results.short_mavg[results.sell], 95 | 'v', markersize=10, color='k') 96 | plt.legend(loc=0) 97 | 98 | sharpe = [risk['sharpe'] for risk in dma.risk_report['one_month']] 99 | print "Monthly Sharpe ratios:", sharpe 100 | plt.gcf().set_size_inches(18, 8) 101 | -------------------------------------------------------------------------------- /alephnull/utils/data.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import numpy as np 17 | import pandas as pd 18 | from copy import deepcopy 19 | 20 | 21 | def _ensure_index(x): 22 | if not isinstance(x, pd.Index): 23 | x = pd.Index(x) 24 | 25 | return x 26 | 27 | 28 | class RollingPanel(object): 29 | """ 30 | Preallocation strategies for rolling window over expanding data set 31 | 32 | Restrictions: major_axis can only be a DatetimeIndex for now 33 | """ 34 | 35 | def __init__(self, window, items, sids, cap_multiple=2, 36 | dtype=np.float64): 37 | self.pos = 0 38 | self.window = window 39 | 40 | self.items = _ensure_index(items) 41 | self.minor_axis = _ensure_index(sids) 42 | 43 | self.cap_multiple = cap_multiple 44 | self.cap = cap_multiple * window 45 | 46 | self.dtype = dtype 47 | self.index_buf = np.empty(self.cap, dtype='M8[ns]') 48 | 49 | self.buffer = self._create_buffer() 50 | 51 | def _create_buffer(self): 52 | return pd.Panel(items=self.items, minor_axis=self.minor_axis, 53 | major_axis=range(self.cap), 54 | dtype=self.dtype) 55 | 56 | def _update_buffer(self, frame): 57 | # Drop outdated, nan-filled minors (sids) and items (fields) 58 | non_nan_cols = set(self.buffer.dropna(axis=1).minor_axis) 59 | new_cols = set(frame.columns) 60 | self.minor_axis = _ensure_index(new_cols.union(non_nan_cols)) 61 | 62 | non_nan_items = set(self.buffer.dropna(axis=1).items) 63 | new_items = set(frame.index) 64 | self.items = _ensure_index(new_items.union(non_nan_items)) 65 | 66 | new_buffer = self._create_buffer() 67 | # Copy old values we want to keep 68 | # .update() is pretty slow. Ideally we would be using 69 | # new_buffer.loc[non_nan_items, :, non_nan_cols] = 70 | # but this triggers a bug in Pandas 0.11. Update 71 | # this when 0.12 is released. 72 | # https://github.com/pydata/pandas/issues/3777 73 | new_buffer.update( 74 | self.buffer.loc[non_nan_items, :, non_nan_cols]) 75 | 76 | self.buffer = new_buffer 77 | 78 | def add_frame(self, tick, frame): 79 | """ 80 | """ 81 | if self.pos == self.cap: 82 | self._roll_data() 83 | 84 | if set(frame.columns).difference(set(self.minor_axis)) or \ 85 | set(frame.index).difference(set(self.items)): 86 | self._update_buffer(frame) 87 | 88 | self.buffer.loc[:, self.pos, :] = frame.ix[self.items].T 89 | 90 | self.index_buf[self.pos] = tick 91 | 92 | self.pos += 1 93 | 94 | def get_current(self): 95 | """ 96 | Get a Panel that is the current data in view. It is not safe to persist 97 | these objects because internal data might change 98 | """ 99 | where = slice(max(self.pos - self.window, 0), self.pos) 100 | major_axis = pd.DatetimeIndex(deepcopy(self.index_buf[where]), 101 | tz='utc') 102 | 103 | return pd.Panel(self.buffer.values[:, where, :], self.items, 104 | major_axis, self.minor_axis) 105 | 106 | def _roll_data(self): 107 | """ 108 | Roll window worth of data up to position zero. 109 | Save the effort of having to expensively roll at each iteration 110 | """ 111 | self.buffer.values[:, :self.window, :] = \ 112 | self.buffer.values[:, -self.window:] 113 | self.index_buf[:self.window] = self.index_buf[-self.window:] 114 | self.pos = self.window 115 | -------------------------------------------------------------------------------- /alephnull/transforms/stddev.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from collections import defaultdict 17 | from math import sqrt 18 | 19 | from alephnull.errors import WrongDataForTransform 20 | from alephnull.transforms.utils import EventWindow, TransformMeta 21 | import alephnull.utils.math_utils as zp_math 22 | 23 | 24 | class MovingStandardDev(object): 25 | """ 26 | Class that maintains a dictionary from sids to 27 | MovingStandardDevWindows. For each sid, we maintain a the 28 | standard deviation of all events falling within the specified 29 | window. 30 | """ 31 | __metaclass__ = TransformMeta 32 | 33 | def __init__(self, market_aware=True, window_length=None, delta=None): 34 | 35 | self.market_aware = market_aware 36 | 37 | self.delta = delta 38 | self.window_length = window_length 39 | 40 | # Market-aware mode only works with full-day windows. 41 | if self.market_aware: 42 | assert self.window_length and not self.delta,\ 43 | "Market-aware mode only works with full-day windows." 44 | 45 | # Non-market-aware mode requires a timedelta. 46 | else: 47 | assert self.delta and not self.window_length, \ 48 | "Non-market-aware mode requires a timedelta." 49 | 50 | # No way to pass arguments to the defaultdict factory, so we 51 | # need to define a method to generate the correct EventWindows. 52 | self.sid_windows = defaultdict(self.create_window) 53 | 54 | def create_window(self): 55 | """ 56 | Factory method for self.sid_windows. 57 | """ 58 | return MovingStandardDevWindow( 59 | self.market_aware, 60 | self.window_length, 61 | self.delta 62 | ) 63 | 64 | def update(self, event): 65 | """ 66 | Update the event window for this event's sid. Return a dict 67 | from tracked fields to moving averages. 68 | """ 69 | # This will create a new EventWindow if this is the first 70 | # message for this sid. 71 | window = self.sid_windows[event.sid] 72 | window.update(event) 73 | return window.get_stddev() 74 | 75 | def assert_required_fields(self, event): 76 | """ 77 | We only allow events with a price field to be run through 78 | the returns transform. 79 | """ 80 | if 'price' not in event: 81 | raise WrongDataForTransform( 82 | transform="StdDevEventWindow", 83 | fields='price') 84 | 85 | 86 | class MovingStandardDevWindow(EventWindow): 87 | """ 88 | Iteratively calculates standard deviation for a particular sid 89 | over a given time window. The expected functionality of this 90 | class is to be instantiated inside a MovingStandardDev. 91 | """ 92 | 93 | def __init__(self, market_aware=True, window_length=None, delta=None): 94 | # Call the superclass constructor to set up base EventWindow 95 | # infrastructure. 96 | EventWindow.__init__(self, market_aware, window_length, delta) 97 | 98 | self.sum = 0.0 99 | self.sum_sqr = 0.0 100 | 101 | def handle_add(self, event): 102 | self.sum += event.price 103 | self.sum_sqr += event.price ** 2 104 | 105 | def handle_remove(self, event): 106 | self.sum -= event.price 107 | self.sum_sqr -= event.price ** 2 108 | 109 | def get_stddev(self): 110 | # Sample standard deviation is undefined for a single event or 111 | # no events. 112 | if len(self) <= 1: 113 | return None 114 | 115 | else: 116 | average = self.sum / len(self) 117 | s_squared = (self.sum_sqr - self.sum * average) \ 118 | / (len(self) - 1) 119 | 120 | if zp_math.tolerant_equals(0, s_squared): 121 | return 0.0 122 | stddev = sqrt(s_squared) 123 | return stddev 124 | -------------------------------------------------------------------------------- /alephnull/data/benchmarks.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | import collections 16 | 17 | from datetime import datetime 18 | 19 | import csv 20 | 21 | from functools import partial 22 | 23 | import requests 24 | import pandas as pd 25 | 26 | from . loader_utils import ( 27 | date_conversion, 28 | source_to_records, 29 | Mapping 30 | ) 31 | 32 | DailyReturn = collections.namedtuple('DailyReturn', ['date', 'returns']) 33 | 34 | 35 | class BenchmarkDataNotFoundError(Exception): 36 | pass 37 | 38 | _BENCHMARK_MAPPING = { 39 | # Need to add 'symbol' 40 | 'volume': (int, 'Volume'), 41 | 'open': (float, 'Open'), 42 | 'close': (float, 'Close'), 43 | 'high': (float, 'High'), 44 | 'low': (float, 'Low'), 45 | 'adj_close': (float, 'Adj Close'), 46 | 'date': (partial(date_conversion, date_pattern='%Y-%m-%d'), 'Date') 47 | } 48 | 49 | 50 | def benchmark_mappings(): 51 | return {key: Mapping(*value) 52 | for key, value 53 | in _BENCHMARK_MAPPING.iteritems()} 54 | 55 | 56 | def get_raw_benchmark_data(start_date, end_date, symbol): 57 | 58 | # create benchmark files 59 | # ^GSPC 19500103 60 | params = collections.OrderedDict(( 61 | ('s', symbol), 62 | # start_date month, zero indexed 63 | ('a', start_date.month - 1), 64 | # start_date day 65 | ('b', start_date.day), 66 | # start_date year 67 | ('c', start_date.year), 68 | # end_date month, zero indexed 69 | ('d', end_date.month - 1), 70 | # end_date day str(int(todate[6:8])) #day 71 | ('e', end_date.day), 72 | # end_date year str(int(todate[0:4])) 73 | ('f', end_date.year), 74 | # daily frequency 75 | ('g', 'd'), 76 | )) 77 | 78 | res = requests.get('http://ichart.finance.yahoo.com/table.csv', 79 | params=params, stream=True) 80 | 81 | if not res.ok: 82 | raise BenchmarkDataNotFoundError(""" 83 | No benchmark data found for date range. 84 | start_date={start_date}, end_date={end_date}, url={url}""".strip(). 85 | format(start_date=start_date, 86 | end_date=end_date, 87 | url=res.url)) 88 | 89 | return csv.DictReader(res.iter_lines()) 90 | 91 | 92 | def get_benchmark_data(symbol, start_date=None, end_date=None): 93 | """ 94 | Benchmarks from Yahoo. 95 | """ 96 | if start_date is None: 97 | start_date = datetime(year=1950, month=1, day=3) 98 | if end_date is None: 99 | end_date = datetime.utcnow() 100 | 101 | raw_benchmark_data = get_raw_benchmark_data(start_date, end_date, symbol) 102 | 103 | mappings = benchmark_mappings() 104 | 105 | return source_to_records(mappings, raw_benchmark_data) 106 | 107 | 108 | def get_benchmark_returns(symbol, start_date=None, end_date=None): 109 | """ 110 | Returns a list of return percentages in chronological order. 111 | """ 112 | if start_date is None: 113 | start_date = datetime(year=1950, month=1, day=3) 114 | if end_date is None: 115 | end_date = datetime.utcnow() 116 | 117 | # Get the benchmark data and convert it to a list in chronological order. 118 | data_points = list(get_benchmark_data(symbol, start_date, end_date)) 119 | data_points.reverse() 120 | 121 | # Calculate the return percentages. 122 | benchmark_returns = [] 123 | for i, data_point in enumerate(data_points): 124 | if i == 0: 125 | curr_open = data_points[i]['open'] 126 | returns = (data_points[i]['close'] - curr_open) / curr_open 127 | else: 128 | prev_close = data_points[i - 1]['close'] 129 | returns = (data_point['close'] - prev_close) / prev_close 130 | date = pd.tseries.tools.normalize_date(data_point['date']) 131 | daily_return = DailyReturn(date=date, returns=returns) 132 | benchmark_returns.append(daily_return) 133 | 134 | return benchmark_returns 135 | -------------------------------------------------------------------------------- /tests/risk/test_risk_cumulative.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import unittest 17 | 18 | import datetime 19 | import numpy as np 20 | import pytz 21 | import zipline.finance.risk as risk 22 | from zipline.utils import factory 23 | 24 | from zipline.finance.trading import SimulationParameters 25 | 26 | import answer_key 27 | ANSWER_KEY = answer_key.ANSWER_KEY 28 | 29 | 30 | class TestRisk(unittest.TestCase): 31 | 32 | def setUp(self): 33 | start_date = datetime.datetime( 34 | year=2006, 35 | month=1, 36 | day=1, 37 | hour=0, 38 | minute=0, 39 | tzinfo=pytz.utc) 40 | end_date = datetime.datetime( 41 | year=2006, month=12, day=29, tzinfo=pytz.utc) 42 | 43 | self.sim_params = SimulationParameters( 44 | period_start=start_date, 45 | period_end=end_date 46 | ) 47 | 48 | self.algo_returns_06 = factory.create_returns_from_list( 49 | answer_key.ALGORITHM_RETURNS.values, 50 | self.sim_params 51 | ) 52 | 53 | self.cumulative_metrics_06 = risk.RiskMetricsCumulative( 54 | self.sim_params) 55 | 56 | for dt, returns in answer_key.RETURNS_DATA.iterrows(): 57 | self.cumulative_metrics_06.update(dt, 58 | returns['Algorithm Returns'], 59 | returns['Benchmark Returns']) 60 | 61 | def test_algorithm_volatility_06(self): 62 | np.testing.assert_almost_equal( 63 | ANSWER_KEY.ALGORITHM_CUMULATIVE_VOLATILITY, 64 | self.cumulative_metrics_06.metrics.algorithm_volatility.values) 65 | 66 | def test_sharpe_06(self): 67 | for dt, value in answer_key.RISK_CUMULATIVE.sharpe.iterkv(): 68 | np.testing.assert_almost_equal( 69 | value, 70 | self.cumulative_metrics_06.metrics.sharpe[dt], 71 | decimal=2, 72 | err_msg="Mismatch at %s" % (dt,)) 73 | 74 | def test_downside_risk_06(self): 75 | for dt, value in answer_key.RISK_CUMULATIVE.downside_risk.iterkv(): 76 | np.testing.assert_almost_equal( 77 | self.cumulative_metrics_06.metrics.downside_risk[dt], 78 | value, 79 | decimal=2, 80 | err_msg="Mismatch at %s" % (dt,)) 81 | 82 | def test_sortino_06(self): 83 | for dt, value in answer_key.RISK_CUMULATIVE.sortino.iterkv(): 84 | np.testing.assert_almost_equal( 85 | self.cumulative_metrics_06.metrics.sortino[dt], 86 | value, 87 | decimal=2, 88 | err_msg="Mismatch at %s" % (dt,)) 89 | 90 | def test_information_06(self): 91 | for dt, value in answer_key.RISK_CUMULATIVE.information.iterkv(): 92 | np.testing.assert_almost_equal( 93 | self.cumulative_metrics_06.metrics.information[dt], 94 | value, 95 | decimal=2, 96 | err_msg="Mismatch at %s" % (dt,)) 97 | 98 | def test_alpha_06(self): 99 | for dt, value in answer_key.RISK_CUMULATIVE.alpha.iterkv(): 100 | np.testing.assert_almost_equal( 101 | self.cumulative_metrics_06.metrics.alpha[dt], 102 | value, 103 | decimal=2, 104 | err_msg="Mismatch at %s" % (dt,)) 105 | 106 | def test_beta_06(self): 107 | for dt, value in answer_key.RISK_CUMULATIVE.beta.iterkv(): 108 | np.testing.assert_almost_equal( 109 | self.cumulative_metrics_06.metrics.beta[dt], 110 | value, 111 | decimal=2, 112 | err_msg="Mismatch at %s" % (dt,)) 113 | 114 | def test_max_drawdown_calculated(self): 115 | # We don't track max_drawdown by day, so it doesn't make sense to 116 | # generate a full answer key for it. For now, ensure it's just 117 | # "not zero" 118 | self.assertNotEqual(self.cumulative_metrics_06.max_drawdown, 0.0) 119 | -------------------------------------------------------------------------------- /alephnull/data/treasuries_can.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import datetime 17 | import requests 18 | 19 | from . loader_utils import ( 20 | source_to_records 21 | ) 22 | 23 | from alephnull.data.treasuries import ( 24 | treasury_mappings, get_treasury_date, get_treasury_rate 25 | ) 26 | 27 | 28 | _CURVE_MAPPINGS = { 29 | 'date': (get_treasury_date, "Date"), 30 | '1month': (get_treasury_rate, "V39063"), 31 | '3month': (get_treasury_rate, "V39065"), 32 | '6month': (get_treasury_rate, "V39066"), 33 | '1year': (get_treasury_rate, "V39067"), 34 | '2year': (get_treasury_rate, "V39051"), 35 | '3year': (get_treasury_rate, "V39052"), 36 | '5year': (get_treasury_rate, "V39053"), 37 | '7year': (get_treasury_rate, "V39054"), 38 | '10year': (get_treasury_rate, "V39055"), 39 | #Bank of Canada refers to this as 'Long' Rate, approximately 30 years. 40 | '30year': (get_treasury_rate, "V39056"), 41 | } 42 | 43 | BILLS = ['V39063', 'V39065', 'V39066', 'V39067'] 44 | BONDS = ['V39051', 'V39052', 'V39053', 'V39054', 'V39055', 'V39056'] 45 | 46 | 47 | def get_treasury_source(start_date=None, end_date=None): 48 | 49 | today = datetime.date.today() 50 | #Bank of Canada only has 10 years of data and has this in the URL. 51 | restriction = datetime.date(today.year-10, today.month, today.day) 52 | 53 | if not end_date: 54 | end_date = today 55 | 56 | if not start_date: 57 | start_date = restriction 58 | 59 | bill_url = ( 60 | "http://www.bankofcanada.ca/stats/results/csv?" 61 | "lP=lookup_tbill_yields.php&sR={restrict}&se=" 62 | "L_V39063-L_V39065-L_V39066-L_V39067&dF={start}&dT={end}" 63 | .format(restrict=restriction.strftime("%Y-%m-%d"), 64 | start=start_date.strftime("%Y-%m-%d"), 65 | end=end_date.strftime("%Y-%m-%d"), 66 | ) 67 | ) 68 | 69 | bond_url = ( 70 | "http://www.bankofcanada.ca/stats/results/csv?" 71 | "lP=lookup_bond_yields.php&sR={restrict}&se=" 72 | "L_V39051-L_V39052-L_V39053-L_V39054-L_V39055-L_V39056" 73 | "&dF={start}&dT={end}" 74 | .format(restrict=restriction.strftime("%Y-%m-%d"), 75 | start=start_date.strftime("%Y-%m-%d"), 76 | end=end_date.strftime("%Y-%m-%d") 77 | ) 78 | ) 79 | 80 | res_bill = requests.get(bill_url, stream=True) 81 | res_bond = requests.get(bond_url, stream=True) 82 | bill_iter = res_bill.iter_lines() 83 | bond_iter = res_bond.iter_lines() 84 | 85 | bill_row = "" 86 | while ",".join(BILLS) not in bill_row: 87 | bill_row = bill_iter.next() 88 | if 'Daily series:' in bill_row: 89 | bill_end_date = datetime.datetime.strptime( 90 | bill_row.split(' - ')[1].strip(), 91 | "%Y-%m-%d").date() 92 | bill_header = bill_row.split(",") 93 | 94 | bond_row = "" 95 | while ",".join(BONDS) not in bond_row: 96 | bond_row = bond_iter.next() 97 | if 'Daily series:' in bond_row: 98 | bond_end_date = datetime.datetime.strptime( 99 | bond_row.split(' - ')[1].strip(), 100 | "%Y-%m-%d").date() 101 | bond_header = bond_row.split(",") 102 | 103 | #Line up the two dates 104 | if bill_end_date > bond_end_date: 105 | bill_iter.next() 106 | elif bond_end_date > bill_end_date: 107 | bond_iter.next() 108 | 109 | for bill_row in bill_iter: 110 | bond_row = bond_iter.next() 111 | bill_dict = dict(zip(bill_header, bill_row.split(","))) 112 | bond_dict = dict(zip(bond_header, bond_row.split(","))) 113 | if ' Bank holiday' in bond_row.split(",") + bill_row.split(","): 114 | continue 115 | if ' Not available' in bond_row.split(",") + bill_row.split(","): 116 | continue 117 | 118 | bill_dict.update(bond_dict) 119 | yield bill_dict 120 | 121 | 122 | def get_treasury_data(): 123 | mappings = treasury_mappings(_CURVE_MAPPINGS) 124 | source = get_treasury_source() 125 | return source_to_records(mappings, source) 126 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | ************ 2 | Installation 3 | ************ 4 | 5 | Since zipline is pure-python code it should be very easy to install 6 | and set up with pip: 7 | 8 | :: 9 | 10 | pip install zipline 11 | 12 | If there are problems installing the dependencies or zipline we 13 | recommend installing these packages via some other means. For Windows, 14 | the `Enthought Python Distribution 15 | `_ 16 | includes most of the necessary dependencies. On OSX, the `Scipy Superpack 17 | `_ works very well. 18 | 19 | Dependencies 20 | ------------ 21 | 22 | * Python (>= 2.7.2) 23 | * numpy (>= 1.6.0) 24 | * pandas (>= 0.9.0) 25 | * pytz 26 | * iso8601 27 | * Logbook 28 | 29 | 30 | Develop 31 | ------- 32 | 33 | To run tests:: 34 | 35 | $ nosetests 36 | 37 | Tooling hints 38 | ================ 39 | :mod:`zipline` relies heavily on scientific python components (numpy, scikit, pandas, matplotlib, ipython, etc). Tooling up can be a pain, and it often involves managing a configuration including your OS, c/c++/fortran compilers, python version, and versions of numerous modules. I've found the following tools absolutely indispensable: 40 | 41 | - some kind of package manager for your platform. package managers generally give you a way to search, install, uninstall, and check currently installed packages. They also do a great job of managing dependencies. 42 | - linux: yum/apt-get 43 | - mac: homebrew/macport/fink (I highly recommend homebrew: https://github.com/mxcl/homebrew) 44 | - windows: probably best if you use a complete distribution, like: enthought, ActiveState, or Python(x,y) 45 | - Python also provides good package management tools to help you manage the components you install for Python. 46 | - pip 47 | - easy_install/setuptools. I have always used setuptools, and I've been quite happy with it. Just remember that setuptools is coupled to your python version. 48 | - virtualenv and virtualenvwrapper are your very best friends. They complement your python package manager by allowing you to create and quickly switch between named configurations. 49 | - *Install all the versions of Python you like to use, but install setuptools, virtualenv, and virtualenvwrapper with the very latest python.* Use the latest python to install the latest setuptools, and the latest setuptools to install virtualenv and virtualenvwrapper. virtualenvwrapper allows you to specify the python version you wish to use (mkvirtualenv -p ), so you can create envs of any python denomination. 50 | 51 | Mac OS hints 52 | ------------- 53 | 54 | Scientific python on the Mac can be a bit confusing because of the many independent variables. You need to have several components installed, and be aware of the versions of each: 55 | 56 | - XCode. XCode includes the gcc and g++ compilers and architecture specific assemblers. Your version of XCode will determine which compilers and assemblers are available. The most common issue I encountered with scientific python libraries is compilation errors of underlying C code. Most scientific libraries are optimized with C routines, so this is a major hurdle. In my environment (XCode 4.0.2 with iOS components installed) I ran into problems with -arch flags asking for power pc (-arch ppc passed to the compiler). Read this stackoverflow to see how to handle similar problems: http://stackoverflow.com/questions/5256397/python-easy-install-fails-with-assembler-for-architecture-ppc-not-installed-on 57 | - gfortran - you need this to build numpy. With brew you can install with just: ```brew install gfortran``` 58 | - umfpack - you need this to build scipy. ```brew install umfpack``` 59 | - swig - you need this to build scipy. ```brew install swig``` 60 | - hdf5 - you need this to build tables. ```brew install hdf5``` 61 | - zeromq - you need this to run qbt. ```brew install zmq``` 62 | 63 | 64 | Data Sources 65 | ============= 66 | 67 | The Backtest can handle multiple concurrent data sources. QBT will start a 68 | subprocess to run each datasource, and merge all events from all sources into a 69 | single serial feed, ordered by date. 70 | 71 | Data sources have events with very different frequencies. For example, liquid 72 | stocks will trade many times per minute, while illiquid stocks may trade just 73 | once a day. In order to serialize events from all sources into a single feed, 74 | qbt loads events from all sources into memory, then sorts. The communication 75 | happens like this: 76 | 77 | 1. QBT requests the next event from each data source, ignoring date (i.e. 78 | just next in sequence for all) 79 | 2. Using the earliest date from all the events from all sources, QBT then 80 | asks for "next after " from all sources. 81 | 3. All datasources send all events in their history from before , 82 | moving their internal pointer forward to the next unsent event. 83 | 4. QBT merges all events in memory 84 | 5. goto 1! 85 | -------------------------------------------------------------------------------- /tests/test_exception_handling.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from unittest import TestCase 17 | 18 | import zipline.utils.simfactory as simfactory 19 | import zipline.utils.factory as factory 20 | 21 | from zipline.test_algorithms import ( 22 | ExceptionAlgorithm, 23 | DivByZeroAlgorithm, 24 | SetPortfolioAlgorithm, 25 | ) 26 | from zipline.finance.slippage import FixedSlippage 27 | from zipline.transforms.utils import StatefulTransform 28 | 29 | 30 | from zipline.utils.test_utils import ( 31 | drain_zipline, 32 | setup_logger, 33 | teardown_logger, 34 | ExceptionSource, 35 | ExceptionTransform 36 | ) 37 | 38 | DEFAULT_TIMEOUT = 15 # seconds 39 | EXTENDED_TIMEOUT = 90 40 | 41 | 42 | class ExceptionTestCase(TestCase): 43 | 44 | def setUp(self): 45 | self.zipline_test_config = { 46 | 'sid': 133, 47 | 'slippage': FixedSlippage() 48 | } 49 | setup_logger(self) 50 | 51 | def tearDown(self): 52 | teardown_logger(self) 53 | 54 | def test_datasource_exception(self): 55 | self.zipline_test_config['trade_source'] = ExceptionSource() 56 | zipline = simfactory.create_test_zipline( 57 | **self.zipline_test_config 58 | ) 59 | 60 | with self.assertRaises(ZeroDivisionError) as ctx: 61 | output, _ = drain_zipline(self, zipline) 62 | 63 | self.assertEqual( 64 | ctx.exception.message, 65 | 'integer division or modulo by zero' 66 | ) 67 | 68 | def test_tranform_exception(self): 69 | exc_tnfm = StatefulTransform(ExceptionTransform) 70 | self.zipline_test_config['transforms'] = [exc_tnfm] 71 | 72 | zipline = simfactory.create_test_zipline( 73 | **self.zipline_test_config 74 | ) 75 | 76 | with self.assertRaises(AssertionError) as ctx: 77 | output, _ = drain_zipline(self, zipline) 78 | 79 | self.assertEqual(ctx.exception.message, 80 | 'An assertion message') 81 | 82 | def test_exception_in_handle_data(self): 83 | # Simulation 84 | # ---------- 85 | self.zipline_test_config['algorithm'] = \ 86 | ExceptionAlgorithm( 87 | 'handle_data', 88 | self.zipline_test_config['sid'], 89 | sim_params=factory.create_simulation_parameters() 90 | ) 91 | 92 | zipline = simfactory.create_test_zipline( 93 | **self.zipline_test_config 94 | ) 95 | 96 | with self.assertRaises(Exception) as ctx: 97 | output, _ = drain_zipline(self, zipline) 98 | 99 | self.assertEqual(ctx.exception.message, 100 | 'Algo exception in handle_data') 101 | 102 | def test_zerodivision_exception_in_handle_data(self): 103 | 104 | # Simulation 105 | # ---------- 106 | self.zipline_test_config['algorithm'] = \ 107 | DivByZeroAlgorithm( 108 | self.zipline_test_config['sid'], 109 | sim_params=factory.create_simulation_parameters() 110 | ) 111 | 112 | zipline = simfactory.create_test_zipline( 113 | **self.zipline_test_config 114 | ) 115 | 116 | with self.assertRaises(ZeroDivisionError) as ctx: 117 | output, _ = drain_zipline(self, zipline) 118 | 119 | self.assertEqual(ctx.exception.message, 120 | 'integer division or modulo by zero') 121 | 122 | def test_set_portfolio(self): 123 | """ 124 | Are we protected against overwriting an algo's portfolio? 125 | """ 126 | 127 | # Simulation 128 | # ---------- 129 | self.zipline_test_config['algorithm'] = \ 130 | SetPortfolioAlgorithm( 131 | self.zipline_test_config['sid'], 132 | sim_params=factory.create_simulation_parameters() 133 | ) 134 | 135 | zipline = simfactory.create_test_zipline( 136 | **self.zipline_test_config 137 | ) 138 | 139 | with self.assertRaises(AttributeError) as ctx: 140 | output, _ = drain_zipline(self, zipline) 141 | 142 | self.assertEqual(ctx.exception.message, 143 | "can't set attribute") 144 | -------------------------------------------------------------------------------- /alephnull/examples/pairtrade.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright 2013 Quantopian, Inc. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | import matplotlib.pyplot as plt 18 | import numpy as np 19 | import statsmodels.api as sm 20 | from datetime import datetime 21 | import pytz 22 | 23 | from alephnull.algorithm import TradingAlgorithm 24 | from alephnull.transforms import batch_transform 25 | from alephnull.utils.factory import load_from_yahoo 26 | 27 | 28 | @batch_transform 29 | def ols_transform(data, sid1, sid2): 30 | """Computes regression coefficient (slope and intercept) 31 | via Ordinary Least Squares between two SIDs. 32 | """ 33 | p0 = data.price[sid1] 34 | p1 = sm.add_constant(data.price[sid2], prepend=True) 35 | slope, intercept = sm.OLS(p0, p1).fit().params 36 | 37 | return slope, intercept 38 | 39 | 40 | class Pairtrade(TradingAlgorithm): 41 | """Pairtrading relies on cointegration of two stocks. 42 | 43 | The expectation is that once the two stocks drifted apart 44 | (i.e. there is spread), they will eventually revert again. Thus, 45 | if we short the upward drifting stock and long the downward 46 | drifting stock (in short, we buy the spread) once the spread 47 | widened we can sell the spread with profit once they converged 48 | again. A nice property of this algorithm is that we enter the 49 | market in a neutral position. 50 | 51 | This specific algorithm tries to exploit the cointegration of 52 | Pepsi and Coca Cola by estimating the correlation between the 53 | two. Divergence of the spread is evaluated by z-scoring. 54 | """ 55 | 56 | def initialize(self, window_length=100): 57 | self.spreads = [] 58 | self.invested = 0 59 | self.window_length = window_length 60 | self.ols_transform = ols_transform(refresh_period=self.window_length, 61 | window_length=self.window_length) 62 | 63 | def handle_data(self, data): 64 | ###################################################### 65 | # 1. Compute regression coefficients between PEP and KO 66 | params = self.ols_transform.handle_data(data, 'PEP', 'KO') 67 | if params is None: 68 | return 69 | intercept, slope = params 70 | 71 | ###################################################### 72 | # 2. Compute spread and zscore 73 | zscore = self.compute_zscore(data, slope, intercept) 74 | self.record(zscores=zscore) 75 | 76 | ###################################################### 77 | # 3. Place orders 78 | self.place_orders(data, zscore) 79 | 80 | def compute_zscore(self, data, slope, intercept): 81 | """1. Compute the spread given slope and intercept. 82 | 2. zscore the spread. 83 | """ 84 | spread = (data['PEP'].price - (slope * data['KO'].price + intercept)) 85 | self.spreads.append(spread) 86 | spread_wind = self.spreads[-self.window_length:] 87 | zscore = (spread - np.mean(spread_wind)) / np.std(spread_wind) 88 | return zscore 89 | 90 | def place_orders(self, data, zscore): 91 | """Buy spread if zscore is > 2, sell if zscore < .5. 92 | """ 93 | if zscore >= 2.0 and not self.invested: 94 | self.order('PEP', int(100 / data['PEP'].price)) 95 | self.order('KO', -int(100 / data['KO'].price)) 96 | self.invested = True 97 | elif zscore <= -2.0 and not self.invested: 98 | self.order('PEP', -int(100 / data['PEP'].price)) 99 | self.order('KO', int(100 / data['KO'].price)) 100 | self.invested = True 101 | elif abs(zscore) < .5 and self.invested: 102 | self.sell_spread() 103 | self.invested = False 104 | 105 | def sell_spread(self): 106 | """ 107 | decrease exposure, regardless of position long/short. 108 | buy for a short position, sell for a long. 109 | """ 110 | ko_amount = self.portfolio.positions['KO'].amount 111 | self.order('KO', -1 * ko_amount) 112 | pep_amount = self.portfolio.positions['PEP'].amount 113 | self.order('PEP', -1 * pep_amount) 114 | 115 | 116 | if __name__ == '__main__': 117 | start = datetime(2013, 1, 1, 0, 0, 0, 0, pytz.utc) 118 | end = datetime(2014, 1, 1, 0, 0, 0, 0, pytz.utc) 119 | data = load_from_yahoo(stocks=['PEP', 'KO'], indexes={}, 120 | start=start, end=end) 121 | 122 | pairtrade = Pairtrade() 123 | results = pairtrade.run(data) 124 | data['spreads'] = np.nan 125 | 126 | ax1 = plt.subplot(211) 127 | data[['PEP', 'KO']].plot(ax=ax1) 128 | plt.ylabel('price') 129 | plt.setp(ax1.get_xticklabels(), visible=False) 130 | 131 | ax2 = plt.subplot(212, sharex=ax1) 132 | results.zscores.plot(ax=ax2, color='r') 133 | plt.ylabel('zscored spread') 134 | 135 | plt.show() 136 | raw_input() -------------------------------------------------------------------------------- /alephnull/data/loader_utils.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2012 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | 17 | """ 18 | Various utilites used by different date loaders. 19 | 20 | Could stand to be broken up more into components. 21 | e.g. the mapping utilities. 22 | 23 | """ 24 | 25 | import datetime 26 | 27 | import pytz 28 | 29 | from collections import namedtuple 30 | 31 | from functools import partial 32 | 33 | 34 | def get_utc_from_exchange_time(naive): 35 | local = pytz.timezone('US/Eastern') 36 | local_dt = naive.replace(tzinfo=local) 37 | utc_dt = local_dt.astimezone(pytz.utc) 38 | return utc_dt 39 | 40 | 41 | def get_exchange_time_from_utc(utc_dt): 42 | """ 43 | Takes in result from exchange time. 44 | """ 45 | dt = utc_dt.replace(tzinfo=pytz.utc) 46 | local = pytz.timezone('US/Eastern') 47 | dt = dt.astimezone(local) 48 | 49 | return dt 50 | 51 | 52 | def guarded_conversion(conversion, str_val): 53 | """ 54 | Returns the result of applying the @conversion to @str_val 55 | """ 56 | if str_val in (None, ""): 57 | return None 58 | return conversion(str_val) 59 | 60 | 61 | def safe_int(str_val): 62 | """ 63 | casts the @str_val to a float to handle the occassional 64 | decimal point in int fields from data providers. 65 | """ 66 | f = float(str_val) 67 | i = int(f) 68 | return i 69 | 70 | 71 | def date_conversion(date_str, date_pattern='%m/%d/%Y', to_utc=True): 72 | """ 73 | Convert date strings from TickData (or other source) into epoch values. 74 | 75 | Specify to_utc=False if the input date is already UTC (or is naive). 76 | """ 77 | dt = datetime.datetime.strptime(date_str, date_pattern) 78 | if to_utc: 79 | dt = get_utc_from_exchange_time(dt) 80 | else: 81 | dt = dt.replace(tzinfo=pytz.utc) 82 | return dt 83 | 84 | 85 | # Mapping is a structure for how want to convert the source data into 86 | # the form we insert into the database. 87 | # - conversion, a function used to convert source input to our target value 88 | # - source, the key(s) in the original source to pass to the conversion 89 | # method 90 | # If a single string, then it's a direct lookup into the 91 | # source row by that key 92 | # If an iterator, pass the source to as a list of keys, 93 | # in order, to the conversion function. 94 | # If empty, then the conversion method provides a 'default' value. 95 | Mapping = namedtuple('Mapping', ['conversion', 'source']) 96 | 97 | 98 | def apply_mapping(mapping, row): 99 | """ 100 | Returns the value of a @mapping for a given @row. 101 | 102 | i.e. the @mapping.source values are extracted from @row and fed 103 | into the @mapping.conversion method. 104 | """ 105 | if isinstance(mapping.source, str): 106 | # Do a 'direct' conversion of one key from the source row. 107 | return guarded_conversion(mapping.conversion, row[mapping.source]) 108 | if mapping.source is None: 109 | # For hardcoded values. 110 | # conversion method will return a constant value 111 | return mapping.conversion() 112 | else: 113 | # Assume we are using multiple source values. 114 | # Feed the source values in order prescribed by mapping.source 115 | # to mapping.conversion. 116 | return mapping.conversion(*[row[source] for source in mapping.source]) 117 | 118 | 119 | def _row_cb(mapping, row): 120 | """ 121 | Returns the dict created from our @mapping of the source @row. 122 | 123 | Not intended to be used directly, but rather to be the base of another 124 | function that supplies the mapping value. 125 | """ 126 | return { 127 | target: apply_mapping(mapping, row) 128 | for target, mapping 129 | in mapping.iteritems() 130 | } 131 | 132 | 133 | def make_row_cb(mapping): 134 | """ 135 | Returns a func that can be applied to a dict that returns the 136 | application of the @mapping, which results in a dict. 137 | """ 138 | return partial(_row_cb, mapping) 139 | 140 | 141 | def source_to_records(mappings, 142 | source, 143 | source_wrapper=None, 144 | records_wrapper=None): 145 | if source_wrapper: 146 | source = source_wrapper(source) 147 | 148 | callback = make_row_cb(mappings) 149 | 150 | records = (callback(row) for row in source) 151 | 152 | if records_wrapper: 153 | records = records_wrapper(records) 154 | 155 | return records 156 | -------------------------------------------------------------------------------- /alephnull/sources/data_frame_source.py: -------------------------------------------------------------------------------- 1 | 2 | # 3 | # Copyright 2013 Quantopian, Inc. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | """ 18 | Tools to generate data sources. 19 | """ 20 | import pandas as pd 21 | 22 | from alephnull.gens.utils import hash_args 23 | 24 | from alephnull.sources.data_source import DataSource 25 | 26 | 27 | class DataFrameSource(DataSource): 28 | """ 29 | Yields all events in event_list that match the given sid_filter. 30 | If no event_list is specified, generates an internal stream of events 31 | to filter. Returns all events if filter is None. 32 | 33 | Configuration options: 34 | 35 | sids : list of values representing simulated internal sids 36 | start : start date 37 | delta : timedelta between internal events 38 | filter : filter to remove the sids 39 | """ 40 | 41 | def __init__(self, data, **kwargs): 42 | assert isinstance(data.index, pd.tseries.index.DatetimeIndex) 43 | 44 | self.data = data 45 | # Unpack config dictionary with default values. 46 | self.sids = kwargs.get('sids', data.columns) 47 | self.start = kwargs.get('start', data.index[0]) 48 | self.end = kwargs.get('end', data.index[-1]) 49 | 50 | # Hash_value for downstream sorting. 51 | self.arg_string = hash_args(data, **kwargs) 52 | 53 | self._raw_data = None 54 | 55 | @property 56 | def mapping(self): 57 | return { 58 | 'dt': (lambda x: x, 'dt'), 59 | 'sid': (lambda x: x, 'sid'), 60 | 'price': (float, 'price'), 61 | 'volume': (int, 'volume'), 62 | } 63 | 64 | @property 65 | def instance_hash(self): 66 | return self.arg_string 67 | 68 | def raw_data_gen(self): 69 | for dt, series in self.data.iterrows(): 70 | for sid, price in series.iterkv(): 71 | if sid in self.sids: 72 | event = { 73 | 'dt': dt, 74 | 'sid': sid, 75 | 'price': price, 76 | 'volume': 1000, 77 | } 78 | yield event 79 | 80 | @property 81 | def raw_data(self): 82 | if not self._raw_data: 83 | self._raw_data = self.raw_data_gen() 84 | return self._raw_data 85 | 86 | 87 | class DataPanelSource(DataSource): 88 | """ 89 | Yields all events in event_list that match the given sid_filter. 90 | If no event_list is specified, generates an internal stream of events 91 | to filter. Returns all events if filter is None. 92 | 93 | Configuration options: 94 | 95 | sids : list of values representing simulated internal sids 96 | start : start date 97 | delta : timedelta between internal events 98 | filter : filter to remove the sids 99 | """ 100 | 101 | def __init__(self, data, **kwargs): 102 | assert isinstance(data.major_axis, pd.tseries.index.DatetimeIndex) 103 | 104 | self.data = data 105 | # Unpack config dictionary with default values. 106 | self.sids = kwargs.get('sids', data.items) 107 | self.start = kwargs.get('start', data.major_axis[0]) 108 | self.end = kwargs.get('end', data.major_axis[-1]) 109 | 110 | # Hash_value for downstream sorting. 111 | self.arg_string = hash_args(data, **kwargs) 112 | 113 | self._raw_data = None 114 | 115 | @property 116 | def mapping(self): 117 | mapping = { 118 | 'dt': (lambda x: x, 'dt'), 119 | 'sid': (lambda x: x, 'sid'), 120 | 'price': (float, 'price'), 121 | 'volume': (int, 'volume'), 122 | } 123 | 124 | # Add additional fields. 125 | for field_name in self.data.minor_axis: 126 | if field_name in ['price', 'volume', 'dt', 'sid']: 127 | continue 128 | mapping[field_name] = (lambda x: x, field_name) 129 | 130 | return mapping 131 | 132 | @property 133 | def instance_hash(self): 134 | return self.arg_string 135 | 136 | def raw_data_gen(self): 137 | for dt in self.data.major_axis: 138 | df = self.data.major_xs(dt) 139 | for sid, series in df.iterkv(): 140 | if sid in self.sids: 141 | event = { 142 | 'dt': dt, 143 | 'sid': sid, 144 | } 145 | for field_name, value in series.iteritems(): 146 | event[field_name] = value 147 | 148 | yield event 149 | 150 | @property 151 | def raw_data(self): 152 | if not self._raw_data: 153 | self._raw_data = self.raw_data_gen() 154 | return self._raw_data 155 | -------------------------------------------------------------------------------- /alephnull/data/treasuries.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | import re 16 | 17 | import numpy as np 18 | import pandas as pd 19 | import requests 20 | 21 | from collections import OrderedDict 22 | import xml.etree.ElementTree as ET 23 | 24 | from . loader_utils import ( 25 | guarded_conversion, 26 | safe_int, 27 | Mapping, 28 | date_conversion, 29 | source_to_records 30 | ) 31 | 32 | 33 | def get_treasury_date(dstring): 34 | return date_conversion(dstring.split("T")[0], date_pattern='%Y-%m-%d', 35 | to_utc=False) 36 | 37 | 38 | def get_treasury_rate(string_val): 39 | val = guarded_conversion(float, string_val) 40 | if val is not None: 41 | val = round(val / 100.0, 4) 42 | return val 43 | 44 | _CURVE_MAPPINGS = { 45 | 'tid': (safe_int, "Id"), 46 | 'date': (get_treasury_date, "NEW_DATE"), 47 | '1month': (get_treasury_rate, "BC_1MONTH"), 48 | '3month': (get_treasury_rate, "BC_3MONTH"), 49 | '6month': (get_treasury_rate, "BC_6MONTH"), 50 | '1year': (get_treasury_rate, "BC_1YEAR"), 51 | '2year': (get_treasury_rate, "BC_2YEAR"), 52 | '3year': (get_treasury_rate, "BC_3YEAR"), 53 | '5year': (get_treasury_rate, "BC_5YEAR"), 54 | '7year': (get_treasury_rate, "BC_7YEAR"), 55 | '10year': (get_treasury_rate, "BC_10YEAR"), 56 | '20year': (get_treasury_rate, "BC_20YEAR"), 57 | '30year': (get_treasury_rate, "BC_30YEAR"), 58 | } 59 | 60 | 61 | def treasury_mappings(mappings): 62 | return {key: Mapping(*value) 63 | for key, value 64 | in mappings.iteritems()} 65 | 66 | 67 | class iter_to_stream(object): 68 | """ 69 | Exposes an iterable as an i/o stream 70 | """ 71 | def __init__(self, iterable): 72 | self.buffered = "" 73 | self.iter = iter(iterable) 74 | 75 | def read(self, size): 76 | result = "" 77 | while size > 0: 78 | data = self.buffered or next(self.iter, None) 79 | self.buffered = "" 80 | if data is None: 81 | break 82 | size -= len(data) 83 | if size < 0: 84 | data, self.buffered = data[:size], data[size:] 85 | result += data 86 | return result 87 | 88 | 89 | def get_localname(element): 90 | qtag = ET.QName(element.tag).text 91 | return re.match("(\{.*\})(.*)", qtag).group(2) 92 | 93 | 94 | def get_treasury_source(): 95 | url = """\ 96 | http://data.treasury.gov/feed.svc/DailyTreasuryYieldCurveRateData\ 97 | """ 98 | res = requests.get(url, stream=True) 99 | stream = iter_to_stream(res.iter_lines()) 100 | 101 | elements = ET.iterparse(stream, ('end', 'start-ns', 'end-ns')) 102 | 103 | namespaces = OrderedDict() 104 | properties_xpath = [''] 105 | 106 | def updated_namespaces(): 107 | if '' in namespaces and 'm' in namespaces: 108 | properties_xpath[0] = "{%s}content/{%s}properties" % ( 109 | namespaces[''], namespaces['m'] 110 | ) 111 | else: 112 | properties_xpath[0] = '' 113 | 114 | for event, element in elements: 115 | if event == 'end': 116 | tag = get_localname(element) 117 | if tag == "entry": 118 | properties = element.find(properties_xpath[0]) 119 | datum = {get_localname(node): node.text 120 | for node in properties.getchildren() 121 | if ET.iselement(node)} 122 | # clear the element after we've dealt with it: 123 | element.clear() 124 | yield datum 125 | 126 | elif event == 'start-ns': 127 | namespaces[element[0]] = element[1] 128 | updated_namespaces() 129 | 130 | elif event == 'end-ns': 131 | namespaces.popitem() 132 | updated_namespaces() 133 | 134 | 135 | def get_treasury_data(): 136 | mappings = treasury_mappings(_CURVE_MAPPINGS) 137 | source = get_treasury_source() 138 | return source_to_records(mappings, source) 139 | 140 | 141 | def dataconverter(s): 142 | try: 143 | return float(s) / 100 144 | except: 145 | return np.nan 146 | 147 | 148 | def get_daily_10yr_treasury_data(): 149 | """Download daily 10 year treasury rates from the Federal Reserve and 150 | return a pandas.Series.""" 151 | url = "http://www.federalreserve.gov/datadownload/Output.aspx?rel=H15" \ 152 | "&series=bcb44e57fb57efbe90002369321bfb3f&lastObs=&from=&to=" \ 153 | "&filetype=csv&label=include&layout=seriescolumn" 154 | return pd.read_csv(url, header=5, index_col=0, names=['DATE', 'BC_10YEAR'], 155 | parse_dates=True, converters={1: dataconverter}, 156 | squeeze=True) 157 | -------------------------------------------------------------------------------- /tests/test_algorithm_gen.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright 2013 Quantopian, Inc. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | from unittest import TestCase 18 | from nose.tools import ( 19 | timed, 20 | nottest 21 | ) 22 | 23 | from datetime import datetime 24 | 25 | import pytz 26 | from zipline.finance import trading 27 | from zipline.algorithm import TradingAlgorithm 28 | from zipline.finance import slippage 29 | from zipline.utils import factory 30 | from zipline.utils.test_utils import ( 31 | setup_logger, 32 | teardown_logger 33 | ) 34 | 35 | DEFAULT_TIMEOUT = 15 # seconds 36 | EXTENDED_TIMEOUT = 90 37 | 38 | 39 | class RecordDateSlippage(slippage.FixedSlippage): 40 | def __init__(self, spread): 41 | super(RecordDateSlippage, self).__init__(spread=spread) 42 | self.latest_date = None 43 | 44 | def simulate(self, event, open_orders): 45 | self.latest_date = event['datetime'] 46 | result = super(RecordDateSlippage, self).simulate(event, open_orders) 47 | return result 48 | 49 | 50 | class TestAlgo(TradingAlgorithm): 51 | 52 | def __init__(self, asserter, *args, **kwargs): 53 | super(TestAlgo, self).__init__(*args, **kwargs) 54 | self.asserter = asserter 55 | 56 | def initialize(self, window_length=100): 57 | self.latest_date = None 58 | 59 | self.set_slippage(RecordDateSlippage(spread=0.05)) 60 | self.stocks = [8229] 61 | self.ordered = False 62 | 63 | def handle_data(self, data): 64 | self.latest_date = self.get_datetime() 65 | 66 | if not self.ordered: 67 | for stock in self.stocks: 68 | self.order(stock, 100) 69 | 70 | self.ordered = True 71 | 72 | else: 73 | 74 | self.asserter.assertGreaterEqual( 75 | self.latest_date, 76 | self.slippage.latest_date 77 | ) 78 | 79 | 80 | class AlgorithmGeneratorTestCase(TestCase): 81 | def setUp(self): 82 | setup_logger(self) 83 | 84 | def tearDown(self): 85 | teardown_logger(self) 86 | 87 | @nottest 88 | def test_lse_algorithm(self): 89 | 90 | lse = trading.TradingEnvironment( 91 | bm_symbol='^FTSE', 92 | exchange_tz='Europe/London' 93 | ) 94 | 95 | with lse: 96 | 97 | sim_params = factory.create_simulation_parameters( 98 | start=datetime(2012, 5, 1, tzinfo=pytz.utc), 99 | end=datetime(2012, 6, 30, tzinfo=pytz.utc) 100 | ) 101 | algo = TestAlgo(self, sim_params=sim_params) 102 | trade_source = factory.create_daily_trade_source( 103 | [8229], 104 | 200, 105 | sim_params 106 | ) 107 | algo.set_sources([trade_source]) 108 | 109 | gen = algo.get_generator() 110 | results = list(gen) 111 | self.assertEqual(len(results), 42) 112 | # May 7, 2012 was an LSE holiday, confirm the 4th trading 113 | # day was May 8. 114 | self.assertEqual(results[4]['daily_perf']['period_open'], 115 | datetime(2012, 5, 8, 8, 31, tzinfo=pytz.utc)) 116 | 117 | @timed(DEFAULT_TIMEOUT) 118 | def test_generator_dates(self): 119 | """ 120 | Ensure the pipeline of generators are in sync, at least as far as 121 | their current dates. 122 | """ 123 | sim_params = factory.create_simulation_parameters( 124 | start=datetime(2011, 7, 30, tzinfo=pytz.utc), 125 | end=datetime(2012, 7, 30, tzinfo=pytz.utc) 126 | ) 127 | algo = TestAlgo(self, sim_params=sim_params) 128 | trade_source = factory.create_daily_trade_source( 129 | [8229], 130 | 200, 131 | sim_params 132 | ) 133 | algo.set_sources([trade_source]) 134 | 135 | gen = algo.get_generator() 136 | self.assertTrue(list(gen)) 137 | 138 | self.assertTrue(algo.slippage.latest_date) 139 | self.assertTrue(algo.latest_date) 140 | 141 | @timed(DEFAULT_TIMEOUT) 142 | def test_progress(self): 143 | """ 144 | Ensure the pipeline of generators are in sync, at least as far as 145 | their current dates. 146 | """ 147 | sim_params = factory.create_simulation_parameters( 148 | start=datetime(2008, 1, 1, tzinfo=pytz.utc), 149 | end=datetime(2008, 1, 5, tzinfo=pytz.utc) 150 | ) 151 | algo = TestAlgo(self, sim_params=sim_params) 152 | trade_source = factory.create_daily_trade_source( 153 | [8229], 154 | 3, 155 | sim_params 156 | ) 157 | algo.set_sources([trade_source]) 158 | 159 | gen = algo.get_generator() 160 | results = list(gen) 161 | self.assertEqual(results[-2]['progress'], 1.0) 162 | -------------------------------------------------------------------------------- /alephnull/examples/olmar.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import logbook 3 | import numpy as np 4 | from datetime import datetime 5 | import pytz 6 | 7 | from alephnull.algorithm import TradingAlgorithm 8 | from alephnull.transforms import MovingAverage 9 | from alephnull.utils.factory import load_from_yahoo 10 | from alephnull.finance import commission 11 | 12 | zipline_logging = logbook.NestedSetup([ 13 | logbook.NullHandler(level=logbook.DEBUG, bubble=True), 14 | logbook.StreamHandler(sys.stdout, level=logbook.INFO), 15 | logbook.StreamHandler(sys.stderr, level=logbook.ERROR), 16 | ]) 17 | zipline_logging.push_application() 18 | 19 | STOCKS = ['AMD', 'CERN', 'COST', 'DELL', 'GPS', 'INTC', 'MMM'] 20 | 21 | 22 | class OLMAR(TradingAlgorithm): 23 | """ 24 | On-Line Portfolio Moving Average Reversion 25 | 26 | More info can be found in the corresponding paper: 27 | http://icml.cc/2012/papers/168.pdf 28 | """ 29 | def initialize(self, eps=1, window_length=5): 30 | self.stocks = STOCKS 31 | self.m = len(self.stocks) 32 | self.price = {} 33 | self.b_t = np.ones(self.m) / self.m 34 | self.last_desired_port = np.ones(self.m) / self.m 35 | self.eps = eps 36 | self.init = True 37 | self.days = 0 38 | self.window_length = window_length 39 | self.add_transform(MovingAverage, 'mavg', ['price'], 40 | window_length=window_length) 41 | 42 | self.set_commission(commission.PerShare(cost=0)) 43 | 44 | def handle_data(self, data): 45 | self.days += 1 46 | if self.days < self.window_length: 47 | return 48 | 49 | if self.init: 50 | self.rebalance_portfolio(data, self.b_t) 51 | self.init = False 52 | return 53 | 54 | m = self.m 55 | 56 | x_tilde = np.zeros(m) 57 | b = np.zeros(m) 58 | 59 | # find relative moving average price for each security 60 | for i, stock in enumerate(self.stocks): 61 | price = data[stock].price 62 | # Relative mean deviation 63 | x_tilde[i] = data[stock]['mavg']['price'] / price 64 | 65 | ########################### 66 | # Inside of OLMAR (algo 2) 67 | x_bar = x_tilde.mean() 68 | 69 | # market relative deviation 70 | mark_rel_dev = x_tilde - x_bar 71 | 72 | # Expected return with current portfolio 73 | exp_return = np.dot(self.b_t, x_tilde) 74 | weight = self.eps - exp_return 75 | variability = (np.linalg.norm(mark_rel_dev)) ** 2 76 | 77 | # test for divide-by-zero case 78 | if variability == 0.0: 79 | step_size = 0 80 | else: 81 | step_size = max(0, weight / variability) 82 | 83 | b = self.b_t + step_size * mark_rel_dev 84 | b_norm = simplex_projection(b) 85 | np.testing.assert_almost_equal(b_norm.sum(), 1) 86 | 87 | self.rebalance_portfolio(data, b_norm) 88 | 89 | # update portfolio 90 | self.b_t = b_norm 91 | 92 | def rebalance_portfolio(self, data, desired_port): 93 | # rebalance portfolio 94 | desired_amount = np.zeros_like(desired_port) 95 | current_amount = np.zeros_like(desired_port) 96 | prices = np.zeros_like(desired_port) 97 | 98 | if self.init: 99 | positions_value = self.portfolio.starting_cash 100 | else: 101 | positions_value = self.portfolio.positions_value + \ 102 | self.portfolio.cash 103 | 104 | for i, stock in enumerate(self.stocks): 105 | current_amount[i] = self.portfolio.positions[stock].amount 106 | prices[i] = data[stock].price 107 | 108 | desired_amount = np.round(desired_port * positions_value / prices) 109 | 110 | self.last_desired_port = desired_port 111 | diff_amount = desired_amount - current_amount 112 | 113 | for i, stock in enumerate(self.stocks): 114 | self.order(stock, diff_amount[i]) 115 | 116 | 117 | def simplex_projection(v, b=1): 118 | """Projection vectors to the simplex domain 119 | 120 | Implemented according to the paper: Efficient projections onto the 121 | l1-ball for learning in high dimensions, John Duchi, et al. ICML 2008. 122 | Implementation Time: 2011 June 17 by Bin@libin AT pmail.ntu.edu.sg 123 | Optimization Problem: min_{w}\| w - v \|_{2}^{2} 124 | s.t. sum_{i=1}^{m}=z, w_{i}\geq 0 125 | 126 | Input: A vector v \in R^{m}, and a scalar z > 0 (default=1) 127 | Output: Projection vector w 128 | 129 | :Example: 130 | >>> proj = simplex_projection([.4 ,.3, -.4, .5]) 131 | >>> print proj 132 | array([ 0.33333333, 0.23333333, 0. , 0.43333333]) 133 | >>> print proj.sum() 134 | 1.0 135 | 136 | Original matlab implementation: John Duchi (jduchi@cs.berkeley.edu) 137 | Python-port: Copyright 2013 by Thomas Wiecki (thomas.wiecki@gmail.com). 138 | """ 139 | 140 | v = np.asarray(v) 141 | p = len(v) 142 | 143 | # Sort v into u in descending order 144 | v = (v > 0) * v 145 | u = np.sort(v)[::-1] 146 | sv = np.cumsum(u) 147 | 148 | rho = np.where(u > (sv - b) / np.arange(1, p + 1))[0][-1] 149 | theta = np.max([0, (sv[rho] - b) / (rho + 1)]) 150 | w = (v - theta) 151 | w[w < 0] = 0 152 | return w 153 | 154 | if __name__ == '__main__': 155 | import pylab as pl 156 | start = datetime(2004, 1, 1, 0, 0, 0, 0, pytz.utc) 157 | end = datetime(2008, 1, 1, 0, 0, 0, 0, pytz.utc) 158 | data = load_from_yahoo(stocks=STOCKS, indexes={}, start=start, 159 | end=end) 160 | data = data.dropna() 161 | olmar = OLMAR() 162 | results = olmar.run(data) 163 | results.portfolio_value.plot() 164 | pl.show() 165 | -------------------------------------------------------------------------------- /alephnull/utils/tradingcalendar_lse.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | 17 | # References: 18 | # http://www.londonstockexchange.com 19 | # /about-the-exchange/company-overview/business-days/business-days.htm 20 | # http://en.wikipedia.org/wiki/Bank_holiday 21 | # http://www.adviceguide.org.uk/england/work_e/work_time_off_work_e/ 22 | # bank_and_public_holidays.htm 23 | 24 | import pytz 25 | 26 | import pandas as pd 27 | 28 | from datetime import datetime 29 | from dateutil import rrule 30 | from alephnull.utils.tradingcalendar import end 31 | 32 | start = datetime(2002, 1, 1, tzinfo=pytz.utc) 33 | 34 | non_trading_rules = [] 35 | # Weekends 36 | weekends = rrule.rrule( 37 | rrule.YEARLY, 38 | byweekday=(rrule.SA, rrule.SU), 39 | cache=True, 40 | dtstart=start, 41 | until=end 42 | ) 43 | non_trading_rules.append(weekends) 44 | # New Year's Day 45 | new_year = rrule.rrule( 46 | rrule.MONTHLY, 47 | byyearday=1, 48 | cache=True, 49 | dtstart=start, 50 | until=end 51 | ) 52 | # If new years day is on Saturday then Monday 3rd is a holiday 53 | # If new years day is on Sunday then Monday 2nd is a holiday 54 | weekend_new_year = rrule.rrule( 55 | rrule.MONTHLY, 56 | bymonth=1, 57 | bymonthday=[2, 3], 58 | byweekday=(rrule.MO), 59 | cache=True, 60 | dtstart=start, 61 | until=end 62 | ) 63 | non_trading_rules.append(new_year) 64 | non_trading_rules.append(weekend_new_year) 65 | # Good Friday 66 | good_friday = rrule.rrule( 67 | rrule.DAILY, 68 | byeaster=-2, 69 | cache=True, 70 | dtstart=start, 71 | until=end 72 | ) 73 | non_trading_rules.append(good_friday) 74 | # Easter Monday 75 | easter_monday = rrule.rrule( 76 | rrule.DAILY, 77 | byeaster=1, 78 | cache=True, 79 | dtstart=start, 80 | until=end 81 | ) 82 | non_trading_rules.append(easter_monday) 83 | # Early May Bank Holiday (1st Monday in May) 84 | may_bank = rrule.rrule( 85 | rrule.MONTHLY, 86 | bymonth=5, 87 | byweekday=(rrule.MO(1)), 88 | cache=True, 89 | dtstart=start, 90 | until=end 91 | ) 92 | non_trading_rules.append(may_bank) 93 | # Spring Bank Holiday (Last Monday in May) 94 | spring_bank = rrule.rrule( 95 | rrule.MONTHLY, 96 | bymonth=5, 97 | byweekday=(rrule.MO(-1)), 98 | cache=True, 99 | dtstart=datetime(2003, 1, 1, tzinfo=pytz.utc), 100 | until=end 101 | ) 102 | non_trading_rules.append(spring_bank) 103 | # Summer Bank Holiday (Last Monday in August) 104 | summer_bank = rrule.rrule( 105 | rrule.MONTHLY, 106 | bymonth=8, 107 | byweekday=(rrule.MO(-1)), 108 | cache=True, 109 | dtstart=start, 110 | until=end 111 | ) 112 | non_trading_rules.append(summer_bank) 113 | # Christmas Day 114 | christmas = rrule.rrule( 115 | rrule.MONTHLY, 116 | bymonth=12, 117 | bymonthday=25, 118 | cache=True, 119 | dtstart=start, 120 | until=end 121 | ) 122 | # If christmas day is Saturday Monday 27th is a holiday 123 | # If christmas day is sunday the Tuesday 27th is a holiday 124 | weekend_christmas = rrule.rrule( 125 | rrule.MONTHLY, 126 | bymonth=12, 127 | bymonthday=27, 128 | byweekday=(rrule.MO, rrule.TU), 129 | cache=True, 130 | dtstart=start, 131 | until=end 132 | ) 133 | 134 | non_trading_rules.append(christmas) 135 | non_trading_rules.append(weekend_christmas) 136 | # Boxing Day 137 | boxing_day = rrule.rrule( 138 | rrule.MONTHLY, 139 | bymonth=12, 140 | bymonthday=26, 141 | cache=True, 142 | dtstart=start, 143 | until=end 144 | ) 145 | # If boxing day is saturday then Monday 28th is a holiday 146 | # If boxing day is sunday then Tuesday 28th is a holiday 147 | weekend_boxing_day = rrule.rrule( 148 | rrule.MONTHLY, 149 | bymonth=12, 150 | bymonthday=28, 151 | byweekday=(rrule.MO, rrule.TU), 152 | cache=True, 153 | dtstart=start, 154 | until=end 155 | ) 156 | 157 | non_trading_rules.append(boxing_day) 158 | non_trading_rules.append(weekend_boxing_day) 159 | 160 | non_trading_ruleset = rrule.rruleset() 161 | 162 | # In 2002 May bank holiday was moved to 4th June to follow the Queens 163 | # Golden Jubilee 164 | non_trading_ruleset.exdate(datetime(2002, 9, 27, tzinfo=pytz.utc)) 165 | non_trading_ruleset.rdate(datetime(2002, 6, 3, tzinfo=pytz.utc)) 166 | non_trading_ruleset.rdate(datetime(2002, 6, 4, tzinfo=pytz.utc)) 167 | # TODO: not sure why Feb 18 2008 is not available in the yahoo data 168 | non_trading_ruleset.rdate(datetime(2008, 2, 18, tzinfo=pytz.utc)) 169 | # In 2011 The Friday before Mayday was the Royal Wedding 170 | non_trading_ruleset.rdate(datetime(2011, 4, 29, tzinfo=pytz.utc)) 171 | # In 2012 May bank holiday was moved to 4th June to preceed the Queens 172 | # Diamond Jubilee 173 | non_trading_ruleset.exdate(datetime(2012, 5, 28, tzinfo=pytz.utc)) 174 | non_trading_ruleset.rdate(datetime(2012, 6, 4, tzinfo=pytz.utc)) 175 | non_trading_ruleset.rdate(datetime(2012, 6, 5, tzinfo=pytz.utc)) 176 | 177 | for rule in non_trading_rules: 178 | non_trading_ruleset.rrule(rule) 179 | 180 | non_trading_days = non_trading_ruleset.between(start, end, inc=True) 181 | non_trading_day_index = pd.DatetimeIndex(sorted(non_trading_days)) 182 | 183 | business_days = pd.DatetimeIndex(start=start, end=end, 184 | freq=pd.datetools.BDay()) 185 | 186 | trading_days = business_days - non_trading_day_index 187 | -------------------------------------------------------------------------------- /alephnull/transforms/mavg.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from collections import defaultdict 17 | 18 | from alephnull.transforms.utils import EventWindow, TransformMeta 19 | from alephnull.errors import WrongDataForTransform 20 | 21 | 22 | class MovingAverage(object): 23 | """ 24 | Class that maintains a dictionary from sids to 25 | MovingAverageEventWindows. For each sid, we maintain moving 26 | averages over any number of distinct fields (For example, we can 27 | maintain a sid's average volume as well as its average price.) 28 | """ 29 | __metaclass__ = TransformMeta 30 | 31 | def __init__(self, fields='price', 32 | market_aware=True, window_length=None, delta=None): 33 | 34 | if isinstance(fields, basestring): 35 | fields = [fields] 36 | self.fields = fields 37 | 38 | self.market_aware = market_aware 39 | 40 | self.delta = delta 41 | self.window_length = window_length 42 | 43 | # Market-aware mode only works with full-day windows. 44 | if self.market_aware: 45 | assert self.window_length and not self.delta,\ 46 | "Market-aware mode only works with full-day windows." 47 | 48 | # Non-market-aware mode requires a timedelta. 49 | else: 50 | assert self.delta and not self.window_length, \ 51 | "Non-market-aware mode requires a timedelta." 52 | 53 | # No way to pass arguments to the defaultdict factory, so we 54 | # need to define a method to generate the correct EventWindows. 55 | self.sid_windows = defaultdict(self.create_window) 56 | 57 | def create_window(self): 58 | """ 59 | Factory method for self.sid_windows. 60 | """ 61 | return MovingAverageEventWindow( 62 | self.fields, 63 | self.market_aware, 64 | self.window_length, 65 | self.delta 66 | ) 67 | 68 | def update(self, event): 69 | """ 70 | Update the event window for this event's sid. Return a dict 71 | from tracked fields to moving averages. 72 | """ 73 | # This will create a new EventWindow if this is the first 74 | # message for this sid. 75 | window = self.sid_windows[event.sid] 76 | window.update(event) 77 | return window.get_averages() 78 | 79 | 80 | class Averages(object): 81 | """ 82 | Container for averages. 83 | """ 84 | 85 | def __getitem__(self, name): 86 | """ 87 | Allow dictionary lookup. 88 | """ 89 | return self.__dict__[name] 90 | 91 | 92 | class MovingAverageEventWindow(EventWindow): 93 | """ 94 | Iteratively calculates moving averages for a particular sid over a 95 | given time window. We can maintain averages for arbitrarily many 96 | fields on a single sid. (For example, we might track average 97 | price as well as average volume for a single sid.) The expected 98 | functionality of this class is to be instantiated inside a 99 | MovingAverage transform. 100 | """ 101 | 102 | def __init__(self, fields, market_aware, days, delta): 103 | 104 | # Call the superclass constructor to set up base EventWindow 105 | # infrastructure. 106 | EventWindow.__init__(self, market_aware, days, delta) 107 | 108 | # We maintain a dictionary of totals for each of our tracked 109 | # fields. 110 | self.fields = fields 111 | self.totals = defaultdict(float) 112 | 113 | # Subclass customization for adding new events. 114 | def handle_add(self, event): 115 | # Sanity check on the event. 116 | self.assert_required_fields(event) 117 | # Increment our running totals with data from the event. 118 | for field in self.fields: 119 | self.totals[field] += event[field] 120 | 121 | # Subclass customization for removing expired events. 122 | def handle_remove(self, event): 123 | # Decrement our running totals with data from the event. 124 | for field in self.fields: 125 | self.totals[field] -= event[field] 126 | 127 | def average(self, field): 128 | """ 129 | Calculate the average value of our ticks over a single field. 130 | """ 131 | # Sanity check. 132 | assert field in self.fields 133 | 134 | # Averages are None by convention if we have no ticks. 135 | if len(self.ticks) == 0: 136 | return 0.0 137 | 138 | # Calculate and return the average. len(self.ticks) is O(1). 139 | else: 140 | return self.totals[field] / len(self.ticks) 141 | 142 | def get_averages(self): 143 | """ 144 | Return a dict of all our tracked averages. 145 | """ 146 | out = Averages() 147 | for field in self.fields: 148 | out.__dict__[field] = self.average(field) 149 | return out 150 | 151 | def assert_required_fields(self, event): 152 | """ 153 | We only allow events with all of our tracked fields. 154 | """ 155 | for field in self.fields: 156 | if field not in event: 157 | raise WrongDataForTransform( 158 | transform="MovingAverageEventWindow", 159 | fields=self.fields) 160 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. linkcheck to check all external links for integrity 37 | echo. doctest to run all doctests embedded in the documentation if enabled 38 | goto end 39 | ) 40 | 41 | if "%1" == "clean" ( 42 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 43 | del /q /s %BUILDDIR%\* 44 | goto end 45 | ) 46 | 47 | if "%1" == "html" ( 48 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 49 | if errorlevel 1 exit /b 1 50 | echo. 51 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 52 | goto end 53 | ) 54 | 55 | if "%1" == "dirhtml" ( 56 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 57 | if errorlevel 1 exit /b 1 58 | echo. 59 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 60 | goto end 61 | ) 62 | 63 | if "%1" == "singlehtml" ( 64 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 68 | goto end 69 | ) 70 | 71 | if "%1" == "pickle" ( 72 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished; now you can process the pickle files. 76 | goto end 77 | ) 78 | 79 | if "%1" == "json" ( 80 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished; now you can process the JSON files. 84 | goto end 85 | ) 86 | 87 | if "%1" == "htmlhelp" ( 88 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can run HTML Help Workshop with the ^ 92 | .hhp project file in %BUILDDIR%/htmlhelp. 93 | goto end 94 | ) 95 | 96 | if "%1" == "qthelp" ( 97 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 98 | if errorlevel 1 exit /b 1 99 | echo. 100 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 101 | .qhcp project file in %BUILDDIR%/qthelp, like this: 102 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\QSim.qhcp 103 | echo.To view the help file: 104 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\QSim.ghc 105 | goto end 106 | ) 107 | 108 | if "%1" == "devhelp" ( 109 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 110 | if errorlevel 1 exit /b 1 111 | echo. 112 | echo.Build finished. 113 | goto end 114 | ) 115 | 116 | if "%1" == "epub" ( 117 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 118 | if errorlevel 1 exit /b 1 119 | echo. 120 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 121 | goto end 122 | ) 123 | 124 | if "%1" == "latex" ( 125 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 129 | goto end 130 | ) 131 | 132 | if "%1" == "text" ( 133 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The text files are in %BUILDDIR%/text. 137 | goto end 138 | ) 139 | 140 | if "%1" == "man" ( 141 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 145 | goto end 146 | ) 147 | 148 | if "%1" == "texinfo" ( 149 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 150 | if errorlevel 1 exit /b 1 151 | echo. 152 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 153 | goto end 154 | ) 155 | 156 | if "%1" == "gettext" ( 157 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 158 | if errorlevel 1 exit /b 1 159 | echo. 160 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 161 | goto end 162 | ) 163 | 164 | if "%1" == "changes" ( 165 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 166 | if errorlevel 1 exit /b 1 167 | echo. 168 | echo.The overview file is in %BUILDDIR%/changes. 169 | goto end 170 | ) 171 | 172 | if "%1" == "linkcheck" ( 173 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 174 | if errorlevel 1 exit /b 1 175 | echo. 176 | echo.Link check complete; look for any errors in the above output ^ 177 | or in %BUILDDIR%/linkcheck/output.txt. 178 | goto end 179 | ) 180 | 181 | if "%1" == "doctest" ( 182 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 183 | if errorlevel 1 exit /b 1 184 | echo. 185 | echo.Testing of doctests in the sources finished, look at the ^ 186 | results in %BUILDDIR%/doctest/output.txt. 187 | goto end 188 | ) 189 | 190 | :end 191 | -------------------------------------------------------------------------------- /alephnull/examples/buystockasfuture.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright 2013 Carter Bain Wealth Management 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | import matplotlib.pyplot as plt 18 | from datetime import datetime 19 | import pytz 20 | 21 | from alephnull.algorithm import TradingAlgorithm 22 | from alephnull.utils.factory import load_from_yahoo 23 | 24 | from pandas.core.series import TimeSeries 25 | 26 | SYMBOL = 'GS' 27 | TRACK = [] 28 | DAT = [None] 29 | DIFFS = [] 30 | SHORTFALL_STRATEGY = "sell" 31 | 32 | 33 | class BuyStock(TradingAlgorithm): 34 | """This is the simplest possible algorithm that does nothing but 35 | buy 1 share of SYMBOL on each event. 36 | """ 37 | 38 | def add_margin(self, data): 39 | # Uses some strategy to get the price at some bar and calculate appropriate 40 | # initial and maintenance margins for that bar. 41 | # Ideally we would use SPAN margining; however, based on some naive data analysis, 42 | # the max a stock changes in a several day period (up to 30 days) is about 42%. 43 | # Change this when you have a better strategy! 44 | initial_margin = data[SYMBOL]['price'] * 0.42 45 | maintenance_margin = data[SYMBOL]['price'] * 0.32 46 | data[SYMBOL].__dict__.update({'initial_margin': initial_margin}) 47 | data[SYMBOL].__dict__.update({'maintenance_margin': maintenance_margin}) 48 | 49 | def initialize(self, *args, **kwargs): 50 | self._first_pass = True 51 | self.futures_results 52 | 53 | def handle_data(self, data): # overload handle_data() method 54 | DAT[0] = data 55 | self.add_margin(data) 56 | position = self.perf_tracker.cumulative_performance.positions[SYMBOL] 57 | maintenance_margin = data[SYMBOL]['maintenance_margin'] 58 | initial_margin = data[SYMBOL]['initial_margin'] 59 | price = data[SYMBOL].price 60 | 61 | if self._first_pass: 62 | initial_quantity = 50 63 | self.order(SYMBOL, initial_quantity) 64 | position.margin += initial_margin * initial_quantity 65 | print(position.margin) 66 | self._first_pass = False 67 | self.last_price = price 68 | return 69 | else: 70 | DIFFS.append((self.last_price - price) / price) 71 | 72 | 73 | quantity_owned = position.amount 74 | margin = position.margin 75 | # don't ask... 76 | timestamp = next(data[0].iteritems() if type(data) is list else data.iteritems())[1]['datetime'] 77 | 78 | TRACK.append((margin, quantity_owned, timestamp)) 79 | if maintenance_margin * quantity_owned > margin: 80 | if SHORTFALL_STRATEGY == "sell": 81 | TRACK.append("SELL") 82 | # sell enough so that your margin account is back above initial margin for every contract 83 | quantity_to_sell = int(initial_margin * quantity_owned ** 2 / margin - quantity_owned) + 1 84 | self.order(SYMBOL, -1*quantity_to_sell) 85 | if quantity_to_sell == 0: 86 | TRACK.append(str(timestamp) + " had a 0-sell!") 87 | elif SHORTFALL_STRATEGY == "buffer": 88 | # put some more money from elsewhere into the account 89 | pass 90 | elif margin > 1.5*(maintenance_margin * quantity_owned): 91 | # we've got too much in margin - we need to make our money work for us! 92 | # buy as many contracts as we can until buying another would put us under 93 | # 1.25 * required margin 94 | TRACK.append("BUY") 95 | max_funds_available = margin - 1.25*(maintenance_margin * quantity_owned) 96 | quantity_to_buy = int(max_funds_available / initial_margin) 97 | 98 | 99 | # we don't have to update the margin because the same amount of cash is still in the margin account, 100 | # it is just distributed over a larger number of contracts 101 | if quantity_to_buy == 0: 102 | TRACK.append("0 to buy, what a shame") 103 | else: 104 | self.order(SYMBOL, quantity_to_buy) # order SID (=0) and amount (=1 shares) 105 | 106 | if quantity_to_buy == 0: 107 | TRACK.append(str(timestamp) + " had a 0-sell!") 108 | 109 | self.last_price = price 110 | 111 | 112 | if __name__ == '__main__': 113 | start = datetime(2008, 1, 1, 0, 0, 0, 0, pytz.utc) 114 | end = datetime(2013, 1, 1, 0, 0, 0, 0, pytz.utc) 115 | data = load_from_yahoo(stocks=[SYMBOL], indexes={}, start=start, 116 | end=end, adjusted=True) 117 | simple_algo = BuyStock() 118 | results = simple_algo.run(data) 119 | 120 | ax1 = plt.subplot(211) 121 | ax2 = plt.subplot(212) 122 | TRACK_STRIPPED = [x for x in TRACK if type(x) == tuple] 123 | futures_indexes = [timestamp for (_, _, timestamp) in TRACK_STRIPPED] 124 | futures_quantity_data = [quantity_owned for (_, quantity_owned, _) in TRACK_STRIPPED] 125 | futures_margin_data = [margin for (margin, _, _) in TRACK_STRIPPED] 126 | 127 | futures_margin_series = TimeSeries(index=futures_indexes, data=futures_margin_data) 128 | futures_margin_series.plot(ax=ax1) 129 | futures_quantity_series = TimeSeries(index=futures_indexes, data=futures_quantity_data) 130 | futures_quantity_series.plot(ax=ax2) 131 | 132 | plt.gcf().set_size_inches(18, 8) -------------------------------------------------------------------------------- /alephnull/protocol.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from . utils.protocol_utils import Enum 17 | 18 | # Datasource type should completely determine the other fields of a 19 | # message with its type. 20 | DATASOURCE_TYPE = Enum( 21 | 'AS_TRADED_EQUITY', 22 | 'MERGER', 23 | 'SPLIT', 24 | 'DIVIDEND', 25 | 'TRADE', 26 | 'TRANSACTION', 27 | 'ORDER', 28 | 'EMPTY', 29 | 'DONE', 30 | 'CUSTOM', 31 | 'BENCHMARK', 32 | 'COMMISSION' 33 | ) 34 | 35 | 36 | class Event(object): 37 | 38 | def __init__(self, initial_values=None): 39 | if initial_values: 40 | self.__dict__ = initial_values 41 | 42 | def __getitem__(self, name): 43 | return getattr(self, name) 44 | 45 | def __setitem__(self, name, value): 46 | setattr(self, name, value) 47 | 48 | def __delitem__(self, name): 49 | delattr(self, name) 50 | 51 | def keys(self): 52 | return self.__dict__.keys() 53 | 54 | def __eq__(self, other): 55 | return self.__dict__ == other.__dict__ 56 | 57 | def __contains__(self, name): 58 | return name in self.__dict__ 59 | 60 | def __repr__(self): 61 | return "Event({0})".format(self.__dict__) 62 | 63 | 64 | class Order(Event): 65 | pass 66 | 67 | 68 | class Portfolio(object): 69 | 70 | def __init__(self): 71 | self.capital_used = 0.0 72 | self.starting_cash = 0.0 73 | self.portfolio_value = 0.0 74 | self.pnl = 0.0 75 | self.returns = 0.0 76 | self.cash = 0.0 77 | self.positions = Positions() 78 | self.start_date = None 79 | self.positions_value = 0.0 80 | self.portfolio_value = 0.0 81 | 82 | def __getitem__(self, key): 83 | return self.__dict__[key] 84 | 85 | def __repr__(self): 86 | return "Portfolio({0})".format(self.__dict__) 87 | 88 | 89 | class Position(object): 90 | 91 | def __init__(self, sid, contract=None): 92 | self.sid = sid 93 | if contract is not None: 94 | self.contract = contract 95 | self.amount = 0 96 | self.cost_basis = 0.0 # per share 97 | self.last_sale_price = 0.0 98 | 99 | def __getitem__(self, key): 100 | return self.__dict__[key] 101 | 102 | def __repr__(self): 103 | return "Position({0})".format(self.__dict__) 104 | 105 | 106 | class Positions(dict): 107 | 108 | def __missing__(self, key): 109 | if type(key) is tuple: 110 | pos = Position(key[0], contract=key[1]) 111 | else: 112 | pos = Position(key) 113 | self[key] = pos 114 | return pos 115 | 116 | 117 | 118 | class SIDData(object): 119 | 120 | def __init__(self, initial_values=None): 121 | if initial_values: 122 | self.__dict__ = initial_values 123 | 124 | def __getitem__(self, name): 125 | return self.__dict__[name] 126 | 127 | def __setitem__(self, name, value): 128 | self.__dict__[name] = value 129 | 130 | def __len__(self): 131 | return len(self.__dict__) 132 | 133 | def __contains__(self, name): 134 | return name in self.__dict__ 135 | 136 | def __repr__(self): 137 | return "SIDData({0})".format(self.__dict__) 138 | 139 | 140 | class BarData(object): 141 | """ 142 | Holds the event data for all sids for a given dt. 143 | 144 | This is what is passed as `data` to the `handle_data` function. 145 | 146 | Note: Many methods are analogues of dictionary because of historical 147 | usage of what this replaced as a dictionary subclass. 148 | """ 149 | 150 | def __init__(self): 151 | self._data = {} 152 | self._contains_override = None 153 | 154 | def __contains__(self, name): 155 | if self._contains_override: 156 | if self._contains_override(name): 157 | return name in self._data 158 | else: 159 | return False 160 | else: 161 | return name in self._data 162 | 163 | def has_key(self, name): 164 | """ 165 | DEPRECATED: __contains__ is preferred, but this method is for 166 | compatibility with existing algorithms. 167 | """ 168 | return name in self 169 | 170 | def __setitem__(self, name, value): 171 | self._data[name] = value 172 | 173 | def __getitem__(self, name): 174 | return self._data[name] 175 | 176 | def __delitem__(self, name): 177 | del self._data[name] 178 | 179 | def __iter__(self): 180 | for sid, data in self._data.iteritems(): 181 | # Allow contains override to filter out sids. 182 | if sid in self: 183 | if len(data): 184 | yield sid 185 | 186 | def iterkeys(self): 187 | # Allow contains override to filter out sids. 188 | return (sid for sid in self._data.iterkeys() if sid in self) 189 | 190 | def keys(self): 191 | # Allow contains override to filter out sids. 192 | return list(self.iterkeys()) 193 | 194 | def itervalues(self): 195 | return (value for sid, value in self.iteritems()) 196 | 197 | def values(self): 198 | return list(self.itervalues()) 199 | 200 | def iteritems(self): 201 | return ((sid, value) for sid, value 202 | in self._data.iteritems() 203 | if sid in self) 204 | 205 | def items(self): 206 | return list(self.iteritems()) 207 | 208 | def __len__(self): 209 | return len(self.keys()) 210 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 18 | 19 | help: 20 | @echo "Please use \`make ' where is one of" 21 | @echo " html to make standalone HTML files" 22 | @echo " dirhtml to make HTML files named index.html in directories" 23 | @echo " singlehtml to make a single large HTML file" 24 | @echo " pickle to make pickle files" 25 | @echo " json to make JSON files" 26 | @echo " htmlhelp to make HTML files and a HTML help project" 27 | @echo " qthelp to make HTML files and a qthelp project" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 31 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 32 | @echo " text to make text files" 33 | @echo " man to make manual pages" 34 | @echo " texinfo to make Texinfo files" 35 | @echo " info to make Texinfo files and run them through makeinfo" 36 | @echo " gettext to make PO message catalogs" 37 | @echo " changes to make an overview of all changed/added/deprecated items" 38 | @echo " linkcheck to check all external links for integrity" 39 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 40 | 41 | clean: 42 | -rm -rf $(BUILDDIR)/* 43 | 44 | html: 45 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 46 | @echo 47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 48 | 49 | dirhtml: 50 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 51 | @echo 52 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 53 | 54 | singlehtml: 55 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 56 | @echo 57 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 58 | 59 | pickle: 60 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 61 | @echo 62 | @echo "Build finished; now you can process the pickle files." 63 | 64 | json: 65 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 66 | @echo 67 | @echo "Build finished; now you can process the JSON files." 68 | 69 | htmlhelp: 70 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 71 | @echo 72 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 73 | ".hhp project file in $(BUILDDIR)/htmlhelp." 74 | 75 | qthelp: 76 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 77 | @echo 78 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 79 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 80 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/QSim.qhcp" 81 | @echo "To view the help file:" 82 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/QSim.qhc" 83 | 84 | devhelp: 85 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 86 | @echo 87 | @echo "Build finished." 88 | @echo "To view the help file:" 89 | @echo "# mkdir -p $$HOME/.local/share/devhelp/QSim" 90 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/QSim" 91 | @echo "# devhelp" 92 | 93 | epub: 94 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 95 | @echo 96 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 97 | 98 | latex: 99 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 100 | @echo 101 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 102 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 103 | "(use \`make latexpdf' here to do that automatically)." 104 | 105 | latexpdf: 106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 107 | @echo "Running LaTeX files through pdflatex..." 108 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 109 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 110 | 111 | text: 112 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 113 | @echo 114 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 115 | 116 | man: 117 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 118 | @echo 119 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 120 | 121 | texinfo: 122 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 123 | @echo 124 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 125 | @echo "Run \`make' in that directory to run these through makeinfo" \ 126 | "(use \`make info' here to do that automatically)." 127 | 128 | info: 129 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 130 | @echo "Running Texinfo files through makeinfo..." 131 | make -C $(BUILDDIR)/texinfo info 132 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 133 | 134 | gettext: 135 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 136 | @echo 137 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 138 | 139 | changes: 140 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 141 | @echo 142 | @echo "The overview file is in $(BUILDDIR)/changes." 143 | 144 | linkcheck: 145 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 146 | @echo 147 | @echo "Link check complete; look for any errors in the above output " \ 148 | "or in $(BUILDDIR)/linkcheck/output.txt." 149 | 150 | doctest: 151 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 152 | @echo "Testing of doctests in the sources finished, look at the " \ 153 | "results in $(BUILDDIR)/doctest/output.txt." 154 | -------------------------------------------------------------------------------- /alephnull/examples/FuturesTradingAlgorithm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright 2013 Carter Bain Wealth Management 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | import matplotlib.pyplot as plt 18 | from datetime import datetime 19 | import pytz 20 | import random 21 | 22 | from alephnull.algorithm import TradingAlgorithm 23 | from alephnull.utils.factory import load_from_yahoo 24 | 25 | from collections import OrderedDict 26 | from pandas.core.series import TimeSeries 27 | 28 | 29 | class FuturesTradingAlgorithm(TradingAlgorithm): 30 | """A wrapper around TradingAlgorithm that adds calculations for futures contracts. 31 | 32 | In order to have everything work in subclasses, you have to do several things: 33 | 34 | Create a method "_handle_margin_call(self, data) that is executed if you go below maintenance margin. 35 | Instead of handle_data(), create handle_futures_data() 36 | Instead of initialize(), create initialize_futures() 37 | 38 | """ 39 | 40 | def add_margin_to_bars(self, data): 41 | # Uses some strategy to get the price at some bar and calculate appropriate 42 | # initial and maintenance margins for that bar. 43 | # Ideally we would use SPAN margining; however, based on some naive data analysis, 44 | # the max a stock changes in a several day period (up to 30 days) is about 42%. 45 | # Change this when you have a better strategy! 46 | for symbol, measures in data.iteritems(): 47 | initial_margin = measures['price'] * 0.42 48 | maintenance_margin = measures['price'] * 0.32 49 | measures.__dict__.update({'initial_margin': initial_margin}) 50 | measures.__dict__.update({'maintenance_margin': maintenance_margin}) 51 | 52 | def initialize(self, *args, **kwargs): 53 | self._margin_account_log = OrderedDict() 54 | self.margin_account_value = 100000 55 | self.last_prices = {} 56 | self.initialize_futures(*args, **kwargs) 57 | # self.max_leverage = 1.5 58 | 59 | def handle_data(self, data): 60 | 61 | self.add_margin_to_bars(data) 62 | self.total_maintenance_margin = 0 63 | 64 | # update margin account 65 | for symbol, measures in data.iteritems(): 66 | position = self.perf_tracker.cumulative_performance.positions[symbol] 67 | last_price = self.last_prices.get(symbol) 68 | price = measures['price'] 69 | if last_price is not None: 70 | self.margin_account_value += (price - last_price) * position.amount 71 | self.last_prices[symbol] = price 72 | self.total_maintenance_margin += measures['maintenance_margin'] 73 | 74 | timestamp = next(data[0].iteritems() if type(data) is list else data.iteritems())[1]['datetime'] 75 | 76 | self._margin_account_log[timestamp] = self.margin_account_value 77 | 78 | if self.margin_account_value < self.total_maintenance_margin: 79 | self._handle_margin_call() 80 | self.handle_futures_data(data) 81 | 82 | def order(self, sid, amount, initial_margin, limit_price=None, stop_price=None): 83 | # TODO: get rid of the initial_margin parameter when we can figure that out from inside this method 84 | # Check if there's enough in the margin account to cover initial margin 85 | if self.margin_account_value > self.total_maintenance_margin + initial_margin * amount: 86 | TradingAlgorithm.order(self, sid, amount, limit_price, stop_price) 87 | else: 88 | # there shouldn't be an exception here, right? 89 | # TODO: log once you figure out how zipline's logging works 90 | pass 91 | 92 | def handle_futures_data(self): 93 | """Up to subclasses to implement""" 94 | pass 95 | 96 | def initialize_futures(self, *args, **kwargs): 97 | """Up to subclasses to implement""" 98 | pass 99 | 100 | def _handle_margin_call(self): 101 | """Up to subclasses to implement, though this class does provide a few premade procedures 102 | like _liquidate_random_positions""" 103 | pass 104 | 105 | def _liquidate_random_positions(self): 106 | """Liquidate an entire position (the position in particular is chosen at random) until we are back above 107 | maintenance margin.""" 108 | while self.margin_account_value < self.total_maintenance_margin: 109 | positions_as_list = self.perf_tracker.cumulative_performance.positions.items()[:] 110 | chosen_symbol, chosen_position = positions_as_list[random.randint(0, len(positions_as_list) - 1)] 111 | TradingAlgorithm.order(self, chosen_symbol, chosen_position.amount) 112 | positions_as_list.remove((chosen_symbol, chosen_position)) 113 | 114 | self.total_maintenance_margin = sum( 115 | [position.last_sale_price * 0.32 * position.amount for symbol, position in positions_as_list]) 116 | 117 | @property 118 | def margin_account_log(self): 119 | return TimeSeries(self._margin_account_log) 120 | 121 | 122 | class BuyGoogleAsFuture(FuturesTradingAlgorithm): 123 | 124 | def initialize_futures(self, *args, **kwargs): 125 | pass 126 | 127 | def handle_futures_data(self, data): 128 | self.order("GOOG", 1, initial_margin=data['GOOG']['initial_margin']) 129 | 130 | def _handle_margin_call(self): 131 | self._liquidate_random_positions() 132 | 133 | if __name__ == '__main__': 134 | start = datetime(2008, 1, 1, 0, 0, 0, 0, pytz.utc) 135 | end = datetime(2013, 1, 1, 0, 0, 0, 0, pytz.utc) 136 | data = load_from_yahoo(stocks=["GOOG"], indexes={}, start=start, 137 | end=end, adjusted=True) 138 | simple_algo = BuyGoogleAsFuture() 139 | results = simple_algo.run(data) 140 | 141 | ax1 = plt.subplot(211) 142 | futures_indexes = list(simple_algo.margin_account_log.keys()) 143 | futures_margin_data = list(simple_algo.margin_account_log.values) 144 | 145 | futures_margin_series = TimeSeries(index=futures_indexes, data=futures_margin_data) 146 | futures_margin_series.plot(ax=ax1) 147 | 148 | ax2 = plt.subplot(212, sharex=ax1) 149 | data.GOOG.plot(ax=ax2) 150 | 151 | plt.gcf().set_size_inches(18, 8) -------------------------------------------------------------------------------- /alephnull/finance/risk/report.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2013 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | """ 17 | 18 | Risk Report 19 | =========== 20 | 21 | +-----------------+----------------------------------------------------+ 22 | | key | value | 23 | +=================+====================================================+ 24 | | trading_days | The number of trading days between self.start_date | 25 | | | and self.end_date | 26 | +-----------------+----------------------------------------------------+ 27 | | benchmark_volat\| The volatility of the benchmark between | 28 | | ility | self.start_date and self.end_date. | 29 | +-----------------+----------------------------------------------------+ 30 | | algo_volatility | The volatility of the algo between self.start_date | 31 | | | and self.end_date. | 32 | +-----------------+----------------------------------------------------+ 33 | | treasury_period\| The return of treasuries over the period. Treasury | 34 | | _return | maturity is chosen to match the duration of the | 35 | | | test period. | 36 | +-----------------+----------------------------------------------------+ 37 | | sharpe | The sharpe ratio based on the _algorithm_ (rather | 38 | | | than the static portfolio) returns. | 39 | +-----------------+----------------------------------------------------+ 40 | | information | The information ratio based on the _algorithm_ | 41 | | | (rather than the static portfolio) returns. | 42 | +-----------------+----------------------------------------------------+ 43 | | beta | The _algorithm_ beta to the benchmark. | 44 | +-----------------+----------------------------------------------------+ 45 | | alpha | The _algorithm_ alpha to the benchmark. | 46 | +-----------------+----------------------------------------------------+ 47 | | excess_return | The excess return of the algorithm over the | 48 | | | treasuries. | 49 | +-----------------+----------------------------------------------------+ 50 | | max_drawdown | The largest relative peak to relative trough move | 51 | | | for the portfolio returns between self.start_date | 52 | | | and self.end_date. | 53 | +-----------------+----------------------------------------------------+ 54 | 55 | 56 | """ 57 | 58 | import logbook 59 | import datetime 60 | from dateutil.relativedelta import relativedelta 61 | 62 | from . period import RiskMetricsPeriod 63 | 64 | log = logbook.Logger('Risk Report') 65 | 66 | 67 | class RiskReport(object): 68 | def __init__(self, algorithm_returns, sim_params, benchmark_returns=None): 69 | """ 70 | algorithm_returns needs to be a list of daily_return objects 71 | sorted in date ascending order 72 | """ 73 | 74 | self.algorithm_returns = algorithm_returns 75 | self.sim_params = sim_params 76 | self.benchmark_returns = benchmark_returns 77 | 78 | if len(self.algorithm_returns) == 0: 79 | start_date = self.sim_params.period_start 80 | end_date = self.sim_params.period_end 81 | else: 82 | start_date = self.algorithm_returns.index[0] 83 | end_date = self.algorithm_returns.index[-1] 84 | 85 | self.month_periods = self.periods_in_range(1, start_date, end_date) 86 | self.three_month_periods = self.periods_in_range(3, start_date, 87 | end_date) 88 | self.six_month_periods = self.periods_in_range(6, start_date, end_date) 89 | self.year_periods = self.periods_in_range(12, start_date, end_date) 90 | 91 | def to_dict(self): 92 | """ 93 | RiskMetrics are calculated for rolling windows in four lengths:: 94 | - 1_month 95 | - 3_month 96 | - 6_month 97 | - 12_month 98 | 99 | The return value of this funciton is a dictionary keyed by the above 100 | list of durations. The value of each entry is a list of RiskMetric 101 | dicts of the same duration as denoted by the top_level key. 102 | 103 | See :py:meth:`RiskMetrics.to_dict` for the detailed list of fields 104 | provided for each period. 105 | """ 106 | return { 107 | 'one_month': [x.to_dict() for x in self.month_periods], 108 | 'three_month': [x.to_dict() for x in self.three_month_periods], 109 | 'six_month': [x.to_dict() for x in self.six_month_periods], 110 | 'twelve_month': [x.to_dict() for x in self.year_periods], 111 | } 112 | 113 | def periods_in_range(self, months_per, start, end): 114 | one_day = datetime.timedelta(days=1) 115 | ends = [] 116 | cur_start = start.replace(day=1) 117 | 118 | # in edge cases (all sids filtered out, start/end are adjacent) 119 | # a test will not generate any returns data 120 | if len(self.algorithm_returns) == 0: 121 | return ends 122 | 123 | # ensure that we have an end at the end of a calendar month, in case 124 | # the return series ends mid-month... 125 | the_end = end.replace(day=1) + relativedelta(months=1) - one_day 126 | while True: 127 | cur_end = cur_start + relativedelta(months=months_per) - one_day 128 | if(cur_end > the_end): 129 | break 130 | cur_period_metrics = RiskMetricsPeriod( 131 | start_date=cur_start, 132 | end_date=cur_end, 133 | returns=self.algorithm_returns, 134 | benchmark_returns=self.benchmark_returns 135 | ) 136 | 137 | ends.append(cur_period_metrics) 138 | cur_start = cur_start + relativedelta(months=1) 139 | 140 | return ends 141 | --------------------------------------------------------------------------------