├── code ├── data │ ├── SX5E_V6I1.h5 │ ├── var_data.h5 │ ├── vstoxx_march_2014.h5 │ ├── index_option_series.h5 │ ├── index_option_series_.h5 │ ├── simulation_results.h5 │ ├── vstoxx_data_31032014.h5 │ ├── vstoxx_option_quotes.h5 │ ├── index_option_series__.h5 │ └── hist_EONIA_2015.csv ├── images │ ├── dx_srjd_cali_1.png │ ├── dx_srjd_cali_2.png │ ├── dx_srjd_cali_5.png │ ├── dx_srjd_cali_1_.png │ ├── dx_srjd_cali_1_fit.png │ ├── dx_srjd_cali_1_fit_.png │ ├── dx_srjd_cali_1_hist.png │ ├── dx_srjd_cali_2_fit.png │ ├── dx_srjd_cali_2_hist.png │ ├── dx_srjd_cali_5_fit.png │ ├── dx_srjd_cali_5_hist.png │ ├── dx_srjd_cali_1_hist_.png │ ├── dx_srd_cali_1_2014-04-18.png │ ├── dx_srd_cali_1_2014-04-18_.png │ ├── dx_srd_cali_1_hist_2014-04-18.png │ └── dx_srd_cali_1_hist_2014-04-18_.png ├── scripts │ ├── variance_swaps.py │ ├── srjd_fwd_calibration.py │ ├── index_vstoxx_calculation.py │ ├── index_date_functions.py │ ├── srjd_simulation.py │ ├── srd_simulation_results.py │ ├── srd_functions.py │ ├── srd_model_calibration.py │ ├── srjd_model_calibration.py │ ├── index_subindex_calculation.py │ └── index_collect_option_data.py └── 03_simple_spanning_with_options.ipynb ├── legacy ├── python2 │ ├── data │ │ ├── SX5E_V6I1.h5 │ │ ├── es_vs_data.h5 │ │ ├── var_data.h5 │ │ ├── hist_EONIA_2015.xlsx │ │ ├── vstoxx_march_2014.h5 │ │ ├── hist_EURIBOR_2014.xlsx │ │ ├── hist_EURIBOR_2015.xlsx │ │ ├── index_option_series.h5 │ │ ├── simulation_results.h5 │ │ ├── vstoxx_data_31032014.h5 │ │ ├── vstoxx_option_quotes.h5 │ │ ├── __index_option_series.h5 │ │ ├── index_option_series_2015.h5 │ │ ├── varphi │ │ └── hist_EONIA_2015.csv │ ├── scripts │ │ ├── variance_swaps.py │ │ ├── srjd_fwd_calibration.py │ │ ├── index_vstoxx_calculation.py │ │ ├── index_date_functions.py │ │ ├── srjd_simulation.py │ │ ├── srd_simulation_results.py │ │ ├── srd_functions.py │ │ ├── srd_model_calibration.py │ │ ├── srjd_model_calibration.py │ │ ├── index_subindex_calculation.py │ │ ├── index_collect_option_data.py │ │ └── srd_simulation_analysis.py │ ├── 08_Terms_Volatility.ipynb │ ├── 14_DX_SRJD_Calibration.ipynb │ └── 01_Introduction.ipynb └── python3 │ ├── data │ ├── SX5E_V6I1.h5 │ ├── var_data.h5 │ ├── vstoxx_march_2014.h5 │ ├── index_option_series.h5 │ ├── index_option_series_.h5 │ ├── simulation_results.h5 │ ├── vstoxx_data_31032014.h5 │ ├── vstoxx_option_quotes.h5 │ ├── index_option_series__.h5 │ └── hist_EONIA_2015.csv │ ├── images │ ├── dx_srjd_cali_1.png │ ├── dx_srjd_cali_2.png │ ├── dx_srjd_cali_5.png │ ├── dx_srjd_cali_1_.png │ ├── dx_srjd_cali_1_fit.png │ ├── dx_srjd_cali_1_fit_.png │ ├── dx_srjd_cali_1_hist.png │ ├── dx_srjd_cali_2_fit.png │ ├── dx_srjd_cali_2_hist.png │ ├── dx_srjd_cali_5_fit.png │ ├── dx_srjd_cali_5_hist.png │ ├── dx_srjd_cali_1_hist_.png │ ├── dx_srd_cali_1_2014-04-18.png │ ├── dx_srd_cali_1_2014-04-18_.png │ ├── dx_srd_cali_1_hist_2014-04-18.png │ └── dx_srd_cali_1_hist_2014-04-18_.png │ ├── scripts │ ├── variance_swaps.py │ ├── srjd_fwd_calibration.py │ ├── index_vstoxx_calculation.py │ ├── index_date_functions.py │ ├── srjd_simulation.py │ ├── srd_simulation_results.py │ ├── srd_functions.py │ ├── srd_model_calibration.py │ ├── srjd_model_calibration.py │ ├── index_subindex_calculation.py │ └── index_collect_option_data.py │ └── 03_simple_spanning_with_options.ipynb ├── LICENSE.txt ├── .gitignore └── README.md /code/data/SX5E_V6I1.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/data/SX5E_V6I1.h5 -------------------------------------------------------------------------------- /code/data/var_data.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/data/var_data.h5 -------------------------------------------------------------------------------- /code/data/vstoxx_march_2014.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/data/vstoxx_march_2014.h5 -------------------------------------------------------------------------------- /code/images/dx_srjd_cali_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srjd_cali_1.png -------------------------------------------------------------------------------- /code/images/dx_srjd_cali_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srjd_cali_2.png -------------------------------------------------------------------------------- /code/images/dx_srjd_cali_5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srjd_cali_5.png -------------------------------------------------------------------------------- /code/data/index_option_series.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/data/index_option_series.h5 -------------------------------------------------------------------------------- /code/data/index_option_series_.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/data/index_option_series_.h5 -------------------------------------------------------------------------------- /code/data/simulation_results.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/data/simulation_results.h5 -------------------------------------------------------------------------------- /code/data/vstoxx_data_31032014.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/data/vstoxx_data_31032014.h5 -------------------------------------------------------------------------------- /code/data/vstoxx_option_quotes.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/data/vstoxx_option_quotes.h5 -------------------------------------------------------------------------------- /code/images/dx_srjd_cali_1_.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srjd_cali_1_.png -------------------------------------------------------------------------------- /legacy/python2/data/SX5E_V6I1.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/SX5E_V6I1.h5 -------------------------------------------------------------------------------- /legacy/python2/data/es_vs_data.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/es_vs_data.h5 -------------------------------------------------------------------------------- /legacy/python2/data/var_data.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/var_data.h5 -------------------------------------------------------------------------------- /legacy/python3/data/SX5E_V6I1.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/data/SX5E_V6I1.h5 -------------------------------------------------------------------------------- /legacy/python3/data/var_data.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/data/var_data.h5 -------------------------------------------------------------------------------- /code/data/index_option_series__.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/data/index_option_series__.h5 -------------------------------------------------------------------------------- /code/images/dx_srjd_cali_1_fit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srjd_cali_1_fit.png -------------------------------------------------------------------------------- /code/images/dx_srjd_cali_1_fit_.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srjd_cali_1_fit_.png -------------------------------------------------------------------------------- /code/images/dx_srjd_cali_1_hist.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srjd_cali_1_hist.png -------------------------------------------------------------------------------- /code/images/dx_srjd_cali_2_fit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srjd_cali_2_fit.png -------------------------------------------------------------------------------- /code/images/dx_srjd_cali_2_hist.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srjd_cali_2_hist.png -------------------------------------------------------------------------------- /code/images/dx_srjd_cali_5_fit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srjd_cali_5_fit.png -------------------------------------------------------------------------------- /code/images/dx_srjd_cali_5_hist.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srjd_cali_5_hist.png -------------------------------------------------------------------------------- /code/images/dx_srjd_cali_1_hist_.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srjd_cali_1_hist_.png -------------------------------------------------------------------------------- /code/images/dx_srd_cali_1_2014-04-18.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srd_cali_1_2014-04-18.png -------------------------------------------------------------------------------- /legacy/python2/data/hist_EONIA_2015.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/hist_EONIA_2015.xlsx -------------------------------------------------------------------------------- /legacy/python2/data/vstoxx_march_2014.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/vstoxx_march_2014.h5 -------------------------------------------------------------------------------- /legacy/python3/data/vstoxx_march_2014.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/data/vstoxx_march_2014.h5 -------------------------------------------------------------------------------- /legacy/python3/images/dx_srjd_cali_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srjd_cali_1.png -------------------------------------------------------------------------------- /legacy/python3/images/dx_srjd_cali_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srjd_cali_2.png -------------------------------------------------------------------------------- /legacy/python3/images/dx_srjd_cali_5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srjd_cali_5.png -------------------------------------------------------------------------------- /code/images/dx_srd_cali_1_2014-04-18_.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srd_cali_1_2014-04-18_.png -------------------------------------------------------------------------------- /legacy/python2/data/hist_EURIBOR_2014.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/hist_EURIBOR_2014.xlsx -------------------------------------------------------------------------------- /legacy/python2/data/hist_EURIBOR_2015.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/hist_EURIBOR_2015.xlsx -------------------------------------------------------------------------------- /legacy/python2/data/index_option_series.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/index_option_series.h5 -------------------------------------------------------------------------------- /legacy/python2/data/simulation_results.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/simulation_results.h5 -------------------------------------------------------------------------------- /legacy/python2/data/vstoxx_data_31032014.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/vstoxx_data_31032014.h5 -------------------------------------------------------------------------------- /legacy/python2/data/vstoxx_option_quotes.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/vstoxx_option_quotes.h5 -------------------------------------------------------------------------------- /legacy/python3/data/index_option_series.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/data/index_option_series.h5 -------------------------------------------------------------------------------- /legacy/python3/data/index_option_series_.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/data/index_option_series_.h5 -------------------------------------------------------------------------------- /legacy/python3/data/simulation_results.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/data/simulation_results.h5 -------------------------------------------------------------------------------- /legacy/python3/data/vstoxx_data_31032014.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/data/vstoxx_data_31032014.h5 -------------------------------------------------------------------------------- /legacy/python3/data/vstoxx_option_quotes.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/data/vstoxx_option_quotes.h5 -------------------------------------------------------------------------------- /legacy/python3/images/dx_srjd_cali_1_.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srjd_cali_1_.png -------------------------------------------------------------------------------- /code/images/dx_srd_cali_1_hist_2014-04-18.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srd_cali_1_hist_2014-04-18.png -------------------------------------------------------------------------------- /legacy/python2/data/__index_option_series.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/__index_option_series.h5 -------------------------------------------------------------------------------- /legacy/python3/data/index_option_series__.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/data/index_option_series__.h5 -------------------------------------------------------------------------------- /legacy/python3/images/dx_srjd_cali_1_fit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srjd_cali_1_fit.png -------------------------------------------------------------------------------- /legacy/python3/images/dx_srjd_cali_1_fit_.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srjd_cali_1_fit_.png -------------------------------------------------------------------------------- /legacy/python3/images/dx_srjd_cali_1_hist.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srjd_cali_1_hist.png -------------------------------------------------------------------------------- /legacy/python3/images/dx_srjd_cali_2_fit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srjd_cali_2_fit.png -------------------------------------------------------------------------------- /legacy/python3/images/dx_srjd_cali_2_hist.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srjd_cali_2_hist.png -------------------------------------------------------------------------------- /legacy/python3/images/dx_srjd_cali_5_fit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srjd_cali_5_fit.png -------------------------------------------------------------------------------- /legacy/python3/images/dx_srjd_cali_5_hist.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srjd_cali_5_hist.png -------------------------------------------------------------------------------- /code/images/dx_srd_cali_1_hist_2014-04-18_.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/code/images/dx_srd_cali_1_hist_2014-04-18_.png -------------------------------------------------------------------------------- /legacy/python2/data/index_option_series_2015.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python2/data/index_option_series_2015.h5 -------------------------------------------------------------------------------- /legacy/python3/images/dx_srjd_cali_1_hist_.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srjd_cali_1_hist_.png -------------------------------------------------------------------------------- /legacy/python3/images/dx_srd_cali_1_2014-04-18.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srd_cali_1_2014-04-18.png -------------------------------------------------------------------------------- /legacy/python3/images/dx_srd_cali_1_2014-04-18_.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srd_cali_1_2014-04-18_.png -------------------------------------------------------------------------------- /legacy/python3/images/dx_srd_cali_1_hist_2014-04-18.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srd_cali_1_hist_2014-04-18.png -------------------------------------------------------------------------------- /legacy/python3/images/dx_srd_cali_1_hist_2014-04-18_.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yhilpisch/lvvd/master/legacy/python3/images/dx_srd_cali_1_hist_2014-04-18_.png -------------------------------------------------------------------------------- /legacy/python2/data/varphi: -------------------------------------------------------------------------------- 1 | (dp0 2 | S'ttms' 3 | p1 4 | cnumpy.core.multiarray 5 | _reconstruct 6 | p2 7 | (cnumpy 8 | ndarray 9 | p3 10 | (I0 11 | tp4 12 | S'b' 13 | p5 14 | tp6 15 | Rp7 16 | (I1 17 | (I9 18 | tp8 19 | cnumpy 20 | dtype 21 | p9 22 | (S'f8' 23 | p10 24 | I0 25 | I1 26 | tp11 27 | Rp12 28 | (I3 29 | S'<' 30 | p13 31 | NNNI-1 32 | I-1 33 | I0 34 | tp14 35 | bI00 36 | S'\x00\x00\x00\x00\x00\x00\x00\x00J\x0c\x02+\x87\x16\xa9?T\xe3\xa5\x9b\xc4 \xc0?\xd1"\xdb\xf9~j\xcc?\x89A`\xe5\xd0"\xd3?\x00\x00\x00\x00\x00\x00\xd8?\xbe\x9f\x1a/\xdd$\xde?\xf0\xa7\xc6K7\x89\xe1?\xcf\xf7S\xe3\xa5\x9b\xe4?' 37 | p15 38 | tp16 39 | bsS'varphi' 40 | p17 41 | g2 42 | (g3 43 | (I0 44 | tp18 45 | g5 46 | tp19 47 | Rp20 48 | (I1 49 | (I9 50 | tp21 51 | g12 52 | I00 53 | S'\x00\x00\x00\x00\x00\x00\x00\x00\xa0\x97\x1f-\x81\xae\xe0?@#T\x02\x82\x96\xd4\xbf\x00\xbc\xa5s\\\xa4\x99?\x000\x86\xbc\xe0!\x81\xbf\x00\x88\x0e\x13\xe7z\x83\xbf\x00\x10\xce\xf2\x87\x07t?\x00\x00\xb0\x8d\xe0\xd3\xaa?\x00T\xe6\xfe\xcc\x88\x93\xbf' 54 | p22 55 | tp23 56 | bs. -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | All the contents as well as the codes, Jupyter Notebooks and other materials in this Github repository and on the Quant Platform (http://lvvd.pqp.io) related to Python for Algorithmic Trading (book, course, class, program) by Dr. Yves J. Hilpisch (The Python Quants GmbH) are copyrighted and only intended for personal use. 2 | 3 | Any kind of sharing, distribution, duplication, etc. without written permission by the The Python Quants GmbH is prohibited. 4 | 5 | The contents, Python code, Jupyter Notebooks and other materials come without warranties or representations, to the extent permitted by applicable law. 6 | 7 | Notice that the code provided might be work in progress and that substantial additions, changes, updates, etc. can take place in the future. It is advised to regularly check for updates. 8 | 9 | None of the material represents any kind of recommendation or investment advice. The material is only meant as a technical illustration. Leveraged and unleveraged trading of financial instruments, and options and futures in particular, involves a number of risks. Make sure to understand and manage these risks. 10 | 11 | (c) Dr. Yves J. Hilpisch | The Python Quants GmbH | February 2021 12 | -------------------------------------------------------------------------------- /code/scripts/variance_swaps.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with functions for 3 | # Variance Swaps Examples 4 | # 5 | # (c) Dr. Yves J. Hilpisch 6 | # Listed Volatility and Variance Derivatives 7 | # 8 | import math 9 | import numpy as np 10 | import pandas as pd 11 | 12 | def generate_path(S0, r, sigma, T, M, seed=100000): 13 | ''' Function to simulate a geometric Brownian motion. 14 | 15 | Parameters 16 | ========== 17 | S0: float 18 | initial index level 19 | r: float 20 | constant risk-less short rate 21 | sigma: float 22 | instantaneous volatility 23 | T: float 24 | date of maturity (in year fractions) 25 | M: int 26 | number of time intervals 27 | 28 | Returns 29 | ======= 30 | path: pandas DataFrame object 31 | simulated path 32 | ''' 33 | # length of time interval 34 | dt = float(T) / M 35 | # random numbers 36 | np.random.seed(seed) 37 | rn = np.random.standard_normal(M + 1) 38 | rn[0] = 0 # to keep the initial value 39 | # simulation of path 40 | path = S0 * np.exp(np.cumsum((r - 0.5 * sigma ** 2) * dt 41 | + sigma * math.sqrt(dt) * rn)) 42 | # setting initial value 43 | path = pd.DataFrame(path, columns=['index']) 44 | return path 45 | -------------------------------------------------------------------------------- /legacy/python2/scripts/variance_swaps.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with functions for 3 | # Variance Swaps Examples 4 | # 5 | # (c) Dr. Yves J. Hilpisch 6 | # Listed Volatility and Variance Derivatives 7 | # 8 | import math 9 | import numpy as np 10 | import pandas as pd 11 | 12 | def generate_path(S0, r, sigma, T, M): 13 | ''' Function to simulate a geometric Brownian motion. 14 | 15 | Parameters 16 | ========== 17 | S0: float 18 | initial index level 19 | r: float 20 | constant risk-less short rate 21 | sigma: float 22 | instantaneous volatility 23 | T: float 24 | date of maturity (in year fractions) 25 | M: int 26 | number of time intervals 27 | 28 | Returns 29 | ======= 30 | path: pandas DataFrame object 31 | simulated path 32 | ''' 33 | # length of time interval 34 | dt = float(T) / M 35 | # random numbers 36 | np.random.seed(100000) 37 | rn = np.random.standard_normal(M + 1) 38 | rn[0] = 0 # to keep the initial value 39 | # simulation of path 40 | path = S0 * np.exp(np.cumsum((r - 0.5 * sigma ** 2) * dt 41 | + sigma * math.sqrt(dt) * rn)) 42 | # setting initial value 43 | path = pd.DataFrame(path, columns=['index']) 44 | return path 45 | -------------------------------------------------------------------------------- /legacy/python3/scripts/variance_swaps.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with functions for 3 | # Variance Swaps Examples 4 | # 5 | # (c) Dr. Yves J. Hilpisch 6 | # Listed Volatility and Variance Derivatives 7 | # 8 | import math 9 | import numpy as np 10 | import pandas as pd 11 | 12 | def generate_path(S0, r, sigma, T, M, seed=100000): 13 | ''' Function to simulate a geometric Brownian motion. 14 | 15 | Parameters 16 | ========== 17 | S0: float 18 | initial index level 19 | r: float 20 | constant risk-less short rate 21 | sigma: float 22 | instantaneous volatility 23 | T: float 24 | date of maturity (in year fractions) 25 | M: int 26 | number of time intervals 27 | 28 | Returns 29 | ======= 30 | path: pandas DataFrame object 31 | simulated path 32 | ''' 33 | # length of time interval 34 | dt = float(T) / M 35 | # random numbers 36 | np.random.seed(seed) 37 | rn = np.random.standard_normal(M + 1) 38 | rn[0] = 0 # to keep the initial value 39 | # simulation of path 40 | path = S0 * np.exp(np.cumsum((r - 0.5 * sigma ** 2) * dt 41 | + sigma * math.sqrt(dt) * rn)) 42 | # setting initial value 43 | path = pd.DataFrame(path, columns=['index']) 44 | return path 45 | -------------------------------------------------------------------------------- /code/data/hist_EONIA_2015.csv: -------------------------------------------------------------------------------- 1 | ,EONIA 2 | 21/04/2015,-0.084 3 | 22/04/2015,-0.077 4 | 23/04/2015,-0.08 5 | 24/04/2015,-0.081 6 | 27/04/2015,-0.081 7 | 28/04/2015,-0.079 8 | 29/04/2015,-0.072 9 | 30/04/2015,-0.027 10 | 04/05/2015,-0.084 11 | 05/05/2015,-0.085 12 | 06/05/2015,-0.088 13 | 07/05/2015,-0.089 14 | 08/05/2015,-0.087 15 | 11/05/2015,-0.089 16 | 12/05/2015,-0.091 17 | 13/05/2015,-0.137 18 | 14/05/2015,-0.143 19 | 15/05/2015,-0.11 20 | 18/05/2015,-0.105 21 | 19/05/2015,-0.106 22 | 20/05/2015,-0.105 23 | 21/05/2015,-0.106 24 | 22/05/2015,-0.14 25 | 25/05/2015,-0.143 26 | 26/05/2015,-0.109 27 | 27/05/2015,-0.114 28 | 28/05/2015,-0.104 29 | 29/05/2015,-0.08 30 | 01/06/2015,-0.106 31 | 02/06/2015,-0.122 32 | 03/06/2015,-0.143 33 | 04/06/2015,-0.138 34 | 05/06/2015,-0.115 35 | 08/06/2015,-0.127 36 | 09/06/2015,-0.126 37 | 10/06/2015,-0.117 38 | 11/06/2015,-0.12 39 | 12/06/2015,-0.125 40 | 15/06/2015,-0.119 41 | 16/06/2015,-0.125 42 | 17/06/2015,-0.11 43 | 18/06/2015,-0.118 44 | 19/06/2015,-0.12 45 | 22/06/2015,-0.127 46 | 23/06/2015,-0.121 47 | 24/06/2015,-0.111 48 | 25/06/2015,-0.116 49 | 26/06/2015,-0.123 50 | 29/06/2015,-0.118 51 | 30/06/2015,-0.06 52 | 01/07/2015,-0.123 53 | 02/07/2015,-0.121 54 | 03/07/2015,-0.116 55 | 06/07/2015,-0.122 56 | 07/07/2015,-0.12 57 | 08/07/2015,-0.118 58 | 09/07/2015,-0.121 59 | 10/07/2015,-0.12 60 | 13/07/2015,-0.118 61 | 14/07/2015,-0.112 62 | -------------------------------------------------------------------------------- /legacy/python2/data/hist_EONIA_2015.csv: -------------------------------------------------------------------------------- 1 | ,EONIA 2 | 21/04/2015,-0.084 3 | 22/04/2015,-0.077 4 | 23/04/2015,-0.08 5 | 24/04/2015,-0.081 6 | 27/04/2015,-0.081 7 | 28/04/2015,-0.079 8 | 29/04/2015,-0.072 9 | 30/04/2015,-0.027 10 | 04/05/2015,-0.084 11 | 05/05/2015,-0.085 12 | 06/05/2015,-0.088 13 | 07/05/2015,-0.089 14 | 08/05/2015,-0.087 15 | 11/05/2015,-0.089 16 | 12/05/2015,-0.091 17 | 13/05/2015,-0.137 18 | 14/05/2015,-0.143 19 | 15/05/2015,-0.11 20 | 18/05/2015,-0.105 21 | 19/05/2015,-0.106 22 | 20/05/2015,-0.105 23 | 21/05/2015,-0.106 24 | 22/05/2015,-0.14 25 | 25/05/2015,-0.143 26 | 26/05/2015,-0.109 27 | 27/05/2015,-0.114 28 | 28/05/2015,-0.104 29 | 29/05/2015,-0.08 30 | 01/06/2015,-0.106 31 | 02/06/2015,-0.122 32 | 03/06/2015,-0.143 33 | 04/06/2015,-0.138 34 | 05/06/2015,-0.115 35 | 08/06/2015,-0.127 36 | 09/06/2015,-0.126 37 | 10/06/2015,-0.117 38 | 11/06/2015,-0.12 39 | 12/06/2015,-0.125 40 | 15/06/2015,-0.119 41 | 16/06/2015,-0.125 42 | 17/06/2015,-0.11 43 | 18/06/2015,-0.118 44 | 19/06/2015,-0.12 45 | 22/06/2015,-0.127 46 | 23/06/2015,-0.121 47 | 24/06/2015,-0.111 48 | 25/06/2015,-0.116 49 | 26/06/2015,-0.123 50 | 29/06/2015,-0.118 51 | 30/06/2015,-0.06 52 | 01/07/2015,-0.123 53 | 02/07/2015,-0.121 54 | 03/07/2015,-0.116 55 | 06/07/2015,-0.122 56 | 07/07/2015,-0.12 57 | 08/07/2015,-0.118 58 | 09/07/2015,-0.121 59 | 10/07/2015,-0.12 60 | 13/07/2015,-0.118 61 | 14/07/2015,-0.112 62 | -------------------------------------------------------------------------------- /legacy/python3/data/hist_EONIA_2015.csv: -------------------------------------------------------------------------------- 1 | ,EONIA 2 | 21/04/2015,-0.084 3 | 22/04/2015,-0.077 4 | 23/04/2015,-0.08 5 | 24/04/2015,-0.081 6 | 27/04/2015,-0.081 7 | 28/04/2015,-0.079 8 | 29/04/2015,-0.072 9 | 30/04/2015,-0.027 10 | 04/05/2015,-0.084 11 | 05/05/2015,-0.085 12 | 06/05/2015,-0.088 13 | 07/05/2015,-0.089 14 | 08/05/2015,-0.087 15 | 11/05/2015,-0.089 16 | 12/05/2015,-0.091 17 | 13/05/2015,-0.137 18 | 14/05/2015,-0.143 19 | 15/05/2015,-0.11 20 | 18/05/2015,-0.105 21 | 19/05/2015,-0.106 22 | 20/05/2015,-0.105 23 | 21/05/2015,-0.106 24 | 22/05/2015,-0.14 25 | 25/05/2015,-0.143 26 | 26/05/2015,-0.109 27 | 27/05/2015,-0.114 28 | 28/05/2015,-0.104 29 | 29/05/2015,-0.08 30 | 01/06/2015,-0.106 31 | 02/06/2015,-0.122 32 | 03/06/2015,-0.143 33 | 04/06/2015,-0.138 34 | 05/06/2015,-0.115 35 | 08/06/2015,-0.127 36 | 09/06/2015,-0.126 37 | 10/06/2015,-0.117 38 | 11/06/2015,-0.12 39 | 12/06/2015,-0.125 40 | 15/06/2015,-0.119 41 | 16/06/2015,-0.125 42 | 17/06/2015,-0.11 43 | 18/06/2015,-0.118 44 | 19/06/2015,-0.12 45 | 22/06/2015,-0.127 46 | 23/06/2015,-0.121 47 | 24/06/2015,-0.111 48 | 25/06/2015,-0.116 49 | 26/06/2015,-0.123 50 | 29/06/2015,-0.118 51 | 30/06/2015,-0.06 52 | 01/07/2015,-0.123 53 | 02/07/2015,-0.121 54 | 03/07/2015,-0.116 55 | 06/07/2015,-0.122 56 | 07/07/2015,-0.12 57 | 08/07/2015,-0.118 58 | 09/07/2015,-0.121 59 | 10/07/2015,-0.12 60 | 13/07/2015,-0.118 61 | 14/07/2015,-0.112 62 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | .DS_Store 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | env/ 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *,cover 48 | .hypothesis/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # IPython Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # dotenv 81 | .env 82 | 83 | # virtualenv 84 | venv/ 85 | ENV/ 86 | 87 | # Spyder project settings 88 | .spyderproject 89 | 90 | # Rope project settings 91 | .ropeproject 92 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Listed Volatility and Variance Derivatives (Wiley Finance) 2 | 3 | This repository provides all Python codes and Jupyter Notebooks of the book _Listed Volatility and Variance Derivatives_ by Yves Hilpisch. The code has been updated in November 2020 and several new data sets have been added. 4 | 5 | ![alt text](http://hilpisch.com/images/lvvd_cover.png "Book Cover") 6 | 7 | Oder the book here 8 | 9 | http://eu.wiley.com/WileyCDA/WileyTitle/productCd-1119167914.html 10 | 11 | or 12 | 13 | https://www.amazon.com/Listed-Volatility-Variance-Derivatives-Python-based/dp/1119167914/. 14 | 15 | ## Quant Platform 16 | 17 | You can immediately use all codes and Jupyter notebooks by registering for the Quant Platform under http://lvvd.quant-platform.com. 18 | 19 | 20 | ## Company Information 21 | 22 | © Dr. Yves J. Hilpisch \| The Python Quants GmbH 23 | 24 | The Quant Platform and the code/Jupyter Notebooks come with no representations or warranties, to the extent permitted by applicable law. 25 | 26 | http://tpq.io \| team@tpq.io \| 27 | http://twitter.com/dyjh 28 | 29 | **Quant Platform** \| http://lvvd.quant-platform.com 30 | 31 | **Python for Finance (O'Reilly)** \| 32 | http://py4fi.tpq.io 33 | 34 | **Derivatives Analytics with Python (Wiley Finance)** \| 35 | http://dawp.tpq.io 36 | 37 | **Artificial Intelligence in Finance (O'Reilly)** \| 38 | http://aiif.tpq.io 39 | 40 | **Python for Algorithmic Trading (O'Reilly)** \| 41 | http://py4at.tpq.io 42 | 43 | **Python for Finance Online Training** \| 44 | http://certificates.tpq.io 45 | -------------------------------------------------------------------------------- /code/scripts/srjd_fwd_calibration.py: -------------------------------------------------------------------------------- 1 | # 2 | # Script for term structure calibration of 3 | # Square-Root Jump Diffusion (SRJD) model 4 | # 5 | # (c) Dr. Yves J. Hilpisch 6 | # Listed Volatility and Variance Derivatives 7 | # 8 | import math 9 | import numpy as np 10 | import pandas as pd 11 | import scipy.optimize as sco 12 | 13 | v0 = 17.6639 # initial VSTOXX index level 14 | i = 0 # counter for calibration runs 15 | 16 | # reading the VSTOXX futures quotes 17 | path = './data/' 18 | h5 = pd.HDFStore(path + 'vstoxx_data_31032014.h5', 'r') 19 | futures_quotes = h5['futures_data'] 20 | h5.close() 21 | 22 | # selecting needed data columns and adding spot value 23 | forwards = list(futures_quotes['PRICE'].values) 24 | forwards.insert(0, v0) 25 | forwards = np.array(forwards) 26 | ttms = list(futures_quotes['TTM'].values) 27 | ttms.insert(0, 0) 28 | ttms = np.array(ttms) 29 | 30 | 31 | def srd_forwards(p0): 32 | ''' Function for forward volatilities in GL96 Model. 33 | 34 | Parameters 35 | ========== 36 | p0: list 37 | set of model parameters, where 38 | 39 | kappa: float 40 | mean-reversion factor 41 | theta: float 42 | long-run mean 43 | sigma: float 44 | volatility factor 45 | 46 | Returns 47 | ======= 48 | forwards: NumPy ndarray object 49 | forward volatilities 50 | ''' 51 | t = ttms 52 | kappa, theta, sigma = p0 53 | g = math.sqrt(kappa ** 2 + 2 * sigma ** 2) 54 | sum1 = ((kappa * theta * (np.exp(g * t) - 1)) / 55 | (2 * g + (kappa + g) * (np.exp(g * t) - 1))) 56 | sum2 = v0 * ((4 * g ** 2 * np.exp(g * t)) / 57 | (2 * g + (kappa + g) * (np.exp(g * t) - 1)) ** 2) 58 | forwards = sum1 + sum2 59 | return forwards 60 | 61 | 62 | def srd_fwd_error(p0): 63 | ''' Error function for GL96 forward volatilities calibration. 64 | 65 | Parameters 66 | ========== 67 | p0: tuple 68 | parameter vector 69 | 70 | Returns 71 | ======= 72 | MSE: float 73 | mean-squared error for p0 74 | ''' 75 | global i 76 | kappa, theta, sigma = p0 77 | srd_fwds = srd_forwards(p0) 78 | MSE = np.sum((forwards - srd_fwds) ** 2) / len(forwards) 79 | if 2 * kappa * theta < sigma ** 2: 80 | MSE = MSE + 100 # penalty 81 | elif sigma < 0: 82 | MSE = MSE + 100 83 | # print intermediate results: every 50th iteration 84 | if i % 50 == 0: 85 | print("{:6.3f} {:6.3f} {:6.3f}".format(*p0) + "{:>12.5f}".format(MSE)) 86 | i += 1 87 | return MSE 88 | 89 | if __name__ == '__main__': 90 | p0 = 1.0, 17.5, 1.0 91 | opt = sco.fmin(srd_fwd_error, p0, 92 | xtol=0.00001, ftol=0.00001, 93 | maxiter=1500, maxfun=2000) 94 | -------------------------------------------------------------------------------- /legacy/python2/scripts/srjd_fwd_calibration.py: -------------------------------------------------------------------------------- 1 | # 2 | # Script for term structure calibration of 3 | # Square-Root Jump Diffusion (SRJD) model 4 | # 5 | # (c) Dr. Yves J. Hilpisch 6 | # Listed Volatility and Variance Derivatives 7 | # 8 | import math 9 | import numpy as np 10 | import pandas as pd 11 | import scipy.optimize as sco 12 | 13 | v0 = 17.6639 # initial VSTOXX index level 14 | i = 0 # counter for calibration runs 15 | 16 | # reading the VSTOXX futures quotes 17 | path = 'data/' 18 | h5 = pd.HDFStore(path + 'vstoxx_data_31032014.h5', 'r') 19 | futures_quotes = h5['futures_data'] 20 | h5.close() 21 | 22 | # selecting needed data columns and adding spot value 23 | forwards = list(futures_quotes['PRICE'].values) 24 | forwards.insert(0, v0) 25 | forwards = np.array(forwards) 26 | ttms = list(futures_quotes['TTM'].values) 27 | ttms.insert(0, 0) 28 | ttms = np.array(ttms) 29 | 30 | 31 | def srd_forwards(p0): 32 | ''' Function for forward volatilities in GL96 Model. 33 | 34 | Parameters 35 | ========== 36 | p0: list 37 | set of model parameters, where 38 | 39 | kappa: float 40 | mean-reversion factor 41 | theta: float 42 | long-run mean 43 | sigma: float 44 | volatility factor 45 | 46 | Returns 47 | ======= 48 | forwards: NumPy ndarray object 49 | forward volatilities 50 | ''' 51 | t = ttms 52 | kappa, theta, sigma = p0 53 | g = math.sqrt(kappa ** 2 + 2 * sigma ** 2) 54 | sum1 = ((kappa * theta * (np.exp(g * t) - 1)) / 55 | (2 * g + (kappa + g) * (np.exp(g * t) - 1))) 56 | sum2 = v0 * ((4 * g ** 2 * np.exp(g * t)) / 57 | (2 * g + (kappa + g) * (np.exp(g * t) - 1)) ** 2) 58 | forwards = sum1 + sum2 59 | return forwards 60 | 61 | 62 | def srd_fwd_error(p0): 63 | ''' Error function for GL96 forward volatilities calibration. 64 | 65 | Parameters 66 | ========== 67 | p0: tuple 68 | parameter vector 69 | 70 | Returns 71 | ======= 72 | MSE: float 73 | mean-squared error for p0 74 | ''' 75 | global i 76 | kappa, theta, sigma = p0 77 | srd_fwds = srd_forwards(p0) 78 | MSE = np.sum((forwards - srd_fwds) ** 2) / len(forwards) 79 | if 2 * kappa * theta < sigma ** 2: 80 | MSE = MSE + 100 # penalty 81 | elif sigma < 0: 82 | MSE = MSE + 100 83 | # print intermediate results: every 50th iteration 84 | if i % 50 == 0: 85 | print "{:6.3f} {:6.3f} {:6.3f}".format(*p0) + "{:>12.5f}".format(MSE) 86 | i += 1 87 | return MSE 88 | 89 | if __name__ is '__main__': 90 | p0 = 1.0, 17.5, 1.0 91 | opt = sco.fmin(srd_fwd_error, p0, 92 | xtol=0.00001, ftol=0.00001, 93 | maxiter=1500, maxfun=2000) 94 | -------------------------------------------------------------------------------- /legacy/python3/scripts/srjd_fwd_calibration.py: -------------------------------------------------------------------------------- 1 | # 2 | # Script for term structure calibration of 3 | # Square-Root Jump Diffusion (SRJD) model 4 | # 5 | # (c) Dr. Yves J. Hilpisch 6 | # Listed Volatility and Variance Derivatives 7 | # 8 | import math 9 | import numpy as np 10 | import pandas as pd 11 | import scipy.optimize as sco 12 | 13 | v0 = 17.6639 # initial VSTOXX index level 14 | i = 0 # counter for calibration runs 15 | 16 | # reading the VSTOXX futures quotes 17 | path = './data/' 18 | h5 = pd.HDFStore(path + 'vstoxx_data_31032014.h5', 'r') 19 | futures_quotes = h5['futures_data'] 20 | h5.close() 21 | 22 | # selecting needed data columns and adding spot value 23 | forwards = list(futures_quotes['PRICE'].values) 24 | forwards.insert(0, v0) 25 | forwards = np.array(forwards) 26 | ttms = list(futures_quotes['TTM'].values) 27 | ttms.insert(0, 0) 28 | ttms = np.array(ttms) 29 | 30 | 31 | def srd_forwards(p0): 32 | ''' Function for forward volatilities in GL96 Model. 33 | 34 | Parameters 35 | ========== 36 | p0: list 37 | set of model parameters, where 38 | 39 | kappa: float 40 | mean-reversion factor 41 | theta: float 42 | long-run mean 43 | sigma: float 44 | volatility factor 45 | 46 | Returns 47 | ======= 48 | forwards: NumPy ndarray object 49 | forward volatilities 50 | ''' 51 | t = ttms 52 | kappa, theta, sigma = p0 53 | g = math.sqrt(kappa ** 2 + 2 * sigma ** 2) 54 | sum1 = ((kappa * theta * (np.exp(g * t) - 1)) / 55 | (2 * g + (kappa + g) * (np.exp(g * t) - 1))) 56 | sum2 = v0 * ((4 * g ** 2 * np.exp(g * t)) / 57 | (2 * g + (kappa + g) * (np.exp(g * t) - 1)) ** 2) 58 | forwards = sum1 + sum2 59 | return forwards 60 | 61 | 62 | def srd_fwd_error(p0): 63 | ''' Error function for GL96 forward volatilities calibration. 64 | 65 | Parameters 66 | ========== 67 | p0: tuple 68 | parameter vector 69 | 70 | Returns 71 | ======= 72 | MSE: float 73 | mean-squared error for p0 74 | ''' 75 | global i 76 | kappa, theta, sigma = p0 77 | srd_fwds = srd_forwards(p0) 78 | MSE = np.sum((forwards - srd_fwds) ** 2) / len(forwards) 79 | if 2 * kappa * theta < sigma ** 2: 80 | MSE = MSE + 100 # penalty 81 | elif sigma < 0: 82 | MSE = MSE + 100 83 | # print intermediate results: every 50th iteration 84 | if i % 50 == 0: 85 | print("{:6.3f} {:6.3f} {:6.3f}".format(*p0) + "{:>12.5f}".format(MSE)) 86 | i += 1 87 | return MSE 88 | 89 | if __name__ == '__main__': 90 | p0 = 1.0, 17.5, 1.0 91 | opt = sco.fmin(srd_fwd_error, p0, 92 | xtol=0.00001, ftol=0.00001, 93 | maxiter=1500, maxfun=2000) 94 | -------------------------------------------------------------------------------- /code/scripts/index_vstoxx_calculation.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module to compute VSTOXX index values 3 | # given the values for the relevant sub-indexes 4 | # as generated by the module index_subindex_calculation.py 5 | # 6 | # (c) Dr. Yves J. Hilpisch 7 | # Listed Volatility and Variance Derivatives 8 | # 9 | import numpy as np 10 | import pandas as pd 11 | import matplotlib.pyplot as plt 12 | from index_date_functions import * 13 | 14 | 15 | def calculate_vstoxx(url): 16 | ''' Function to calculate the VSTOXX volatility index given time series 17 | of the relevant sub-indexes. 18 | 19 | Parameters 20 | ========== 21 | path: string 22 | string with path of data files 23 | 24 | Returns 25 | ======= 26 | data: pandas DataFrame object 27 | results of index calculation 28 | ''' 29 | # constant parameters 30 | seconds_year = 365 * 24 * 3600. 31 | seconds_30_days = 30 * 24 * 3600. 32 | 33 | # import historical VSTOXX data 34 | data = pd.read_csv(url, index_col=0, parse_dates=True) 35 | 36 | # determine the settlement dates for the two underlying option series 37 | data['Settlement date 1'] = [first_settlement_day(a) for a in data.index] 38 | data['Settlement date 2'] = [second_settlement_day(a) for a in data.index] 39 | 40 | # deduce the life time (in seconds) from current date to 41 | # final settlement Date 42 | data['Life time 1'] = [(data['Settlement date 1'][i] - i).days 43 | * 24 * 60 * 60 for i in data.index] 44 | data['Life time 2'] = [(data['Settlement date 2'][i] - i).days 45 | * 24 * 60 * 60 for i in data.index] 46 | 47 | data['Use V6I2'] = data['.V6I1'].notnull() # where V6I1 is not defined 48 | data['Subindex to use 1'] = [data['.V6I1'][i] if data['Use V6I2'][i] 49 | else data['.V6I2'][i] for i in data.index] 50 | # if V6I1 is defined, use V6I1 and V6I2 as data set 51 | data['Subindex to use 2'] = [data['.V6I2'][i] if data['Use V6I2'][i] 52 | else data['.V6I3'][i] for i in data.index] 53 | # else use V6I2 and V6I3 54 | 55 | # the linear interpolation of the VSTOXX value 56 | # from the two relevant sub-indexes 57 | data['Part 1'] = data['Life time 1'] / seconds_year \ 58 | * data['Subindex to use 1'] ** 2 \ 59 | * ((data['Life time 2'] - seconds_30_days) 60 | / (data['Life time 2'] - data['Life time 1'])) 61 | 62 | data['Part 2'] = data['Life time 2'] / seconds_year \ 63 | * data['Subindex to use 2'] ** 2 \ 64 | *((seconds_30_days - data['Life time 1']) 65 | / (data['Life time 2'] - data['Life time 1'])) \ 66 | 67 | data['VSTOXX'] = np.sqrt((data['Part 1'] + data['Part 2']) * 68 | seconds_year / seconds_30_days) 69 | 70 | # difference between original VSTOXX data and re-calculated values 71 | data['Difference'] = data['.V2TX'] - data['VSTOXX'] 72 | 73 | return data 74 | -------------------------------------------------------------------------------- /legacy/python3/scripts/index_vstoxx_calculation.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module to compute VSTOXX index values 3 | # given the values for the relevant sub-indexes 4 | # as generated by the module index_subindex_calculation.py 5 | # 6 | # (c) Dr. Yves J. Hilpisch 7 | # Listed Volatility and Variance Derivatives 8 | # 9 | import numpy as np 10 | import pandas as pd 11 | import matplotlib.pyplot as plt 12 | from index_date_functions import * 13 | 14 | 15 | def calculate_vstoxx(url): 16 | ''' Function to calculate the VSTOXX volatility index given time series 17 | of the relevant sub-indexes. 18 | 19 | Parameters 20 | ========== 21 | path: string 22 | string with path of data files 23 | 24 | Returns 25 | ======= 26 | data: pandas DataFrame object 27 | results of index calculation 28 | ''' 29 | # constant parameters 30 | seconds_year = 365 * 24 * 3600. 31 | seconds_30_days = 30 * 24 * 3600. 32 | 33 | # import historical VSTOXX data 34 | data = pd.read_csv(url, index_col=0, parse_dates=True) 35 | 36 | # determine the settlement dates for the two underlying option series 37 | data['Settlement date 1'] = [first_settlement_day(a) for a in data.index] 38 | data['Settlement date 2'] = [second_settlement_day(a) for a in data.index] 39 | 40 | # deduce the life time (in seconds) from current date to 41 | # final settlement Date 42 | data['Life time 1'] = [(data['Settlement date 1'][i] - i).days 43 | * 24 * 60 * 60 for i in data.index] 44 | data['Life time 2'] = [(data['Settlement date 2'][i] - i).days 45 | * 24 * 60 * 60 for i in data.index] 46 | 47 | data['Use V6I2'] = data['.V6I1'].notnull() # where V6I1 is not defined 48 | data['Subindex to use 1'] = [data['.V6I1'][i] if data['Use V6I2'][i] 49 | else data['.V6I2'][i] for i in data.index] 50 | # if V6I1 is defined, use V6I1 and V6I2 as data set 51 | data['Subindex to use 2'] = [data['.V6I2'][i] if data['Use V6I2'][i] 52 | else data['.V6I3'][i] for i in data.index] 53 | # else use V6I2 and V6I3 54 | 55 | # the linear interpolation of the VSTOXX value 56 | # from the two relevant sub-indexes 57 | data['Part 1'] = data['Life time 1'] / seconds_year \ 58 | * data['Subindex to use 1'] ** 2 \ 59 | * ((data['Life time 2'] - seconds_30_days) 60 | / (data['Life time 2'] - data['Life time 1'])) 61 | 62 | data['Part 2'] = data['Life time 2'] / seconds_year \ 63 | * data['Subindex to use 2'] ** 2 \ 64 | *((seconds_30_days - data['Life time 1']) 65 | / (data['Life time 2'] - data['Life time 1'])) \ 66 | 67 | data['VSTOXX'] = np.sqrt((data['Part 1'] + data['Part 2']) * 68 | seconds_year / seconds_30_days) 69 | 70 | # difference between original VSTOXX data and re-calculated values 71 | data['Difference'] = data['.V2TX'] - data['VSTOXX'] 72 | 73 | return data 74 | -------------------------------------------------------------------------------- /legacy/python2/scripts/index_vstoxx_calculation.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module to compute VSTOXX index values 3 | # given the values for the relevant sub-indexes 4 | # as generated by the module index_subindex_calculation.py 5 | # 6 | # (c) Dr. Yves J. Hilpisch 7 | # Listed Volatility and Variance Derivatives 8 | # 9 | import pandas as pd 10 | import numpy as np 11 | import matplotlib.pyplot as plt 12 | from index_date_functions import * 13 | 14 | 15 | def calculate_vstoxx(path): 16 | ''' Function to calculate the VSTOXX volatility index given time series 17 | of the relevant sub-indexes. 18 | 19 | Parameters 20 | ========== 21 | path: string 22 | string with path of data files 23 | 24 | Returns 25 | ======= 26 | data: pandas DataFrame object 27 | results of index calculation 28 | ''' 29 | # constant parameters 30 | seconds_year = 365 * 24 * 3600. 31 | seconds_30_days = 30 * 24 * 3600. 32 | 33 | # import historical VSTOXX data 34 | data = pd.read_csv(path + 'vs.csv', index_col=0, parse_dates=True) 35 | 36 | # determine the settlement dates for the two underlying option series 37 | data['Settlement date 1'] = [first_settlement_day(a) for a in data.index] 38 | data['Settlement date 2'] = [second_settlement_day(a) for a in data.index] 39 | 40 | # deduce the life time (in seconds) from current date to 41 | # final settlement Date 42 | data['Life time 1'] = [(data['Settlement date 1'][i] - i).days 43 | * 24 * 60 * 60 for i in data.index] 44 | data['Life time 2'] = [(data['Settlement date 2'][i] - i).days 45 | * 24 * 60 * 60 for i in data.index] 46 | 47 | data['Use V6I2'] = data['V6I1'].notnull() # where V6I1 is not defined 48 | data['Subindex to use 1'] = [data['V6I1'][i] if data['Use V6I2'][i] 49 | else data['V6I2'][i] for i in data.index] 50 | # if V6I1 is defined, use V6I1 and V6I2 as data set 51 | data['Subindex to use 2'] = [data['V6I2'][i] if data['Use V6I2'][i] 52 | else data['V6I3'][i] for i in data.index] 53 | # else use V6I2 and V6I3 54 | 55 | # the linear interpolation of the VSTOXX value 56 | # from the two relevant sub-indexes 57 | data['Part 1'] = data['Life time 1'] / seconds_year \ 58 | * data['Subindex to use 1'] ** 2 \ 59 | * ((data['Life time 2'] - seconds_30_days) 60 | / (data['Life time 2'] - data['Life time 1'])) 61 | 62 | data['Part 2'] = data['Life time 2'] / seconds_year \ 63 | * data['Subindex to use 2'] ** 2 \ 64 | *((seconds_30_days - data['Life time 1']) 65 | / (data['Life time 2'] - data['Life time 1'])) \ 66 | 67 | data['VSTOXX'] = np.sqrt((data['Part 1'] + data['Part 2']) * 68 | seconds_year / seconds_30_days) 69 | 70 | # difference between original VSTOXX data and re-calculated values 71 | data['Difference'] = data['V2TX'] - data['VSTOXX'] 72 | 73 | return data 74 | -------------------------------------------------------------------------------- /legacy/python2/08_Terms_Volatility.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "\"The


" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "# Listed Volatility and Variance Derivatives\n", 15 | "\n", 16 | "**Dr. Yves J. Hilpisch — Wiley Finance (2016)**\n", 17 | "\n", 18 | "\"Derivatives" 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "# Terms of the VSTOXX and its Derivatives" 26 | ] 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "metadata": {}, 31 | "source": [ 32 | "## The EURO STOXX 50 Index" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "## The VSTOXX Index" 40 | ] 41 | }, 42 | { 43 | "cell_type": "markdown", 44 | "metadata": {}, 45 | "source": [ 46 | "## VSTOXX Futures Contracts" 47 | ] 48 | }, 49 | { 50 | "cell_type": "markdown", 51 | "metadata": {}, 52 | "source": [ 53 | "## VSTOXX Options Contracts" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "## Conclusions" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "\"The
\n", 68 | "\n", 69 | "http://tpq.io | @dyjh | team@tpq.io\n", 70 | "\n", 71 | "**DX Analytics** |\n", 72 | "http://dx-analytics.com\n", 73 | "\n", 74 | "**Quant Platform** |\n", 75 | "http://quant-platform.com\n", 76 | "\n", 77 | "**Python for Finance Books** |\n", 78 | "http://books.tpq.io\n", 79 | "\n", 80 | "**Python for Finance Training** |\n", 81 | "http://training.tpq.io" 82 | ] 83 | } 84 | ], 85 | "metadata": { 86 | "anaconda-cloud": {}, 87 | "kernelspec": { 88 | "display_name": "Python [Root]", 89 | "language": "python", 90 | "name": "Python [Root]" 91 | }, 92 | "language_info": { 93 | "codemirror_mode": { 94 | "name": "ipython", 95 | "version": 2 96 | }, 97 | "file_extension": ".py", 98 | "mimetype": "text/x-python", 99 | "name": "python", 100 | "nbconvert_exporter": "python", 101 | "pygments_lexer": "ipython2", 102 | "version": "2.7.12" 103 | } 104 | }, 105 | "nbformat": 4, 106 | "nbformat_minor": 0 107 | } 108 | -------------------------------------------------------------------------------- /code/scripts/index_date_functions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with helper functions for the VSTOXX index calculation 3 | # 4 | # (c) The Python Quants GmbH 5 | # For illustration purposes only. 6 | # August 2014 7 | # 8 | 9 | TYEAR = 365 * 24 * 60 * 60. # seconds of a standard year 10 | 11 | import datetime as dt 12 | 13 | 14 | def third_friday(date): 15 | ''' Returns the third friday of the month given by the datetime object date 16 | This is the day options expiry on. 17 | 18 | date: datetime object 19 | date of month for which third Friday is to be found 20 | ''' 21 | 22 | number_days = date.day 23 | first_day = date - dt.timedelta(number_days - 1) 24 | # Reduce the given date to the first of the month. 25 | # Year and month stay the same. 26 | week_day = first_day.weekday() 27 | # What weekday is the first of the month (Mon=0, Tue=1, ...) 28 | day_delta = 4 - week_day # distance to the next Friday 29 | if day_delta < 0: 30 | day_delta += 7 31 | third_friday = first_day + dt.timedelta(day_delta + 14) 32 | # add that distance plus two weeks to the first of month 33 | return third_friday 34 | 35 | 36 | def first_settlement_day(date): 37 | ''' Returns the next settlement date (third Friday of a month) following 38 | the date date. 39 | 40 | date: datetime object 41 | date for which following third Friday is to be found 42 | ''' 43 | 44 | settlement_day_in_month = third_friday(date) 45 | # settlement date in the given month 46 | 47 | delta = (settlement_day_in_month - date).days 48 | # where are we relative to the settlement date in that month? 49 | 50 | if delta > 1: # more than 1 day before ? 51 | return settlement_day_in_month 52 | # yes: take the settlement dates of this and the next month 53 | else: 54 | next_month = settlement_day_in_month + dt.timedelta(20) 55 | # no: shift the date of next month into the next month but one and ... 56 | settlement_day_next_month = third_friday(next_month) 57 | # ... compute that settlement day 58 | return settlement_day_next_month 59 | 60 | 61 | def second_settlement_day(date): 62 | ''' Returns the second settlement date (third Friday of a month) following 63 | the date date. 64 | 65 | date: datetime object 66 | date for which second third Friday is to be found 67 | ''' 68 | 69 | settlement_day_in_month = first_settlement_day(date) 70 | # settlement date in the given month 71 | next_month = settlement_day_in_month + dt.timedelta(20) 72 | # shift date to the next month 73 | return third_friday(next_month) # settlement date of that month 74 | 75 | 76 | def not_a_day_before_expiry(date): 77 | ''' Returns True if the date is NOT one day before or equal the third 78 | Friday in month 79 | 80 | date: datetime object 81 | date for which second third Friday is to be found 82 | ''' 83 | 84 | settlement_day_in_month = third_friday(date) 85 | delta = (settlement_day_in_month - date).days 86 | if delta == 1 or delta == 0: 87 | return False 88 | else: 89 | return True 90 | 91 | 92 | def compute_delta(date, settlement_day): 93 | ''' Computes the time (in seconds) from date 0:00 to the first settlement 94 | date 8:30 AM 95 | 96 | date: datetime object 97 | starting date 98 | settlement_day: datetime object 99 | relevant settlement day 100 | ''' 101 | 102 | dummy_time_1 = dt.timedelta(seconds=43200) 103 | # seconds from midnight to 12:00 104 | dummy_time_2 = dt.timedelta(seconds=23400) 105 | # seconds from 17:30 to midnight 106 | settlement_date = settlement_day + dummy_time_1 + dummy_time_2 107 | delta_T_dummy = settlement_date - date 108 | delta_T = ((delta_T_dummy.days - 1) * 24 * 60 * 60 + 109 | delta_T_dummy.seconds) / TYEAR 110 | return delta_T 111 | 112 | 113 | -------------------------------------------------------------------------------- /legacy/python2/scripts/index_date_functions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with helper functions for the VSTOXX index calculation 3 | # 4 | # (c) The Python Quants GmbH 5 | # For illustration purposes only. 6 | # August 2014 7 | # 8 | 9 | TYEAR = 365 * 24 * 60 * 60. # seconds of a standard year 10 | 11 | import datetime as dt 12 | 13 | 14 | def third_friday(date): 15 | ''' Returns the third friday of the month given by the datetime object date 16 | This is the day options expiry on. 17 | 18 | date: datetime object 19 | date of month for which third Friday is to be found 20 | ''' 21 | 22 | number_days = date.day 23 | first_day = date - dt.timedelta(number_days - 1) 24 | # Reduce the given date to the first of the month. 25 | # Year and month stay the same. 26 | week_day = first_day.weekday() 27 | # What weekday is the first of the month (Mon=0, Tue=1, ...) 28 | day_delta = 4 - week_day # distance to the next Friday 29 | if day_delta < 0: 30 | day_delta += 7 31 | third_friday = first_day + dt.timedelta(day_delta + 14) 32 | # add that distance plus two weeks to the first of month 33 | return third_friday 34 | 35 | 36 | def first_settlement_day(date): 37 | ''' Returns the next settlement date (third Friday of a month) following 38 | the date date. 39 | 40 | date: datetime object 41 | date for which following third Friday is to be found 42 | ''' 43 | 44 | settlement_day_in_month = third_friday(date) 45 | # settlement date in the given month 46 | 47 | delta = (settlement_day_in_month - date).days 48 | # where are we relative to the settlement date in that month? 49 | 50 | if delta > 1: # more than 1 day before ? 51 | return settlement_day_in_month 52 | # yes: take the settlement dates of this and the next month 53 | else: 54 | next_month = settlement_day_in_month + dt.timedelta(20) 55 | # no: shift the date of next month into the next month but one and ... 56 | settlement_day_next_month = third_friday(next_month) 57 | # ... compute that settlement day 58 | return settlement_day_next_month 59 | 60 | 61 | def second_settlement_day(date): 62 | ''' Returns the second settlement date (third Friday of a month) following 63 | the date date. 64 | 65 | date: datetime object 66 | date for which second third Friday is to be found 67 | ''' 68 | 69 | settlement_day_in_month = first_settlement_day(date) 70 | # settlement date in the given month 71 | next_month = settlement_day_in_month + dt.timedelta(20) 72 | # shift date to the next month 73 | return third_friday(next_month) # settlement date of that month 74 | 75 | 76 | def not_a_day_before_expiry(date): 77 | ''' Returns True if the date is NOT one day before or equal the third 78 | Friday in month 79 | 80 | date: datetime object 81 | date for which second third Friday is to be found 82 | ''' 83 | 84 | settlement_day_in_month = third_friday(date) 85 | delta = (settlement_day_in_month - date).days 86 | if delta == 1 or delta == 0: 87 | return False 88 | else: 89 | return True 90 | 91 | 92 | def compute_delta(date, settlement_day): 93 | ''' Computes the time (in seconds) from date 0:00 to the first settlement 94 | date 8:30 AM 95 | 96 | date: datetime object 97 | starting date 98 | settlement_day: datetime object 99 | relevant settlement day 100 | ''' 101 | 102 | dummy_time_1 = dt.timedelta(seconds=43200) 103 | # seconds from midnight to 12:00 104 | dummy_time_2 = dt.timedelta(seconds=23400) 105 | # seconds from 17:30 to midnight 106 | settlement_date = settlement_day + dummy_time_1 + dummy_time_2 107 | delta_T_dummy = settlement_date - date 108 | delta_T = ((delta_T_dummy.days - 1) * 24 * 60 * 60 + 109 | delta_T_dummy.seconds) / TYEAR 110 | return delta_T 111 | 112 | 113 | -------------------------------------------------------------------------------- /legacy/python3/scripts/index_date_functions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with helper functions for the VSTOXX index calculation 3 | # 4 | # (c) The Python Quants GmbH 5 | # For illustration purposes only. 6 | # August 2014 7 | # 8 | 9 | TYEAR = 365 * 24 * 60 * 60. # seconds of a standard year 10 | 11 | import datetime as dt 12 | 13 | 14 | def third_friday(date): 15 | ''' Returns the third friday of the month given by the datetime object date 16 | This is the day options expiry on. 17 | 18 | date: datetime object 19 | date of month for which third Friday is to be found 20 | ''' 21 | 22 | number_days = date.day 23 | first_day = date - dt.timedelta(number_days - 1) 24 | # Reduce the given date to the first of the month. 25 | # Year and month stay the same. 26 | week_day = first_day.weekday() 27 | # What weekday is the first of the month (Mon=0, Tue=1, ...) 28 | day_delta = 4 - week_day # distance to the next Friday 29 | if day_delta < 0: 30 | day_delta += 7 31 | third_friday = first_day + dt.timedelta(day_delta + 14) 32 | # add that distance plus two weeks to the first of month 33 | return third_friday 34 | 35 | 36 | def first_settlement_day(date): 37 | ''' Returns the next settlement date (third Friday of a month) following 38 | the date date. 39 | 40 | date: datetime object 41 | date for which following third Friday is to be found 42 | ''' 43 | 44 | settlement_day_in_month = third_friday(date) 45 | # settlement date in the given month 46 | 47 | delta = (settlement_day_in_month - date).days 48 | # where are we relative to the settlement date in that month? 49 | 50 | if delta > 1: # more than 1 day before ? 51 | return settlement_day_in_month 52 | # yes: take the settlement dates of this and the next month 53 | else: 54 | next_month = settlement_day_in_month + dt.timedelta(20) 55 | # no: shift the date of next month into the next month but one and ... 56 | settlement_day_next_month = third_friday(next_month) 57 | # ... compute that settlement day 58 | return settlement_day_next_month 59 | 60 | 61 | def second_settlement_day(date): 62 | ''' Returns the second settlement date (third Friday of a month) following 63 | the date date. 64 | 65 | date: datetime object 66 | date for which second third Friday is to be found 67 | ''' 68 | 69 | settlement_day_in_month = first_settlement_day(date) 70 | # settlement date in the given month 71 | next_month = settlement_day_in_month + dt.timedelta(20) 72 | # shift date to the next month 73 | return third_friday(next_month) # settlement date of that month 74 | 75 | 76 | def not_a_day_before_expiry(date): 77 | ''' Returns True if the date is NOT one day before or equal the third 78 | Friday in month 79 | 80 | date: datetime object 81 | date for which second third Friday is to be found 82 | ''' 83 | 84 | settlement_day_in_month = third_friday(date) 85 | delta = (settlement_day_in_month - date).days 86 | if delta == 1 or delta == 0: 87 | return False 88 | else: 89 | return True 90 | 91 | 92 | def compute_delta(date, settlement_day): 93 | ''' Computes the time (in seconds) from date 0:00 to the first settlement 94 | date 8:30 AM 95 | 96 | date: datetime object 97 | starting date 98 | settlement_day: datetime object 99 | relevant settlement day 100 | ''' 101 | 102 | dummy_time_1 = dt.timedelta(seconds=43200) 103 | # seconds from midnight to 12:00 104 | dummy_time_2 = dt.timedelta(seconds=23400) 105 | # seconds from 17:30 to midnight 106 | settlement_date = settlement_day + dummy_time_1 + dummy_time_2 107 | delta_T_dummy = settlement_date - date 108 | delta_T = ((delta_T_dummy.days - 1) * 24 * 60 * 60 + 109 | delta_T_dummy.seconds) / TYEAR 110 | return delta_T 111 | 112 | 113 | -------------------------------------------------------------------------------- /legacy/python2/14_DX_SRJD_Calibration.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "\"The


" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "# Listed Volatility and Variance Derivatives\n", 15 | "\n", 16 | "**Dr. Yves J. Hilpisch — Wiley Finance (2016)**\n", 17 | "\n", 18 | "\"Derivatives" 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "# DX Analytics — Square-Root Jump Diffusion " 26 | ] 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "metadata": {}, 31 | "source": [ 32 | "## Introduction " 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "You need to install DX Analytics. See http://dx-analytics.com." 40 | ] 41 | }, 42 | { 43 | "cell_type": "markdown", 44 | "metadata": {}, 45 | "source": [ 46 | "## Modeling the VSTOXX Options" 47 | ] 48 | }, 49 | { 50 | "cell_type": "markdown", 51 | "metadata": {}, 52 | "source": [ 53 | "## Calibration of the VSTOXX Model" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "## Calibration Results" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "### Calibration to 1 Maturity" 68 | ] 69 | }, 70 | { 71 | "cell_type": "markdown", 72 | "metadata": {}, 73 | "source": [ 74 | "### Calibration to 2 Maturities" 75 | ] 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "metadata": {}, 80 | "source": [ 81 | "### Calibration to 5 Maturities" 82 | ] 83 | }, 84 | { 85 | "cell_type": "markdown", 86 | "metadata": {}, 87 | "source": [ 88 | "### Calibration without Penalties" 89 | ] 90 | }, 91 | { 92 | "cell_type": "markdown", 93 | "metadata": {}, 94 | "source": [ 95 | "## Conclusions" 96 | ] 97 | }, 98 | { 99 | "cell_type": "markdown", 100 | "metadata": {}, 101 | "source": [ 102 | "## Python Scripts" 103 | ] 104 | }, 105 | { 106 | "cell_type": "markdown", 107 | "metadata": {}, 108 | "source": [ 109 | "### dx_srjd_calibration.py" 110 | ] 111 | }, 112 | { 113 | "cell_type": "markdown", 114 | "metadata": {}, 115 | "source": [ 116 | "\"The
\n", 117 | "\n", 118 | "http://tpq.io | @dyjh | team@tpq.io\n", 119 | "\n", 120 | "**DX Analytics** |\n", 121 | "http://dx-analytics.com\n", 122 | "\n", 123 | "**Quant Platform** |\n", 124 | "http://quant-platform.com\n", 125 | "\n", 126 | "**Python for Finance Books** |\n", 127 | "http://books.tpq.io\n", 128 | "\n", 129 | "**Python for Finance Training** |\n", 130 | "http://training.tpq.io" 131 | ] 132 | } 133 | ], 134 | "metadata": { 135 | "kernelspec": { 136 | "display_name": "Python [Root]", 137 | "language": "python", 138 | "name": "Python [Root]" 139 | }, 140 | "language_info": { 141 | "codemirror_mode": { 142 | "name": "ipython", 143 | "version": 2 144 | }, 145 | "file_extension": ".py", 146 | "mimetype": "text/x-python", 147 | "name": "python", 148 | "nbconvert_exporter": "python", 149 | "pygments_lexer": "ipython2", 150 | "version": "2.7.12" 151 | } 152 | }, 153 | "nbformat": 4, 154 | "nbformat_minor": 0 155 | } 156 | -------------------------------------------------------------------------------- /legacy/python2/01_Introduction.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "\"The


" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "# Listed Volatility and Variance Derivatives\n", 15 | "\n", 16 | "**Dr. Yves J. Hilpisch — Wiley Finance (2016)**\n", 17 | "\n", 18 | "\"Derivatives" 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "# Derivatives, Volatility and Variance" 26 | ] 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "metadata": {}, 31 | "source": [ 32 | "## Option Pricing and Hedging" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "## Notions of Volatility and Variance" 40 | ] 41 | }, 42 | { 43 | "cell_type": "markdown", 44 | "metadata": {}, 45 | "source": [ 46 | "## Listed Volatility and Variance Derivatives" 47 | ] 48 | }, 49 | { 50 | "cell_type": "markdown", 51 | "metadata": {}, 52 | "source": [ 53 | "### The US History" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "### The European History" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "### Volatility of Volatility Indexes" 68 | ] 69 | }, 70 | { 71 | "cell_type": "markdown", 72 | "metadata": {}, 73 | "source": [ 74 | "### Products Covered in this Book" 75 | ] 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "metadata": {}, 80 | "source": [ 81 | "## Volatility and Variance Trading" 82 | ] 83 | }, 84 | { 85 | "cell_type": "markdown", 86 | "metadata": {}, 87 | "source": [ 88 | "### Volatility Trading" 89 | ] 90 | }, 91 | { 92 | "cell_type": "markdown", 93 | "metadata": {}, 94 | "source": [ 95 | "### Variance Trading" 96 | ] 97 | }, 98 | { 99 | "cell_type": "markdown", 100 | "metadata": {}, 101 | "source": [ 102 | "## Python as Our Tool of Choice" 103 | ] 104 | }, 105 | { 106 | "cell_type": "markdown", 107 | "metadata": {}, 108 | "source": [ 109 | "## Quick Guide Through Rest of the Book" 110 | ] 111 | }, 112 | { 113 | "cell_type": "markdown", 114 | "metadata": {}, 115 | "source": [ 116 | "\"The
\n", 117 | "\n", 118 | "http://tpq.io | @dyjh | team@tpq.io\n", 119 | "\n", 120 | "**DX Analytics** |\n", 121 | "http://dx-analytics.com\n", 122 | "\n", 123 | "**Quant Platform** |\n", 124 | "http://quant-platform.com\n", 125 | "\n", 126 | "**Python for Finance Books** |\n", 127 | "http://books.tpq.io\n", 128 | "\n", 129 | "**Python for Finance Training** |\n", 130 | "http://training.tpq.io" 131 | ] 132 | } 133 | ], 134 | "metadata": { 135 | "kernelspec": { 136 | "display_name": "Python [Root]", 137 | "language": "python", 138 | "name": "Python [Root]" 139 | }, 140 | "language_info": { 141 | "codemirror_mode": { 142 | "name": "ipython", 143 | "version": 2 144 | }, 145 | "file_extension": ".py", 146 | "mimetype": "text/x-python", 147 | "name": "python", 148 | "nbconvert_exporter": "python", 149 | "pygments_lexer": "ipython2", 150 | "version": "2.7.12" 151 | } 152 | }, 153 | "nbformat": 4, 154 | "nbformat_minor": 0 155 | } 156 | -------------------------------------------------------------------------------- /code/scripts/srjd_simulation.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with simulation functions for 3 | # Square-Root Jump Diffusion (SRJD) model 4 | # 5 | # (c) Dr. Yves J. Hilpisch 6 | # Listed Volatility and Variance Derivatives 7 | # 8 | import math 9 | import pickle 10 | import numpy as np 11 | import scipy.interpolate as scint 12 | 13 | v0 = 17.6639 # initial VSTOXX index level 14 | 15 | # parameters of square-root diffusion 16 | kappa = 2.0 # speed of mean reversion 17 | theta = 15.0 # long-term volatility 18 | sigma = 1.0 # standard deviation coefficient 19 | 20 | # parameters of log-normal jump 21 | lamb = 0.4 # intensity (jumps per year) 22 | mu = 0.4 # average jump size 23 | delta = 0.1 # volatility of jump size 24 | 25 | # general parameters 26 | r = 0.01 # risk-free interest rate 27 | K = 17.5 # strike 28 | T = 0.5 # time horizon 29 | M = 150 # time steps 30 | I = 10000 # number of MCS paths 31 | anti_paths = True # antithetic variates 32 | mo_match = True # moment matching 33 | 34 | 35 | # deterministic shift parameters 36 | varphi = pickle.load(open('varphi', 'rb')) 37 | tck = scint.splrep(varphi['ttms'], varphi['varphi'], k=1) 38 | # linear splines interpolation of 39 | # term structure calibration differences 40 | 41 | 42 | def random_number_gen(M, I, fixed_seed=False): 43 | ''' Generate standard normally distributed pseudo-random numbers 44 | 45 | Parameters 46 | ========== 47 | M: int 48 | number of time intervals 49 | I: int 50 | number of paths 51 | 52 | Returns 53 | ======= 54 | ran: NumPy ndarrayo object 55 | random number array 56 | ''' 57 | if fixed_seed is True: 58 | np.random.seed(10000) 59 | if anti_paths is True: 60 | ran = np.random.standard_normal((M + 1, int(I / 2))) 61 | ran = np.concatenate((ran, -ran), axis=1) 62 | else: 63 | ran = np.standard_normal((M + 1, I)) 64 | if mo_match is True: 65 | ran = ran / np.std(ran) 66 | ran -= np.mean(ran) 67 | return ran 68 | 69 | 70 | def srjd_simulation(x0, kappa, theta, sigma, 71 | lamb, mu, delta, T, M, I, fixed_seed=False): 72 | ''' Function to simulate square-root jump Difusion. 73 | 74 | Parameters 75 | ========== 76 | x0: float 77 | initial value 78 | kappa: float 79 | mean-reversion factor 80 | theta: float 81 | long-run mean 82 | sigma: float 83 | volatility factor 84 | lamb: float 85 | jump intensity 86 | mu: float 87 | expected jump size 88 | delta: float 89 | standard deviation of jump 90 | T: float 91 | time horizon/maturity 92 | M: int 93 | time steps 94 | I: int 95 | number of simulation paths 96 | 97 | Returns 98 | ======= 99 | x: NumPy ndarray object 100 | array with simulated SRJD paths 101 | ''' 102 | dt = float(T) / M # time interval 103 | shift = scint.splev(np.arange(M + 1) * dt, tck, der=0) 104 | # deterministic shift values 105 | xh = np.zeros((M + 1, I), dtype=np.float) 106 | x = np.zeros((M + 1, I), dtype=np.float) 107 | xh[0, :] = x0 108 | x[0, :] = x0 109 | # drift contribution of jump p.a. 110 | rj = lamb * (math.exp(mu + 0.5 * delta ** 2) - 1) 111 | # 1st matrix with standard normal rv 112 | ran1 = random_number_gen(M + 1, I, fixed_seed) 113 | # 2nd matrix with standard normal rv 114 | ran2 = random_number_gen(M + 1, I, fixed_seed) 115 | # matrix with Poisson distributed rv 116 | ran3 = np.random.poisson(lamb * dt, (M + 1, I)) 117 | for t in range(1, M + 1): 118 | xh[t, :] = (xh[t - 1, :] + 119 | kappa * (theta - np.maximum(0, xh[t - 1, :])) * dt 120 | + np.sqrt(np.maximum(0, xh[t - 1, :])) * sigma 121 | * ran1[t] * np.sqrt(dt) 122 | + (np.exp(mu + delta * ran2[t]) - 1) * ran3[t] 123 | * np.maximum(0, xh[t - 1, :]) - rj * dt) 124 | x[t, :] = np.maximum(0, xh[t, :]) + shift[t] 125 | return x 126 | 127 | 128 | def srjd_call_valuation(v0, kappa, theta, sigma, 129 | lamb, mu, delta, T, r, K, M=M, I=I, 130 | fixed_seed=False): 131 | ''' Function to value European volatility call option in SRDJ model. 132 | Parameters see function srjd_simulation. 133 | 134 | Returns 135 | ======= 136 | call_value: float 137 | estimator for European call present value for strike K 138 | ''' 139 | v = srjd_simulation(v0, kappa, theta, sigma, 140 | lamb, mu, delta, T, M, I, fixed_seed) 141 | call_value = np.exp(-r * T) * sum(np.maximum(v[-1] - K, 0)) / I 142 | return call_value 143 | 144 | if __name__ == '__main__': 145 | call_value = srjd_call_valuation(v0, kappa, theta, sigma, 146 | lamb, mu, delta, T, r, K, M, I) 147 | print("Value of European call by MCS: %10.4f" % call_value) 148 | -------------------------------------------------------------------------------- /legacy/python2/scripts/srjd_simulation.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with simulation functions for 3 | # Square-Root Jump Diffusion (SRJD) model 4 | # 5 | # (c) Dr. Yves J. Hilpisch 6 | # Listed Volatility and Variance Derivatives 7 | # 8 | import math 9 | import pickle 10 | import numpy as np 11 | import scipy.interpolate as scint 12 | 13 | v0 = 17.6639 # initial VSTOXX index level 14 | 15 | # parameters of square-root diffusion 16 | kappa = 2.0 # speed of mean reversion 17 | theta = 15.0 # long-term volatility 18 | sigma = 1.0 # standard deviation coefficient 19 | 20 | # parameters of log-normal jump 21 | lamb = 0.4 # intensity (jumps per year) 22 | mu = 0.4 # average jump size 23 | delta = 0.1 # volatility of jump size 24 | 25 | # general parameters 26 | r = 0.01 # risk-free interest rate 27 | K = 17.5 # strike 28 | T = 0.5 # time horizon 29 | M = 150 # time steps 30 | I = 10000 # number of MCS paths 31 | anti_paths = True # antithetic variates 32 | mo_match = True # moment matching 33 | 34 | 35 | # deterministic shift parameters 36 | varphi = pickle.load(open('data/varphi')) 37 | tck = scint.splrep(varphi['ttms'], varphi['varphi'], k=1) 38 | # linear splines interpolation of 39 | # term structure calibration differences 40 | 41 | 42 | def random_number_gen(M, I, fixed_seed=False): 43 | ''' Generate standard normally distributed pseudo-random numbers 44 | 45 | Parameters 46 | ========== 47 | M: int 48 | number of time intervals 49 | I: int 50 | number of paths 51 | 52 | Returns 53 | ======= 54 | ran: NumPy ndarrayo object 55 | random number array 56 | ''' 57 | if fixed_seed is True: 58 | np.random.seed(10000) 59 | if anti_paths is True: 60 | ran = np.random.standard_normal((M + 1, I / 2)) 61 | ran = np.concatenate((ran, -ran), axis=1) 62 | else: 63 | ran = np.standard_normal((M + 1, I)) 64 | if mo_match is True: 65 | ran = ran / np.std(ran) 66 | ran -= np.mean(ran) 67 | return ran 68 | 69 | 70 | def srjd_simulation(x0, kappa, theta, sigma, 71 | lamb, mu, delta, T, M, I, fixed_seed=False): 72 | ''' Function to simulate square-root jump Difusion. 73 | 74 | Parameters 75 | ========== 76 | x0: float 77 | initial value 78 | kappa: float 79 | mean-reversion factor 80 | theta: float 81 | long-run mean 82 | sigma: float 83 | volatility factor 84 | lamb: float 85 | jump intensity 86 | mu: float 87 | expected jump size 88 | delta: float 89 | standard deviation of jump 90 | T: float 91 | time horizon/maturity 92 | M: int 93 | time steps 94 | I: int 95 | number of simulation paths 96 | 97 | Returns 98 | ======= 99 | x: NumPy ndarray object 100 | array with simulated SRJD paths 101 | ''' 102 | dt = float(T) / M # time interval 103 | shift = scint.splev(np.arange(M + 1) * dt, tck, der=0) 104 | # deterministic shift values 105 | xh = np.zeros((M + 1, I), dtype=np.float) 106 | x = np.zeros((M + 1, I), dtype=np.float) 107 | xh[0, :] = x0 108 | x[0, :] = x0 109 | # drift contribution of jump p.a. 110 | rj = lamb * (math.exp(mu + 0.5 * delta ** 2) - 1) 111 | # 1st matrix with standard normal rv 112 | ran1 = random_number_gen(M + 1, I, fixed_seed) 113 | # 2nd matrix with standard normal rv 114 | ran2 = random_number_gen(M + 1, I, fixed_seed) 115 | # matrix with Poisson distributed rv 116 | ran3 = np.random.poisson(lamb * dt, (M + 1, I)) 117 | for t in range(1, M + 1): 118 | xh[t, :] = (xh[t - 1, :] + 119 | kappa * (theta - np.maximum(0, xh[t - 1, :])) * dt 120 | + np.sqrt(np.maximum(0, xh[t - 1, :])) * sigma 121 | * ran1[t] * np.sqrt(dt) 122 | + (np.exp(mu + delta * ran2[t]) - 1) * ran3[t] 123 | * np.maximum(0, xh[t - 1, :]) - rj * dt) 124 | x[t, :] = np.maximum(0, xh[t, :]) + shift[t] 125 | return x 126 | 127 | 128 | def srjd_call_valuation(v0, kappa, theta, sigma, 129 | lamb, mu, delta, T, r, K, M=M, I=I, 130 | fixed_seed=False): 131 | ''' Function to value European volatility call option in SRDJ model. 132 | Parameters see function srjd_simulation. 133 | 134 | Returns 135 | ======= 136 | call_value: float 137 | estimator for European call present value for strike K 138 | ''' 139 | v = srjd_simulation(v0, kappa, theta, sigma, 140 | lamb, mu, delta, T, M, I, fixed_seed) 141 | call_value = np.exp(-r * T) * sum(np.maximum(v[-1] - K, 0)) / I 142 | return call_value 143 | 144 | if __name__ is '__main__': 145 | call_value = srjd_call_valuation(v0, kappa, theta, sigma, 146 | lamb, mu, delta, T, r, K, M, I) 147 | print "Value of European call by MCS: %10.4f" % call_value 148 | -------------------------------------------------------------------------------- /legacy/python3/scripts/srjd_simulation.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with simulation functions for 3 | # Square-Root Jump Diffusion (SRJD) model 4 | # 5 | # (c) Dr. Yves J. Hilpisch 6 | # Listed Volatility and Variance Derivatives 7 | # 8 | import math 9 | import pickle 10 | import numpy as np 11 | import scipy.interpolate as scint 12 | 13 | v0 = 17.6639 # initial VSTOXX index level 14 | 15 | # parameters of square-root diffusion 16 | kappa = 2.0 # speed of mean reversion 17 | theta = 15.0 # long-term volatility 18 | sigma = 1.0 # standard deviation coefficient 19 | 20 | # parameters of log-normal jump 21 | lamb = 0.4 # intensity (jumps per year) 22 | mu = 0.4 # average jump size 23 | delta = 0.1 # volatility of jump size 24 | 25 | # general parameters 26 | r = 0.01 # risk-free interest rate 27 | K = 17.5 # strike 28 | T = 0.5 # time horizon 29 | M = 150 # time steps 30 | I = 10000 # number of MCS paths 31 | anti_paths = True # antithetic variates 32 | mo_match = True # moment matching 33 | 34 | 35 | # deterministic shift parameters 36 | varphi = pickle.load(open('varphi', 'rb')) 37 | tck = scint.splrep(varphi['ttms'], varphi['varphi'], k=1) 38 | # linear splines interpolation of 39 | # term structure calibration differences 40 | 41 | 42 | def random_number_gen(M, I, fixed_seed=False): 43 | ''' Generate standard normally distributed pseudo-random numbers 44 | 45 | Parameters 46 | ========== 47 | M: int 48 | number of time intervals 49 | I: int 50 | number of paths 51 | 52 | Returns 53 | ======= 54 | ran: NumPy ndarrayo object 55 | random number array 56 | ''' 57 | if fixed_seed is True: 58 | np.random.seed(10000) 59 | if anti_paths is True: 60 | ran = np.random.standard_normal((M + 1, int(I / 2))) 61 | ran = np.concatenate((ran, -ran), axis=1) 62 | else: 63 | ran = np.standard_normal((M + 1, I)) 64 | if mo_match is True: 65 | ran = ran / np.std(ran) 66 | ran -= np.mean(ran) 67 | return ran 68 | 69 | 70 | def srjd_simulation(x0, kappa, theta, sigma, 71 | lamb, mu, delta, T, M, I, fixed_seed=False): 72 | ''' Function to simulate square-root jump Difusion. 73 | 74 | Parameters 75 | ========== 76 | x0: float 77 | initial value 78 | kappa: float 79 | mean-reversion factor 80 | theta: float 81 | long-run mean 82 | sigma: float 83 | volatility factor 84 | lamb: float 85 | jump intensity 86 | mu: float 87 | expected jump size 88 | delta: float 89 | standard deviation of jump 90 | T: float 91 | time horizon/maturity 92 | M: int 93 | time steps 94 | I: int 95 | number of simulation paths 96 | 97 | Returns 98 | ======= 99 | x: NumPy ndarray object 100 | array with simulated SRJD paths 101 | ''' 102 | dt = float(T) / M # time interval 103 | shift = scint.splev(np.arange(M + 1) * dt, tck, der=0) 104 | # deterministic shift values 105 | xh = np.zeros((M + 1, I), dtype=np.float) 106 | x = np.zeros((M + 1, I), dtype=np.float) 107 | xh[0, :] = x0 108 | x[0, :] = x0 109 | # drift contribution of jump p.a. 110 | rj = lamb * (math.exp(mu + 0.5 * delta ** 2) - 1) 111 | # 1st matrix with standard normal rv 112 | ran1 = random_number_gen(M + 1, I, fixed_seed) 113 | # 2nd matrix with standard normal rv 114 | ran2 = random_number_gen(M + 1, I, fixed_seed) 115 | # matrix with Poisson distributed rv 116 | ran3 = np.random.poisson(lamb * dt, (M + 1, I)) 117 | for t in range(1, M + 1): 118 | xh[t, :] = (xh[t - 1, :] + 119 | kappa * (theta - np.maximum(0, xh[t - 1, :])) * dt 120 | + np.sqrt(np.maximum(0, xh[t - 1, :])) * sigma 121 | * ran1[t] * np.sqrt(dt) 122 | + (np.exp(mu + delta * ran2[t]) - 1) * ran3[t] 123 | * np.maximum(0, xh[t - 1, :]) - rj * dt) 124 | x[t, :] = np.maximum(0, xh[t, :]) + shift[t] 125 | return x 126 | 127 | 128 | def srjd_call_valuation(v0, kappa, theta, sigma, 129 | lamb, mu, delta, T, r, K, M=M, I=I, 130 | fixed_seed=False): 131 | ''' Function to value European volatility call option in SRDJ model. 132 | Parameters see function srjd_simulation. 133 | 134 | Returns 135 | ======= 136 | call_value: float 137 | estimator for European call present value for strike K 138 | ''' 139 | v = srjd_simulation(v0, kappa, theta, sigma, 140 | lamb, mu, delta, T, M, I, fixed_seed) 141 | call_value = np.exp(-r * T) * sum(np.maximum(v[-1] - K, 0)) / I 142 | return call_value 143 | 144 | if __name__ == '__main__': 145 | call_value = srjd_call_valuation(v0, kappa, theta, sigma, 146 | lamb, mu, delta, T, r, K, M, I) 147 | print("Value of European call by MCS: %10.4f" % call_value) 148 | -------------------------------------------------------------------------------- /code/scripts/srd_simulation_results.py: -------------------------------------------------------------------------------- 1 | # 2 | # Valuation of European volatility options 3 | # by Monte Carlo simulation in 4 | # Gruenbichler and Longstaff (1996) model 5 | # -- Creating a database for simulation results 6 | # with pandas and PyTables 7 | # 8 | # (c) Dr. Yves J. Hilpisch 9 | # Listed Volatility and Variance Derivatives 10 | # 11 | import numpy as np 12 | import pandas as pd 13 | import datetime as dt 14 | import matplotlib.pyplot as plt 15 | 16 | # filname for HDFStore to save results 17 | filename = "../data/simulation_results.h5" 18 | 19 | 20 | def write_results(sim_results, name, SEED, runs, steps, paths, mo_match, 21 | anti_paths, l, PY1, PY2, errors, error_ratio, 22 | abs_errors, rel_errors, t1, t2, d1, d2): 23 | ''' Appends simulation results to pandas DataFrame df and returns it. 24 | 25 | Parameters 26 | ========== 27 | see srd_simulation_analysis.py 28 | 29 | Returns 30 | ======= 31 | df: pandas DataFrame object 32 | updated results object 33 | ''' 34 | results = { 35 | 'sim_name': name, 36 | 'seed': SEED, 37 | 'runs': runs, 38 | 'time_steps': steps, 39 | 'paths': paths, 40 | 'mo_match': mo_match, 41 | 'anti_paths': anti_paths, 42 | 'opt_prices': l, 43 | 'abs_tol': PY1, 44 | 'rel_tol': PY2, 45 | 'errors': errors, 46 | 'error_ratio': error_ratio, 47 | 'aval_err': sum(abs_errors) / l, 48 | 'abal_err': sum(abs(rel_errors)) / l, 49 | 'time_sec': t1, 50 | 'time_min': t2, 51 | 'time_opt': t1 / l, 52 | 'start_date': d1, 53 | 'end_date': d2 54 | } 55 | df = pd.concat([sim_results, 56 | pd.DataFrame([results])], 57 | ignore_index=True) 58 | return df 59 | 60 | 61 | def write_to_database(sim_results, filename=filename): 62 | ''' Write pandas DataFrame sim_results to HDFStore object. 63 | 64 | Parameters 65 | ========== 66 | sim_results: pandas DataFrame object 67 | object with simulation results 68 | filename: string 69 | name of the file for storage 70 | ''' 71 | h5 = pd.HDFStore(filename, 'a') 72 | h5.append('sim_results', sim_results, min_itemsize={'values': 30}) 73 | h5.close() 74 | 75 | 76 | def print_results(filename=filename, idl=0, idh=50): 77 | ''' prints valuation results in detailed form. 78 | 79 | Parameters 80 | ========== 81 | filename: string 82 | HDFStore with pandas.DataFrame with results 83 | idl: int 84 | start index value 85 | idh: int 86 | stop index value 87 | ''' 88 | h5 = pd.HDFStore(filename, 'r') 89 | sim_results = h5['sim_results'] 90 | br = "----------------------------------------------------" 91 | for i in range(idl, min(len(sim_results), idh + 1)): 92 | row = sim_results.iloc[i] 93 | print(br) 94 | print("Start Calculations %32s" % row['start_date'] + "\n" + br) 95 | print("ID Number %32d" % i) 96 | print("Name of Simulation %32s" % row['sim_name']) 97 | print("Seed Value for RNG %32d" % row['seed']) 98 | print("Number of Runs %32d" % row['runs']) 99 | print("Time Steps %32d" % row['time_steps']) 100 | print("Paths %32d" % row['paths']) 101 | print("Moment Matching %32s" % row['mo_match']) 102 | print("Antithetic Paths %32s" % row['anti_paths'] + "\n") 103 | print("Option Prices %32d" % row['opt_prices']) 104 | print("Absolute Tolerance %32.4f" % row['abs_tol']) 105 | print("Relative Tolerance %32.4f" % row['rel_tol']) 106 | print("Errors %32d" % row['errors']) 107 | print("Error Ratio %32.4f" % row['error_ratio'] + "\n") 108 | print("Aver Val Error %32.4f" % row['aval_err']) 109 | print("Aver Abs Val Error %32.4f" % row['abal_err']) 110 | print("Time in Seconds %32.4f" % row['time_sec']) 111 | print("Time in Minutes %32.4f" % row['time_min']) 112 | print("Time per Option %32.4f" % row['time_opt'] + "\n" + br) 113 | print("End Calculations %32s" % row['end_date'] \ 114 | + "\n" + br + "\n") 115 | print("Total number of rows in table %d" % len(sim_results)) 116 | h5.close() 117 | 118 | 119 | def plot_error_ratio(filename=filename): 120 | ''' Show error ratio vs. paths * time_steps (i.e. granularity). 121 | 122 | Parameters 123 | ========== 124 | filename: string 125 | name of file with data to be plotted 126 | ''' 127 | h5 = pd.HDFStore(filename, mode='r') 128 | sim_results = h5['sim_results'] 129 | x = np.array(sim_results['paths'] * sim_results['time_steps'], dtype='d') 130 | x = x / max(x) 131 | y = sim_results['error_ratio'] 132 | plt.plot(x, y, 'bo', label='error ratio') 133 | rg = np.polyfit(x, y, deg=1) 134 | plt.plot(np.sort(x), np.polyval(rg, np.sort(x)), 'r', label='regression', 135 | linewidth=2) 136 | plt.xlabel('time steps * paths (normalized)') 137 | plt.ylabel('errors / option valuations') 138 | plt.legend() 139 | plt.grid(True) 140 | h5.close() 141 | -------------------------------------------------------------------------------- /legacy/python3/scripts/srd_simulation_results.py: -------------------------------------------------------------------------------- 1 | # 2 | # Valuation of European volatility options 3 | # by Monte Carlo simulation in 4 | # Gruenbichler and Longstaff (1996) model 5 | # -- Creating a database for simulation results 6 | # with pandas and PyTables 7 | # 8 | # (c) Dr. Yves J. Hilpisch 9 | # Listed Volatility and Variance Derivatives 10 | # 11 | import numpy as np 12 | import pandas as pd 13 | import datetime as dt 14 | import matplotlib.pyplot as plt 15 | 16 | # filname for HDFStore to save results 17 | filename = "../data/simulation_results.h5" 18 | 19 | 20 | def write_results(sim_results, name, SEED, runs, steps, paths, mo_match, 21 | anti_paths, l, PY1, PY2, errors, error_ratio, 22 | abs_errors, rel_errors, t1, t2, d1, d2): 23 | ''' Appends simulation results to pandas DataFrame df and returns it. 24 | 25 | Parameters 26 | ========== 27 | see srd_simulation_analysis.py 28 | 29 | Returns 30 | ======= 31 | df: pandas DataFrame object 32 | updated results object 33 | ''' 34 | results = { 35 | 'sim_name': name, 36 | 'seed': SEED, 37 | 'runs': runs, 38 | 'time_steps': steps, 39 | 'paths': paths, 40 | 'mo_match': mo_match, 41 | 'anti_paths': anti_paths, 42 | 'opt_prices': l, 43 | 'abs_tol': PY1, 44 | 'rel_tol': PY2, 45 | 'errors': errors, 46 | 'error_ratio': error_ratio, 47 | 'aval_err': sum(abs_errors) / l, 48 | 'abal_err': sum(abs(rel_errors)) / l, 49 | 'time_sec': t1, 50 | 'time_min': t2, 51 | 'time_opt': t1 / l, 52 | 'start_date': d1, 53 | 'end_date': d2 54 | } 55 | df = pd.concat([sim_results, 56 | pd.DataFrame([results])], 57 | ignore_index=True) 58 | return df 59 | 60 | 61 | def write_to_database(sim_results, filename=filename): 62 | ''' Write pandas DataFrame sim_results to HDFStore object. 63 | 64 | Parameters 65 | ========== 66 | sim_results: pandas DataFrame object 67 | object with simulation results 68 | filename: string 69 | name of the file for storage 70 | ''' 71 | h5 = pd.HDFStore(filename, 'a') 72 | h5.append('sim_results', sim_results, min_itemsize={'values': 30}) 73 | h5.close() 74 | 75 | 76 | def print_results(filename=filename, idl=0, idh=50): 77 | ''' prints valuation results in detailed form. 78 | 79 | Parameters 80 | ========== 81 | filename: string 82 | HDFStore with pandas.DataFrame with results 83 | idl: int 84 | start index value 85 | idh: int 86 | stop index value 87 | ''' 88 | h5 = pd.HDFStore(filename, 'r') 89 | sim_results = h5['sim_results'] 90 | br = "----------------------------------------------------" 91 | for i in range(idl, min(len(sim_results), idh + 1)): 92 | row = sim_results.iloc[i] 93 | print(br) 94 | print("Start Calculations %32s" % row['start_date'] + "\n" + br) 95 | print("ID Number %32d" % i) 96 | print("Name of Simulation %32s" % row['sim_name']) 97 | print("Seed Value for RNG %32d" % row['seed']) 98 | print("Number of Runs %32d" % row['runs']) 99 | print("Time Steps %32d" % row['time_steps']) 100 | print("Paths %32d" % row['paths']) 101 | print("Moment Matching %32s" % row['mo_match']) 102 | print("Antithetic Paths %32s" % row['anti_paths'] + "\n") 103 | print("Option Prices %32d" % row['opt_prices']) 104 | print("Absolute Tolerance %32.4f" % row['abs_tol']) 105 | print("Relative Tolerance %32.4f" % row['rel_tol']) 106 | print("Errors %32d" % row['errors']) 107 | print("Error Ratio %32.4f" % row['error_ratio'] + "\n") 108 | print("Aver Val Error %32.4f" % row['aval_err']) 109 | print("Aver Abs Val Error %32.4f" % row['abal_err']) 110 | print("Time in Seconds %32.4f" % row['time_sec']) 111 | print("Time in Minutes %32.4f" % row['time_min']) 112 | print("Time per Option %32.4f" % row['time_opt'] + "\n" + br) 113 | print("End Calculations %32s" % row['end_date'] \ 114 | + "\n" + br + "\n") 115 | print("Total number of rows in table %d" % len(sim_results)) 116 | h5.close() 117 | 118 | 119 | def plot_error_ratio(filename=filename): 120 | ''' Show error ratio vs. paths * time_steps (i.e. granularity). 121 | 122 | Parameters 123 | ========== 124 | filename: string 125 | name of file with data to be plotted 126 | ''' 127 | h5 = pd.HDFStore(filename, mode='r') 128 | sim_results = h5['sim_results'] 129 | x = np.array(sim_results['paths'] * sim_results['time_steps'], dtype='d') 130 | x = x / max(x) 131 | y = sim_results['error_ratio'] 132 | plt.plot(x, y, 'bo', label='error ratio') 133 | rg = np.polyfit(x, y, deg=1) 134 | plt.plot(np.sort(x), np.polyval(rg, np.sort(x)), 'r', label='regression', 135 | linewidth=2) 136 | plt.xlabel('time steps * paths (normalized)') 137 | plt.ylabel('errors / option valuations') 138 | plt.legend() 139 | plt.grid(True) 140 | h5.close() 141 | -------------------------------------------------------------------------------- /legacy/python2/scripts/srd_simulation_results.py: -------------------------------------------------------------------------------- 1 | # 2 | # Valuation of European volatility options 3 | # by Monte Carlo simulation in 4 | # Gruenbichler and Longstaff (1996) model 5 | # -- Creating a database for simulation results 6 | # with pandas and PyTables 7 | # 8 | # (c) Dr. Yves J. Hilpisch 9 | # Listed Volatility and Variance Derivatives 10 | # 11 | import numpy as np 12 | import pandas as pd 13 | import datetime as dt 14 | import matplotlib.pyplot as plt 15 | 16 | # filname for HDFStore to save results 17 | filename = "../data/simulation_results.h5" 18 | 19 | 20 | def write_results(sim_results, name, SEED, runs, steps, paths, mo_match, 21 | anti_paths, l, PY1, PY2, errors, error_ratio, 22 | abs_errors, rel_errors, t1, t2, d1, d2): 23 | ''' Appends simulation results to pandas DataFrame df and returns it. 24 | 25 | Parameters 26 | ========== 27 | see srd_simulation_analysis.py 28 | 29 | Returns 30 | ======= 31 | df: pandas DataFrame object 32 | updated results object 33 | ''' 34 | results = { 35 | 'sim_name': name, 36 | 'seed': SEED, 37 | 'runs': runs, 38 | 'time_steps': steps, 39 | 'paths': paths, 40 | 'mo_match': mo_match, 41 | 'anti_paths': anti_paths, 42 | 'opt_prices': l, 43 | 'abs_tol': PY1, 44 | 'rel_tol': PY2, 45 | 'errors': errors, 46 | 'error_ratio': error_ratio, 47 | 'aval_err': sum(abs_errors) / l, 48 | 'abal_err': sum(abs(rel_errors)) / l, 49 | 'time_sec': t1, 50 | 'time_min': t2, 51 | 'time_opt': t1 / l, 52 | 'start_date': d1, 53 | 'end_date': d2 54 | } 55 | df = pd.concat([sim_results, 56 | pd.DataFrame([results])], 57 | ignore_index=True) 58 | return df 59 | 60 | 61 | def write_to_database(sim_results, filename=filename): 62 | ''' Write pandas DataFrame sim_results to HDFStore object. 63 | 64 | Parameters 65 | ========== 66 | sim_results: pandas DataFrame object 67 | object with simulation results 68 | filename: string 69 | name of the file for storage 70 | ''' 71 | h5 = pd.HDFStore(filename, 'a') 72 | h5.append('sim_results', sim_results, min_itemsize={'values': 30}, 73 | ignore_index=True) 74 | h5.close() 75 | 76 | 77 | def print_results(filename=filename, idl=0, idh=50): 78 | ''' Prints valuation results in detailed form. 79 | 80 | Parameters 81 | ========== 82 | filename: string 83 | HDFStore with pandas.DataFrame with results 84 | idl: int 85 | start index value 86 | idh: int 87 | stop index value 88 | ''' 89 | h5 = pd.HDFStore(filename, 'r') 90 | sim_results = h5['sim_results'] 91 | br = "----------------------------------------------------" 92 | for i in range(idl, min(len(sim_results), idh + 1)): 93 | row = sim_results.iloc[i] 94 | print br 95 | print "Start Calculations %32s" % row['start_date'] + "\n" + br 96 | print "ID Number %32d" % i 97 | print "Name of Simulation %32s" % row['sim_name'] 98 | print "Seed Value for RNG %32d" % row['seed'] 99 | print "Number of Runs %32d" % row['runs'] 100 | print "Time Steps %32d" % row['time_steps'] 101 | print "Paths %32d" % row['paths'] 102 | print "Moment Matching %32s" % row['mo_match'] 103 | print "Antithetic Paths %32s" % row['anti_paths'] + "\n" 104 | print "Option Prices %32d" % row['opt_prices'] 105 | print "Absolute Tolerance %32.4f" % row['abs_tol'] 106 | print "Relative Tolerance %32.4f" % row['rel_tol'] 107 | print "Errors %32d" % row['errors'] 108 | print "Error Ratio %32.4f" % row['error_ratio'] + "\n" 109 | print "Aver Val Error %32.4f" % row['aval_err'] 110 | print "Aver Abs Val Error %32.4f" % row['abal_err'] 111 | print "Time in Seconds %32.4f" % row['time_sec'] 112 | print "Time in Minutes %32.4f" % row['time_min'] 113 | print "Time per Option %32.4f" % row['time_opt'] + "\n" + br 114 | print "End Calculations %32s" % row['end_date'] \ 115 | + "\n" + br + "\n" 116 | print "Total number of rows in table %d" % len(sim_results) 117 | h5.close() 118 | 119 | 120 | def plot_error_ratio(filename=filename): 121 | ''' Show error ratio vs. paths * time_steps (i.e. granularity). 122 | 123 | Parameters 124 | ========== 125 | filename: string 126 | name of file with data to be plotted 127 | ''' 128 | h5 = pd.HDFStore(filename, mode='r') 129 | sim_results = h5['sim_results'] 130 | x = np.array(sim_results['paths'] * sim_results['time_steps'], dtype='d') 131 | x = x / max(x) 132 | y = sim_results['error_ratio'] 133 | plt.plot(x, y, 'bo', label='error ratio') 134 | rg = np.polyfit(x, y, deg=1) 135 | plt.plot(np.sort(x), np.polyval(rg, np.sort(x)), 'r', label='regression', 136 | linewidth=2) 137 | plt.xlabel('time steps * paths (normalized)') 138 | plt.ylabel('errors / option valuations') 139 | plt.legend() 140 | plt.grid(True) 141 | h5.close() 142 | -------------------------------------------------------------------------------- /code/scripts/srd_functions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with functions for 3 | # Gruenbichler and Longstaff (1996) model 4 | # 5 | # (c) Dr. Yves J. Hilpisch 6 | # Listed Volatility and Variance Derivatives 7 | # 8 | import math 9 | import numpy as np 10 | import scipy.stats as scs 11 | 12 | 13 | def futures_price(v0, kappa, theta, zeta, T): 14 | ''' Futures pricing formula in GL96 model. 15 | 16 | Parameters 17 | ========== 18 | v0: float (positive) 19 | current volatility level 20 | kappa: float (positive) 21 | mean-reversion factor 22 | theta: float (positive) 23 | long-run mean of volatility 24 | zeta: float (positive) 25 | volatility risk premium 26 | T: float (positive) 27 | time-to-maturity 28 | 29 | Returns 30 | ======= 31 | future: float 32 | price of a future 33 | ''' 34 | alpha = kappa * theta 35 | beta = kappa + zeta 36 | future = (alpha / beta * (1 - math.exp(-beta * T)) 37 | + math.exp(-beta * T) * v0) 38 | return future 39 | 40 | 41 | def cx(K, gamma, nu, lamb): 42 | ''' Complementary distribution function of non-central chi-squared density. 43 | 44 | Parameters 45 | ========== 46 | K: float (positive) 47 | strike price 48 | gamma: float (positive) 49 | as defined in the GL96 model 50 | nu: float (positive) 51 | degrees of freedom 52 | lamb: float (positive) 53 | non-centrality parameter 54 | 55 | Returns 56 | ======= 57 | complementary distribution of nc cs density 58 | ''' 59 | return 1 - scs.ncx2.cdf(gamma * K, nu, lamb) 60 | 61 | 62 | def call_price(v0, kappa, theta, sigma, zeta, T, r, K): 63 | ''' Call option pricing formula in GL96 Model 64 | 65 | Parameters 66 | ========== 67 | v0: float (positive) 68 | current volatility level 69 | kappa: float (positive) 70 | mean-reversion factor 71 | theta: float (positive) 72 | long-run mean of volatility 73 | sigma: float (positive) 74 | volatility of volatility 75 | zeta: float (positive) 76 | volatility risk premium 77 | T: float (positive) 78 | time-to-maturity 79 | r: float (positive) 80 | risk-free short rate 81 | K: float(positive) 82 | strike price of the option 83 | 84 | Returns 85 | ======= 86 | call: float 87 | present value of European call option 88 | ''' 89 | D = math.exp(-r * T) # discount factor 90 | 91 | alpha = kappa * theta 92 | beta = kappa + zeta 93 | gamma = 4 * beta / (sigma ** 2 * (1 - math.exp(-beta * T))) 94 | nu = 4 * alpha / sigma ** 2 95 | lamb = gamma * math.exp(-beta * T) * v0 96 | 97 | # the pricing formula 98 | call = (D * math.exp(-beta * T) * v0 * cx(K, gamma, nu + 4, lamb) 99 | + D * (alpha / beta) * (1 - math.exp(-beta * T)) 100 | * cx(K, gamma, nu + 2, lamb) 101 | - D * K * cx(K, gamma, nu, lamb)) 102 | return call 103 | 104 | 105 | def generate_paths(x0, kappa, theta, sigma, T, M, I): 106 | ''' Simulation of square-root diffusion with exact discretization 107 | 108 | Parameters 109 | ========== 110 | x0: float (positive) 111 | starting value 112 | kappa: float (positive) 113 | mean-reversion factor 114 | theta: float (positive) 115 | long-run mean 116 | sigma: float (positive) 117 | volatility (of volatility) 118 | T: float (positive) 119 | time-to-maturity 120 | M: int 121 | number of time intervals 122 | I: int 123 | number of simulation paths 124 | 125 | Returns 126 | ======= 127 | x: NumPy ndarray object 128 | simulated paths 129 | ''' 130 | dt = float(T) / M 131 | x = np.zeros((M + 1, I), dtype=np.float) 132 | x[0, :] = x0 133 | # matrix filled with standard normal distributed rv 134 | ran = np.random.standard_normal((M + 1, I)) 135 | d = 4 * kappa * theta / sigma ** 2 136 | # constant factor in the integrated process of x 137 | c = (sigma ** 2 * (1 - math.exp(-kappa * dt))) / (4 * kappa) 138 | if d > 1: 139 | for t in range(1, M + 1): 140 | # non-centrality parameter 141 | l = x[t - 1, :] * math.exp(-kappa * dt) / c 142 | # matrix with chi-squared distributed rv 143 | chi = np.random.chisquare(d - 1, I) 144 | x[t, :] = c * ((ran[t] + np.sqrt(l)) ** 2 + chi) 145 | else: 146 | for t in range(1, M + 1): 147 | l = x[t - 1, :] * math.exp(-kappa * dt) / c 148 | N = np.random.poisson(l / 2, I) 149 | chi = np.random.chisquare(d + 2 * N, I) 150 | x[t, :] = c * chi 151 | return x 152 | 153 | 154 | def call_estimator(v0, kappa, theta, sigma, T, r, K, M, I): 155 | ''' Estimation of European call option price in GL96 Model 156 | via Monte Carlo simulation 157 | 158 | Parameters 159 | ========== 160 | v0: float (positive) 161 | current volatility level 162 | kappa: float (positive) 163 | mean-reversion factor 164 | theta: float (positive) 165 | long-run mean of volatility 166 | sigma: float (positive) 167 | volatility of volatility 168 | T: float (positive) 169 | time-to-maturity 170 | r: float (positive) 171 | risk-free short rate 172 | K: float (positive) 173 | strike price of the option 174 | M: int 175 | number of time intervals 176 | I: int 177 | number of simulation paths 178 | 179 | Returns 180 | ======= 181 | callvalue: float 182 | MCS estimator for European call option 183 | ''' 184 | V = generate_paths(v0, kappa, theta, sigma, T, M, I) 185 | callvalue = math.exp(-r * T) * np.sum(np.maximum(V[-1] - K, 0)) / I 186 | return callvalue 187 | -------------------------------------------------------------------------------- /legacy/python2/scripts/srd_functions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with functions for 3 | # Gruenbichler and Longstaff (1996) model 4 | # 5 | # (c) Dr. Yves J. Hilpisch 6 | # Listed Volatility and Variance Derivatives 7 | # 8 | import math 9 | import numpy as np 10 | import scipy.stats as scs 11 | 12 | 13 | def futures_price(v0, kappa, theta, zeta, T): 14 | ''' Futures pricing formula in GL96 model. 15 | 16 | Parameters 17 | ========== 18 | v0: float (positive) 19 | current volatility level 20 | kappa: float (positive) 21 | mean-reversion factor 22 | theta: float (positive) 23 | long-run mean of volatility 24 | zeta: float (positive) 25 | volatility risk premium 26 | T: float (positive) 27 | time-to-maturity 28 | 29 | Returns 30 | ======= 31 | future: float 32 | price of a future 33 | ''' 34 | alpha = kappa * theta 35 | beta = kappa + zeta 36 | future = (alpha / beta * (1 - math.exp(-beta * T)) 37 | + math.exp(-beta * T) * v0) 38 | return future 39 | 40 | 41 | def cx(K, gamma, nu, lamb): 42 | ''' Complementary distribution function of non-central chi-squared density. 43 | 44 | Parameters 45 | ========== 46 | K: float (positive) 47 | strike price 48 | gamma: float (positive) 49 | as defined in the GL96 model 50 | nu: float (positive) 51 | degrees of freedom 52 | lamb: float (positive) 53 | non-centrality parameter 54 | 55 | Returns 56 | ======= 57 | complementary distribution of nc cs density 58 | ''' 59 | return 1 - scs.ncx2.cdf(gamma * K, nu, lamb) 60 | 61 | 62 | def call_price(v0, kappa, theta, sigma, zeta, T, r, K): 63 | ''' Call option pricing formula in GL96 Model 64 | 65 | Parameters 66 | ========== 67 | v0: float (positive) 68 | current volatility level 69 | kappa: float (positive) 70 | mean-reversion factor 71 | theta: float (positive) 72 | long-run mean of volatility 73 | sigma: float (positive) 74 | volatility of volatility 75 | zeta: float (positive) 76 | volatility risk premium 77 | T: float (positive) 78 | time-to-maturity 79 | r: float (positive) 80 | risk-free short rate 81 | K: float(positive) 82 | strike price of the option 83 | 84 | Returns 85 | ======= 86 | call: float 87 | present value of European call option 88 | ''' 89 | D = math.exp(-r * T) # discount factor 90 | 91 | alpha = kappa * theta 92 | beta = kappa + zeta 93 | gamma = 4 * beta / (sigma ** 2 * (1 - math.exp(-beta * T))) 94 | nu = 4 * alpha / sigma ** 2 95 | lamb = gamma * math.exp(-beta * T) * v0 96 | 97 | # the pricing formula 98 | call = (D * math.exp(-beta * T) * v0 * cx(K, gamma, nu + 4, lamb) 99 | + D * (alpha / beta) * (1 - math.exp(-beta * T)) 100 | * cx(K, gamma, nu + 2, lamb) 101 | - D * K * cx(K, gamma, nu, lamb)) 102 | return call 103 | 104 | 105 | def generate_paths(x0, kappa, theta, sigma, T, M, I): 106 | ''' Simulation of square-root diffusion with exact discretization 107 | 108 | Parameters 109 | ========== 110 | x0: float (positive) 111 | starting value 112 | kappa: float (positive) 113 | mean-reversion factor 114 | theta: float (positive) 115 | long-run mean 116 | sigma: float (positive) 117 | volatility (of volatility) 118 | T: float (positive) 119 | time-to-maturity 120 | M: int 121 | number of time intervals 122 | I: int 123 | number of simulation paths 124 | 125 | Returns 126 | ======= 127 | x: NumPy ndarray object 128 | simulated paths 129 | ''' 130 | dt = float(T) / M 131 | x = np.zeros((M + 1, I), dtype=np.float) 132 | x[0, :] = x0 133 | # matrix filled with standard normal distributed rv 134 | ran = np.random.standard_normal((M + 1, I)) 135 | d = 4 * kappa * theta / sigma ** 2 136 | # constant factor in the integrated process of x 137 | c = (sigma ** 2 * (1 - math.exp(-kappa * dt))) / (4 * kappa) 138 | if d > 1: 139 | for t in range(1, M + 1): 140 | # non-centrality parameter 141 | l = x[t - 1, :] * math.exp(-kappa * dt) / c 142 | # matrix with chi-squared distributed rv 143 | chi = np.random.chisquare(d - 1, I) 144 | x[t, :] = c * ((ran[t] + np.sqrt(l)) ** 2 + chi) 145 | else: 146 | for t in range(1, M + 1): 147 | l = x[t - 1, :] * math.exp(-kappa * dt) / c 148 | N = np.random.poisson(l / 2, I) 149 | chi = np.random.chisquare(d + 2 * N, I) 150 | x[t, :] = c * chi 151 | return x 152 | 153 | 154 | def call_estimator(v0, kappa, theta, sigma, T, r, K, M, I): 155 | ''' Estimation of European call option price in GL96 Model 156 | via Monte Carlo simulation 157 | 158 | Parameters 159 | ========== 160 | v0: float (positive) 161 | current volatility level 162 | kappa: float (positive) 163 | mean-reversion factor 164 | theta: float (positive) 165 | long-run mean of volatility 166 | sigma: float (positive) 167 | volatility of volatility 168 | T: float (positive) 169 | time-to-maturity 170 | r: float (positive) 171 | risk-free short rate 172 | K: float (positive) 173 | strike price of the option 174 | M: int 175 | number of time intervals 176 | I: int 177 | number of simulation paths 178 | 179 | Returns 180 | ======= 181 | callvalue: float 182 | MCS estimator for European call option 183 | ''' 184 | V = generate_paths(v0, kappa, theta, sigma, T, M, I) 185 | callvalue = math.exp(-r * T) * np.sum(np.maximum(V[-1] - K, 0)) / I 186 | return callvalue 187 | -------------------------------------------------------------------------------- /legacy/python3/scripts/srd_functions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with functions for 3 | # Gruenbichler and Longstaff (1996) model 4 | # 5 | # (c) Dr. Yves J. Hilpisch 6 | # Listed Volatility and Variance Derivatives 7 | # 8 | import math 9 | import numpy as np 10 | import scipy.stats as scs 11 | 12 | 13 | def futures_price(v0, kappa, theta, zeta, T): 14 | ''' Futures pricing formula in GL96 model. 15 | 16 | Parameters 17 | ========== 18 | v0: float (positive) 19 | current volatility level 20 | kappa: float (positive) 21 | mean-reversion factor 22 | theta: float (positive) 23 | long-run mean of volatility 24 | zeta: float (positive) 25 | volatility risk premium 26 | T: float (positive) 27 | time-to-maturity 28 | 29 | Returns 30 | ======= 31 | future: float 32 | price of a future 33 | ''' 34 | alpha = kappa * theta 35 | beta = kappa + zeta 36 | future = (alpha / beta * (1 - math.exp(-beta * T)) 37 | + math.exp(-beta * T) * v0) 38 | return future 39 | 40 | 41 | def cx(K, gamma, nu, lamb): 42 | ''' Complementary distribution function of non-central chi-squared density. 43 | 44 | Parameters 45 | ========== 46 | K: float (positive) 47 | strike price 48 | gamma: float (positive) 49 | as defined in the GL96 model 50 | nu: float (positive) 51 | degrees of freedom 52 | lamb: float (positive) 53 | non-centrality parameter 54 | 55 | Returns 56 | ======= 57 | complementary distribution of nc cs density 58 | ''' 59 | return 1 - scs.ncx2.cdf(gamma * K, nu, lamb) 60 | 61 | 62 | def call_price(v0, kappa, theta, sigma, zeta, T, r, K): 63 | ''' Call option pricing formula in GL96 Model 64 | 65 | Parameters 66 | ========== 67 | v0: float (positive) 68 | current volatility level 69 | kappa: float (positive) 70 | mean-reversion factor 71 | theta: float (positive) 72 | long-run mean of volatility 73 | sigma: float (positive) 74 | volatility of volatility 75 | zeta: float (positive) 76 | volatility risk premium 77 | T: float (positive) 78 | time-to-maturity 79 | r: float (positive) 80 | risk-free short rate 81 | K: float(positive) 82 | strike price of the option 83 | 84 | Returns 85 | ======= 86 | call: float 87 | present value of European call option 88 | ''' 89 | D = math.exp(-r * T) # discount factor 90 | 91 | alpha = kappa * theta 92 | beta = kappa + zeta 93 | gamma = 4 * beta / (sigma ** 2 * (1 - math.exp(-beta * T))) 94 | nu = 4 * alpha / sigma ** 2 95 | lamb = gamma * math.exp(-beta * T) * v0 96 | 97 | # the pricing formula 98 | call = (D * math.exp(-beta * T) * v0 * cx(K, gamma, nu + 4, lamb) 99 | + D * (alpha / beta) * (1 - math.exp(-beta * T)) 100 | * cx(K, gamma, nu + 2, lamb) 101 | - D * K * cx(K, gamma, nu, lamb)) 102 | return call 103 | 104 | 105 | def generate_paths(x0, kappa, theta, sigma, T, M, I): 106 | ''' Simulation of square-root diffusion with exact discretization 107 | 108 | Parameters 109 | ========== 110 | x0: float (positive) 111 | starting value 112 | kappa: float (positive) 113 | mean-reversion factor 114 | theta: float (positive) 115 | long-run mean 116 | sigma: float (positive) 117 | volatility (of volatility) 118 | T: float (positive) 119 | time-to-maturity 120 | M: int 121 | number of time intervals 122 | I: int 123 | number of simulation paths 124 | 125 | Returns 126 | ======= 127 | x: NumPy ndarray object 128 | simulated paths 129 | ''' 130 | dt = float(T) / M 131 | x = np.zeros((M + 1, I), dtype=np.float) 132 | x[0, :] = x0 133 | # matrix filled with standard normal distributed rv 134 | ran = np.random.standard_normal((M + 1, I)) 135 | d = 4 * kappa * theta / sigma ** 2 136 | # constant factor in the integrated process of x 137 | c = (sigma ** 2 * (1 - math.exp(-kappa * dt))) / (4 * kappa) 138 | if d > 1: 139 | for t in range(1, M + 1): 140 | # non-centrality parameter 141 | l = x[t - 1, :] * math.exp(-kappa * dt) / c 142 | # matrix with chi-squared distributed rv 143 | chi = np.random.chisquare(d - 1, I) 144 | x[t, :] = c * ((ran[t] + np.sqrt(l)) ** 2 + chi) 145 | else: 146 | for t in range(1, M + 1): 147 | l = x[t - 1, :] * math.exp(-kappa * dt) / c 148 | N = np.random.poisson(l / 2, I) 149 | chi = np.random.chisquare(d + 2 * N, I) 150 | x[t, :] = c * chi 151 | return x 152 | 153 | 154 | def call_estimator(v0, kappa, theta, sigma, T, r, K, M, I): 155 | ''' Estimation of European call option price in GL96 Model 156 | via Monte Carlo simulation 157 | 158 | Parameters 159 | ========== 160 | v0: float (positive) 161 | current volatility level 162 | kappa: float (positive) 163 | mean-reversion factor 164 | theta: float (positive) 165 | long-run mean of volatility 166 | sigma: float (positive) 167 | volatility of volatility 168 | T: float (positive) 169 | time-to-maturity 170 | r: float (positive) 171 | risk-free short rate 172 | K: float (positive) 173 | strike price of the option 174 | M: int 175 | number of time intervals 176 | I: int 177 | number of simulation paths 178 | 179 | Returns 180 | ======= 181 | callvalue: float 182 | MCS estimator for European call option 183 | ''' 184 | V = generate_paths(v0, kappa, theta, sigma, T, M, I) 185 | callvalue = math.exp(-r * T) * np.sum(np.maximum(V[-1] - K, 0)) / I 186 | return callvalue 187 | -------------------------------------------------------------------------------- /legacy/python2/scripts/srd_model_calibration.py: -------------------------------------------------------------------------------- 1 | # 2 | # Calibration of Gruenbichler and Longstaff (1996) 3 | # square-root diffusion model to 4 | # VSTOXX call options traded at Eurex 5 | # Data as of 31. March 2014 6 | # All data from www.eurexchange.com 7 | # 8 | # (c) Dr. Yves J. Hilpisch 9 | # Listed Volatility and Variance Derivatives 10 | # 11 | import numpy as np 12 | import pandas as pd 13 | from srd_functions import call_price 14 | import scipy.optimize as sco 15 | import matplotlib.pyplot as plt 16 | 17 | path = 'data/' 18 | 19 | # Fixed Parameters 20 | v0 = 17.6639 # VSTOXX index on 31. March 2014 21 | r = 0.01 # risk-less short rate 22 | zeta = 0. # volatility risk premium factor 23 | 24 | 25 | def read_select_quotes(path=path, tol=0.2): 26 | ''' Selects and read options quotes. 27 | 28 | Parameters 29 | ========== 30 | path: string 31 | path to file with option quotes 32 | 33 | Returns 34 | ======= 35 | option_data: pandas DataFrame object 36 | option data 37 | ''' 38 | h5 = pd.HDFStore(path + 'vstoxx_march_2014.h5', 'r') 39 | 40 | # read option data from file and close it 41 | option_data = h5['vstoxx_options'] 42 | h5.close() 43 | # select relevant date for call option quotes 44 | option_data = option_data[(option_data.DATE == '2014-3-31') 45 | & (option_data.TYPE == 'C')] 46 | # calculate time-to-maturity in year fractions 47 | option_data['TTM'] = (option_data.MATURITY - option_data.DATE).apply( 48 | lambda x: x / np.timedelta64(1, 'D') / 365.) 49 | 50 | # only those options close enough to the ATM level 51 | option_data = option_data[(option_data.STRIKE > (1 - tol) * v0) 52 | & (option_data.STRIKE < (1 + tol) * v0)] 53 | return option_data 54 | 55 | 56 | def valuation_function(p0): 57 | ''' Valuation function for set of strike prices 58 | 59 | Parameters 60 | ========== 61 | p0: list 62 | set of model parameters 63 | 64 | Returns 65 | ======= 66 | call_prices: NumPy ndarray object 67 | array of call prices 68 | ''' 69 | kappa, theta, sigma = p0 70 | call_prices = [] 71 | for strike in strikes: 72 | call_prices.append(call_price(v0, kappa, theta, 73 | sigma, zeta, ttm, r, strike)) 74 | call_prices = np.array(call_prices) 75 | return call_prices 76 | 77 | 78 | def error_function(p0): 79 | ''' Error function for model calibration. 80 | 81 | Parameters 82 | ========== 83 | p0: tuple 84 | set of model parameters 85 | 86 | Returns 87 | ======= 88 | MSE: float 89 | mean squared (relative/absolute) error 90 | ''' 91 | global i 92 | call_prices = valuation_function(p0) 93 | kappa, theta, sigma = p0 94 | pen = 0. 95 | if 2 * kappa * theta < sigma ** 2: 96 | pen = 1000.0 97 | if kappa < 0 or theta < 0 or sigma < 0: 98 | pen = 1000.0 99 | if relative is True: 100 | MSE = (np.sum(((call_prices - call_quotes) / call_quotes) ** 2) 101 | / len(call_quotes) + pen) 102 | else: 103 | MSE = np.sum((call_prices - call_quotes) ** 2) / len(call_quotes) + pen 104 | 105 | if i == 0: 106 | print ("{:>6s} {:>6s} {:>6s}".format('kappa', 'theta', 'sigma') 107 | + "{:>12s}".format('MSE')) 108 | 109 | # print intermediate results: every 100th iteration 110 | if i % 100 == 0: 111 | print "{:6.3f} {:6.3f} {:6.3f}".format(*p0) + "{:>12.5f}".format(MSE) 112 | i += 1 113 | return MSE 114 | 115 | 116 | def model_calibration(option_data, rel=False, mat='2014-07-18'): 117 | ''' Function for global and local model calibration. 118 | 119 | Parameters 120 | ========== 121 | option_data: pandas DataFrame object 122 | option quotes to be used 123 | relative: boolean 124 | relative or absolute MSE 125 | maturity: string 126 | maturity of option quotes to calibrate to 127 | 128 | Returns 129 | ======= 130 | opt: tuple 131 | optimal parameter values 132 | ''' 133 | global relative # if True: MSRE is used, if False: MSAE 134 | global strikes 135 | global call_quotes 136 | global ttm 137 | global i 138 | 139 | relative = rel 140 | # only option quotes for a single maturity 141 | option_quotes = option_data[option_data.MATURITY == mat] 142 | 143 | # time-to-maturity from the data set 144 | ttm = option_quotes.iloc[0, -1] 145 | 146 | # transform strike column and price column in ndarray object 147 | strikes = option_quotes['STRIKE'].values 148 | call_quotes = option_quotes['PRICE'].values 149 | 150 | # global optimization 151 | i = 0 # counter for calibration iterations 152 | p0 = sco.brute(error_function, ((5.0, 20.1, 1.0), (10., 30.1, 1.25), 153 | (1.0, 9.1, 2.0)), finish=None) 154 | 155 | # local optimization 156 | i = 0 157 | opt = sco.fmin(error_function, p0, xtol=0.0000001, ftol=0.0000001, 158 | maxiter=1000, maxfun=1500) 159 | 160 | return opt 161 | 162 | 163 | def plot_calibration_results(opt): 164 | ''' Function to plot market quotes vs. model prices. 165 | 166 | Parameters 167 | ========== 168 | opt: list 169 | optimal parameters from calibration 170 | ''' 171 | callalues = valuation_function(opt) 172 | diffs = callalues - call_quotes 173 | plt.figure() 174 | plt.subplot(211) 175 | plt.plot(strikes, call_quotes, label='market quotes') 176 | plt.plot(strikes, callalues, 'ro', label='model prices') 177 | plt.ylabel('option values') 178 | plt.grid(True) 179 | plt.legend() 180 | plt.axis([min(strikes) - 0.5, max(strikes) + 0.5, 181 | 0.0, max(call_quotes) * 1.1]) 182 | plt.subplot(212) 183 | wi = 0.3 184 | plt.bar(strikes - wi / 2, diffs, width=wi) 185 | plt.grid(True) 186 | plt.xlabel('strike price') 187 | plt.ylabel('difference') 188 | plt.axis([min(strikes) - 0.5, max(strikes) + 0.5, 189 | min(diffs) * 1.1, max(diffs) * 1.1]) 190 | plt.tight_layout() 191 | 192 | if __name__ == '__main__': 193 | option_data = read_select_quotes() 194 | opt = model_calibration(option_data=option_data) 195 | -------------------------------------------------------------------------------- /code/scripts/srd_model_calibration.py: -------------------------------------------------------------------------------- 1 | # 2 | # Calibration of Gruenbichler and Longstaff (1996) 3 | # square-root diffusion model to 4 | # VSTOXX call options traded at Eurex 5 | # Data as of 31. March 2014 6 | # All data from www.eurexchange.com 7 | # 8 | # (c) Dr. Yves J. Hilpisch 9 | # Listed Volatility and Variance Derivatives 10 | # 11 | import numpy as np 12 | import pandas as pd 13 | from srd_functions import call_price 14 | import scipy.optimize as sco 15 | import matplotlib.pyplot as plt 16 | 17 | path = 'data/' 18 | 19 | # Fixed Parameters 20 | v0 = 17.6639 # VSTOXX index on 31. March 2014 21 | r = 0.01 # risk-less short rate 22 | zeta = 0. # volatility risk premium factor 23 | 24 | 25 | def read_select_quotes(path=path, tol=0.2): 26 | ''' Selects and read options quotes. 27 | 28 | Parameters 29 | ========== 30 | path: string 31 | path to file with option quotes 32 | 33 | Returns 34 | ======= 35 | option_data: pandas DataFrame object 36 | option data 37 | ''' 38 | h5 = pd.HDFStore(path + 'vstoxx_march_2014.h5', 'r') 39 | 40 | # read option data from file and close it 41 | option_data = h5['vstoxx_options'] 42 | h5.close() 43 | # transform dates 44 | option_data['DATE'] = pd.to_datetime(option_data['DATE']) 45 | option_data['MATURITY'] = pd.to_datetime(option_data['MATURITY']) 46 | # select relevant date for call option quotes 47 | option_data = option_data[(option_data.DATE == '2014-3-31') 48 | & (option_data.TYPE == 'C')] 49 | # calculate time-to-maturity in year fractions 50 | option_data['TTM'] = (option_data.MATURITY - option_data.DATE).apply( 51 | lambda x: x / np.timedelta64(1, 'D') / 365.) 52 | 53 | # only those options close enough to the ATM level 54 | option_data = option_data[(option_data.STRIKE > (1 - tol) * v0) 55 | & (option_data.STRIKE < (1 + tol) * v0)] 56 | return option_data 57 | 58 | 59 | def valuation_function(p0): 60 | ''' Valuation function for set of strike prices 61 | 62 | Parameters 63 | ========== 64 | p0: list 65 | set of model parameters 66 | 67 | Returns 68 | ======= 69 | call_prices: NumPy ndarray object 70 | array of call prices 71 | ''' 72 | kappa, theta, sigma = p0 73 | call_prices = [] 74 | for strike in strikes: 75 | call_prices.append(call_price(v0, kappa, theta, 76 | sigma, zeta, ttm, r, strike)) 77 | call_prices = np.array(call_prices) 78 | return call_prices 79 | 80 | 81 | def error_function(p0): 82 | ''' Error function for model calibration. 83 | 84 | Parameters 85 | ========== 86 | p0: tuple 87 | set of model parameters 88 | 89 | Returns 90 | ======= 91 | MSE: float 92 | mean squared (relative/absolute) error 93 | ''' 94 | global i 95 | call_prices = valuation_function(p0) 96 | kappa, theta, sigma = p0 97 | pen = 0. 98 | if 2 * kappa * theta < sigma ** 2: 99 | pen = 1000.0 100 | if kappa < 0 or theta < 0 or sigma < 0: 101 | pen = 1000.0 102 | if relative is True: 103 | MSE = (np.sum(((call_prices - call_quotes) / call_quotes) ** 2) 104 | / len(call_quotes) + pen) 105 | else: 106 | MSE = np.sum((call_prices - call_quotes) ** 2) / len(call_quotes) + pen 107 | 108 | if i == 0: 109 | print("{:>6s} {:>6s} {:>6s}".format('kappa', 'theta', 'sigma') 110 | + "{:>12s}".format('MSE')) 111 | 112 | # print intermediate results: every 100th iteration 113 | if i % 100 == 0: 114 | print("{:6.3f} {:6.3f} {:6.3f}".format(*p0) + "{:>12.5f}".format(MSE)) 115 | i += 1 116 | return MSE 117 | 118 | 119 | def model_calibration(option_data, rel=False, mat='2014-07-18'): 120 | ''' Function for global and local model calibration. 121 | 122 | Parameters 123 | ========== 124 | option_data: pandas DataFrame object 125 | option quotes to be used 126 | relative: boolean 127 | relative or absolute MSE 128 | maturity: string 129 | maturity of option quotes to calibrate to 130 | 131 | Returns 132 | ======= 133 | opt: tuple 134 | optimal parameter values 135 | ''' 136 | global relative # if True: MSRE is used, if False: MSAE 137 | global strikes 138 | global call_quotes 139 | global ttm 140 | global i 141 | 142 | relative = rel 143 | # only option quotes for a single maturity 144 | option_quotes = option_data[option_data.MATURITY == mat] 145 | 146 | # time-to-maturity from the data set 147 | ttm = option_quotes.iloc[0, -1] 148 | 149 | # transform strike column and price column in ndarray object 150 | strikes = option_quotes['STRIKE'].values 151 | call_quotes = option_quotes['PRICE'].values 152 | 153 | # global optimization 154 | i = 0 # counter for calibration iterations 155 | p0 = sco.brute(error_function, ((5.0, 20.1, 1.0), (10., 30.1, 1.25), 156 | (1.0, 9.1, 2.0)), finish=None) 157 | 158 | # local optimization 159 | i = 0 160 | opt = sco.fmin(error_function, p0, xtol=0.0000001, ftol=0.0000001, 161 | maxiter=1000, maxfun=1500) 162 | 163 | return opt 164 | 165 | 166 | def plot_calibration_results(opt): 167 | ''' Function to plot market quotes vs. model prices. 168 | 169 | Parameters 170 | ========== 171 | opt: list 172 | optimal parameters from calibration 173 | ''' 174 | callalues = valuation_function(opt) 175 | diffs = callalues - call_quotes 176 | plt.figure() 177 | plt.subplot(211) 178 | plt.plot(strikes, call_quotes, label='market quotes') 179 | plt.plot(strikes, callalues, 'ro', label='model prices') 180 | plt.ylabel('option values') 181 | plt.grid(True) 182 | plt.legend() 183 | plt.axis([min(strikes) - 0.5, max(strikes) + 0.5, 184 | 0.0, max(call_quotes) * 1.1]) 185 | plt.subplot(212) 186 | wi = 0.3 187 | plt.bar(strikes, diffs, width=wi) 188 | plt.grid(True) 189 | plt.xlabel('strike price') 190 | plt.ylabel('difference') 191 | plt.axis([min(strikes) - 0.5, max(strikes) + 0.5, 192 | min(diffs) * 1.1, max(diffs) * 1.1]) 193 | plt.tight_layout() 194 | 195 | if __name__ == '__main__': 196 | option_data = read_select_quotes() 197 | opt = model_calibration(option_data=option_data) 198 | -------------------------------------------------------------------------------- /legacy/python3/scripts/srd_model_calibration.py: -------------------------------------------------------------------------------- 1 | # 2 | # Calibration of Gruenbichler and Longstaff (1996) 3 | # square-root diffusion model to 4 | # VSTOXX call options traded at Eurex 5 | # Data as of 31. March 2014 6 | # All data from www.eurexchange.com 7 | # 8 | # (c) Dr. Yves J. Hilpisch 9 | # Listed Volatility and Variance Derivatives 10 | # 11 | import numpy as np 12 | import pandas as pd 13 | from srd_functions import call_price 14 | import scipy.optimize as sco 15 | import matplotlib.pyplot as plt 16 | 17 | path = 'data/' 18 | 19 | # Fixed Parameters 20 | v0 = 17.6639 # VSTOXX index on 31. March 2014 21 | r = 0.01 # risk-less short rate 22 | zeta = 0. # volatility risk premium factor 23 | 24 | 25 | def read_select_quotes(path=path, tol=0.2): 26 | ''' Selects and read options quotes. 27 | 28 | Parameters 29 | ========== 30 | path: string 31 | path to file with option quotes 32 | 33 | Returns 34 | ======= 35 | option_data: pandas DataFrame object 36 | option data 37 | ''' 38 | h5 = pd.HDFStore(path + 'vstoxx_march_2014.h5', 'r') 39 | 40 | # read option data from file and close it 41 | option_data = h5['vstoxx_options'] 42 | h5.close() 43 | # transform dates 44 | option_data['DATE'] = pd.to_datetime(option_data['DATE']) 45 | option_data['MATURITY'] = pd.to_datetime(option_data['MATURITY']) 46 | # select relevant date for call option quotes 47 | option_data = option_data[(option_data.DATE == '2014-3-31') 48 | & (option_data.TYPE == 'C')] 49 | # calculate time-to-maturity in year fractions 50 | option_data['TTM'] = (option_data.MATURITY - option_data.DATE).apply( 51 | lambda x: x / np.timedelta64(1, 'D') / 365.) 52 | 53 | # only those options close enough to the ATM level 54 | option_data = option_data[(option_data.STRIKE > (1 - tol) * v0) 55 | & (option_data.STRIKE < (1 + tol) * v0)] 56 | return option_data 57 | 58 | 59 | def valuation_function(p0): 60 | ''' Valuation function for set of strike prices 61 | 62 | Parameters 63 | ========== 64 | p0: list 65 | set of model parameters 66 | 67 | Returns 68 | ======= 69 | call_prices: NumPy ndarray object 70 | array of call prices 71 | ''' 72 | kappa, theta, sigma = p0 73 | call_prices = [] 74 | for strike in strikes: 75 | call_prices.append(call_price(v0, kappa, theta, 76 | sigma, zeta, ttm, r, strike)) 77 | call_prices = np.array(call_prices) 78 | return call_prices 79 | 80 | 81 | def error_function(p0): 82 | ''' Error function for model calibration. 83 | 84 | Parameters 85 | ========== 86 | p0: tuple 87 | set of model parameters 88 | 89 | Returns 90 | ======= 91 | MSE: float 92 | mean squared (relative/absolute) error 93 | ''' 94 | global i 95 | call_prices = valuation_function(p0) 96 | kappa, theta, sigma = p0 97 | pen = 0. 98 | if 2 * kappa * theta < sigma ** 2: 99 | pen = 1000.0 100 | if kappa < 0 or theta < 0 or sigma < 0: 101 | pen = 1000.0 102 | if relative is True: 103 | MSE = (np.sum(((call_prices - call_quotes) / call_quotes) ** 2) 104 | / len(call_quotes) + pen) 105 | else: 106 | MSE = np.sum((call_prices - call_quotes) ** 2) / len(call_quotes) + pen 107 | 108 | if i == 0: 109 | print("{:>6s} {:>6s} {:>6s}".format('kappa', 'theta', 'sigma') 110 | + "{:>12s}".format('MSE')) 111 | 112 | # print intermediate results: every 100th iteration 113 | if i % 100 == 0: 114 | print("{:6.3f} {:6.3f} {:6.3f}".format(*p0) + "{:>12.5f}".format(MSE)) 115 | i += 1 116 | return MSE 117 | 118 | 119 | def model_calibration(option_data, rel=False, mat='2014-07-18'): 120 | ''' Function for global and local model calibration. 121 | 122 | Parameters 123 | ========== 124 | option_data: pandas DataFrame object 125 | option quotes to be used 126 | relative: boolean 127 | relative or absolute MSE 128 | maturity: string 129 | maturity of option quotes to calibrate to 130 | 131 | Returns 132 | ======= 133 | opt: tuple 134 | optimal parameter values 135 | ''' 136 | global relative # if True: MSRE is used, if False: MSAE 137 | global strikes 138 | global call_quotes 139 | global ttm 140 | global i 141 | 142 | relative = rel 143 | # only option quotes for a single maturity 144 | option_quotes = option_data[option_data.MATURITY == mat] 145 | 146 | # time-to-maturity from the data set 147 | ttm = option_quotes.iloc[0, -1] 148 | 149 | # transform strike column and price column in ndarray object 150 | strikes = option_quotes['STRIKE'].values 151 | call_quotes = option_quotes['PRICE'].values 152 | 153 | # global optimization 154 | i = 0 # counter for calibration iterations 155 | p0 = sco.brute(error_function, ((5.0, 20.1, 1.0), (10., 30.1, 1.25), 156 | (1.0, 9.1, 2.0)), finish=None) 157 | 158 | # local optimization 159 | i = 0 160 | opt = sco.fmin(error_function, p0, xtol=0.0000001, ftol=0.0000001, 161 | maxiter=1000, maxfun=1500) 162 | 163 | return opt 164 | 165 | 166 | def plot_calibration_results(opt): 167 | ''' Function to plot market quotes vs. model prices. 168 | 169 | Parameters 170 | ========== 171 | opt: list 172 | optimal parameters from calibration 173 | ''' 174 | callalues = valuation_function(opt) 175 | diffs = callalues - call_quotes 176 | plt.figure() 177 | plt.subplot(211) 178 | plt.plot(strikes, call_quotes, label='market quotes') 179 | plt.plot(strikes, callalues, 'ro', label='model prices') 180 | plt.ylabel('option values') 181 | plt.grid(True) 182 | plt.legend() 183 | plt.axis([min(strikes) - 0.5, max(strikes) + 0.5, 184 | 0.0, max(call_quotes) * 1.1]) 185 | plt.subplot(212) 186 | wi = 0.3 187 | plt.bar(strikes, diffs, width=wi) 188 | plt.grid(True) 189 | plt.xlabel('strike price') 190 | plt.ylabel('difference') 191 | plt.axis([min(strikes) - 0.5, max(strikes) + 0.5, 192 | min(diffs) * 1.1, max(diffs) * 1.1]) 193 | plt.tight_layout() 194 | 195 | if __name__ == '__main__': 196 | option_data = read_select_quotes() 197 | opt = model_calibration(option_data=option_data) 198 | -------------------------------------------------------------------------------- /legacy/python3/03_simple_spanning_with_options.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "\"The


" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "# Listed Volatility and Variance Derivatives\n", 15 | "\n", 16 | "**Wiley Finance (2017)**\n", 17 | "\n", 18 | "Dr. Yves J. Hilpisch | The Python Quants GmbH\n", 19 | "\n", 20 | "http://tpq.io | [@dyjh](http://twitter.com/dyjh) | http://books.tpq.io\n", 21 | "\n", 22 | "\"Derivatives" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "metadata": {}, 28 | "source": [ 29 | "# Model-Free Replication of Variance" 30 | ] 31 | }, 32 | { 33 | "cell_type": "markdown", 34 | "metadata": {}, 35 | "source": [ 36 | "## Simple Spanning with Options" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": null, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "import numpy as np" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": null, 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "S = np.array((20, 5))\n", 55 | "B = np.array((11, 11))" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "S0 = 10\n", 65 | "B0 = 10" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": null, 71 | "metadata": {}, 72 | "outputs": [], 73 | "source": [ 74 | "M = np.array((S, B)).T\n", 75 | "M" 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": null, 81 | "metadata": {}, 82 | "outputs": [], 83 | "source": [ 84 | "M0 = np.array((S0, B0))" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": null, 90 | "metadata": {}, 91 | "outputs": [], 92 | "source": [ 93 | "K = 14" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": null, 99 | "metadata": {}, 100 | "outputs": [], 101 | "source": [ 102 | "C = np.maximum(S - K, 0)\n", 103 | "C" 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": null, 109 | "metadata": {}, 110 | "outputs": [], 111 | "source": [ 112 | "P = np.maximum(K - S, 0)\n", 113 | "P" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": null, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "phi_C = np.linalg.solve(M, C)\n", 123 | "phi_C" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": null, 129 | "metadata": {}, 130 | "outputs": [], 131 | "source": [ 132 | "phi_P = np.linalg.solve(M, P)\n", 133 | "phi_P" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": null, 139 | "metadata": {}, 140 | "outputs": [], 141 | "source": [ 142 | "C0 = np.dot(M0, phi_C)\n", 143 | "C0" 144 | ] 145 | }, 146 | { 147 | "cell_type": "code", 148 | "execution_count": null, 149 | "metadata": {}, 150 | "outputs": [], 151 | "source": [ 152 | "P0 = np.dot(M0, phi_P)\n", 153 | "P0" 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": null, 159 | "metadata": {}, 160 | "outputs": [], 161 | "source": [ 162 | "M_ = np.array((C, P)).T\n", 163 | "M_" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": null, 169 | "metadata": {}, 170 | "outputs": [], 171 | "source": [ 172 | "M0_ = np.array((C0, P0))\n", 173 | "M0_" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": null, 179 | "metadata": {}, 180 | "outputs": [], 181 | "source": [ 182 | "s1 = np.array((1, 0))\n", 183 | "s2 = np.array((0, 1))" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": null, 189 | "metadata": {}, 190 | "outputs": [], 191 | "source": [ 192 | "phi_s1 = np.linalg.solve(M_, s1)\n", 193 | "phi_s1" 194 | ] 195 | }, 196 | { 197 | "cell_type": "code", 198 | "execution_count": null, 199 | "metadata": {}, 200 | "outputs": [], 201 | "source": [ 202 | "phi_s2 = np.linalg.solve(M_, s2)\n", 203 | "phi_s2" 204 | ] 205 | }, 206 | { 207 | "cell_type": "code", 208 | "execution_count": null, 209 | "metadata": {}, 210 | "outputs": [], 211 | "source": [ 212 | "p1 = np.dot(M0_, phi_s1)\n", 213 | "p1" 214 | ] 215 | }, 216 | { 217 | "cell_type": "code", 218 | "execution_count": null, 219 | "metadata": {}, 220 | "outputs": [], 221 | "source": [ 222 | "p2 = np.dot(M0_, phi_s2)\n", 223 | "p2" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": null, 229 | "metadata": {}, 230 | "outputs": [], 231 | "source": [ 232 | "M0_s = np.array((p1, p2))" 233 | ] 234 | }, 235 | { 236 | "cell_type": "code", 237 | "execution_count": null, 238 | "metadata": {}, 239 | "outputs": [], 240 | "source": [ 241 | "g = np.array((13, 3))" 242 | ] 243 | }, 244 | { 245 | "cell_type": "code", 246 | "execution_count": null, 247 | "metadata": {}, 248 | "outputs": [], 249 | "source": [ 250 | "g0 = np.dot(M0_s, g)\n", 251 | "g0" 252 | ] 253 | }, 254 | { 255 | "cell_type": "markdown", 256 | "metadata": {}, 257 | "source": [ 258 | "\"The
\n", 259 | "\n", 260 | "http://tpq.io | @dyjh | team@tpq.io" 261 | ] 262 | } 263 | ], 264 | "metadata": { 265 | "kernelspec": { 266 | "display_name": "Python 3", 267 | "language": "python", 268 | "name": "python3" 269 | }, 270 | "language_info": { 271 | "codemirror_mode": { 272 | "name": "ipython", 273 | "version": 3 274 | }, 275 | "file_extension": ".py", 276 | "mimetype": "text/x-python", 277 | "name": "python", 278 | "nbconvert_exporter": "python", 279 | "pygments_lexer": "ipython3", 280 | "version": "3.8.6" 281 | } 282 | }, 283 | "nbformat": 4, 284 | "nbformat_minor": 4 285 | } 286 | -------------------------------------------------------------------------------- /code/03_simple_spanning_with_options.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "\"The


" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "# Listed Volatility and Variance Derivatives\n", 15 | "\n", 16 | "**Wiley Finance (2017)**\n", 17 | "\n", 18 | "Dr. Yves J. Hilpisch | The Python Quants GmbH\n", 19 | "\n", 20 | "http://tpq.io | [@dyjh](http://twitter.com/dyjh) | http://books.tpq.io\n", 21 | "\n", 22 | "\"Listed" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "metadata": {}, 28 | "source": [ 29 | "# Model-Free Replication of Variance" 30 | ] 31 | }, 32 | { 33 | "cell_type": "markdown", 34 | "metadata": {}, 35 | "source": [ 36 | "## Simple Spanning with Options" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": null, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "import numpy as np" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": null, 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "S = np.array((20, 5))\n", 55 | "B = np.array((11, 11))" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "S0 = 10\n", 65 | "B0 = 10" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": null, 71 | "metadata": {}, 72 | "outputs": [], 73 | "source": [ 74 | "M = np.array((S, B)).T\n", 75 | "M" 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": null, 81 | "metadata": {}, 82 | "outputs": [], 83 | "source": [ 84 | "M0 = np.array((S0, B0))" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": null, 90 | "metadata": {}, 91 | "outputs": [], 92 | "source": [ 93 | "K = 14" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": null, 99 | "metadata": {}, 100 | "outputs": [], 101 | "source": [ 102 | "C = np.maximum(S - K, 0)\n", 103 | "C" 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": null, 109 | "metadata": {}, 110 | "outputs": [], 111 | "source": [ 112 | "P = np.maximum(K - S, 0)\n", 113 | "P" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": null, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "phi_C = np.linalg.solve(M, C)\n", 123 | "phi_C" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": null, 129 | "metadata": {}, 130 | "outputs": [], 131 | "source": [ 132 | "phi_P = np.linalg.solve(M, P)\n", 133 | "phi_P" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": null, 139 | "metadata": {}, 140 | "outputs": [], 141 | "source": [ 142 | "C0 = np.dot(M0, phi_C)\n", 143 | "C0" 144 | ] 145 | }, 146 | { 147 | "cell_type": "code", 148 | "execution_count": null, 149 | "metadata": {}, 150 | "outputs": [], 151 | "source": [ 152 | "P0 = np.dot(M0, phi_P)\n", 153 | "P0" 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": null, 159 | "metadata": {}, 160 | "outputs": [], 161 | "source": [ 162 | "M_ = np.array((C, P)).T\n", 163 | "M_" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": null, 169 | "metadata": {}, 170 | "outputs": [], 171 | "source": [ 172 | "M0_ = np.array((C0, P0))\n", 173 | "M0_" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": null, 179 | "metadata": {}, 180 | "outputs": [], 181 | "source": [ 182 | "s1 = np.array((1, 0))\n", 183 | "s2 = np.array((0, 1))" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": null, 189 | "metadata": {}, 190 | "outputs": [], 191 | "source": [ 192 | "phi_s1 = np.linalg.solve(M_, s1)\n", 193 | "phi_s1" 194 | ] 195 | }, 196 | { 197 | "cell_type": "code", 198 | "execution_count": null, 199 | "metadata": {}, 200 | "outputs": [], 201 | "source": [ 202 | "phi_s2 = np.linalg.solve(M_, s2)\n", 203 | "phi_s2" 204 | ] 205 | }, 206 | { 207 | "cell_type": "code", 208 | "execution_count": null, 209 | "metadata": {}, 210 | "outputs": [], 211 | "source": [ 212 | "p1 = np.dot(M0_, phi_s1)\n", 213 | "p1" 214 | ] 215 | }, 216 | { 217 | "cell_type": "code", 218 | "execution_count": null, 219 | "metadata": {}, 220 | "outputs": [], 221 | "source": [ 222 | "p2 = np.dot(M0_, phi_s2)\n", 223 | "p2" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": null, 229 | "metadata": {}, 230 | "outputs": [], 231 | "source": [ 232 | "M0_s = np.array((p1, p2))" 233 | ] 234 | }, 235 | { 236 | "cell_type": "code", 237 | "execution_count": null, 238 | "metadata": {}, 239 | "outputs": [], 240 | "source": [ 241 | "g = np.array((13, 3))" 242 | ] 243 | }, 244 | { 245 | "cell_type": "code", 246 | "execution_count": null, 247 | "metadata": {}, 248 | "outputs": [], 249 | "source": [ 250 | "g0 = np.dot(M0_s, g)\n", 251 | "g0" 252 | ] 253 | }, 254 | { 255 | "cell_type": "markdown", 256 | "metadata": {}, 257 | "source": [ 258 | "\"The
\n", 259 | "\n", 260 | "http://tpq.io | @dyjh | team@tpq.io" 261 | ] 262 | } 263 | ], 264 | "metadata": { 265 | "kernelspec": { 266 | "display_name": "Python 3 (ipykernel)", 267 | "language": "python", 268 | "name": "python3" 269 | }, 270 | "language_info": { 271 | "codemirror_mode": { 272 | "name": "ipython", 273 | "version": 3 274 | }, 275 | "file_extension": ".py", 276 | "mimetype": "text/x-python", 277 | "name": "python", 278 | "nbconvert_exporter": "python", 279 | "pygments_lexer": "ipython3", 280 | "version": "3.9.7" 281 | } 282 | }, 283 | "nbformat": 4, 284 | "nbformat_minor": 4 285 | } 286 | -------------------------------------------------------------------------------- /code/scripts/srjd_model_calibration.py: -------------------------------------------------------------------------------- 1 | # 2 | # Calibration of square-root jump diffusion (SRJD) model 3 | # to VSTOXX European call options traded at Eurex 4 | # Data as of 31. March 2014 5 | # All data from www.eurexchange.com 6 | # 7 | # (c) Dr. Yves J. Hilpisch 8 | # Listed Volatility and Variance Derivatives 9 | # 10 | import numpy as np 11 | import pandas as pd 12 | import scipy.optimize as sco 13 | import matplotlib.pyplot as plt 14 | from srd_model_calibration import path, read_select_quotes 15 | from srjd_simulation import srjd_call_valuation 16 | 17 | # fixed parameters 18 | r = 0.01 # risk-less short rate 19 | v0 = 17.6639 # VSTOXX index at 31.03.2014 20 | M = 15 # number of time intervals 21 | I = 100 # number of simulated paths 22 | 23 | 24 | def srjd_valuation_function(p0): 25 | ''' Valuation ('difference') function for all options 26 | of a given DataFrame object. 27 | 28 | Parameters 29 | ========== 30 | p0: list 31 | set of model parameters 32 | 33 | Returns 34 | ======= 35 | diffs: NumPy ndarray object 36 | array with valuation differences 37 | ''' 38 | global relative, option_data 39 | kappa, theta, sigma, lamb, mu, delta = p0 40 | diffs = [] 41 | for i, option in option_data.iterrows(): 42 | value = srjd_call_valuation(v0, kappa, theta, sigma, 43 | lamb, mu, delta, 44 | option['TTM'], r, option['STRIKE'], 45 | M=M, I=I, fixed_seed=True) 46 | if relative is True: 47 | diffs.append((value - option['PRICE']) / option['PRICE']) 48 | else: 49 | diffs.append(value - option['PRICE']) 50 | diffs = np.array(diffs) 51 | return diffs 52 | 53 | 54 | def srjd_error_function(p0): 55 | ''' Error function for model calibration. 56 | 57 | Parameters 58 | ========== 59 | p0: tuple 60 | set of model parameters 61 | 62 | Returns 63 | ======= 64 | MSE: float 65 | mean squared (relative/absolute) error 66 | ''' 67 | global i, min_MSE, option_data 68 | OD = len(option_data) 69 | diffs = srjd_valuation_function(p0) 70 | kappa, theta, sigma, lamb, mu, delta = p0 71 | 72 | # penalties 73 | pen = 0. 74 | if 2 * kappa * theta < sigma ** 2: 75 | pen = 1000.0 76 | if kappa < 0 or theta < 0 or sigma < 0 or lamb < 0 or delta < 0: 77 | pen = 1000.0 78 | 79 | MSE = np.sum(diffs ** 2) / OD + pen # mean squared error 80 | 81 | min_MSE = min(min_MSE, MSE) # running minimum value 82 | 83 | if i == 0: 84 | print('\n' + ('{:>5s}'.format('its') 85 | + '{:>7s} {:>6s} {:>6s} {:>6s} {:>6s} {:>6s}'.format( 86 | 'kappa', 'theta', 'sigma', 'lamb', 'mu', 'delta') 87 | + '{:>12s}'.format('MSE') + '{:>12s}'.format('min_MSE'))) 88 | # print intermediate results: every 100th iteration 89 | if i % 100 == 0: 90 | print('{:>5d}'.format(i) 91 | + '{:7.3f} {:6.3f} {:6.3f} {:6.3f} {:6.3f} {:6.3f}'.format(*p0) 92 | + '{:>12.5f}'.format(MSE) + '{:>12.5f}'.format(min_MSE)) 93 | i += 1 94 | return MSE 95 | 96 | 97 | def srjd_model_calibration(data, p0=None, rel=False, mats=None): 98 | ''' Function for global and local model calibration. 99 | 100 | Parameters 101 | ========== 102 | option_data: pandas DataFrame object 103 | option quotes to be used 104 | relative: bool 105 | relative or absolute MSE 106 | mats: list 107 | list of maturities of option quotes to calibrate to 108 | 109 | Returns 110 | ======= 111 | opt: tuple 112 | optimal parameter values 113 | ''' 114 | global i, min_MSE, option_data 115 | global relative # if True: MSRE is used, if False: MSAE 116 | 117 | min_MSE = 5000. # dummy value 118 | relative = rel # relative or absolute 119 | option_data = data 120 | 121 | if mats is not None: 122 | # select the option data for the given maturities 123 | option_data = option_data[option_data['MATURITY'].isin(mats)] 124 | 125 | # global optimization 126 | if p0 is None: 127 | i = 0 # counter for calibration iterations 128 | p0 = sco.brute(srjd_error_function, ( 129 | (1.0, 9.1, 4.0), # kappa 130 | (10., 20.1, 10.0), # theta 131 | (1.0, 3.1, 2.0), # sigma 132 | (0.0, 0.81, 0.4), # lambda 133 | (-0.2, 0.41, 0.3), # mu 134 | (0.0, 0.31, 0.15)), # delta 135 | finish=None) 136 | 137 | # local optimization 138 | i = 0 139 | opt = sco.fmin(srjd_error_function, p0, 140 | xtol=0.0000001, ftol=0.0000001, 141 | maxiter=550, maxfun=700) 142 | 143 | return opt 144 | 145 | 146 | def plot_calibration_results(option_data, opt, mats): 147 | ''' Function to plot market quotes vs. model prices. 148 | 149 | Parameters 150 | ========== 151 | option_data: pandas DataFrame object 152 | option data to plot 153 | opt: list 154 | optimal results from calibration 155 | mats: list 156 | maturities to be plotted 157 | ''' 158 | kappa, theta, sigma, lamb, mu, delta = opt 159 | # adding model values for optimal parameter set 160 | # to the DataFrame object 161 | values = [] 162 | for i, option in option_data.iterrows(): 163 | value = srjd_call_valuation(v0, kappa, theta, sigma, 164 | lamb, mu, delta, 165 | option['TTM'], r, option['STRIKE'], 166 | M=M, I=I, fixed_seed=True) 167 | values.append(value) 168 | option_data['MODEL'] = values 169 | 170 | # plotting the market and model values 171 | height = min(len(mats) * 3, 12) 172 | fig, axarr = plt.subplots(len(mats), 2, sharex=True, figsize=(10, height)) 173 | for z, mat in enumerate(mats): 174 | if z == 0: 175 | axarr[z, 0].set_title('values') 176 | axarr[z, 1].set_title('differences') 177 | os = option_data[option_data.MATURITY == mat] 178 | strikes = os.STRIKE.values 179 | axarr[z, 0].set_ylabel('%s' % str(mat)[:10]) 180 | axarr[z, 0].plot(strikes, os.PRICE.values, label='market quotes') 181 | axarr[z, 0].plot(strikes, os.MODEL.values, 'ro', label='model prices') 182 | axarr[z, 0].legend(loc=0) 183 | wi = 0.3 184 | axarr[z, 1].bar(strikes, os.MODEL.values - os.PRICE.values, 185 | width=wi) 186 | if mat == mats[-1]: 187 | axarr[z, 0].set_xlabel('strike') 188 | axarr[z, 1].set_xlabel('strike') 189 | 190 | if __name__ == '__main__': 191 | option_data = read_select_quotes('../data/', tol=0.1) 192 | option_data['VALUE'] = 0.0 193 | opt = srjd_model_calibration() 194 | -------------------------------------------------------------------------------- /legacy/python3/scripts/srjd_model_calibration.py: -------------------------------------------------------------------------------- 1 | # 2 | # Calibration of square-root jump diffusion (SRJD) model 3 | # to VSTOXX European call options traded at Eurex 4 | # Data as of 31. March 2014 5 | # All data from www.eurexchange.com 6 | # 7 | # (c) Dr. Yves J. Hilpisch 8 | # Listed Volatility and Variance Derivatives 9 | # 10 | import numpy as np 11 | import pandas as pd 12 | import scipy.optimize as sco 13 | import matplotlib.pyplot as plt 14 | from srd_model_calibration import path, read_select_quotes 15 | from srjd_simulation import srjd_call_valuation 16 | 17 | # fixed parameters 18 | r = 0.01 # risk-less short rate 19 | v0 = 17.6639 # VSTOXX index at 31.03.2014 20 | M = 15 # number of time intervals 21 | I = 100 # number of simulated paths 22 | 23 | 24 | def srjd_valuation_function(p0): 25 | ''' Valuation ('difference') function for all options 26 | of a given DataFrame object. 27 | 28 | Parameters 29 | ========== 30 | p0: list 31 | set of model parameters 32 | 33 | Returns 34 | ======= 35 | diffs: NumPy ndarray object 36 | array with valuation differences 37 | ''' 38 | global relative, option_data 39 | kappa, theta, sigma, lamb, mu, delta = p0 40 | diffs = [] 41 | for i, option in option_data.iterrows(): 42 | value = srjd_call_valuation(v0, kappa, theta, sigma, 43 | lamb, mu, delta, 44 | option['TTM'], r, option['STRIKE'], 45 | M=M, I=I, fixed_seed=True) 46 | if relative is True: 47 | diffs.append((value - option['PRICE']) / option['PRICE']) 48 | else: 49 | diffs.append(value - option['PRICE']) 50 | diffs = np.array(diffs) 51 | return diffs 52 | 53 | 54 | def srjd_error_function(p0): 55 | ''' Error function for model calibration. 56 | 57 | Parameters 58 | ========== 59 | p0: tuple 60 | set of model parameters 61 | 62 | Returns 63 | ======= 64 | MSE: float 65 | mean squared (relative/absolute) error 66 | ''' 67 | global i, min_MSE, option_data 68 | OD = len(option_data) 69 | diffs = srjd_valuation_function(p0) 70 | kappa, theta, sigma, lamb, mu, delta = p0 71 | 72 | # penalties 73 | pen = 0. 74 | if 2 * kappa * theta < sigma ** 2: 75 | pen = 1000.0 76 | if kappa < 0 or theta < 0 or sigma < 0 or lamb < 0 or delta < 0: 77 | pen = 1000.0 78 | 79 | MSE = np.sum(diffs ** 2) / OD + pen # mean squared error 80 | 81 | min_MSE = min(min_MSE, MSE) # running minimum value 82 | 83 | if i == 0: 84 | print('\n' + ('{:>5s}'.format('its') 85 | + '{:>7s} {:>6s} {:>6s} {:>6s} {:>6s} {:>6s}'.format( 86 | 'kappa', 'theta', 'sigma', 'lamb', 'mu', 'delta') 87 | + '{:>12s}'.format('MSE') + '{:>12s}'.format('min_MSE'))) 88 | # print intermediate results: every 100th iteration 89 | if i % 100 == 0: 90 | print('{:>5d}'.format(i) 91 | + '{:7.3f} {:6.3f} {:6.3f} {:6.3f} {:6.3f} {:6.3f}'.format(*p0) 92 | + '{:>12.5f}'.format(MSE) + '{:>12.5f}'.format(min_MSE)) 93 | i += 1 94 | return MSE 95 | 96 | 97 | def srjd_model_calibration(data, p0=None, rel=False, mats=None): 98 | ''' Function for global and local model calibration. 99 | 100 | Parameters 101 | ========== 102 | option_data: pandas DataFrame object 103 | option quotes to be used 104 | relative: bool 105 | relative or absolute MSE 106 | mats: list 107 | list of maturities of option quotes to calibrate to 108 | 109 | Returns 110 | ======= 111 | opt: tuple 112 | optimal parameter values 113 | ''' 114 | global i, min_MSE, option_data 115 | global relative # if True: MSRE is used, if False: MSAE 116 | 117 | min_MSE = 5000. # dummy value 118 | relative = rel # relative or absolute 119 | option_data = data 120 | 121 | if mats is not None: 122 | # select the option data for the given maturities 123 | option_data = option_data[option_data['MATURITY'].isin(mats)] 124 | 125 | # global optimization 126 | if p0 is None: 127 | i = 0 # counter for calibration iterations 128 | p0 = sco.brute(srjd_error_function, ( 129 | (1.0, 9.1, 4.0), # kappa 130 | (10., 20.1, 10.0), # theta 131 | (1.0, 3.1, 2.0), # sigma 132 | (0.0, 0.81, 0.4), # lambda 133 | (-0.2, 0.41, 0.3), # mu 134 | (0.0, 0.31, 0.15)), # delta 135 | finish=None) 136 | 137 | # local optimization 138 | i = 0 139 | opt = sco.fmin(srjd_error_function, p0, 140 | xtol=0.0000001, ftol=0.0000001, 141 | maxiter=550, maxfun=700) 142 | 143 | return opt 144 | 145 | 146 | def plot_calibration_results(option_data, opt, mats): 147 | ''' Function to plot market quotes vs. model prices. 148 | 149 | Parameters 150 | ========== 151 | option_data: pandas DataFrame object 152 | option data to plot 153 | opt: list 154 | optimal results from calibration 155 | mats: list 156 | maturities to be plotted 157 | ''' 158 | kappa, theta, sigma, lamb, mu, delta = opt 159 | # adding model values for optimal parameter set 160 | # to the DataFrame object 161 | values = [] 162 | for i, option in option_data.iterrows(): 163 | value = srjd_call_valuation(v0, kappa, theta, sigma, 164 | lamb, mu, delta, 165 | option['TTM'], r, option['STRIKE'], 166 | M=M, I=I, fixed_seed=True) 167 | values.append(value) 168 | option_data['MODEL'] = values 169 | 170 | # plotting the market and model values 171 | height = min(len(mats) * 3, 12) 172 | fig, axarr = plt.subplots(len(mats), 2, sharex=True, figsize=(10, height)) 173 | for z, mat in enumerate(mats): 174 | if z == 0: 175 | axarr[z, 0].set_title('values') 176 | axarr[z, 1].set_title('differences') 177 | os = option_data[option_data.MATURITY == mat] 178 | strikes = os.STRIKE.values 179 | axarr[z, 0].set_ylabel('%s' % str(mat)[:10]) 180 | axarr[z, 0].plot(strikes, os.PRICE.values, label='market quotes') 181 | axarr[z, 0].plot(strikes, os.MODEL.values, 'ro', label='model prices') 182 | axarr[z, 0].legend(loc=0) 183 | wi = 0.3 184 | axarr[z, 1].bar(strikes, os.MODEL.values - os.PRICE.values, 185 | width=wi) 186 | if mat == mats[-1]: 187 | axarr[z, 0].set_xlabel('strike') 188 | axarr[z, 1].set_xlabel('strike') 189 | 190 | if __name__ == '__main__': 191 | option_data = read_select_quotes('../data/', tol=0.1) 192 | option_data['VALUE'] = 0.0 193 | opt = srjd_model_calibration() 194 | -------------------------------------------------------------------------------- /legacy/python2/scripts/srjd_model_calibration.py: -------------------------------------------------------------------------------- 1 | # 2 | # Calibration of square-root jump diffusion (SRJD) model 3 | # to VSTOXX European call options traded at Eurex 4 | # Data as of 31. March 2014 5 | # All data from www.eurexchange.com 6 | # 7 | # (c) Dr. Yves J. Hilpisch 8 | # Listed Volatility and Variance Derivatives 9 | # 10 | import numpy as np 11 | import pandas as pd 12 | import scipy.optimize as sco 13 | import matplotlib.pyplot as plt 14 | from srd_model_calibration import path, read_select_quotes 15 | from srjd_simulation import srjd_call_valuation 16 | 17 | # fixed parameters 18 | r = 0.01 # risk-less short rate 19 | v0 = 17.6639 # VSTOXX index at 31.03.2014 20 | M = 15 # number of time intervals 21 | I = 100 # number of simulated paths 22 | 23 | 24 | def srjd_valuation_function(p0): 25 | ''' Valuation ('difference') function for all options 26 | of a given DataFrame object. 27 | 28 | Parameters 29 | ========== 30 | p0: list 31 | set of model parameters 32 | 33 | Returns 34 | ======= 35 | diffs: NumPy ndarray object 36 | array with valuation differences 37 | ''' 38 | global relative, option_data 39 | kappa, theta, sigma, lamb, mu, delta = p0 40 | diffs = [] 41 | for i, option in option_data.iterrows(): 42 | value = srjd_call_valuation(v0, kappa, theta, sigma, 43 | lamb, mu, delta, 44 | option['TTM'], r, option['STRIKE'], 45 | M=M, I=I, fixed_seed=True) 46 | if relative is True: 47 | diffs.append((value - option['PRICE']) / option['PRICE']) 48 | else: 49 | diffs.append(value - option['PRICE']) 50 | diffs = np.array(diffs) 51 | return diffs 52 | 53 | 54 | def srjd_error_function(p0): 55 | ''' Error function for model calibration. 56 | 57 | Parameters 58 | ========== 59 | p0: tuple 60 | set of model parameters 61 | 62 | Returns 63 | ======= 64 | MSE: float 65 | mean squared (relative/absolute) error 66 | ''' 67 | global i, min_MSE, option_data 68 | OD = len(option_data) 69 | diffs = srjd_valuation_function(p0) 70 | kappa, theta, sigma, lamb, mu, delta = p0 71 | 72 | # penalties 73 | pen = 0. 74 | if 2 * kappa * theta < sigma ** 2: 75 | pen = 1000.0 76 | if kappa < 0 or theta < 0 or sigma < 0 or lamb < 0 or delta < 0: 77 | pen = 1000.0 78 | 79 | MSE = np.sum(diffs ** 2) / OD + pen # mean squared error 80 | 81 | min_MSE = min(min_MSE, MSE) # running minimum value 82 | 83 | if i == 0: 84 | print '\n' + ('{:>5s}'.format('its') 85 | + '{:>7s} {:>6s} {:>6s} {:>6s} {:>6s} {:>6s}'.format( 86 | 'kappa', 'theta', 'sigma', 'lamb', 'mu', 'delta') 87 | + '{:>12s}'.format('MSE') + '{:>12s}'.format('min_MSE')) 88 | # print intermediate results: every 100th iteration 89 | if i % 100 == 0: 90 | print ('{:>5d}'.format(i) 91 | + '{:7.3f} {:6.3f} {:6.3f} {:6.3f} {:6.3f} {:6.3f}'.format(*p0) 92 | + '{:>12.5f}'.format(MSE) + '{:>12.5f}'.format(min_MSE)) 93 | i += 1 94 | return MSE 95 | 96 | 97 | def srjd_model_calibration(data, p0=None, rel=False, mats=None): 98 | ''' Function for global and local model calibration. 99 | 100 | Parameters 101 | ========== 102 | option_data: pandas DataFrame object 103 | option quotes to be used 104 | relative: bool 105 | relative or absolute MSE 106 | mats: list 107 | list of maturities of option quotes to calibrate to 108 | 109 | Returns 110 | ======= 111 | opt: tuple 112 | optimal parameter values 113 | ''' 114 | global i, min_MSE, option_data 115 | global relative # if True: MSRE is used, if False: MSAE 116 | 117 | min_MSE = 5000. # dummy value 118 | relative = rel # relative or absolute 119 | option_data = data 120 | 121 | if mats is not None: 122 | # select the option data for the given maturities 123 | option_data = option_data[option_data['MATURITY'].isin(mats)] 124 | 125 | # global optimization 126 | if p0 is None: 127 | i = 0 # counter for calibration iterations 128 | p0 = sco.brute(srjd_error_function, ( 129 | (1.0, 9.1, 4.0), # kappa 130 | (10., 20.1, 10.0), # theta 131 | (1.0, 3.1, 2.0), # sigma 132 | (0.0, 0.81, 0.4), # lambda 133 | (-0.2, 0.41, 0.3), # mu 134 | (0.0, 0.31, 0.15)), # delta 135 | finish=None) 136 | 137 | # local optimization 138 | i = 0 139 | opt = sco.fmin(srjd_error_function, p0, 140 | xtol=0.0000001, ftol=0.0000001, 141 | maxiter=550, maxfun=700) 142 | 143 | return opt 144 | 145 | 146 | def plot_calibration_results(option_data, opt, mats): 147 | ''' Function to plot market quotes vs. model prices. 148 | 149 | Parameters 150 | ========== 151 | option_data: pandas DataFrame object 152 | option data to plot 153 | opt: list 154 | optimal results from calibration 155 | mats: list 156 | maturities to be plotted 157 | ''' 158 | kappa, theta, sigma, lamb, mu, delta = opt 159 | # adding model values for optimal parameter set 160 | # to the DataFrame object 161 | values = [] 162 | for i, option in option_data.iterrows(): 163 | value = srjd_call_valuation(v0, kappa, theta, sigma, 164 | lamb, mu, delta, 165 | option['TTM'], r, option['STRIKE'], 166 | M=M, I=I, fixed_seed=True) 167 | values.append(value) 168 | option_data['MODEL'] = values 169 | 170 | # plotting the market and model values 171 | height = min(len(mats) * 3, 12) 172 | fig, axarr = plt.subplots(len(mats), 2, sharex=True, figsize=(10, height)) 173 | for z, mat in enumerate(mats): 174 | if z == 0: 175 | axarr[z, 0].set_title('values') 176 | axarr[z, 1].set_title('differences') 177 | os = option_data[option_data.MATURITY == mat] 178 | strikes = os.STRIKE.values 179 | axarr[z, 0].set_ylabel('%s' % str(mat)[:10]) 180 | axarr[z, 0].plot(strikes, os.PRICE.values, label='market quotes') 181 | axarr[z, 0].plot(strikes, os.MODEL.values, 'ro', label='model prices') 182 | axarr[z, 0].legend(loc=0) 183 | wi = 0.3 184 | axarr[z, 1].bar(strikes - wi / 2, os.MODEL.values - os.PRICE.values, 185 | width=wi) 186 | if mat == mats[-1]: 187 | axarr[z, 0].set_xlabel('strike') 188 | axarr[z, 1].set_xlabel('strike') 189 | 190 | if __name__ == '__main__': 191 | option_data = read_select_quotes('./source/data/', tol=0.1) 192 | option_data['VALUE'] = 0.0 193 | opt = srjd_model_calibration() 194 | -------------------------------------------------------------------------------- /legacy/python2/scripts/index_subindex_calculation.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with functions to compute VSTOXX sub-indexes 3 | # 4 | # Data as generated by the script index_collect_option_data.py 5 | # is needed for the calculations in this module 6 | # 7 | # (c) Dr. Yves J. Hilpisch 8 | # Listed Volatility and Variance Derivatives 9 | # 10 | import math 11 | import numpy as np 12 | import pandas as pd 13 | import datetime as dt 14 | import index_date_functions as idf 15 | 16 | 17 | def compute_subindex(data, delta_T, R): 18 | ''' Computes a sub-index for given option series data. 19 | 20 | Parameters 21 | ========== 22 | data: pandas.DataFrame object 23 | contains the option data 24 | delta_T: float 25 | time interval 26 | R: float 27 | discount factor 28 | 29 | Returns 30 | ======= 31 | subVSTOXX: float 32 | sub-index value 33 | ''' 34 | # difference between put and call option with same strike 35 | data['Diff_Put_Call'] = np.abs(data.Put_Price - data.Call_Price) 36 | # converts the strike price which serves as index so far 37 | # to a regular data column 38 | data = data.reset_index() 39 | data['delta_K'] = None 40 | # differences between the different strikes of the series 41 | data['delta_K'].iloc[1:-1] = [(data['Strike price'][i + 1] 42 | - data['Strike price'][i - 1]) / 2 for i in data.index[1:-1]] 43 | # where possible, for the i-th entry it is 44 | # half of the difference between the (i-1)-th 45 | # and (i+1)-th price 46 | # for i=0 it is just the difference to the next strike 47 | data['delta_K'].iloc[0] = data['Strike price'][1] - data['Strike price'][0] 48 | 49 | data['delta_K'].iloc[data.index[-1:]] = float(data['Strike price'][-1:]) \ 50 | - float(data['Strike price'][-2:-1]) 51 | # for the last entry, it is just the difference 52 | # between the second but last strike and the last strike price 53 | 54 | # find the smallest difference between put and call price 55 | min_index = data.Diff_Put_Call.argmin() 56 | 57 | # the forward price of that option 58 | forward_price = data['Strike price'][min_index] \ 59 | + R * data.Diff_Put_Call[min_index] 60 | 61 | K_0 = data['Strike price'][forward_price - 62 | data['Strike price'] > 0].max() 63 | # the index of the ATM strike 64 | K_0_index = data.index[data['Strike price'] == K_0][0] 65 | 66 | # selects the OTM options 67 | data['M'] = pd.concat((data.Put_Price[0:K_0_index], 68 | data.Call_Price[K_0_index:])) 69 | 70 | # ATM we take the average of put and call price 71 | data['M'].iloc[K_0_index] = (data['Call_Price'][K_0_index] 72 | + data['Put_Price'][K_0_index]) / 2 73 | 74 | # the single OTM values 75 | data['MFactor'] = (R * (data['delta_K'] * data['M']) 76 | / (data['Strike price']) ** 2) 77 | 78 | # the forward term 79 | fterm = 1. / delta_T * (forward_price / K_0 - 1) ** 2 80 | # summing up 81 | sigma = 2 / delta_T * np.sum(data.MFactor) - fterm 82 | subVSTOXX = 100 * math.sqrt(sigma) 83 | return subVSTOXX 84 | 85 | 86 | def make_subindex(path): 87 | ''' Depending on the content of the file 'index_option_series.h5', 88 | the function computes the sub-indexes V6I1, V6I2 and parts 89 | of V6I3 and returns a pandas DataFrame object with the results. 90 | 91 | Parameters 92 | ========== 93 | path: string 94 | string with path of data file 95 | 96 | Returns 97 | ======= 98 | df: pandas DataFrame object 99 | sub-index data as computed by the function 100 | ''' 101 | 102 | # the data source, created with index_collect_option_data.py 103 | datastore = pd.HDFStore(path + 'index_option_series_2015.h5', 'r') 104 | 105 | max_date = dt.datetime.today() # find the latest date in the source 106 | for series in datastore.keys(): 107 | dummy_date = datastore[series].index.get_level_values(0)[0] 108 | dummy_date = dummy_date.to_pydatetime() 109 | if dummy_date > max_date: 110 | max_date = dummy_date 111 | 112 | start_date = dt.datetime.today() # find the earliest date in the source 113 | for series in datastore.keys(): 114 | dummy_date = datastore[series].index.get_level_values(0)[0] 115 | dummy_data = dummy_date.to_pydatetime() 116 | if dummy_date < start_date: 117 | start_date = dummy_date 118 | 119 | V1 = dict() # dicts to store the values, V stands for the sub-indices, 120 | # T for their expiry 121 | V2 = dict() 122 | V3 = dict() 123 | T1 = dict() 124 | T2 = dict() 125 | T3 = dict() 126 | 127 | # from start_date to max_date, but only weekdays 128 | for day in pd.bdate_range(start=start_date.date(), end=max_date.date()): 129 | # is V6I1 defined? 130 | is_V1_defined = idf.not_a_day_before_expiry(day) 131 | # the settlement date 132 | settlement_date = idf.first_settlement_day(day) 133 | # abbreviation for the expiry date, like Oct14 134 | key = settlement_date.strftime('%b%y') 135 | # days until maturity 136 | delta_T = idf.compute_delta(day, settlement_date) 137 | try: 138 | # data of the option series for that date 139 | data = datastore[key].ix[day] 140 | except: 141 | continue 142 | 143 | if is_V1_defined: # if V6I1 is defined 144 | # compute its value 145 | V1[day] = compute_subindex(data, delta_T, 146 | math.exp(0.0015 * delta_T)) 147 | T1[day] = settlement_date 148 | else: 149 | # compute the value of V6I2 instead 150 | V2[day] = compute_subindex(data, delta_T, 151 | math.exp(0.0015 * delta_T)) 152 | T2[day] = settlement_date 153 | 154 | settlement_date_2 = idf.second_settlement_day(day) 155 | 156 | # the same for the next index 157 | key_2 = settlement_date_2.strftime('%b%y') 158 | delta_T_2 = idf.compute_delta(day, settlement_date_2) 159 | data_2 = datastore[key_2].ix[day] 160 | 161 | if is_V1_defined: 162 | V2[day] = compute_subindex(data_2, delta_T_2, 163 | math.exp(0.001 * delta_T_2)) 164 | T2[day] = settlement_date_2 165 | else: 166 | V3[day] = compute_subindex(data_2, delta_T_2, 167 | math.exp(0.001 * delta_T_2)) 168 | T3[day] = settlement_date_2 169 | 170 | datastore.close() 171 | # create the pandas DataFrame object and return it 172 | df = pd.DataFrame(data={'V6I1': V1, 'Expiry V6I1': T1, 'V6I2': V2, 173 | 'Expiry V6I2': T2, 'V6I3': V3, 'Expiry V6I3': T3}) 174 | return df 175 | 176 | -------------------------------------------------------------------------------- /code/scripts/index_subindex_calculation.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with functions to compute VSTOXX sub-indexes 3 | # 4 | # Data as generated by the script index_collect_option_data.py 5 | # is needed for the calculations in this module 6 | # 7 | # (c) Dr. Yves J. Hilpisch 8 | # Listed Volatility and Variance Derivatives 9 | # 10 | import math 11 | import numpy as np 12 | import pandas as pd 13 | import datetime as dt 14 | import index_date_functions as idf 15 | pd.options.mode.chained_assignment = None 16 | 17 | 18 | def compute_subindex(data, delta_T, R): 19 | ''' Computes a sub-index for given option series data. 20 | 21 | Parameters 22 | ========== 23 | data: pandas.DataFrame object 24 | contains the option data 25 | delta_T: float 26 | time interval 27 | R: float 28 | discount factor 29 | 30 | Returns 31 | ======= 32 | subVSTOXX: float 33 | sub-index value 34 | ''' 35 | # difference between put and call option with same strike 36 | data['Diff_Put_Call'] = np.abs(data.Put_Price - data.Call_Price) 37 | # converts the strike price which serves as index so far 38 | # to a regular data column 39 | data = data.reset_index() 40 | data['delta_K'] = None 41 | # differences between the different strikes of the series 42 | data['delta_K'].iloc[1:-1] = [(data['Strike price'].iloc[i + 1] 43 | - data['Strike price'].iloc[i - 1]) / 2 for i in data.index[1:-1]] 44 | # where possible, for the i-th entry it is 45 | # half of the difference between the (i-1)-th 46 | # and (i+1)-th price 47 | # for i=0 it is just the difference to the next strike 48 | data['delta_K'].iloc[0] = data['Strike price'].iloc[1] - data['Strike price'].iloc[0] 49 | 50 | data['delta_K'].loc[data.index[-1:]] = float(data['Strike price'].iloc[-1:]) \ 51 | - float(data['Strike price'].iloc[-2:-1]) 52 | # for the last entry, it is just the difference 53 | # between the second but last strike and the last strike price 54 | 55 | # find the smallest difference between put and call price 56 | min_index = data.Diff_Put_Call.argmin() 57 | 58 | # the forward price of that option 59 | forward_price = data['Strike price'].iloc[min_index] \ 60 | + R * data.Diff_Put_Call[min_index] 61 | 62 | K_0 = data['Strike price'][forward_price - 63 | data['Strike price'] > 0].max() 64 | # the index of the ATM strike 65 | K_0_index = data.index[data['Strike price'] == K_0][0] 66 | 67 | # selects the OTM options 68 | data['M'] = pd.concat((data.Put_Price[0:K_0_index], 69 | data.Call_Price[K_0_index:])) 70 | 71 | # ATM we take the average of put and call price 72 | data['M'].iloc[K_0_index] = (data['Call_Price'][K_0_index] 73 | + data['Put_Price'][K_0_index]) / 2 74 | 75 | # the single OTM values 76 | data['MFactor'] = (R * (data['delta_K'] * data['M']) 77 | / (data['Strike price']) ** 2) 78 | 79 | # the forward term 80 | fterm = 1. / delta_T * (forward_price / K_0 - 1) ** 2 81 | # summing up 82 | sigma = 2 / delta_T * np.sum(data.MFactor) - fterm 83 | subVSTOXX = 100 * math.sqrt(sigma) 84 | return subVSTOXX 85 | 86 | 87 | def make_subindex(path): 88 | ''' Depending on the content of the file 'index_option_series.h5', 89 | the function computes the sub-indexes V6I1, V6I2 and parts 90 | of V6I3 and returns a pandas DataFrame object with the results. 91 | 92 | Parameters 93 | ========== 94 | path: string 95 | string with path of data file 96 | 97 | Returns 98 | ======= 99 | df: pandas DataFrame object 100 | sub-index data as computed by the function 101 | ''' 102 | 103 | # the data source, created with index_collect_option_data.py 104 | datastore = pd.HDFStore(path + 'index_option_series.h5', 'r') 105 | 106 | max_date = dt.datetime.today() # find the latest date in the source 107 | for series in datastore.keys(): 108 | dummy_date = datastore[series].index.get_level_values(0)[0] 109 | dummy_date = dummy_date.to_pydatetime() 110 | if dummy_date > max_date: 111 | max_date = dummy_date 112 | start_date = dt.datetime.today() # find the earliest date in the source 113 | for series in datastore.keys(): 114 | dummy_date = datastore[series].index.get_level_values(0)[0] 115 | dummy_data = dummy_date.to_pydatetime() 116 | if dummy_date < start_date: 117 | start_date = dummy_date 118 | V1 = dict() # dicts to store the values, V stands for the sub-indices, 119 | # T for their expiry 120 | V2 = dict() 121 | V3 = dict() 122 | T1 = dict() 123 | T2 = dict() 124 | T3 = dict() 125 | 126 | # from start_date to max_date, but only weekdays 127 | for day in pd.bdate_range(start=start_date.date(), end=max_date.date()): 128 | # is V6I1 defined? 129 | is_V1_defined = idf.not_a_day_before_expiry(day) 130 | # the settlement date 131 | settlement_date = idf.first_settlement_day(day) 132 | # abbreviation for the expiry date, like Oct14 133 | key = settlement_date.strftime('%b%y') 134 | # days until maturity 135 | delta_T = idf.compute_delta(day, settlement_date) 136 | try: 137 | # data of the option series for that date 138 | data = datastore[key].loc[day] 139 | except: 140 | continue 141 | 142 | if is_V1_defined: # if V6I1 is defined 143 | # compute its value 144 | V1[day] = compute_subindex(data, delta_T, 145 | math.exp(0.0015 * delta_T)) 146 | T1[day] = settlement_date 147 | else: 148 | # compute the value of V6I2 instead 149 | V2[day] = compute_subindex(data, delta_T, 150 | math.exp(0.0015 * delta_T)) 151 | T2[day] = settlement_date 152 | 153 | settlement_date_2 = idf.second_settlement_day(day) 154 | 155 | # the same for the next index 156 | key_2 = settlement_date_2.strftime('%b%y') 157 | delta_T_2 = idf.compute_delta(day, settlement_date_2) 158 | data_2 = datastore[key_2].loc[day] 159 | 160 | if is_V1_defined: 161 | V2[day] = compute_subindex(data_2, delta_T_2, 162 | math.exp(0.001 * delta_T_2)) 163 | T2[day] = settlement_date_2 164 | else: 165 | V3[day] = compute_subindex(data_2, delta_T_2, 166 | math.exp(0.001 * delta_T_2)) 167 | T3[day] = settlement_date_2 168 | 169 | datastore.close() 170 | # create the pandas DataFrame object and return it 171 | df = pd.DataFrame(data={'V6I1': V1, 'Expiry V6I1': T1, 'V6I2': V2, 172 | 'Expiry V6I2': T2, 'V6I3': V3, 'Expiry V6I3': T3}) 173 | return df 174 | 175 | -------------------------------------------------------------------------------- /legacy/python3/scripts/index_subindex_calculation.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module with functions to compute VSTOXX sub-indexes 3 | # 4 | # Data as generated by the script index_collect_option_data.py 5 | # is needed for the calculations in this module 6 | # 7 | # (c) Dr. Yves J. Hilpisch 8 | # Listed Volatility and Variance Derivatives 9 | # 10 | import math 11 | import numpy as np 12 | import pandas as pd 13 | import datetime as dt 14 | import index_date_functions as idf 15 | pd.options.mode.chained_assignment = None 16 | 17 | 18 | def compute_subindex(data, delta_T, R): 19 | ''' Computes a sub-index for given option series data. 20 | 21 | Parameters 22 | ========== 23 | data: pandas.DataFrame object 24 | contains the option data 25 | delta_T: float 26 | time interval 27 | R: float 28 | discount factor 29 | 30 | Returns 31 | ======= 32 | subVSTOXX: float 33 | sub-index value 34 | ''' 35 | # difference between put and call option with same strike 36 | data['Diff_Put_Call'] = np.abs(data.Put_Price - data.Call_Price) 37 | # converts the strike price which serves as index so far 38 | # to a regular data column 39 | data = data.reset_index() 40 | data['delta_K'] = None 41 | # differences between the different strikes of the series 42 | data['delta_K'].iloc[1:-1] = [(data['Strike price'].iloc[i + 1] 43 | - data['Strike price'].iloc[i - 1]) / 2 for i in data.index[1:-1]] 44 | # where possible, for the i-th entry it is 45 | # half of the difference between the (i-1)-th 46 | # and (i+1)-th price 47 | # for i=0 it is just the difference to the next strike 48 | data['delta_K'].iloc[0] = data['Strike price'].iloc[1] - data['Strike price'].iloc[0] 49 | 50 | data['delta_K'].loc[data.index[-1:]] = float(data['Strike price'].iloc[-1:]) \ 51 | - float(data['Strike price'].iloc[-2:-1]) 52 | # for the last entry, it is just the difference 53 | # between the second but last strike and the last strike price 54 | 55 | # find the smallest difference between put and call price 56 | min_index = data.Diff_Put_Call.argmin() 57 | 58 | # the forward price of that option 59 | forward_price = data['Strike price'].iloc[min_index] \ 60 | + R * data.Diff_Put_Call[min_index] 61 | 62 | K_0 = data['Strike price'][forward_price - 63 | data['Strike price'] > 0].max() 64 | # the index of the ATM strike 65 | K_0_index = data.index[data['Strike price'] == K_0][0] 66 | 67 | # selects the OTM options 68 | data['M'] = pd.concat((data.Put_Price[0:K_0_index], 69 | data.Call_Price[K_0_index:])) 70 | 71 | # ATM we take the average of put and call price 72 | data['M'].iloc[K_0_index] = (data['Call_Price'][K_0_index] 73 | + data['Put_Price'][K_0_index]) / 2 74 | 75 | # the single OTM values 76 | data['MFactor'] = (R * (data['delta_K'] * data['M']) 77 | / (data['Strike price']) ** 2) 78 | 79 | # the forward term 80 | fterm = 1. / delta_T * (forward_price / K_0 - 1) ** 2 81 | # summing up 82 | sigma = 2 / delta_T * np.sum(data.MFactor) - fterm 83 | subVSTOXX = 100 * math.sqrt(sigma) 84 | return subVSTOXX 85 | 86 | 87 | def make_subindex(path): 88 | ''' Depending on the content of the file 'index_option_series.h5', 89 | the function computes the sub-indexes V6I1, V6I2 and parts 90 | of V6I3 and returns a pandas DataFrame object with the results. 91 | 92 | Parameters 93 | ========== 94 | path: string 95 | string with path of data file 96 | 97 | Returns 98 | ======= 99 | df: pandas DataFrame object 100 | sub-index data as computed by the function 101 | ''' 102 | 103 | # the data source, created with index_collect_option_data.py 104 | datastore = pd.HDFStore(path + 'index_option_series.h5', 'r') 105 | 106 | max_date = dt.datetime.today() # find the latest date in the source 107 | for series in datastore.keys(): 108 | dummy_date = datastore[series].index.get_level_values(0)[0] 109 | dummy_date = dummy_date.to_pydatetime() 110 | if dummy_date > max_date: 111 | max_date = dummy_date 112 | start_date = dt.datetime.today() # find the earliest date in the source 113 | for series in datastore.keys(): 114 | dummy_date = datastore[series].index.get_level_values(0)[0] 115 | dummy_data = dummy_date.to_pydatetime() 116 | if dummy_date < start_date: 117 | start_date = dummy_date 118 | V1 = dict() # dicts to store the values, V stands for the sub-indices, 119 | # T for their expiry 120 | V2 = dict() 121 | V3 = dict() 122 | T1 = dict() 123 | T2 = dict() 124 | T3 = dict() 125 | 126 | # from start_date to max_date, but only weekdays 127 | for day in pd.bdate_range(start=start_date.date(), end=max_date.date()): 128 | # is V6I1 defined? 129 | is_V1_defined = idf.not_a_day_before_expiry(day) 130 | # the settlement date 131 | settlement_date = idf.first_settlement_day(day) 132 | # abbreviation for the expiry date, like Oct14 133 | key = settlement_date.strftime('%b%y') 134 | # days until maturity 135 | delta_T = idf.compute_delta(day, settlement_date) 136 | try: 137 | # data of the option series for that date 138 | data = datastore[key].loc[day] 139 | except: 140 | continue 141 | 142 | if is_V1_defined: # if V6I1 is defined 143 | # compute its value 144 | V1[day] = compute_subindex(data, delta_T, 145 | math.exp(0.0015 * delta_T)) 146 | T1[day] = settlement_date 147 | else: 148 | # compute the value of V6I2 instead 149 | V2[day] = compute_subindex(data, delta_T, 150 | math.exp(0.0015 * delta_T)) 151 | T2[day] = settlement_date 152 | 153 | settlement_date_2 = idf.second_settlement_day(day) 154 | 155 | # the same for the next index 156 | key_2 = settlement_date_2.strftime('%b%y') 157 | delta_T_2 = idf.compute_delta(day, settlement_date_2) 158 | data_2 = datastore[key_2].loc[day] 159 | 160 | if is_V1_defined: 161 | V2[day] = compute_subindex(data_2, delta_T_2, 162 | math.exp(0.001 * delta_T_2)) 163 | T2[day] = settlement_date_2 164 | else: 165 | V3[day] = compute_subindex(data_2, delta_T_2, 166 | math.exp(0.001 * delta_T_2)) 167 | T3[day] = settlement_date_2 168 | 169 | datastore.close() 170 | # create the pandas DataFrame object and return it 171 | df = pd.DataFrame(data={'V6I1': V1, 'Expiry V6I1': T1, 'V6I2': V2, 172 | 'Expiry V6I2': T2, 'V6I3': V3, 'Expiry V6I3': T3}) 173 | return df 174 | 175 | -------------------------------------------------------------------------------- /code/scripts/index_collect_option_data.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module to collect option series data 3 | # from the web 4 | # Source: www.eurexchange.com 5 | # Data is needed to calculate the VSTOXX 6 | # and its sub-indexes 7 | # 8 | # (c) Dr. Yves J. Hilpisch 9 | # Listed Volatility and Variance Derivatives 10 | # 11 | import requests 12 | from io import * 13 | import numpy as np 14 | import pandas as pd 15 | import datetime as dt 16 | from bs4 import BeautifulSoup 17 | from index_date_functions import * 18 | 19 | # 20 | # The URL template 21 | # 22 | URL = 'https://www.eurex.com/ex-en/data/statistics/market-statistics-online/' 23 | URL += '100!onlineStats?productGroupId=13370&productId=69660&viewType=3&' 24 | URL += 'cp=%s&month=%s&year=%s&busDate=%s' 25 | 26 | 27 | def collect_option_series(month, year, start): 28 | ''' Collects daily option data from web source. 29 | 30 | Parameters 31 | ========== 32 | month: int 33 | maturity month 34 | year: int 35 | maturity year 36 | start: datetime object 37 | starting date 38 | 39 | Returns 40 | ======= 41 | dataset: pandas DataFrame object 42 | object containing the collected data 43 | ''' 44 | end = dt.datetime.today() 45 | delta = (end - start).days 46 | 47 | dataset = pd.DataFrame() 48 | for t in range(0, delta): # runs from start to today 49 | date = start + dt.timedelta(t) 50 | dummy = get_data(month, year, date) # get data for one day 51 | if len(dummy) != 0: 52 | if len(dataset) == 0: 53 | dataset = dummy 54 | else: 55 | dataset = pd.concat((dataset, dummy)) # add data 56 | 57 | return dataset 58 | 59 | 60 | def get_data(month, year, date): 61 | ''' Get the data for an option series. 62 | 63 | Parameters 64 | ========== 65 | month: int 66 | maturity month 67 | year: int 68 | maturity year 69 | date: datetime object 70 | the date for which the data is collected 71 | 72 | Returns 73 | ======= 74 | dataset: pandas DataFrame object 75 | object containing call & put option data 76 | ''' 77 | 78 | date_string = date.strftime('%Y%m%d') 79 | # loads the call data from the web 80 | data = get_data_from_www('Call', month, year, date_string) 81 | calls = parse_data(data, date) # parse the raw data 82 | calls = calls.rename(columns={'Daily settlem. price': 'Call_Price'}) 83 | 84 | calls = pd.DataFrame(calls.pop('Call_Price').astype(float)) 85 | # the same for puts 86 | data = get_data_from_www('Put', month, year, date_string) 87 | puts = parse_data(data, date) 88 | puts = puts.rename(columns={'Daily settlem. price': 'Put_Price'}) 89 | puts = pd.DataFrame(puts.pop('Put_Price').astype(float)) 90 | 91 | dataset = merge_and_filter(puts, calls) # merges the two time series 92 | 93 | return dataset 94 | 95 | 96 | def get_data_from_www(oType, matMonth, matYear, date): 97 | ''' Retrieves the raw data of an option series from the web. 98 | 99 | Parameters 100 | ========== 101 | oType: string 102 | either 'Put' or 'Call' 103 | matMonth: int 104 | maturity month 105 | matYear: int 106 | maturity year 107 | date: string 108 | expiry in the format 'YYYYMMDD' 109 | 110 | Returns 111 | ======= 112 | a: string 113 | raw text with option data 114 | ''' 115 | 116 | url = URL % (oType, matMonth, matYear, date) # parametrizes the URL 117 | a = requests.get(url).text 118 | return a 119 | 120 | 121 | def merge_and_filter(puts, calls): 122 | ''' Gets two pandas time series for the puts and calls 123 | (from the same option series), merges them, filters out 124 | all options with prices smaller than 0.5 and 125 | returns the resulting DataFrame object. 126 | 127 | Parameters 128 | ========== 129 | puts: pandas DataFrame object 130 | put option data 131 | calls: pandas DataFrame object 132 | call option data 133 | 134 | Returns 135 | ======= 136 | df: pandas DataFrame object 137 | merged & filtered options data 138 | ''' 139 | 140 | df = calls.join(puts, how='inner') # merges the two time series 141 | # filters all prices which are too small 142 | df = df[(df.Put_Price >= 0.5) & (df.Call_Price >= 0.5)] 143 | 144 | return df 145 | 146 | 147 | def parse_data(data, date): 148 | ''' Parses the HTML table and transforms it into a CSV compatible 149 | format. The result can be directly imported into a pandas DataFrame. 150 | 151 | Parameters 152 | ========== 153 | data: string 154 | document containing the Web content 155 | date: datetime object 156 | date for which the data is parsed 157 | 158 | Returns 159 | ======= 160 | dataset: pandas DataFrame object 161 | transformed option raw data 162 | ''' 163 | 164 | data_list = list() 165 | date_value = dt.date(date.year, date.month, date.day) 166 | soup = BeautifulSoup(data, 'html.parser') 167 | 168 | tables = soup.select('table.dataTable') 169 | if len(tables) != 1: 170 | raise ValueError('table selector is not unique') 171 | else: 172 | table = tables[0] 173 | columns = ['Pricing day',] + [cell.get_text() for cell in table.find_all('th')] 174 | 175 | for line in table.find_all('tr')[:-1]: 176 | data_list.append([date_value,]+[float(cell.get_text().replace(',','')) for cell in line.find_all('td')]) 177 | 178 | dataset = pd.DataFrame(data_list, columns=columns) 179 | dataset = dataset.set_index(['Pricing day','Strike price']) 180 | return dataset 181 | 182 | 183 | def data_collection(path=''): 184 | ''' Main function which saves data into the HDF5 file 185 | 'index_option_series.h5' for later use. 186 | 187 | Parameters 188 | ========== 189 | path: string 190 | path to store the data 191 | ''' 192 | # file to store data 193 | store = pd.HDFStore(path + 'index_option_series.h5', 'a') 194 | 195 | today = dt.datetime.today() 196 | start = today - dt.timedelta(31) # the last 31 days 197 | 198 | day = start.day 199 | month = start.month 200 | year = start.year 201 | 202 | for i in range(4): # iterates over the next 4 months 203 | dummy_month = month + i 204 | dummy_year = year 205 | if dummy_month > 12: 206 | dummy_month -= 12 207 | dummy_year += 1 208 | 209 | # collect daily data beginning 31 days ago (start) for 210 | # option series with expiry dummy_month, dummy_year 211 | dataset = collect_option_series(dummy_month, dummy_year, start) 212 | 213 | dummy_date = dt.datetime(dummy_year, dummy_month, day) 214 | 215 | # abbreviation for expiry date (for example Oct14) 216 | series_name = dummy_date.strftime('%b%y') 217 | 218 | if series_name in store.keys(): # if data for that series exists 219 | index_old = store[series_name].index 220 | index_new = dataset.index 221 | 222 | if len(index_new - index_old) > 0: 223 | dummy = pd.concat((store[series_name], 224 | dataset.ix[index_new - index_old])) # add the new data 225 | 226 | store[series_name] = dummy 227 | else: 228 | if len(dataset) > 0: 229 | # if series is new, write whole data set into data store 230 | store[series_name] = dataset 231 | 232 | store.close() 233 | -------------------------------------------------------------------------------- /legacy/python3/scripts/index_collect_option_data.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module to collect option series data 3 | # from the web 4 | # Source: www.eurexchange.com 5 | # Data is needed to calculate the VSTOXX 6 | # and its sub-indexes 7 | # 8 | # (c) Dr. Yves J. Hilpisch 9 | # Listed Volatility and Variance Derivatives 10 | # 11 | import requests 12 | from io import * 13 | import numpy as np 14 | import pandas as pd 15 | import datetime as dt 16 | from bs4 import BeautifulSoup 17 | from index_date_functions import * 18 | 19 | # 20 | # The URL template 21 | # 22 | URL = 'https://www.eurex.com/ex-en/data/statistics/market-statistics-online/' 23 | URL += '100!onlineStats?productGroupId=13370&productId=69660&viewType=3&' 24 | URL += 'cp=%s&month=%s&year=%s&busDate=%s' 25 | 26 | 27 | def collect_option_series(month, year, start): 28 | ''' Collects daily option data from web source. 29 | 30 | Parameters 31 | ========== 32 | month: int 33 | maturity month 34 | year: int 35 | maturity year 36 | start: datetime object 37 | starting date 38 | 39 | Returns 40 | ======= 41 | dataset: pandas DataFrame object 42 | object containing the collected data 43 | ''' 44 | end = dt.datetime.today() 45 | delta = (end - start).days 46 | 47 | dataset = pd.DataFrame() 48 | for t in range(0, delta): # runs from start to today 49 | date = start + dt.timedelta(t) 50 | dummy = get_data(month, year, date) # get data for one day 51 | if len(dummy) != 0: 52 | if len(dataset) == 0: 53 | dataset = dummy 54 | else: 55 | dataset = pd.concat((dataset, dummy)) # add data 56 | 57 | return dataset 58 | 59 | 60 | def get_data(month, year, date): 61 | ''' Get the data for an option series. 62 | 63 | Parameters 64 | ========== 65 | month: int 66 | maturity month 67 | year: int 68 | maturity year 69 | date: datetime object 70 | the date for which the data is collected 71 | 72 | Returns 73 | ======= 74 | dataset: pandas DataFrame object 75 | object containing call & put option data 76 | ''' 77 | 78 | date_string = date.strftime('%Y%m%d') 79 | # loads the call data from the web 80 | data = get_data_from_www('Call', month, year, date_string) 81 | calls = parse_data(data, date) # parse the raw data 82 | calls = calls.rename(columns={'Daily settlem. price': 'Call_Price'}) 83 | 84 | calls = pd.DataFrame(calls.pop('Call_Price').astype(float)) 85 | # the same for puts 86 | data = get_data_from_www('Put', month, year, date_string) 87 | puts = parse_data(data, date) 88 | puts = puts.rename(columns={'Daily settlem. price': 'Put_Price'}) 89 | puts = pd.DataFrame(puts.pop('Put_Price').astype(float)) 90 | 91 | dataset = merge_and_filter(puts, calls) # merges the two time series 92 | 93 | return dataset 94 | 95 | 96 | def get_data_from_www(oType, matMonth, matYear, date): 97 | ''' Retrieves the raw data of an option series from the web. 98 | 99 | Parameters 100 | ========== 101 | oType: string 102 | either 'Put' or 'Call' 103 | matMonth: int 104 | maturity month 105 | matYear: int 106 | maturity year 107 | date: string 108 | expiry in the format 'YYYYMMDD' 109 | 110 | Returns 111 | ======= 112 | a: string 113 | raw text with option data 114 | ''' 115 | 116 | url = URL % (oType, matMonth, matYear, date) # parametrizes the URL 117 | a = requests.get(url).text 118 | return a 119 | 120 | 121 | def merge_and_filter(puts, calls): 122 | ''' Gets two pandas time series for the puts and calls 123 | (from the same option series), merges them, filters out 124 | all options with prices smaller than 0.5 and 125 | returns the resulting DataFrame object. 126 | 127 | Parameters 128 | ========== 129 | puts: pandas DataFrame object 130 | put option data 131 | calls: pandas DataFrame object 132 | call option data 133 | 134 | Returns 135 | ======= 136 | df: pandas DataFrame object 137 | merged & filtered options data 138 | ''' 139 | 140 | df = calls.join(puts, how='inner') # merges the two time series 141 | # filters all prices which are too small 142 | df = df[(df.Put_Price >= 0.5) & (df.Call_Price >= 0.5)] 143 | 144 | return df 145 | 146 | 147 | def parse_data(data, date): 148 | ''' Parses the HTML table and transforms it into a CSV compatible 149 | format. The result can be directly imported into a pandas DataFrame. 150 | 151 | Parameters 152 | ========== 153 | data: string 154 | document containing the Web content 155 | date: datetime object 156 | date for which the data is parsed 157 | 158 | Returns 159 | ======= 160 | dataset: pandas DataFrame object 161 | transformed option raw data 162 | ''' 163 | 164 | data_list = list() 165 | date_value = dt.date(date.year, date.month, date.day) 166 | soup = BeautifulSoup(data, 'html.parser') 167 | 168 | tables = soup.select('table.dataTable') 169 | if len(tables) != 1: 170 | raise ValueError('table selector is not unique') 171 | else: 172 | table = tables[0] 173 | columns = ['Pricing day',] + [cell.get_text() for cell in table.find_all('th')] 174 | 175 | for line in table.find_all('tr')[:-1]: 176 | data_list.append([date_value,]+[float(cell.get_text().replace(',','')) for cell in line.find_all('td')]) 177 | 178 | dataset = pd.DataFrame(data_list, columns=columns) 179 | dataset = dataset.set_index(['Pricing day','Strike price']) 180 | return dataset 181 | 182 | 183 | def data_collection(path=''): 184 | ''' Main function which saves data into the HDF5 file 185 | 'index_option_series.h5' for later use. 186 | 187 | Parameters 188 | ========== 189 | path: string 190 | path to store the data 191 | ''' 192 | # file to store data 193 | store = pd.HDFStore(path + 'index_option_series.h5', 'a') 194 | 195 | today = dt.datetime.today() 196 | start = today - dt.timedelta(31) # the last 31 days 197 | 198 | day = start.day 199 | month = start.month 200 | year = start.year 201 | 202 | for i in range(4): # iterates over the next 4 months 203 | dummy_month = month + i 204 | dummy_year = year 205 | if dummy_month > 12: 206 | dummy_month -= 12 207 | dummy_year += 1 208 | 209 | # collect daily data beginning 31 days ago (start) for 210 | # option series with expiry dummy_month, dummy_year 211 | dataset = collect_option_series(dummy_month, dummy_year, start) 212 | 213 | dummy_date = dt.datetime(dummy_year, dummy_month, day) 214 | 215 | # abbreviation for expiry date (for example Oct14) 216 | series_name = dummy_date.strftime('%b%y') 217 | 218 | if series_name in store.keys(): # if data for that series exists 219 | index_old = store[series_name].index 220 | index_new = dataset.index 221 | 222 | if len(index_new - index_old) > 0: 223 | dummy = pd.concat((store[series_name], 224 | dataset.ix[index_new - index_old])) # add the new data 225 | 226 | store[series_name] = dummy 227 | else: 228 | if len(dataset) > 0: 229 | # if series is new, write whole data set into data store 230 | store[series_name] = dataset 231 | 232 | store.close() 233 | -------------------------------------------------------------------------------- /legacy/python2/scripts/index_collect_option_data.py: -------------------------------------------------------------------------------- 1 | # 2 | # Module to collect option series data 3 | # from the web 4 | # Source: www.eurexchange.com 5 | # Data is needed to calculate the VSTOXX 6 | # and its sub-indexes 7 | # 8 | # (c) Dr. Yves J. Hilpisch 9 | # Listed Volatility and Variance Derivatives 10 | # 11 | import requests 12 | import datetime as dt 13 | import pandas as pd 14 | import numpy as np 15 | from StringIO import * 16 | from index_date_functions import * 17 | 18 | # 19 | # The URL template 20 | # 21 | url1 = 'http://www.eurexchange.com/action/exchange-en/' 22 | url2 = '180106-180102/180102/onlineStats.do?productGroupId=846' 23 | url3 = '&productId=19068&viewType=3&cp=%s&month=%s&year=%s&busDate=%s' 24 | URL = url1 + url2 + url3 25 | 26 | 27 | def collect_option_series(month, year, start): 28 | ''' Collects daily option data from web source. 29 | 30 | Parameters 31 | ========== 32 | month: int 33 | maturity month 34 | year: int 35 | maturity year 36 | start: datetime object 37 | starting date 38 | 39 | Returns 40 | ======= 41 | dataset: pandas DataFrame object 42 | object containing the collected data 43 | ''' 44 | end = dt.datetime.today() 45 | delta = (end - start).days 46 | 47 | dataset = pd.DataFrame() 48 | for t in range(0, delta): # runs from start to today 49 | date = start + dt.timedelta(t) 50 | dummy = get_data(month, year, date) # get data for one day 51 | if len(dummy) != 0: 52 | if len(dataset) == 0: 53 | dataset = dummy 54 | else: 55 | dataset = pd.concat((dataset, dummy)) # add data 56 | 57 | return dataset 58 | 59 | 60 | def get_data(month, year, date): 61 | ''' Get the data for an option series. 62 | 63 | Parameters 64 | ========== 65 | month: int 66 | maturity month 67 | year: int 68 | maturity year 69 | date: datetime object 70 | the date for which the data is collected 71 | 72 | Returns 73 | ======= 74 | dataset: pandas DataFrame object 75 | object containing call & put option data 76 | ''' 77 | 78 | date_string = date.strftime('%Y%m%d') 79 | # loads the call data from the web 80 | data = get_data_from_www('Call', month, year, date_string) 81 | calls = parse_data(data, date) # parse the raw data 82 | calls = calls.rename(columns={'Daily settlem. price': 'Call_Price'}) 83 | 84 | calls = pd.DataFrame(calls.pop('Call_Price').astype(float)) 85 | # the same for puts 86 | data = get_data_from_www('Put', month, year, date_string) 87 | puts = parse_data(data, date) 88 | puts = puts.rename(columns={'Daily settlem. price': 'Put_Price'}) 89 | puts = pd.DataFrame(puts.pop('Put_Price').astype(float)) 90 | 91 | dataset = merge_and_filter(puts, calls) # merges the two time series 92 | 93 | return dataset 94 | 95 | 96 | def get_data_from_www(oType, matMonth, matYear, date): 97 | ''' Retrieves the raw data of an option series from the web. 98 | 99 | Parameters 100 | ========== 101 | oType: string 102 | either 'Put' or 'Call' 103 | matMonth: int 104 | maturity month 105 | matYear: int 106 | maturity year 107 | date: string 108 | expiry in the format 'YYYYMMDD' 109 | 110 | Returns 111 | ======= 112 | a: string 113 | raw text with option data 114 | ''' 115 | 116 | url = URL % (oType, matMonth, matYear, date) # parametrizes the URL 117 | a = requests.get(url).text 118 | return a 119 | 120 | 121 | def merge_and_filter(puts, calls): 122 | ''' Gets two pandas time series for the puts and calls 123 | (from the same option series), merges them, filters out 124 | all options with prices smaller than 0.5 and 125 | returns the resulting DataFrame object. 126 | 127 | Parameters 128 | ========== 129 | puts: pandas DataFrame object 130 | put option data 131 | calls: pandas DataFrame object 132 | call option data 133 | 134 | Returns 135 | ======= 136 | df: pandas DataFrame object 137 | merged & filtered options data 138 | ''' 139 | 140 | df = calls.join(puts, how='inner') # merges the two time series 141 | # filters all prices which are too small 142 | df = df[(df.Put_Price >= 0.5) & (df.Call_Price >= 0.5)] 143 | 144 | return df 145 | 146 | 147 | def parse_data(data, date): 148 | ''' Parses the HTML table and transforms it into a CSV compatible 149 | format. The result can be directly imported into a pandas DataFrame. 150 | 151 | Parameters 152 | ========== 153 | data: string 154 | document containing the Web content 155 | date: datetime object 156 | date for which the data is parsed 157 | 158 | Returns 159 | ======= 160 | dataset: pandas DataFrame object 161 | transformed option raw data 162 | ''' 163 | parts = data.split('Total') 168 | table = parts3[0] # the html table containing the data 169 | table = table.replace('class="dataTable">', 'Pricing day') 170 | 171 | # replace tags by commas and newlines 172 | table = table.replace('', '\n') 173 | table = table.replace(',', '') 174 | table = table.replace('', ',') 175 | table = table.replace('', '') 176 | table = table.replace('', ',') 177 | table = table.replace('', '') 178 | table = table.replace('', '\n') 179 | 180 | # the resulting string looks like a CSV file 181 | date_string = date.strftime('%d.%m.%Y') 182 | table = table.replace('', date_string) 183 | 184 | string = StringIO(table) # mask the string as file 185 | dataset = pd.read_csv(string, parse_dates=[0], index_col=(0, 1), 186 | dayfirst=True) # read the 'file' as pandas DataFrame object 187 | 188 | return dataset 189 | 190 | 191 | def data_collection(path): 192 | ''' Main function which saves data into the HDF5 file 193 | 'index_option_series.h5' for later use. 194 | 195 | Parameters 196 | ========== 197 | path: string 198 | path to store the data 199 | ''' 200 | # file to store data 201 | store = pd.HDFStore(path + 'index_option_series.h5', 'a') 202 | 203 | today = dt.datetime.today() 204 | start = today - dt.timedelta(31) # the last 31 days 205 | 206 | day = start.day 207 | month = start.month 208 | year = start.year 209 | 210 | for i in range(4): # iterates over the next 4 months 211 | dummy_month = month + i 212 | dummy_year = year 213 | if dummy_month > 12: 214 | dummy_month -= 12 215 | dummy_year += 1 216 | 217 | # collect daily data beginning 31 days ago (start) for 218 | # option series with expiry dummy_month, dummy_year 219 | dataset = collect_option_series(dummy_month, dummy_year, start) 220 | 221 | dummy_date = dt.datetime(dummy_year, dummy_month, day) 222 | 223 | # abbreviation for expiry date (for example Oct14) 224 | series_name = dummy_date.strftime('%b%y') 225 | 226 | if series_name in store.keys(): # if data for that series exists 227 | index_old = store[series_name].index 228 | index_new = dataset.index 229 | 230 | if len(index_new - index_old) > 0: 231 | dummy = pd.concat((store[series_name], 232 | dataset.ix[index_new - index_old])) # add the new data 233 | 234 | store[series_name] = dummy 235 | else: 236 | if len(dataset) > 0: 237 | # if series is new, write whole data set into data store 238 | store[series_name] = dataset 239 | 240 | store.close() 241 | -------------------------------------------------------------------------------- /legacy/python2/scripts/srd_simulation_analysis.py: -------------------------------------------------------------------------------- 1 | # 2 | # Valuation of European volatility options 3 | # by Monte Carlo simulation in 4 | # Gruenbichler and Longstaff (1996) model 5 | # -- analysis of valuation results 6 | # 7 | # (c) Dr. Yves J. Hilpisch 8 | # Listed Volatility and Variance Derivatives 9 | # 10 | import time 11 | import math 12 | import numpy as np 13 | from datetime import datetime 14 | from srd_functions import generate_paths, call_price 15 | from srd_simulation_results import * 16 | 17 | # Model Parameters 18 | v0 = 20.0 # initial volatility 19 | kappa = 3.0 # speed of mean reversion 20 | theta = 20.0 # long-term volatility 21 | sigma = 3.2 # standard deviation coefficient 22 | zeta = 0.0 # factor of the expected volatility risk premium 23 | r = 0.01 # risk-free short rate 24 | 25 | # General Simulation Parameters 26 | write = True 27 | var_red = [(False, False), (False, True), (True, False), (True, True)] 28 | # 1st = mo_match -- random number correction (std + mean + drift) 29 | # 2nd = anti_paths -- antithetic paths for variance reduction 30 | # number of time steps 31 | steps_list = [25, 50, 75, 100] 32 | # number of paths per valuation 33 | paths_list = [2500, 50000, 75000, 100000, 125000, 150000] 34 | SEED = 100000 # seed value 35 | runs = 3 # number of simulation runs 36 | PY1 = 0.010 # performance yardstick 1: abs. error in currency units 37 | PY2 = 0.010 # performance yardstick 2: rel. error in decimals 38 | maturity_list = [1.0 / 12 , 1.0 / 4, 1.0 / 2, 1.0] # maturity list 39 | strike_list = [15.0, 17.5, 20.0, 22.5, 25.0] # strike list 40 | 41 | 42 | def generate_paths(x0, kappa, theta, sigma, T, M, I): 43 | ''' Simulation of square-root diffusion with exact discretization 44 | 45 | Parameters 46 | ========== 47 | x0: float (positive) 48 | starting value 49 | kappa: float (positive) 50 | mean-reversion factor 51 | theta: float (positive) 52 | long-run mean 53 | sigma: float (positive) 54 | volatility (of volatility) 55 | T: float (positive) 56 | time-to-maturity 57 | M: int 58 | number of time intervals 59 | I: int 60 | number of simulation paths 61 | 62 | Returns 63 | ======= 64 | x: NumPy ndarray object 65 | simulated paths 66 | ''' 67 | dt = float(T) / M 68 | x = np.zeros((M + 1, I), dtype=np.float) 69 | x[0, :] = x0 70 | # matrix filled with standard normally distributed rv 71 | ran = randoms(M, I) 72 | d = 4 * kappa * theta / sigma ** 2 73 | # constant factor in the integrated process of x 74 | c = (sigma ** 2 * (1 - math.exp(-kappa * dt))) / (4 * kappa) 75 | if d > 1: 76 | for t in range(1, M + 1): 77 | # non-centrality parameter 78 | l = x[t - 1, :] * math.exp(-kappa * dt) / c 79 | # matrix with chi-squared distributed rv 80 | chi = np.random.chisquare(d - 1, I) 81 | x[t, :] = c * ((ran[t] + np.sqrt(l)) ** 2 + chi) 82 | else: 83 | for t in range(1, M + 1): 84 | l = x[t - 1, :] * math.exp(-kappa * dt) / c 85 | N = np.random.poisson(l / 2, I) 86 | chi = np.random.chisquare(d + 2 * N, I) 87 | x[t, :] = c * chi 88 | return x 89 | 90 | 91 | def randoms(M, I): 92 | ''' Function to generate pseudo-random numbers with variance reduction. 93 | 94 | Parameters 95 | ========== 96 | M: int 97 | number of discrete time intervals 98 | I: int 99 | number of simulated paths 100 | 101 | Returns 102 | ======= 103 | rand: Numpy ndarray object 104 | object with pseudo-random numbers 105 | ''' 106 | if anti_paths is True: 107 | rand_ = np.random.standard_normal((M + 1, I / 2)) 108 | rand = np.concatenate((rand_, -rand_), 1) 109 | else: 110 | rand = np.random.standard_normal((M + 1, I)) 111 | if mo_match is True: 112 | rand = rand / np.std(rand) 113 | rand = rand - np.mean(rand) 114 | return rand 115 | 116 | 117 | t0 = time.time() 118 | sim_results = pd.DataFrame() 119 | 120 | for vr in var_red: # variance reduction techniques 121 | mo_match, anti_paths = vr 122 | for M in steps_list: # number of time steps 123 | for I in paths_list: # number of paths 124 | t1 = time.time() 125 | d1 = datetime.now() 126 | abs_errors = [] 127 | rel_errors = [] 128 | l = 0.0 129 | errors = 0 130 | # name of the simulation setup 131 | name = ('Call_' + str(runs) + '_' 132 | + str(M) + '_' + str(I / 1000) 133 | + '_' + str(mo_match)[0] + str(anti_paths)[0] + 134 | '_' + str(PY1 * 100) + '_' + str(PY2 * 100)) 135 | np.random.seed(SEED) # RNG seed value 136 | for run in range(runs): # simulation runs 137 | print "\nSimulation Run %d of %d" % (run + 1, runs) 138 | print "----------------------------------------------------" 139 | print ("Elapsed Time in Minutes %8.2f" 140 | % ((time.time() - t0) / 60)) 141 | print "----------------------------------------------------" 142 | z = 0 143 | for T in maturity_list: # time-to-maturity 144 | dt = T / M # time interval in year fractions 145 | V = generate_paths(v0, kappa, theta, sigma, T, M, I) 146 | # volatility process paths 147 | print "\n Results for Time-to-Maturity %6.3f" % T 148 | print " -----------------------------------------" 149 | for K in strike_list: # Strikes 150 | h = np.maximum(V[-1] - K, 0) # inner value matrix 151 | # MCS estimator 152 | call_estimate = math.exp(-r * T) * np.sum(h) / I * 100 153 | # BSM analytical value 154 | callalue = call_price(v0, kappa, theta, sigma, 155 | zeta, T, r, K) * 100 156 | # errors 157 | diff = call_estimate - callalue 158 | rdiff = diff / callalue 159 | abs_errors.append(diff) 160 | rel_errors.append(rdiff * 100) 161 | # output 162 | br = " ----------------------------------" 163 | print "\n Results for Strike %4.2f\n" % K 164 | print (" European Op. Value MCS %8.4f" % 165 | call_estimate) 166 | print (" European Op. Value Closed %8.4f" % 167 | callalue) 168 | print " Valuation Error (abs) %8.4f" % diff 169 | print " Valuation Error (rel) %8.4f" % rdiff 170 | if abs(diff) < PY1 or abs(diff) / callalue < PY2: 171 | print " Accuracy ok!\n" + br 172 | CORR = True 173 | else: 174 | print " Accuracy NOT ok!\n" + br 175 | CORR = False 176 | errors = errors + 1 177 | print " %d Errors, %d Values, %.1f Min." \ 178 | % (errors, len(abs_errors), 179 | float((time.time() - t1) / 60)) 180 | print (" %d Time Intervals, %d Paths" 181 | % (M, I)) 182 | z = z + 1 183 | l = l + 1 184 | 185 | t2 = time.time() 186 | d2 = datetime.now() 187 | if write is True: # append simulation results 188 | sim_results = write_results(sim_results, name, SEED, 189 | runs, M, I, mo_match, anti_paths, 190 | l, PY1, PY2, errors, 191 | float(errors) / l, np.array(abs_errors), 192 | np.array(rel_errors), t2 - t1, (t2 - t1) / 60, d1, d2) 193 | 194 | if write is True: 195 | # write/append DataFrame to HDFStore object 196 | write_to_database(sim_results) 197 | --------------------------------------------------------------------------------