├── .gitignore ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── optimalportfolios ├── __init__.py ├── config.py ├── covar_estimation │ ├── README.md │ ├── __init__.py │ ├── annualisation.py │ ├── config.py │ ├── covar_estimator.py │ ├── current_covar.py │ ├── examples │ │ ├── demo_covar_different_estimation_freqs.py │ │ └── simulate_factor_returns.py │ ├── rolling_covar.py │ └── utils.py ├── examples │ ├── computation_of_tracking_error.py │ ├── crypto_allocation │ │ ├── README.md │ │ ├── article_figures.py │ │ ├── backtest_portfolios_for_article.py │ │ ├── data │ │ │ ├── BTC_from_2010.csv │ │ │ ├── CTA_Historical.xlsx │ │ │ ├── HFRX_historical_HFRXGL.csv │ │ │ ├── Macro_Trading_Index_Historical.xlsx │ │ │ ├── crypto_allocation_prices.csv │ │ │ └── crypto_allocation_prices_updated.csv │ │ ├── load_prices.py │ │ └── perf_crypto_portfolios.py │ ├── equal_factor_budget.py │ ├── figures │ │ ├── MinVariance_multi_covar_estimator_backtest.PNG │ │ ├── example_customised_report.PNG │ │ ├── example_portfolio_factsheet1.PNG │ │ ├── example_portfolio_factsheet2.PNG │ │ ├── max_diversification_span.PNG │ │ └── multi_optimisers_backtest.PNG │ ├── lasso_covar_estimation.py │ ├── lasso_estimation.py │ ├── lasso_risk_model.py │ ├── long_short_optimisation.py │ ├── multi_covar_estimation_backtest.py │ ├── multi_optimisers_backtest.py │ ├── optimal_portfolio_backtest.py │ ├── parameter_sensitivity_backtest.py │ ├── resources │ │ └── dow30_prices.csv │ ├── robust_optimisation_saa_taa │ │ ├── README.md │ │ └── hcgl_covar_for_rolling_backtest.py │ ├── solve_risk_budgets_balanced_portfolio.py │ ├── solvers │ │ ├── carra_mixture.py │ │ ├── max_diversification.py │ │ ├── max_sharpe.py │ │ ├── min_variance.py │ │ ├── risk_parity.py │ │ ├── target_return.py │ │ └── tracking_error.py │ ├── sp500_minvar.py │ ├── sp500_universe.py │ └── universe.py ├── lasso │ ├── __init__.py │ └── lasso_model_estimator.py ├── local_path.py ├── optimization │ ├── __init__.py │ ├── constraints.py │ ├── solvers │ │ ├── __init__.py │ │ ├── carra_mixure.py │ │ ├── max_diversification.py │ │ ├── max_sharpe.py │ │ ├── quadratic.py │ │ ├── risk_budgeting.py │ │ ├── target_return.py │ │ └── tracking_error.py │ └── wrapper_rolling_portfolios.py ├── reports │ ├── __init__.py │ ├── backtest_alphas.py │ ├── config.py │ └── marginal_backtest.py ├── settings.yaml ├── test_data.py └── utils │ ├── __init__.py │ ├── factor_alphas.py │ ├── filter_nans.py │ ├── gaussian_mixture.py │ ├── manager_alphas.py │ ├── portfolio_funcs.py │ └── returns_unsmoother.py ├── pyproject.toml ├── pyrb ├── README.md ├── __init__.py ├── allocation.py ├── settings.py ├── solvers.py ├── tools.py └── validation.py └── requirements.txt /.gitignore: -------------------------------------------------------------------------------- 1 | # created by virtualenv automatically 2 | /quant_strats/ 3 | /mac_portfolio_optimizer/ 4 | 5 | .idea/ 6 | Lib/ 7 | Scripts/ 8 | dist/ 9 | 10 | __pycache__/ 11 | 12 | # YAML 13 | *.yaml 14 | 15 | # Byte-compiled / optimized / DLL files 16 | docs/figures/ 17 | 18 | *.py[cod] 19 | *$py.class 20 | 21 | # C extensions 22 | *.so 23 | 24 | # Distribution / packaging 25 | .Python 26 | build/ 27 | develop-eggs/ 28 | downloads/ 29 | eggs/ 30 | .eggs/ 31 | lib/ 32 | lib64/ 33 | parts/ 34 | sdist/ 35 | var/ 36 | wheels/ 37 | pip-wheel-metadata/ 38 | share/python-wheels/ 39 | *.egg-info/ 40 | .installed.cfg 41 | *.egg 42 | MANIFEST 43 | .*xml 44 | .*iml 45 | 46 | # PyInstaller 47 | # Usually these files are written by a python script from a template 48 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 49 | *.manifest 50 | *.spec 51 | 52 | # Installer logs 53 | pip-log.txt 54 | pip-delete-this-directory.txt 55 | 56 | # Unit test / coverage reports 57 | htmlcov/ 58 | .tox/ 59 | .nox/ 60 | .coverage 61 | .coverage.* 62 | .cache 63 | nosetests.xml 64 | coverage.xml 65 | *.cover 66 | *.py,cover 67 | .hypothesis/ 68 | .pytest_cache/ 69 | 70 | # Translations 71 | *.mo 72 | *.pot 73 | 74 | # Django stuff: 75 | *.log 76 | local_settings.py 77 | db.sqlite3 78 | db.sqlite3-journal 79 | 80 | # Flask stuff: 81 | instance/ 82 | .webassets-cache 83 | 84 | # Scrapy stuff: 85 | .scrapy 86 | 87 | # Sphinx documentation 88 | docs/_build/ 89 | 90 | # PyBuilder 91 | target/ 92 | 93 | # Jupyter Notebook 94 | .ipynb_checkpoints 95 | 96 | # IPython 97 | profile_default/ 98 | ipython_config.py 99 | 100 | # pyenv 101 | .python-version 102 | 103 | # pipenv 104 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 105 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 106 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 107 | # install all needed dependencies. 108 | #Pipfile.lock 109 | 110 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 111 | __pypackages__/ 112 | 113 | # Celery stuff 114 | celerybeat-schedule 115 | celerybeat.pid 116 | 117 | # SageMath parsed files 118 | *.sage.py 119 | 120 | # Environments 121 | .env 122 | .venv 123 | env/ 124 | venv/ 125 | ENV/ 126 | env.bak/ 127 | venv.bak/ 128 | 129 | # Spyder project settings 130 | .spyderproject 131 | .spyproject 132 | 133 | # Rope project settings 134 | .ropeproject 135 | 136 | # mkdocs documentation 137 | /site 138 | 139 | # mypy 140 | .mypy_cache/ 141 | .dmypy.json 142 | dmypy.json 143 | 144 | # Pyre type checker 145 | .pyre/ 146 | 147 | /dist/ 148 | *.xml 149 | *.pyc 150 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | # MANIFEST.in 2 | # Controls what files are included in the source distribution (sdist) 3 | 4 | # Include essential package files 5 | include README.md 6 | include LICENSE.txt 7 | include pyproject.toml 8 | include requirements*.txt 9 | 10 | # Include all Python source files 11 | recursive-include optimalportfolios *.py 12 | 13 | # Include specific data files that the package needs 14 | recursive-include optimalportfolios/data *.csv 15 | recursive-include optimalportfolios/templates *.html 16 | recursive-include optimalportfolios/static *.css *.js 17 | recursive-include optimalportfolios *.yml *.yaml *.json *.txt *.md 18 | 19 | # Explicitly exclude figures, notebooks, resources, and image files 20 | recursive-exclude optimalportfolios/examples/figures * 21 | recursive-exclude optimalportfolios/examples/resources * 22 | recursive-exclude optimalportfolios/notebooks * 23 | recursive-exclude optimalportfolios *figures* 24 | global-exclude *.png 25 | global-exclude *.jpg 26 | global-exclude *.jpeg 27 | global-exclude *.gif 28 | global-exclude *.svg 29 | global-exclude *.pdf 30 | global-exclude *.eps 31 | global-exclude *.ipynb 32 | 33 | # Exclude development and build artifacts 34 | recursive-exclude * __pycache__ 35 | recursive-exclude * *.py[co] 36 | recursive-exclude * .DS_Store 37 | recursive-exclude * .git* 38 | recursive-exclude * *.egg-info 39 | global-exclude *.pyc 40 | global-exclude *.pyo -------------------------------------------------------------------------------- /optimalportfolios/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | import optimalportfolios.local_path 3 | 4 | from optimalportfolios.config import PortfolioObjective 5 | 6 | from optimalportfolios.utils.__init__ import * 7 | 8 | from optimalportfolios.lasso.__init__ import * 9 | 10 | from optimalportfolios.covar_estimation.__init__ import * 11 | 12 | from optimalportfolios.optimization.__init__ import * 13 | -------------------------------------------------------------------------------- /optimalportfolios/config.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class PortfolioObjective(Enum): 5 | """ 6 | implemented portfolios in rolling_engine 7 | """ 8 | # risk-based: 9 | MAX_DIVERSIFICATION = 'MaxDiversification' # maximum diversification measure 10 | EQUAL_RISK_CONTRIBUTION = 'EqualRisk' # implementation in risk_parity 11 | MIN_VARIANCE = 'MinVariance' # min w^t @ covar @ w 12 | # return-risk based 13 | QUADRATIC_UTILITY = 'QuadraticUtil' # max means^t*w- 0.5*gamma*w^t*covar*w 14 | MAXIMUM_SHARPE_RATIO = 'MaximumSharpe' # max means^t*w / sqrt(*w^t*covar*w) 15 | # return-skeweness based 16 | MAX_CARA_MIXTURE = 'MaxCarraMixture' # carra for mixture distributions 17 | 18 | 19 | -------------------------------------------------------------------------------- /optimalportfolios/covar_estimation/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from optimalportfolios.covar_estimation.covar_estimator import CovarEstimator 3 | 4 | from optimalportfolios.covar_estimation.config import CovarEstimatorType 5 | 6 | from optimalportfolios.covar_estimation.rolling_covar import (EstimatedRollingCovarData, 7 | wrapper_estimate_rolling_covar, 8 | estimate_rolling_ewma_covar, 9 | wrapper_estimate_rolling_lasso_covar, 10 | estimate_rolling_lasso_covar, 11 | estimate_rolling_lasso_covar_different_freq) 12 | 13 | from optimalportfolios.covar_estimation.current_covar import (EstimatedCurrentCovarData, 14 | wrapper_estimate_current_covar, 15 | estimate_current_ewma_covar, 16 | wrapper_estimate_current_lasso_covar, 17 | estimate_lasso_covar, 18 | estimate_lasso_covar_different_freq) -------------------------------------------------------------------------------- /optimalportfolios/covar_estimation/annualisation.py: -------------------------------------------------------------------------------- 1 | """ 2 | implement annualisation 3 | """ 4 | 5 | import pandas as pd 6 | from typing import Union 7 | 8 | # Standard pandas frequency mappings to annualization factors 9 | FREQUENCY_MAPPINGS = { 10 | # Daily frequencies 11 | 'D': 365, # Calendar daily 12 | 'B': 260, # Business daily 13 | 14 | # Weekly frequencies 15 | 'WE': 52, # Weekly ending (replaces 'W') 16 | 'W-MON': 52, # Weekly ending Monday 17 | 'W-TUE': 52, # Weekly ending Tuesday 18 | 'W-WED': 52, # Weekly ending Wednesday 19 | 'W-THU': 52, # Weekly ending Thursday 20 | 'W-FRI': 52, # Weekly ending Friday 21 | 'W-SAT': 52, # Weekly ending Saturday 22 | 'W-SUN': 52, # Weekly ending Sunday 23 | 24 | # Monthly frequencies 25 | 'ME': 12, # Month end (replaces 'M') 26 | 'MS': 12, # Month start 27 | 'BME': 12, # Business month end (replaces 'BM') 28 | 'BMS': 12, # Business month start 29 | 30 | # Quarterly frequencies 31 | 'QE': 4, # Quarter end (replaces 'Q') 32 | 'QS': 4, # Quarter start 33 | 'BQE': 4, # Business quarter end (replaces 'BQ') 34 | 'BQS': 4, # Business quarter start 35 | 'QE-DEC': 4, # Quarter ending December (replaces 'Q-DEC') 36 | 'QE-JAN': 4, # Quarter ending January 37 | 'QE-FEB': 4, # Quarter ending February 38 | 'QE-MAR': 4, # Quarter ending March 39 | 40 | # Annual frequencies 41 | 'YE': 1, # Year end (replaces 'A') 42 | 'YS': 1, # Year start (replaces 'AS') 43 | 'BYE': 1, # Business year end (replaces 'BA') 44 | 'BYS': 1, # Business year start (replaces 'BAS') 45 | 'YE-DEC': 1, # Year ending December (replaces 'A-DEC') 46 | 'YE-JAN': 1, # Year ending January 47 | 'YE-FEB': 1, # Year ending February 48 | 'YE-MAR': 1, # Year ending March 49 | 'YE-APR': 1, # Year ending April 50 | 'YE-MAY': 1, # Year ending May 51 | 'YE-JUN': 1, # Year ending June 52 | 'YE-JUL': 1, # Year ending July 53 | 'YE-AUG': 1, # Year ending August 54 | 'YE-SEP': 1, # Year ending September 55 | 'YE-OCT': 1, # Year ending October 56 | 'YE-NOV': 1, # Year ending November 57 | 58 | # Legacy frequency support (deprecated but still supported) 59 | 'W': 52, # Legacy weekly (use 'WE') 60 | 'M': 12, # Legacy monthly (use 'ME') 61 | 'Q': 4, # Legacy quarterly (use 'QE') 62 | 'A': 1, # Legacy annual (use 'YE') 63 | 'BM': 12, # Legacy business month (use 'BME') 64 | 'BQ': 4, # Legacy business quarter (use 'BQE') 65 | 'BA': 1, # Legacy business annual (use 'BYE') 66 | } 67 | 68 | 69 | def get_annualization_factor(freq: Union[str, pd.Timestamp]) -> float: 70 | """ 71 | Get factor to annualize from given pandas frequency. 72 | 73 | Args: 74 | freq: Pandas frequency string (e.g., 'ME', 'QE', 'B') or frequency object 75 | 76 | Returns: 77 | Number of periods per year for the given frequency 78 | 79 | Examples: 80 | >>> get_annualization_factor('ME') 81 | 12 82 | >>> get_annualization_factor('QE') 83 | 4 84 | >>> get_annualization_factor('B') 85 | 260 86 | """ 87 | if isinstance(freq, pd.Timestamp): 88 | freq = freq.freq 89 | 90 | freq_str = str(freq).upper() 91 | 92 | if freq_str in FREQUENCY_MAPPINGS: 93 | return FREQUENCY_MAPPINGS[freq_str] 94 | else: 95 | raise ValueError(f"Unsupported frequency: {freq_str}. " 96 | f"Supported frequencies: {list(FREQUENCY_MAPPINGS.keys())}") 97 | 98 | 99 | def get_conversion_factor(from_freq: Union[str, pd.Timestamp], 100 | to_freq: Union[str, pd.Timestamp]) -> float: 101 | """ 102 | Get factor to convert between pandas frequencies. 103 | 104 | Args: 105 | from_freq: Source frequency 106 | to_freq: Target frequency 107 | 108 | Returns: 109 | Conversion factor (multiply source data by this factor) 110 | 111 | Examples: 112 | >>> get_conversion_factor('QE', 'ME') # Quarterly to Monthly 113 | 0.3333333333333333 114 | >>> get_conversion_factor('ME', 'QE') # Monthly to Quarterly 115 | 3.0 116 | >>> get_conversion_factor('B', 'ME') # Business Daily to Monthly 117 | 21.666666666666668 118 | """ 119 | from_periods = get_annualization_factor(from_freq) 120 | to_periods = get_annualization_factor(to_freq) 121 | 122 | return from_periods / to_periods 123 | -------------------------------------------------------------------------------- /optimalportfolios/covar_estimation/config.py: -------------------------------------------------------------------------------- 1 | 2 | from enum import Enum 3 | 4 | 5 | class CovarEstimatorType(Enum): 6 | EWMA = 1 7 | LASSO = 2 8 | -------------------------------------------------------------------------------- /optimalportfolios/covar_estimation/covar_estimator.py: -------------------------------------------------------------------------------- 1 | """ 2 | some utilities for estimation of covariance matrices 3 | """ 4 | from __future__ import annotations 5 | import pandas as pd 6 | import qis as qis 7 | from typing import Union, Optional, Dict, Any 8 | from dataclasses import dataclass, asdict 9 | 10 | # project 11 | from optimalportfolios.covar_estimation.config import CovarEstimatorType 12 | from optimalportfolios.lasso.lasso_model_estimator import LassoModel 13 | from optimalportfolios.covar_estimation.rolling_covar import EstimatedRollingCovarData, wrapper_estimate_rolling_covar 14 | from optimalportfolios.covar_estimation.current_covar import EstimatedCurrentCovarData, wrapper_estimate_current_covar 15 | 16 | 17 | @dataclass 18 | class CovarEstimator: 19 | """ 20 | specifies estimator specific parameters 21 | CovarEstimator supports: 22 | fit_rolling_covars() 23 | fit_covars() 24 | """ 25 | covar_estimator_type: CovarEstimatorType = CovarEstimatorType.EWMA 26 | lasso_model: LassoModel = None # for mandatory lasso estimator 27 | factor_returns_freq: str = 'W-WED' # for lasso estimator 28 | rebalancing_freq: str = 'QE' # sampling frequency for computing covariance matrix at rebalancing dates 29 | returns_freqs: Union[str, pd.Series] = 'ME' # frequency of returns for beta estimation 30 | span: int = 52 # span for ewma estimate 31 | is_apply_vol_normalised_returns: bool = False # for ewma 32 | demean: bool = True # adjust for mean 33 | squeeze_factor: Optional[float] = None # squeezing factor for ewma covars 34 | residual_var_weight: float = 1.0 # for lasso covars 35 | span_freq_dict: Optional[Dict[str, int]] = None # spans for different freqs 36 | num_lags_newey_west_dict: Optional[Dict[str, int]] = None 37 | 38 | def to_dict(self) -> Dict[str, Any]: 39 | this = asdict(self) 40 | if self.lasso_model is not None: # need to make it dataclass 41 | this['lasso_model'] = LassoModel(**this['lasso_model']) 42 | return this 43 | 44 | def fit_rolling_covars(self, 45 | prices: pd.DataFrame, 46 | time_period: qis.TimePeriod, 47 | risk_factor_prices: pd.DataFrame = None, 48 | ) -> EstimatedRollingCovarData: 49 | """ 50 | fit rolling covars at rebalancing_freq 51 | time_period is for what period we need 52 | """ 53 | rolling_covar_data = wrapper_estimate_rolling_covar(prices=prices, 54 | risk_factor_prices=risk_factor_prices, 55 | time_period=time_period, 56 | returns_freq=self.factor_returns_freq, 57 | **self.to_dict()) 58 | return rolling_covar_data 59 | 60 | def fit_current_covars(self, 61 | prices: pd.DataFrame, 62 | risk_factor_prices: pd.DataFrame = None, 63 | ) -> EstimatedCurrentCovarData: 64 | """ 65 | fit rolling covars at rebalancing_freq 66 | time_period is for what period we need 67 | """ 68 | rolling_covar_data = wrapper_estimate_current_covar(prices=prices, 69 | risk_factor_prices=risk_factor_prices, 70 | **self.to_dict()) 71 | return rolling_covar_data 72 | -------------------------------------------------------------------------------- /optimalportfolios/covar_estimation/examples/demo_covar_different_estimation_freqs.py: -------------------------------------------------------------------------------- 1 | """ 2 | illustrate estimation of covar at different frequencies 3 | """ 4 | 5 | # packages 6 | import pandas as pd 7 | import numpy as np 8 | import matplotlib.pyplot as plt 9 | import qis as qis 10 | 11 | from optimalportfolios import LassoModel, LassoModelType, wrapper_estimate_current_lasso_covar 12 | from optimalportfolios.covar_estimation.examples.simulate_factor_returns import simulate_factor_model_returns 13 | 14 | 15 | simulation_results = simulate_factor_model_returns(n_assets=9, n_periods=20*260, seed=40) 16 | 17 | asset_prices = qis.returns_to_nav(returns=simulation_results['asset_returns']) 18 | risk_factor_prices = qis.returns_to_nav(returns=simulation_results['factor_returns']) 19 | 20 | 21 | # select multi asset ETFs 22 | instrument_data = dict(Asset_1='Equity', 23 | Asset_2='Equity', 24 | Asset_3='Equity', 25 | Asset_4='Bonds', 26 | Asset_5='Bonds', 27 | Asset_6='Bonds', 28 | Asset_7='Mixed', 29 | Asset_8='Mixed', 30 | Asset_9='Mixed') 31 | 32 | group_data = pd.Series(instrument_data) 33 | ac_group_order = ['Equity', 'Bonds', 'Bonds'] 34 | 35 | # set lasso model 36 | lasso_params = dict(reg_lambda=1e-5, span=120, demean=False, solver='ECOS_BB', warmup_period=50) 37 | lasso_model = LassoModel(model_type=LassoModelType.GROUP_LASSO_CLUSTERS, **lasso_params) 38 | 39 | 40 | covar_data_all_daily = wrapper_estimate_current_lasso_covar(risk_factors_prices=risk_factor_prices, 41 | prices=asset_prices, 42 | lasso_model=lasso_model, 43 | returns_freqs='B', 44 | factor_returns_freq='B') 45 | 46 | covar_data_factor_daily_asset_mixed = wrapper_estimate_current_lasso_covar(risk_factors_prices=risk_factor_prices, 47 | prices=asset_prices, 48 | lasso_model=lasso_model, 49 | returns_freqs=group_data.map({'Equity': 'B', 'Bonds': 'W-WED', 'Mixed': 'ME'}), 50 | factor_returns_freq='B') 51 | covar_data_factor_weekly_all = wrapper_estimate_current_lasso_covar(risk_factors_prices=risk_factor_prices, 52 | prices=asset_prices, 53 | lasso_model=lasso_model, 54 | returns_freqs='W-WED', 55 | factor_returns_freq='W-WED') 56 | covar_data_factor_weekly_asset_mixed = wrapper_estimate_current_lasso_covar(risk_factors_prices=risk_factor_prices, 57 | prices=asset_prices, 58 | lasso_model=lasso_model, 59 | returns_freqs=group_data.map({'Equity': 'ME', 'Bonds': 'ME', 'Mixed': 'QE'}), 60 | factor_returns_freq='W-WED') 61 | 62 | covar_data_factor_monthy_all = wrapper_estimate_current_lasso_covar(risk_factors_prices=risk_factor_prices, 63 | prices=asset_prices, 64 | lasso_model=lasso_model, 65 | returns_freqs='ME', 66 | factor_returns_freq='ME') 67 | 68 | covar_data_factor_monthy_asset_mixed = wrapper_estimate_current_lasso_covar(risk_factors_prices=risk_factor_prices, 69 | prices=asset_prices, 70 | lasso_model=lasso_model, 71 | returns_freqs=group_data.map({'Equity': 'ME', 'Bonds': 'QE', 'Mixed': 'QE'}), 72 | factor_returns_freq='ME') 73 | 74 | fig, axs = plt.subplots(2, 4, figsize=(14, 12), constrained_layout=True) 75 | theoretical_asset_covar = pd.DataFrame(260*simulation_results['theoretical_asset_covar'], 76 | index=asset_prices.columns, 77 | columns=asset_prices.columns) 78 | sample_covar = pd.DataFrame(260*np.cov(simulation_results['asset_returns'], rowvar=False, bias=True), 79 | index=asset_prices.columns, 80 | columns=asset_prices.columns) 81 | 82 | qis.plot_heatmap(df=theoretical_asset_covar, title='theoretical_covar', ax=axs[0, 0]) 83 | qis.plot_heatmap(df=sample_covar, title='sample_covar', ax=axs[1, 0]) 84 | qis.plot_heatmap(df=covar_data_all_daily.y_covar, title='all_daily', ax=axs[0, 1]) 85 | qis.plot_heatmap(df=covar_data_factor_daily_asset_mixed.y_covar, title='factor_daily_asset_mixed', ax=axs[1, 1]) 86 | qis.plot_heatmap(df=covar_data_factor_weekly_all.y_covar, title='weekly_all', ax=axs[0, 2]) 87 | qis.plot_heatmap(df=covar_data_factor_weekly_asset_mixed.y_covar, title='factor_weekly_asset_mixed', ax=axs[1, 2]) 88 | qis.plot_heatmap(df=covar_data_factor_monthy_all.y_covar, title='monthy_all', ax=axs[0, 3]) 89 | qis.plot_heatmap(df=covar_data_factor_monthy_asset_mixed.y_covar, title='factor_monthy_asset_mixed', ax=axs[1, 3]) 90 | 91 | plt.show() 92 | -------------------------------------------------------------------------------- /optimalportfolios/covar_estimation/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Union, Optional 4 | 5 | import numpy as np 6 | import pandas as pd 7 | import qis as qis 8 | 9 | 10 | def squeeze_covariance_matrix(covar: Union[np.ndarray, pd.DataFrame], 11 | squeeze_factor: Optional[float] = 0.05, 12 | is_preserve_variance: bool = True 13 | ) -> Union[np.ndarray, pd.DataFrame]: 14 | """ 15 | Adjusts the covariance matrix by applying a squeezing factor to eigenvalues. 16 | Smaller eigenvalues are reduced to mitigate noise. 17 | for the methodology see SSRN paper 18 | Squeezing Financial Noise: A Novel Approach to Covariance Matrix Estimation 19 | https://papers.ssrn.com/sol3/papers.cfm?abstract_id=4986939 20 | """ 21 | if squeeze_factor is None or np.isclose(squeeze_factor, 0.0): 22 | return covar 23 | 24 | # need to create pd.Dataframe for keeping track of good indices 25 | if isinstance(covar, pd.DataFrame): 26 | cov_matrix_pd = covar.copy() 27 | else: 28 | cov_matrix_pd = pd.DataFrame(covar) 29 | 30 | # filter out nans and zero variances 31 | variances = np.diag(cov_matrix_pd.to_numpy()) 32 | is_good_asset = np.where(np.logical_and(np.greater(variances, 0.0), np.isnan(variances) == False)) 33 | good_tickers = cov_matrix_pd.columns[is_good_asset] 34 | clean_covar_pd = cov_matrix_pd.loc[good_tickers, good_tickers] 35 | clean_covar_np = clean_covar_pd.to_numpy() 36 | 37 | # Eigen decomposition 38 | eigenvalues, eigenvectors = np.linalg.eigh(clean_covar_np) 39 | 40 | # Squeeze smaller eigenvalues (simple threshold-based squeezing) 41 | squeezed_eigenvalues = np.array([np.maximum(eigenvalue, squeeze_factor * np.max(eigenvalues)) 42 | for eigenvalue in eigenvalues]) 43 | 44 | # Reconstruct squeezed covariance matrix 45 | squeezed_cov_matrix = eigenvectors @ np.diag(squeezed_eigenvalues) @ eigenvectors.T 46 | 47 | if is_preserve_variance: 48 | # adjustment should be applied to off-dioagonal elements too otherwise we may end up with noncosistent matrix 49 | original_variance = np.diag(clean_covar_np) 50 | squeezed_variance = np.diag(squeezed_cov_matrix) 51 | adjustment_ratio = np.sqrt(original_variance / squeezed_variance) 52 | norm = np.outer(adjustment_ratio, adjustment_ratio) 53 | squeezed_cov_matrix = norm*squeezed_cov_matrix 54 | 55 | # now extend back 56 | squeezed_cov_matrix_pd = pd.DataFrame(squeezed_cov_matrix, index=good_tickers, columns=good_tickers) 57 | # reindex for all tickers and fill nans with zeros 58 | all_tickers = cov_matrix_pd.columns 59 | squeezed_cov_matrix = squeezed_cov_matrix_pd.reindex(index=all_tickers).reindex(columns=all_tickers).fillna(0.0) 60 | 61 | if isinstance(covar, np.ndarray): # match return to original type 62 | squeezed_cov_matrix = squeezed_cov_matrix.to_numpy() 63 | return squeezed_cov_matrix 64 | 65 | 66 | def compute_returns_from_prices(prices: pd.DataFrame, 67 | returns_freq: Optional[str] = 'ME', 68 | demean: bool = True, 69 | drop_first: bool = True, 70 | is_first_zero: bool = False, 71 | span: Optional[int] = 52 72 | ) -> pd.DataFrame: 73 | """ 74 | compute returns for covar matrix estimation 75 | """ 76 | returns = qis.to_returns(prices=prices, is_log_returns=True, is_first_zero=is_first_zero, drop_first=drop_first, freq=returns_freq) 77 | if demean: 78 | returns = returns - qis.compute_ewm(returns, span=span) 79 | # returns.iloc[0, :] will be zero so shift the period 80 | if drop_first: 81 | returns = returns.iloc[1:, :] 82 | return returns 83 | -------------------------------------------------------------------------------- /optimalportfolios/examples/computation_of_tracking_error.py: -------------------------------------------------------------------------------- 1 | """ 2 | example of minimization of tracking error 3 | """ 4 | import numpy as np 5 | import pandas as pd 6 | import matplotlib.pyplot as plt 7 | import qis as qis 8 | import yfinance as yf 9 | from enum import Enum 10 | from qis.portfolio.ewm_portfolio_risk import compute_benchamark_portfolio_risk_contributions 11 | 12 | 13 | from optimalportfolios import (Constraints, GroupLowerUpperConstraints, CovarEstimator, 14 | compute_tre_turnover_stats, 15 | wrapper_quadratic_optimisation, 16 | wrapper_risk_budgeting, 17 | wrapper_maximise_diversification, 18 | estimate_current_ewma_covar, 19 | PortfolioObjective, 20 | compute_portfolio_vol, 21 | local_path) 22 | 23 | 24 | def create_stocks_data(): 25 | dow_30_tickers = ['NVDA', 'MSFT', 'AAPL', 'AMZN', 'JPM', 'WMT', 'V', 'JNJ', 'PG', 'HD', 'KO', 'CSCO', 'IBM', 26 | 'CVX', 'UNH', 'CRM', 'DIS', 'AXP', 'MCD', 'GS', 'MRK', 'CAT', 'VZ', 'BA', 'AMGN', 'HON', 'NKE', 27 | 'SHW', 'MMM', 'TRV'] 28 | prices = yf.download(tickers=dow_30_tickers, start="2003-12-31", end="2025-07-18", ignore_tz=True, auto_adjust=True)['Close'][dow_30_tickers] 29 | qis.save_df_to_csv(df=prices, file_name='dow30_prices', local_path=local_path.get_resource_path()) 30 | 31 | # create_stocks_data() 32 | prices = qis.load_df_from_csv(file_name='dow30_prices', local_path=local_path.get_resource_path()) 33 | print(prices) 34 | # create bench 35 | benchmark_weights = qis.df_to_weight_allocation_sum1(df=prices.iloc[-1, :]) 36 | 37 | # prices, benchmark_prices, ac_loadings, benchmark_weights, group_data, ac_benchmark_prices = fetch_benchmark_universe_data() 38 | time_period = qis.TimePeriod(start='31Dec2009', end=prices.index[-1]) 39 | perf_time_period = qis.TimePeriod(start='31Dec2004', end=prices.index[-1]) # backtest reporting 40 | 41 | covar_matrix = estimate_current_ewma_covar(prices=prices, span=3*52) 42 | print(covar_matrix) 43 | qis.plot_corr_matrix_from_covar(covar=covar_matrix) 44 | 45 | 46 | # portfolio_weights = wrapper_risk_budgeting(pd_covar=covar_matrix, constraints0=Constraints(is_long_only=True)) 47 | # portfolio_weights = wrapper_quadratic_optimisation(pd_covar=covar_matrix, constraints0=Constraints(is_long_only=True)) 48 | portfolio_weights = wrapper_maximise_diversification(pd_covar=covar_matrix, constraints0=Constraints(is_long_only=True)) 49 | 50 | print(f"benchmark_vol={compute_portfolio_vol(covar_matrix, benchmark_weights):.2%}, " 51 | f"portfolio_vol={compute_portfolio_vol(covar_matrix, portfolio_weights):.2%}," 52 | f"tracking_error={compute_portfolio_vol(covar_matrix, benchmark_weights-portfolio_weights):.2%}," 53 | f"tracking_error1={np.nansum(compute_benchamark_portfolio_risk_contributions(w_portfolio=portfolio_weights, w_benchmark=benchmark_weights, covar=covar_matrix)):.2%}, " 54 | f"tracking_error ind={np.nansum(compute_benchamark_portfolio_risk_contributions(w_portfolio=portfolio_weights, w_benchmark=benchmark_weights, covar=covar_matrix, is_independent_risk=True)):.2%}") 55 | 56 | 57 | risk_contributions = qis.compute_portfolio_risk_contributions(w=portfolio_weights, covar=covar_matrix) 58 | risk_contributions_rel = risk_contributions / np.nansum(risk_contributions) 59 | 60 | tre_contributions = qis.compute_portfolio_risk_contributions(w=(portfolio_weights-benchmark_weights), covar=covar_matrix) 61 | tre_contributions_rel = tre_contributions / np.nansum(tre_contributions) 62 | 63 | tre_contributions1 = compute_benchamark_portfolio_risk_contributions(w_portfolio=portfolio_weights, w_benchmark=benchmark_weights, covar=covar_matrix) 64 | tre_contributions_rel1 = tre_contributions1 / np.nansum(tre_contributions1) 65 | 66 | tre_contributions_ind = compute_benchamark_portfolio_risk_contributions(w_portfolio=portfolio_weights, w_benchmark=benchmark_weights, covar=covar_matrix, is_independent_risk=True) 67 | tre_contributions_ind_rel = tre_contributions_ind / np.nansum(tre_contributions_ind) 68 | 69 | 70 | df = pd.concat([benchmark_weights.rename('benchmark'), 71 | portfolio_weights.rename('portfolio'), 72 | risk_contributions.rename('risk-contribs bp'), 73 | risk_contributions_rel.rename('risk-contribs %'), 74 | tre_contributions.rename('tre contribs bp'), 75 | tre_contributions_rel.rename('tre contribs %'), 76 | tre_contributions1.rename('tre contribs1 bp'), 77 | tre_contributions_rel1.rename('tre contribs1 %'), 78 | tre_contributions_ind.rename('tre contribs ind bp'), 79 | tre_contributions_ind_rel.rename('tre contribs ind %'), 80 | ], axis=1).sort_values(by='portfolio', ascending=False) 81 | df.loc['total', :] = df.sum(axis=0) 82 | qis.plot_df_table(df=df, var_format='{:.2%}') 83 | 84 | plt.show() 85 | 86 | 87 | -------------------------------------------------------------------------------- /optimalportfolios/examples/crypto_allocation/README.md: -------------------------------------------------------------------------------- 1 | Implementation of simulations for paper: 2 | 3 | Sepp A. (2023) Optimal Allocation to Cryptocurrencies in Diversified Portfolios, 4 | Risk (October 2023, 1-6), Available at SSRN: https://ssrn.com/abstract=4217841 5 | 6 | The analysis presented in the paper can be replicated or extended using this module 7 | 8 | Implementation steps: 9 | 1) Populate the time series of asset prices in the investable universe using 10 | ```python 11 | optimaportfolios/examples/crypto_allocation/load_prices.py 12 | ``` 13 | 14 | Price data for some assets can be fetched from local csv files, some can be generated on the fly 15 | 16 | Run 17 | ```python 18 | update_prices() 19 | ``` 20 | 21 | 2) Generate article figures using unit tests in 22 | ```python 23 | optimaportfolios/examples/crypto_allocation/article_figures.py 24 | ``` 25 | 26 | 3) Generate reports of simulated investment portfolios as reported in the article 27 | ```python 28 | optimaportfolios/examples/crypto_allocation/backtest_portfolios_for_article.py 29 | ``` 30 | 31 | -------------------------------------------------------------------------------- /optimalportfolios/examples/crypto_allocation/data/CTA_Historical.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ArturSepp/OptimalPortfolios/32c384c6cc4c6d050fd115b1f2cfd46859c3a2c1/optimalportfolios/examples/crypto_allocation/data/CTA_Historical.xlsx -------------------------------------------------------------------------------- /optimalportfolios/examples/crypto_allocation/data/Macro_Trading_Index_Historical.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ArturSepp/OptimalPortfolios/32c384c6cc4c6d050fd115b1f2cfd46859c3a2c1/optimalportfolios/examples/crypto_allocation/data/Macro_Trading_Index_Historical.xlsx -------------------------------------------------------------------------------- /optimalportfolios/examples/figures/MinVariance_multi_covar_estimator_backtest.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ArturSepp/OptimalPortfolios/32c384c6cc4c6d050fd115b1f2cfd46859c3a2c1/optimalportfolios/examples/figures/MinVariance_multi_covar_estimator_backtest.PNG -------------------------------------------------------------------------------- /optimalportfolios/examples/figures/example_customised_report.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ArturSepp/OptimalPortfolios/32c384c6cc4c6d050fd115b1f2cfd46859c3a2c1/optimalportfolios/examples/figures/example_customised_report.PNG -------------------------------------------------------------------------------- /optimalportfolios/examples/figures/example_portfolio_factsheet1.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ArturSepp/OptimalPortfolios/32c384c6cc4c6d050fd115b1f2cfd46859c3a2c1/optimalportfolios/examples/figures/example_portfolio_factsheet1.PNG -------------------------------------------------------------------------------- /optimalportfolios/examples/figures/example_portfolio_factsheet2.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ArturSepp/OptimalPortfolios/32c384c6cc4c6d050fd115b1f2cfd46859c3a2c1/optimalportfolios/examples/figures/example_portfolio_factsheet2.PNG -------------------------------------------------------------------------------- /optimalportfolios/examples/figures/max_diversification_span.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ArturSepp/OptimalPortfolios/32c384c6cc4c6d050fd115b1f2cfd46859c3a2c1/optimalportfolios/examples/figures/max_diversification_span.PNG -------------------------------------------------------------------------------- /optimalportfolios/examples/figures/multi_optimisers_backtest.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ArturSepp/OptimalPortfolios/32c384c6cc4c6d050fd115b1f2cfd46859c3a2c1/optimalportfolios/examples/figures/multi_optimisers_backtest.PNG -------------------------------------------------------------------------------- /optimalportfolios/examples/lasso_covar_estimation.py: -------------------------------------------------------------------------------- 1 | # packages 2 | import pandas as pd 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | from sklearn.linear_model import MultiTaskLasso 6 | from enum import Enum 7 | 8 | import yfinance as yf 9 | import qis as qis 10 | 11 | from optimalportfolios import (LassoModel, LassoModelType, 12 | estimate_lasso_covar, 13 | estimate_rolling_lasso_covar_different_freq) 14 | 15 | # select multi asset ETFs 16 | instrument_data = dict(IEFA='Equity', 17 | IEMG='Equity', 18 | ITOT='Equity', 19 | DVY='Equity', 20 | AGG='Bonds', 21 | IUSB='Bonds', 22 | GVI='Bonds', 23 | GBF='Bonds', 24 | AOR='Mixed', # growth 25 | AOA='Mixed', # aggressive 26 | AOM='Mixed', # moderate 27 | AOK='Mixed') 28 | group_data = pd.Series(instrument_data) 29 | sampling_freqs = group_data.map({'Equity': 'ME', 'Bonds': 'ME', 'Mixed': 'QE'}) 30 | 31 | asset_tickers = group_data.index.to_list() 32 | benchmark_tickers = ['SPY', 'IEF', 'LQD', 'USO', 'GLD', 'UUP'] 33 | asset_group_loadings = qis.set_group_loadings(group_data=group_data) 34 | # print(asset_group_loadings) 35 | 36 | asset_prices = yf.download(asset_tickers, start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'][asset_tickers].asfreq('B', method='ffill') 37 | benchmark_prices = yf.download(benchmark_tickers, start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'][benchmark_tickers].reindex( 38 | index=asset_prices.index, method='ffill') 39 | 40 | 41 | class LocalTests(Enum): 42 | LASSO_BETAS = 1 43 | LASSO_COVAR_DIFFERENT_FREQUENCIES = 3 44 | 45 | 46 | def run_local_test(local_test: LocalTests): 47 | """Run local tests for development and debugging purposes. 48 | 49 | These are integration tests that download real data and generate reports. 50 | Use for quick verification during development. 51 | """ 52 | 53 | pd.set_option('display.max_rows', 500) 54 | pd.set_option('display.max_columns', 500) 55 | pd.set_option('display.width', 1000) 56 | 57 | # set lasso model, x and y are demeaned 58 | lasso_params = dict(group_data=group_data, reg_lambda=1e-4, span=120, demean=False, solver='ECOS_BB') 59 | 60 | # set x and y 61 | y = qis.to_returns(asset_prices, freq='ME', drop_first=True) 62 | x = qis.to_returns(benchmark_prices, freq='ME', drop_first=True) 63 | # demean 64 | y = y - np.nanmean(y, axis=0) 65 | x = x - np.nanmean(x, axis=0) 66 | 67 | if local_test == LocalTests.LASSO_BETAS: 68 | # full regression 69 | lasso_model_full = LassoModel(model_type=LassoModelType.LASSO, **qis.update_kwargs(lasso_params, dict(reg_lambda=0.0))) 70 | betas0, total_vars, residual_vars, r2_t = lasso_model_full.fit(x=x, y=y).compute_residual_alpha_r2() 71 | betas0 = betas0.where(np.abs(betas0) > 1e-5, other=np.nan) 72 | 73 | # independent Lasso 74 | lasso_model = LassoModel(model_type=LassoModelType.LASSO, **lasso_params) 75 | betas_lasso, total_vars, residual_vars, r2_t = lasso_model.fit(x=x, y=y).compute_residual_alpha_r2() 76 | betas_lasso = betas_lasso.where(np.abs(betas_lasso) > 1e-5, other=np.nan) 77 | 78 | # group Lasso 79 | group_lasso_model = LassoModel(model_type=LassoModelType.GROUP_LASSO, **lasso_params) 80 | betas_group_lasso, total_vars, residual_vars, r2_t = group_lasso_model.fit(x=x, y=y).compute_residual_alpha_r2() 81 | betas_group_lasso = betas_group_lasso.where(np.abs(betas_group_lasso) > 1e-5, other=np.nan) 82 | 83 | fig, axs = plt.subplots(3, 1, figsize=(12, 10), tight_layout=True) 84 | qis.plot_heatmap(df=betas0, title='(A) Multivariate Regression Betas', var_format='{:.2f}', ax=axs[0]) 85 | qis.plot_heatmap(df=betas_lasso, title='(A) Independent Lasso Betas', var_format='{:.2f}', ax=axs[1]) 86 | qis.plot_heatmap(df=betas_group_lasso, title='(B) Group Lasso Betas', var_format='{:.2f}', ax=axs[2]) 87 | 88 | elif local_test == LocalTests.LASSO_COVAR_DIFFERENT_FREQUENCIES: 89 | lasso_model = LassoModel(model_type=LassoModelType.GROUP_LASSO_CLUSTERS, **lasso_params) 90 | covar_data = estimate_rolling_lasso_covar_different_freq(risk_factor_prices=benchmark_prices, 91 | prices=asset_prices, 92 | returns_freqs=sampling_freqs, 93 | time_period=qis.TimePeriod('31Dec2019', '13Dec2024'), 94 | rebalancing_freq='YE', 95 | lasso_model=lasso_model, 96 | is_apply_vol_normalised_returns=False 97 | ) 98 | for date, covar in covar_data.y_covars.items(): 99 | print(date) 100 | print(covar) 101 | 102 | for date, beta in covar_data.asset_last_betas_t.items(): 103 | print(date) 104 | print(beta) 105 | 106 | plt.show() 107 | 108 | 109 | if __name__ == '__main__': 110 | 111 | run_local_test(local_test=LocalTests.LASSO_COVAR_DIFFERENT_FREQUENCIES) 112 | -------------------------------------------------------------------------------- /optimalportfolios/examples/lasso_estimation.py: -------------------------------------------------------------------------------- 1 | # packages 2 | import pandas as pd 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | from sklearn.linear_model import MultiTaskLasso 6 | 7 | import yfinance as yf 8 | import qis as qis 9 | 10 | from optimalportfolios.lasso.lasso_model_estimator import (solve_lasso_cvx_problem, solve_group_lasso_cvx_problem) 11 | 12 | # select multi asset ETFs 13 | instrument_data = dict(SPY='SPY', 14 | IEFA='Equity', 15 | IEMG='Equity', 16 | ITOT='Equity', 17 | DVY='Equity', 18 | AGG='Bonds', 19 | IUSB='Bonds', 20 | GVI='Bonds', 21 | GBF='Bonds', 22 | AOR='Mixed', # growth 23 | AOA='Mixed', # aggressive 24 | AOM='Mixed', # moderate 25 | AOK='Mixed', # conservatives 26 | GSG='Commodts', 27 | COMT='Commodts', 28 | PDBC='Commodts', 29 | FTGC='Commodts') 30 | instrument_data = pd.Series(instrument_data) 31 | asset_tickers = instrument_data.index.to_list() 32 | benchmark_tickers = ['SPY', 'IEF', 'LQD', 'USO', 'GLD', 'UUP'] 33 | asset_group_loadings = qis.set_group_loadings(group_data=instrument_data) 34 | print(asset_group_loadings) 35 | 36 | asset_prices = yf.download(asset_tickers, start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'][asset_tickers].asfreq('B', method='ffill') 37 | benchmark_prices = yf.download(benchmark_tickers, start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'][benchmark_tickers].reindex(index=asset_prices.index, method='ffill') 38 | 39 | y = qis.to_returns(asset_prices, freq='ME', drop_first=True) 40 | x = qis.to_returns(benchmark_prices, freq='ME', drop_first=True) 41 | y = y.to_numpy() - np.nanmean(y, axis=0) 42 | x = x.to_numpy() - np.nanmean(x, axis=0) 43 | 44 | params = dict(reg_lambda=1e-5, span=24, nonneg=False) 45 | 46 | 47 | #beta2 = solve_lasso_problem_2d(x=x, y=y, **params, apply_independent_nan_filter=True) 48 | #beta2 = pd.DataFrame(beta2, index=benchmark_tickers, columns=asset_tickers) 49 | #beta2 = beta2.where(np.abs(beta2) > 1e-4, other=np.nan) 50 | 51 | beta3, _, _ = solve_lasso_cvx_problem(x=x, y=y, **params, apply_independent_nan_filter=False) 52 | beta3 = pd.DataFrame(beta3, index=benchmark_tickers, columns=asset_tickers) 53 | beta3 = beta3.where(np.abs(beta3) > 1e-4, other=np.nan) 54 | print(beta3) 55 | 56 | beta4 = solve_group_lasso_cvx_problem(x=x, y=y, group_loadings=asset_group_loadings.to_numpy(), **params) 57 | beta4 = pd.DataFrame(beta4, index=benchmark_tickers, columns=asset_tickers) 58 | beta4 = beta4.where(np.abs(beta4) > 1e-4, other=np.nan) 59 | print(beta4) 60 | 61 | model = MultiTaskLasso(alpha=1e-3, fit_intercept=False) 62 | 63 | x, y = qis.select_non_nan_x_y(x=x, y=y) 64 | model.fit(X=x, y=y) 65 | params = pd.DataFrame(model.coef_.T, index=benchmark_tickers, columns=asset_tickers) 66 | params = params.where(np.abs(params) > 1e-4, other=np.nan) 67 | print(params) 68 | 69 | fig, axs = plt.subplots(3, 1, figsize=(12, 8), tight_layout=True) 70 | qis.plot_heatmap(df=beta3, title='independent betas same nonnan basis', var_format='{:.2f}', ax=axs[0]) 71 | qis.plot_heatmap(df=beta4, title='group betas same nonnan basis', var_format='{:.2f}', ax=axs[1]) 72 | qis.plot_heatmap(df=params, title='multi Lasso', var_format='{:.2f}', ax=axs[2]) 73 | 74 | 75 | plt.show() 76 | 77 | 78 | 79 | 80 | -------------------------------------------------------------------------------- /optimalportfolios/examples/lasso_risk_model.py: -------------------------------------------------------------------------------- 1 | # packages 2 | import pandas as pd 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | import seaborn as sns 6 | import yfinance as yf 7 | import qis as qis 8 | 9 | import optimalportfolios.local_path 10 | from optimalportfolios import (LassoModel, LassoModelType, 11 | Constraints, 12 | rolling_risk_budgeting, 13 | estimate_rolling_lasso_covar_different_freq) 14 | 15 | # select multi asset ETFs 16 | instrument_data = dict(IEFA='Equity', 17 | IEMG='Equity', 18 | DVY='Equity', 19 | AGG='Bonds', 20 | IUSB='Bonds', 21 | GVI='Bonds', 22 | AOR='Mixed', # growth 23 | AOA='Mixed', # aggressive 24 | AOM='Mixed') # moderate 25 | 26 | group_data = pd.Series(instrument_data) 27 | ac_group_order = ['Equity', 'Bonds', 'Bonds'] 28 | # select different observation periods 29 | sampling_freqs = group_data.map({'Equity': 'ME', 'Bonds': 'ME', 'Mixed': 'QE'}) 30 | 31 | # set 32 | asset_tickers = group_data.index.to_list() 33 | risk_factor_tickers = ['SPY', 'IEF', 'LQD', 'USO', 'GLD', 'UUP'] 34 | 35 | prices = yf.download(asset_tickers, start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'][asset_tickers].asfreq('B', method='ffill') 36 | risk_factor_prices = yf.download(risk_factor_tickers, start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'][risk_factor_tickers].reindex( 37 | index=prices.index, method='ffill') 38 | 39 | 40 | # estimate asset betas and covar matrix 41 | time_period=qis.TimePeriod('31Dec2015', '13Dec2024') 42 | lasso_params = dict(reg_lambda=1e-5, span=120, demean=False, solver='ECOS_BB') 43 | lasso_model = LassoModel(model_type=LassoModelType.GROUP_LASSO_CLUSTERS, **lasso_params) 44 | covar_data = estimate_rolling_lasso_covar_different_freq(risk_factor_prices=risk_factor_prices, 45 | prices=prices, 46 | returns_freqs=sampling_freqs, 47 | time_period=time_period, 48 | factor_returns_freq='W-WED', 49 | rebalancing_freq='QE', 50 | lasso_model=lasso_model 51 | ) 52 | # print betas 53 | for date, beta in covar_data.asset_last_betas_t.items(): 54 | print(date) 55 | print(beta) 56 | 57 | # build equal risk budget portfolio 58 | risk_budget = pd.Series(1.0 / len(prices.columns), index=prices.columns) 59 | risk_budget_weights = rolling_risk_budgeting(prices=prices, 60 | constraints0=Constraints(), 61 | time_period=time_period, 62 | covar_dict=covar_data.y_covars, 63 | risk_budget=risk_budget) 64 | erb_portfolio_data = qis.backtest_model_portfolio(prices=prices, 65 | weights=risk_budget_weights, 66 | weight_implementation_lag=1, 67 | ticker='EqualRisk') 68 | 69 | # build simple equal weight portfolio 70 | ew_portfolio_data = qis.backtest_model_portfolio(prices=prices, 71 | weights=risk_budget, 72 | weight_implementation_lag=1, 73 | ticker='EqualWeight') 74 | 75 | multi_portfolio_data = qis.MultiPortfolioData(portfolio_datas=[erb_portfolio_data, ew_portfolio_data], 76 | covar_dict=covar_data.y_covars, 77 | benchmark_prices=risk_factor_prices['SPY'].to_frame()) 78 | 79 | 80 | # get linear model 81 | risk_model = covar_data.get_linear_factor_model(x_factors=risk_factor_prices, y_assets=prices) 82 | 83 | portfolio_factor_betas = risk_model.compute_agg_factor_exposures(weights=erb_portfolio_data.get_weights()) 84 | 85 | # portfolio returns at portfolio_factor_betas.index 86 | portfolio_returns = qis.to_returns(prices=erb_portfolio_data.get_portfolio_nav().reindex(index=portfolio_factor_betas.index).ffill(), is_first_zero=True) 87 | attributions = qis.compute_benchmarks_beta_attribution_from_returns(portfolio_returns=portfolio_returns, 88 | benchmark_returns=risk_model.x, 89 | portfolio_benchmark_betas=portfolio_factor_betas, 90 | total_name='Total') 91 | 92 | # factor risk 93 | factor_rcs_ratios, factor_risk_contrib_idio, factor_risk_contrib, portfolio_var \ 94 | = risk_model.compute_factor_risk_contribution(weights=erb_portfolio_data.get_weights()) 95 | 96 | with sns.axes_style("darkgrid"): 97 | fig, axs = plt.subplots(4, 1, figsize=(12, 12), tight_layout=True) 98 | qis.plot_time_series(df=portfolio_factor_betas, 99 | title=f"Portfolio Factor Beta Exposures", 100 | var_format='{:,.2f}', 101 | ax=axs[0]) 102 | qis.plot_time_series(df=attributions.cumsum(axis=0), 103 | title=f"Portfolio Factor Attribution", 104 | var_format='{:,.2%}', 105 | ax=axs[1]) 106 | qis.plot_time_series(df=factor_risk_contrib, 107 | title=f"Portfolio Factor Risk Attribution", 108 | var_format='{:,.2%}', 109 | ax=axs[2]) 110 | 111 | portfolio_var['total'] = np.sum(portfolio_var, axis=1) 112 | portfolio_var = np.sqrt(portfolio_var) 113 | print(portfolio_var) 114 | 115 | qis.plot_time_series(df=portfolio_var, 116 | title=f"Portfolio sqrt(Vars)", 117 | var_format='{:,.2%}', 118 | ax=axs[3]) 119 | 120 | fig, ax = plt.subplots(1, 1, figsize=(12, 12), tight_layout=True) 121 | qis.plot_stack(df=factor_rcs_ratios, 122 | use_bar_plot=True, 123 | title=f"Relative", 124 | var_format='{:,.2%}', 125 | ax=ax) 126 | 127 | """ 128 | # comprehensive report 129 | report_kwargs = qis.fetch_default_report_kwargs(time_period=time_period, 130 | reporting_frequency=qis.ReportingFrequency.MONTHLY, 131 | add_rates_data=False) 132 | figs, dfs = qis.weights_tracking_error_report_by_ac_subac(multi_portfolio_data=multi_portfolio_data, 133 | time_period=time_period, 134 | ac_group_data=group_data, 135 | ac_group_order=ac_group_order, 136 | sub_ac_group_data=pd.Series(group_data.index, index=group_data.index), 137 | sub_ac_group_order=None, 138 | turnover_groups=group_data, 139 | turnover_order=ac_group_order, 140 | risk_model=risk_model, 141 | **report_kwargs) 142 | qis.save_figs_to_pdf(figs, file_name='lasso_risk_report', local_path=optimalportfolios.local_path.get_output_path()) 143 | """ 144 | plt.show() 145 | -------------------------------------------------------------------------------- /optimalportfolios/examples/long_short_optimisation.py: -------------------------------------------------------------------------------- 1 | """ 2 | example of backtester with long-short weights 3 | """ 4 | import pandas as pd 5 | import matplotlib.pyplot as plt 6 | import seaborn as sns 7 | import yfinance as yf 8 | from typing import Tuple 9 | import qis as qis 10 | 11 | # package 12 | from optimalportfolios import compute_rolling_optimal_weights, PortfolioObjective, Constraints 13 | 14 | 15 | # 1. we define the investment universe and allocation by asset classes 16 | def fetch_universe_data() -> Tuple[pd.DataFrame, pd.DataFrame, pd.Series]: 17 | """ 18 | fetch universe data for the portfolio construction: 19 | 1. dividend and split adjusted end of day prices: price data may start / end at different dates 20 | 2. benchmark prices which is used for portfolio reporting and benchmarking 21 | 3. universe group data for portfolio reporting and risk attribution for large universes 22 | this function is using yfinance to fetch the price data 23 | """ 24 | universe_data = dict(SPY='Equities', 25 | QQQ='Equities', 26 | EEM='Equities', 27 | TLT='Bonds', 28 | IEF='Bonds', 29 | LQD='Credit', 30 | HYG='HighYield', 31 | GLD='Gold') 32 | tickers = list(universe_data.keys()) 33 | group_data = pd.Series(universe_data) 34 | prices = yf.download(tickers, start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'] 35 | prices = prices[tickers] # arrange as given 36 | prices = prices.asfreq('B', method='ffill') # refill at B frequency 37 | benchmark_prices = prices[['SPY', 'TLT']] 38 | return prices, benchmark_prices, group_data 39 | 40 | 41 | # 2. get universe data 42 | prices, benchmark_prices, group_data = fetch_universe_data() 43 | time_period = qis.TimePeriod('31Dec2004', '17Apr2025') # period for computing weights backtest 44 | 45 | # 3.a. define optimisation setup 46 | # currently cannot work for MAX_DIVERSIFICATION and MAXIMUM_SHARPE_RATIO 47 | portfolio_objective = PortfolioObjective.MAXIMUM_SHARPE_RATIO # define portfolio objective 48 | returns_freq = 'W-WED' # use weekly returns 49 | rebalancing_freq = 'QE' # weights rebalancing frequency: rebalancing is quarterly on WED 50 | span = 52 # span of number of returns_freq-returns for covariance estimation = 12y 51 | 52 | # use max_exposure = min_exposure so the sum of portfolio weight = 0 53 | constraints0 = Constraints(is_long_only=False, # negative weights are allowed 54 | max_exposure=1.0, # defines maximum net exposure = sum(weights) 55 | min_exposure=-1.0, # defines minimum net exposure = sum(weights) 56 | min_weights=pd.Series(-0.25, index=prices.columns), # can go negative 57 | max_weights=pd.Series(0.25, index=prices.columns) 58 | ) 59 | 60 | # 3.b. compute solvers portfolio weights rebalanced every quarter 61 | weights = compute_rolling_optimal_weights(prices=prices, 62 | portfolio_objective=portfolio_objective, 63 | constraints0=constraints0, 64 | time_period=time_period, 65 | rebalancing_freq=rebalancing_freq, 66 | span=span) 67 | 68 | # 4. given portfolio weights, construct the performance of the portfolio 69 | funding_rate = None # on positive / negative cash balances 70 | rebalancing_costs = 0.0010 # rebalancing costs per volume = 10bp 71 | weight_implementation_lag = 1 # portfolio is implemented next day after weights are computed 72 | portfolio_data = qis.backtest_model_portfolio(prices=prices.loc[weights.index[0]:, :], 73 | weights=weights, 74 | ticker='LongShortPortfolio', 75 | funding_rate=funding_rate, 76 | weight_implementation_lag=weight_implementation_lag, 77 | rebalancing_costs=rebalancing_costs) 78 | 79 | # 5. using portfolio_data run the reporting with strategy factsheet 80 | # for group-based reporting set_group_data 81 | portfolio_data.set_group_data(group_data=group_data, group_order=list(group_data.unique())) 82 | # set time period for portfolio reporting 83 | figs = qis.generate_strategy_factsheet(portfolio_data=portfolio_data, 84 | benchmark_prices=benchmark_prices, 85 | add_weights_turnover_sheet=True, 86 | time_period=time_period, 87 | **qis.fetch_default_report_kwargs(time_period=time_period)) 88 | # save report to pdf and png 89 | qis.save_figs_to_pdf(figs=figs, 90 | file_name=f"{portfolio_data.nav.name}_portfolio_factsheet", 91 | orientation='landscape', 92 | local_path="C://Users//Artur//OneDrive//analytics//outputs") 93 | 94 | plt.show() 95 | -------------------------------------------------------------------------------- /optimalportfolios/examples/multi_covar_estimation_backtest.py: -------------------------------------------------------------------------------- 1 | """ 2 | backtest multiple covariance estimators given parameter backtest several optimisers 3 | """ 4 | # imports 5 | import pandas as pd 6 | import matplotlib.pyplot as plt 7 | from typing import List, Optional 8 | from enum import Enum 9 | import qis as qis 10 | 11 | # package 12 | from optimalportfolios import (Constraints, PortfolioObjective, 13 | backtest_rolling_optimal_portfolio, 14 | estimate_rolling_ewma_covar, 15 | LassoModelType, LassoModel, 16 | estimate_rolling_lasso_covar) 17 | from optimalportfolios.examples.universe import fetch_benchmark_universe_data 18 | 19 | SUPPORTED_SOLVERS = [PortfolioObjective.EQUAL_RISK_CONTRIBUTION, 20 | PortfolioObjective.MIN_VARIANCE, 21 | PortfolioObjective.MAX_DIVERSIFICATION] 22 | 23 | 24 | def run_multi_covar_estimators_backtest(prices: pd.DataFrame, 25 | benchmark_prices: pd.DataFrame, 26 | ac_benchmark_prices: pd.DataFrame, 27 | group_data: pd.Series, 28 | time_period: qis.TimePeriod, # for weights 29 | perf_time_period: qis.TimePeriod, # for reporting 30 | returns_freq: str = 'W-WED', # covar matrix estimation on weekly returns 31 | rebalancing_freq: str = 'QE', # portfolio rebalancing 32 | span: int = 52, # ewma span for covariance matrix estimation: span = 1y of weekly returns 33 | portfolio_objective: PortfolioObjective = PortfolioObjective.MAX_DIVERSIFICATION, 34 | squeeze_factor: Optional[float] = None 35 | ) -> List[plt.Figure]: 36 | """ 37 | backtest multi covar estimation 38 | test maximum diversification optimiser to span parameter 39 | portfolios are rebalanced at rebalancing_freq 40 | """ 41 | if portfolio_objective not in SUPPORTED_SOLVERS: 42 | raise NotImplementedError(f"not supported {portfolio_objective}") 43 | 44 | # 1. EWMA covar 45 | ewma_covars = estimate_rolling_ewma_covar(prices=prices, time_period=time_period, 46 | rebalancing_freq=rebalancing_freq, 47 | returns_freq=returns_freq, 48 | span=span, 49 | is_apply_vol_normalised_returns=False, 50 | squeeze_factor=squeeze_factor) 51 | # 2. ewma covar with vol norm returns 52 | ewma_covars_vol_norm = estimate_rolling_ewma_covar(prices=prices, time_period=time_period, 53 | rebalancing_freq=rebalancing_freq, 54 | returns_freq=returns_freq, 55 | span=span, 56 | is_apply_vol_normalised_returns=True, 57 | squeeze_factor=squeeze_factor) 58 | # lasso params 59 | lasso_kwargs = dict(risk_factor_prices=ac_benchmark_prices, 60 | prices=prices, 61 | time_period=time_period, 62 | returns_freq=returns_freq, 63 | rebalancing_freq=rebalancing_freq, 64 | span=span, 65 | squeeze_factor=squeeze_factor, 66 | residual_var_weight=1.0) 67 | # 3. Group Lasso model using ac_benchmarks from universe 68 | lasso_model = LassoModel(model_type=LassoModelType.LASSO, 69 | group_data=group_data, reg_lambda=1e-6, span=span, 70 | warmup_period=span, solver='ECOS_BB') 71 | lasso_covar_data = estimate_rolling_lasso_covar(lasso_model=lasso_model, 72 | is_apply_vol_normalised_returns=False, 73 | **lasso_kwargs) 74 | lasso_covars = lasso_covar_data.y_covars 75 | lasso_covar_data_norm = estimate_rolling_lasso_covar(lasso_model=lasso_model, 76 | is_apply_vol_normalised_returns=True, 77 | **lasso_kwargs) 78 | lasso_covars_norm = lasso_covar_data_norm.y_covars 79 | 80 | 81 | # 4. Group Lasso model using ac_benchmarks from universe 82 | group_lasso_model = LassoModel(model_type=LassoModelType.GROUP_LASSO, 83 | group_data=group_data, reg_lambda=1e-6, span=span, solver='ECOS_BB') 84 | group_lasso_covars = estimate_rolling_lasso_covar(lasso_model=group_lasso_model, 85 | is_apply_vol_normalised_returns=False, 86 | **lasso_kwargs) 87 | group_lasso_covars = group_lasso_covars.y_covars 88 | group_lasso_covars_norm = estimate_rolling_lasso_covar(lasso_model=group_lasso_model, 89 | is_apply_vol_normalised_returns=True, 90 | **lasso_kwargs) 91 | group_lasso_covars_norm = group_lasso_covars_norm.y_covars 92 | # create dict of estimated covars 93 | covars_dict = {'EWMA': ewma_covars, 'EWMA vol norm': ewma_covars_vol_norm, 94 | 'Lasso': lasso_covars, 'Lasso VolNorn': lasso_covars_norm, 95 | 'Group Lasso': group_lasso_covars, 'Group Lasso VolNorn': group_lasso_covars_norm} 96 | 97 | # set global constaints for portfolios 98 | constraints0 = Constraints(is_long_only=True, 99 | min_weights=pd.Series(0.0, index=prices.columns), 100 | max_weights=pd.Series(0.5, index=prices.columns)) 101 | 102 | # now create a list of portfolios 103 | portfolio_datas = [] 104 | for key, covar_dict in covars_dict.items(): 105 | portfolio_data = backtest_rolling_optimal_portfolio(prices=prices, 106 | portfolio_objective=portfolio_objective, 107 | constraints0=constraints0, 108 | time_period=time_period, 109 | perf_time_period=perf_time_period, 110 | covar_dict=covar_dict, 111 | rebalancing_costs=0.0010, # 10bp for rebalancin 112 | weight_implementation_lag=1, # weights are implemnted next day after comuting 113 | ticker=f"{key}" # portfolio id 114 | ) 115 | portfolio_data.set_group_data(group_data=group_data) 116 | portfolio_datas.append(portfolio_data) 117 | 118 | # run cross portfolio report 119 | multi_portfolio_data = qis.MultiPortfolioData(portfolio_datas=portfolio_datas, benchmark_prices=benchmark_prices) 120 | figs = qis.generate_multi_portfolio_factsheet(multi_portfolio_data=multi_portfolio_data, 121 | time_period=time_period, 122 | add_strategy_factsheets=False, 123 | **qis.fetch_default_report_kwargs(time_period=time_period)) 124 | return figs 125 | 126 | 127 | class LocalTests(Enum): 128 | MULTI_COVAR_ESTIMATORS_BACKTEST = 1 129 | 130 | 131 | def run_local_test(local_test: LocalTests): 132 | """Run local tests for development and debugging purposes. 133 | 134 | These are integration tests that download real data and generate reports. 135 | Use for quick verification during development. 136 | """ 137 | 138 | import optimalportfolios.local_path as local_path 139 | 140 | if local_test == LocalTests.MULTI_COVAR_ESTIMATORS_BACKTEST: 141 | # portfolio_objective = PortfolioObjective.MAX_DIVERSIFICATION 142 | # portfolio_objective = PortfolioObjective.EQUAL_RISK_CONTRIBUTION 143 | portfolio_objective = PortfolioObjective.MIN_VARIANCE 144 | 145 | # params = dict(returns_freq='W-WED', rebalancing_freq='QE', span=52) 146 | params = dict(returns_freq='ME', rebalancing_freq='ME', span=12, squeeze_factor=0.01) 147 | 148 | prices, benchmark_prices, ac_loadings, benchmark_weights, group_data, ac_benchmark_prices = fetch_benchmark_universe_data() 149 | time_period = qis.TimePeriod(start='31Dec1998', end=prices.index[-1]) # backtest start: need 6y of data for rolling Sharpe and max mixure portfolios 150 | perf_time_period = qis.TimePeriod(start='31Dec2004', end=prices.index[-1]) # backtest reporting 151 | figs = run_multi_covar_estimators_backtest(prices=prices, 152 | benchmark_prices=benchmark_prices, 153 | ac_benchmark_prices=ac_benchmark_prices, 154 | group_data=group_data, 155 | time_period=time_period, 156 | perf_time_period=perf_time_period, 157 | portfolio_objective=portfolio_objective, 158 | **params) 159 | 160 | # save png and pdf 161 | # qis.save_fig(fig=figs[0], file_name=f"{portfolio_objective.value}_multi_covar_estimator_backtest", local_path=f"figures/") 162 | qis.save_figs_to_pdf(figs=figs, 163 | file_name=f"{portfolio_objective.value} multi_covar_estimator_backtest", 164 | orientation='landscape', 165 | local_path=local_path.get_output_path()) 166 | plt.show() 167 | 168 | 169 | if __name__ == '__main__': 170 | 171 | run_local_test(local_test=LocalTests.MULTI_COVAR_ESTIMATORS_BACKTEST) 172 | -------------------------------------------------------------------------------- /optimalportfolios/examples/multi_optimisers_backtest.py: -------------------------------------------------------------------------------- 1 | """ 2 | backtest several optimisers 3 | """ 4 | # imports 5 | import pandas as pd 6 | import matplotlib.pyplot as plt 7 | from typing import List 8 | from enum import Enum 9 | import qis as qis 10 | 11 | # package 12 | from optimalportfolios import Constraints, backtest_rolling_optimal_portfolio, PortfolioObjective 13 | from optimalportfolios.examples.universe import fetch_benchmark_universe_data 14 | 15 | 16 | def run_multi_optimisers_backtest(prices: pd.DataFrame, 17 | benchmark_prices: pd.DataFrame, 18 | group_data: pd.Series, 19 | time_period: qis.TimePeriod, # for weights 20 | perf_time_period: qis.TimePeriod # for reporting 21 | ) -> List[plt.Figure]: 22 | """ 23 | backtest multi optimisers 24 | test maximum diversification optimiser to span parameter 25 | span is number period for ewm filter 26 | span = 20 for daily data implies last 20 (trading) days contribute 50% of weight for covariance estimation 27 | we test sensitivity from fast (small span) to slow (large span) 28 | """ 29 | portfolio_objectives = {'EqualRisk': PortfolioObjective.EQUAL_RISK_CONTRIBUTION, 30 | 'MinVariance': PortfolioObjective.MIN_VARIANCE, 31 | 'MaxDiversification': PortfolioObjective.MAX_DIVERSIFICATION, 32 | 'MaxSharpe': PortfolioObjective.MAXIMUM_SHARPE_RATIO, 33 | 'MaxCarraMixture': PortfolioObjective.MAX_CARA_MIXTURE} 34 | 35 | # set global constaints for portfolios 36 | constraints0 = Constraints(is_long_only=True, 37 | min_weights=pd.Series(0.0, index=prices.columns), 38 | max_weights=pd.Series(0.5, index=prices.columns)) 39 | 40 | # now create a list of portfolios 41 | portfolio_datas = [] 42 | for ticker, portfolio_objective in portfolio_objectives.items(): 43 | print(ticker) 44 | portfolio_data = backtest_rolling_optimal_portfolio(prices=prices, 45 | portfolio_objective=portfolio_objective, 46 | constraints0=constraints0, 47 | time_period=time_period, 48 | perf_time_period=perf_time_period, 49 | returns_freq='W-WED', # covar matrix estimation on weekly returns 50 | rebalancing_freq='QE', # portfolio rebalancing 51 | span=52, # ewma span for covariance matrix estimation: span = 1y of weekly returns 52 | roll_window=5*52, # linked to returns at rebalancing_freq: 5y of data for max sharpe and mixture carra 53 | carra=0.5, # carra parameter 54 | n_mixures=3, # for mixture carra utility 55 | rebalancing_costs=0.0010, # 10bp for rebalancin 56 | weight_implementation_lag=1, # weights are implemnted next day after comuting 57 | ticker=f"{ticker}" # portfolio id 58 | ) 59 | portfolio_data.set_group_data(group_data=group_data) 60 | portfolio_datas.append(portfolio_data) 61 | 62 | # run cross portfolio report 63 | multi_portfolio_data = qis.MultiPortfolioData(portfolio_datas=portfolio_datas, benchmark_prices=benchmark_prices) 64 | figs = qis.generate_multi_portfolio_factsheet(multi_portfolio_data=multi_portfolio_data, 65 | time_period=time_period, 66 | add_strategy_factsheets=False, 67 | **qis.fetch_default_report_kwargs(time_period=time_period)) 68 | return figs 69 | 70 | 71 | class LocalTests(Enum): 72 | MULTI_OPTIMISERS_BACKTEST = 1 73 | 74 | 75 | def run_local_test(local_test: LocalTests): 76 | """Run local tests for development and debugging purposes. 77 | 78 | These are integration tests that download real data and generate reports. 79 | Use for quick verification during development. 80 | """ 81 | 82 | import optimalportfolios.local_path as local_path 83 | 84 | if local_test == LocalTests.MULTI_OPTIMISERS_BACKTEST: 85 | prices, benchmark_prices, ac_loadings, benchmark_weights, group_data, ac_benchmark_prices = fetch_benchmark_universe_data() 86 | time_period = qis.TimePeriod(start='31Dec1998', end=prices.index[-1]) # backtest start: need 6y of data for rolling Sharpe and max mixure portfolios 87 | perf_time_period = qis.TimePeriod(start='31Dec2004', end=prices.index[-1]) # backtest reporting 88 | figs = run_multi_optimisers_backtest(prices=prices, 89 | benchmark_prices=benchmark_prices, 90 | group_data=group_data, 91 | time_period=time_period, 92 | perf_time_period=perf_time_period) 93 | 94 | # save png and pdf 95 | qis.save_fig(fig=figs[0], file_name=f"multi_optimisers_backtest", local_path=f"figures/") 96 | qis.save_figs_to_pdf(figs=figs, 97 | file_name=f"multi_optimisers_backtest", 98 | orientation='landscape', 99 | local_path=local_path.get_output_path()) 100 | plt.show() 101 | 102 | 103 | if __name__ == '__main__': 104 | 105 | run_local_test(local_test=LocalTests.MULTI_OPTIMISERS_BACKTEST) 106 | -------------------------------------------------------------------------------- /optimalportfolios/examples/optimal_portfolio_backtest.py: -------------------------------------------------------------------------------- 1 | """ 2 | minimal example of using the backtester 3 | """ 4 | import pandas as pd 5 | import matplotlib.pyplot as plt 6 | import seaborn as sns 7 | import yfinance as yf 8 | from typing import Tuple 9 | import qis as qis 10 | 11 | # package 12 | from optimalportfolios import compute_rolling_optimal_weights, PortfolioObjective, Constraints 13 | 14 | 15 | # 1. we define the investment universe and allocation by asset classes 16 | def fetch_universe_data() -> Tuple[pd.DataFrame, pd.DataFrame, pd.Series]: 17 | """ 18 | fetch universe data for the portfolio construction: 19 | 1. dividend and split adjusted end of day prices: price data may start / end at different dates 20 | 2. benchmark prices which is used for portfolio reporting and benchmarking 21 | 3. universe group data for portfolio reporting and risk attribution for large universes 22 | this function is using yfinance to fetch the price data 23 | """ 24 | universe_data = dict(SPY='Equities', 25 | QQQ='Equities', 26 | EEM='Equities', 27 | TLT='Bonds', 28 | IEF='Bonds', 29 | LQD='Credit', 30 | HYG='HighYield', 31 | GLD='Gold') 32 | tickers = list(universe_data.keys()) 33 | group_data = pd.Series(universe_data) 34 | prices = yf.download(tickers, start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'] 35 | prices = prices[tickers] # arrange as given 36 | prices = prices.asfreq('B', method='ffill') # refill at B frequency 37 | benchmark_prices = prices[['SPY', 'TLT']] 38 | return prices, benchmark_prices, group_data 39 | 40 | 41 | # 2. get universe data 42 | prices, benchmark_prices, group_data = fetch_universe_data() 43 | print(prices) 44 | time_period = qis.TimePeriod('31Dec2004', '18Jul2025') # period for computing weights backtest 45 | 46 | # 3.a. define optimisation setup 47 | portfolio_objective = PortfolioObjective.MAX_DIVERSIFICATION # define portfolio objective 48 | returns_freq = 'W-WED' # use weekly returns 49 | rebalancing_freq = 'QE' # weights rebalancing frequency: rebalancing is quarterly on WED 50 | span = 52 # span of number of returns_freq-returns for covariance estimation = 12y 51 | constraints0 = Constraints(is_long_only=True, 52 | min_weights=pd.Series(0.0, index=prices.columns), 53 | max_weights=pd.Series(0.5, index=prices.columns)) 54 | 55 | # 3.b. compute solvers portfolio weights rebalanced every quarter 56 | weights = compute_rolling_optimal_weights(prices=prices, 57 | portfolio_objective=portfolio_objective, 58 | constraints0=constraints0, 59 | time_period=time_period, 60 | rebalancing_freq=rebalancing_freq, 61 | span=span) 62 | 63 | # 4. given portfolio weights, construct the performance of the portfolio 64 | funding_rate = None # on positive / negative cash balances 65 | rebalancing_costs = 0.0010 # rebalancing costs per volume = 10bp 66 | weight_implementation_lag = 1 # portfolio is implemented next day after weights are computed 67 | portfolio_data = qis.backtest_model_portfolio(prices=prices.loc[weights.index[0]:, :], 68 | weights=weights, 69 | ticker='MaxDiversification', 70 | funding_rate=funding_rate, 71 | weight_implementation_lag=weight_implementation_lag, 72 | rebalancing_costs=rebalancing_costs) 73 | 74 | # 5. using portfolio_data run the reporting with strategy factsheet 75 | # for group-based reporting set_group_data 76 | portfolio_data.set_group_data(group_data=group_data, group_order=list(group_data.unique())) 77 | # set time period for portfolio reporting 78 | figs = qis.generate_strategy_factsheet(portfolio_data=portfolio_data, 79 | benchmark_prices=benchmark_prices, 80 | add_current_position_var_risk_sheet=True, 81 | time_period=time_period, 82 | **qis.fetch_default_report_kwargs(time_period=time_period)) 83 | # save report to pdf and png 84 | qis.save_figs_to_pdf(figs=figs, 85 | file_name=f"{portfolio_data.nav.name}_portfolio_factsheet", 86 | orientation='landscape', 87 | local_path="C://Users//Artur//OneDrive//analytics//outputs") 88 | qis.save_fig(fig=figs[0], file_name=f"example_portfolio_factsheet1", local_path=f"figures/") 89 | qis.save_fig(fig=figs[1], file_name=f"example_portfolio_factsheet2", local_path=f"figures/") 90 | 91 | 92 | # 6. can create customised reporting using portfolio_data custom reporting 93 | def run_customised_reporting(portfolio_data) -> plt.Figure: 94 | with sns.axes_style("darkgrid"): 95 | fig, axs = plt.subplots(3, 1, figsize=(12, 12), tight_layout=True) 96 | perf_params = qis.PerfParams(freq='W-WED', freq_reg='ME') 97 | kwargs = dict(x_date_freq='YE', framealpha=0.8, perf_params=perf_params) 98 | portfolio_data.plot_nav(ax=axs[0], **kwargs) 99 | portfolio_data.plot_weights(ncol=len(prices.columns)//3, 100 | legend_stats=qis.LegendStats.AVG_LAST, 101 | title='Portfolio weights', 102 | freq='QE', 103 | ax=axs[1], 104 | **kwargs) 105 | portfolio_data.plot_returns_scatter(benchmark_price=benchmark_prices.iloc[:, 0], 106 | ax=axs[2], 107 | **kwargs) 108 | return fig 109 | 110 | 111 | # run customised report 112 | fig = run_customised_reporting(portfolio_data) 113 | # save png 114 | qis.save_fig(fig=fig, file_name=f"example_customised_report", local_path=f"figures/") 115 | 116 | plt.show() 117 | -------------------------------------------------------------------------------- /optimalportfolios/examples/parameter_sensitivity_backtest.py: -------------------------------------------------------------------------------- 1 | """ 2 | backtest parameter sensitivity of one method 3 | """ 4 | # imports 5 | import pandas as pd 6 | import matplotlib.pyplot as plt 7 | from typing import List 8 | from enum import Enum 9 | import qis as qis 10 | 11 | # package 12 | from optimalportfolios import (PortfolioObjective, backtest_rolling_optimal_portfolio, 13 | Constraints, GroupLowerUpperConstraints) 14 | from optimalportfolios.examples.universe import fetch_benchmark_universe_data 15 | 16 | 17 | def run_max_diversification_sensitivity_to_span(prices: pd.DataFrame, 18 | benchmark_prices: pd.DataFrame, 19 | group_data: pd.Series, 20 | time_period: qis.TimePeriod, # weight computations 21 | perf_time_period: qis.TimePeriod, # for reporting 22 | constraints0: Constraints 23 | ) -> List[ plt.Figure]: 24 | """ 25 | test maximum diversification optimiser to span parameter 26 | span is number period for ewm filter 27 | span = 20 for daily data implies last 20 (trading) days contribute 50% of weight for covariance estimation 28 | we test sensitivity from fast (small span) to slow (large span) 29 | """ 30 | # use daily returns 31 | returns_freq = 'W-WED' # returns freq 32 | # span defined on number periods using returns_freq 33 | # for weekly returns assume 5 weeeks per month 34 | spans = {'1m': 5, '3m': 13, '6m': 26, '1y': 52, '2y': 104} 35 | 36 | # now create a list of portfolios 37 | portfolio_datas = [] 38 | for ticker, span in spans.items(): 39 | portfolio_data = backtest_rolling_optimal_portfolio(prices=prices, 40 | constraints0=constraints0, 41 | time_period=time_period, 42 | portfolio_objective=PortfolioObjective.MAX_DIVERSIFICATION, 43 | rebalancing_freq='QE', # portfolio rebalancing 44 | returns_freq=returns_freq, 45 | span=span, 46 | ticker=f"span-{ticker}", # portfolio id 47 | rebalancing_costs=0.0010, # 10bp for rebalancin 48 | weight_implementation_lag=1 49 | ) 50 | portfolio_data.set_group_data(group_data=group_data) 51 | portfolio_datas.append(portfolio_data) 52 | 53 | # run cross portfolio report 54 | multi_portfolio_data = qis.MultiPortfolioData(portfolio_datas=portfolio_datas, benchmark_prices=benchmark_prices) 55 | figs = qis.generate_multi_portfolio_factsheet(multi_portfolio_data=multi_portfolio_data, 56 | time_period=perf_time_period, 57 | add_strategy_factsheets=False, 58 | **qis.fetch_default_report_kwargs(time_period=time_period)) 59 | return figs 60 | 61 | 62 | class LocalTests(Enum): 63 | MAX_DIVERSIFICATION_SPAN = 1 64 | 65 | 66 | def run_local_test(local_test: LocalTests): 67 | """Run local tests for development and debugging purposes. 68 | 69 | These are integration tests that download real data and generate reports. 70 | Use for quick verification during development. 71 | """ 72 | 73 | import optimalportfolios.local_path as local_path 74 | 75 | prices, benchmark_prices, ac_loadings, benchmark_weights, group_data, ac_benchmark_prices = fetch_benchmark_universe_data() 76 | 77 | # add costraints that each asset class is 10% <= sum ac weights <= 30% (benchamrk is 20% each) 78 | group_min_allocation = pd.Series(0.0, index=ac_loadings.columns) 79 | group_max_allocation = pd.Series(0.3, index=ac_loadings.columns) 80 | group_lower_upper_constraints = GroupLowerUpperConstraints(group_loadings=ac_loadings, 81 | group_min_allocation=group_min_allocation, 82 | group_max_allocation=group_max_allocation) 83 | constraints0 = Constraints(is_long_only=True, 84 | min_weights=pd.Series(0.0, index=prices.columns), 85 | max_weights=pd.Series(0.2, index=prices.columns), 86 | group_lower_upper_constraints=group_lower_upper_constraints) 87 | 88 | if local_test == LocalTests.MAX_DIVERSIFICATION_SPAN: 89 | 90 | time_period = qis.TimePeriod(start='31Dec1998', end=prices.index[-1]) # backtest start for weights computation 91 | perf_time_period = qis.TimePeriod(start='31Dec2004', end=prices.index[-1]) # backtest reporting 92 | figs = run_max_diversification_sensitivity_to_span(prices=prices, 93 | benchmark_prices=benchmark_prices, 94 | constraints0=constraints0, 95 | group_data=group_data, 96 | time_period=time_period, 97 | perf_time_period=perf_time_period) 98 | 99 | # save png and pdf 100 | qis.save_fig(fig=figs[0], file_name=f"max_diversification_span", local_path=f"figures/") 101 | qis.save_figs_to_pdf(figs=figs, 102 | file_name=f"max_diversification_span", 103 | orientation='landscape', 104 | local_path=local_path.get_output_path()) 105 | plt.show() 106 | 107 | 108 | if __name__ == '__main__': 109 | 110 | run_local_test(local_test=LocalTests.MAX_DIVERSIFICATION_SPAN) 111 | -------------------------------------------------------------------------------- /optimalportfolios/examples/robust_optimisation_saa_taa/README.md: -------------------------------------------------------------------------------- 1 | Example implementation of computing the covariance matrix using Hierarchical Clustering Group Lasso (HCGL) method 2 | for rolling estimation of covariance matrix and for solving Strategic Asset Allocation 3 | using risk-budgeted optimisation as introduced paper 4 | 5 | Sepp A., Ossa I., and Kastenholz M. (2025), "Robust Optimization of Strategic and Tactical Asset Allocation for Multi-Asset Portfolios" 6 | 7 | Available at https://papers.ssrn.com/sol3/papers.cfm?abstract_id=5250221 8 | 9 | Use a universe of ETFs for computing and backtesting of rolling SAA portfolio 10 | 11 | -------------------------------------------------------------------------------- /optimalportfolios/examples/robust_optimisation_saa_taa/hcgl_covar_for_rolling_backtest.py: -------------------------------------------------------------------------------- 1 | """ 2 | minimal example of using Hierarchical Clustering Group Lasso (HCGL) method 3 | for rolling estimation of covariance matrix and for solving Strategic Asset Allocation 4 | using risk-budgeted optimisation as introduced in paper 5 | Sepp A., Ossa I., and Kastenholz M. (2025), 6 | "Robust Optimization of Strategic and Tactical Asset Allocation for Multi-Asset Portfolios" 7 | Available at https://papers.ssrn.com/sol3/papers.cfm?abstract_id=5250221 8 | 9 | Use a universe of ETFs for computing and backtesting of rolling SAA portfolio 10 | """ 11 | import pandas as pd 12 | import matplotlib.pyplot as plt 13 | import yfinance as yf 14 | from typing import Tuple 15 | import qis as qis 16 | 17 | # package 18 | from optimalportfolios import (Constraints, LassoModelType, 19 | LassoModel, CovarEstimator, CovarEstimatorType, 20 | rolling_risk_budgeting) 21 | 22 | 23 | # 1. we define the investment universe and allocation by asset classes 24 | def fetch_universe_data() -> Tuple[pd.DataFrame, pd.DataFrame, pd.Series]: 25 | """ 26 | fetch universe data for the portfolio construction: 27 | 1. dividend and split adjusted end of day prices: price data may start / end at different dates 28 | 2. benchmark prices which is used for portfolio reporting and benchmarking 29 | 3. universe group data for portfolio reporting and risk attribution for large universes 30 | this function is using yfinance to fetch the price data 31 | """ 32 | universe_data = dict(SPY='Equities', 33 | EZU='Equities', 34 | EEM='Equities', 35 | TLT='Bonds', 36 | HYG='HighYield', 37 | GLD='Gold') 38 | tickers = list(universe_data.keys()) 39 | group_data = pd.Series(universe_data) 40 | prices = yf.download(tickers, start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'] 41 | prices = prices[tickers].loc['2000':, :] # arrange as given 42 | prices = prices.asfreq('B', method='ffill') # refill at B frequency 43 | benchmark_prices = prices[['SPY', 'TLT']] 44 | return prices, benchmark_prices, group_data 45 | 46 | 47 | # 1. get universe data including prices, risk factor prices and groups 48 | universe_prices, risk_factor_prices, group_data = fetch_universe_data() 49 | # 2. set lasso model 50 | lasso_model = LassoModel(model_type=LassoModelType.GROUP_LASSO_CLUSTERS, 51 | reg_lambda=1e-5, # lambda 52 | span=36, # ewma span in months 53 | warmup_period=12) # at least 12 months of returns to estimate first beta 54 | # 3. set covar estimator 55 | covar_estimator = CovarEstimator(covar_estimator_type=CovarEstimatorType.LASSO, 56 | lasso_model=lasso_model, 57 | factor_returns_freq='ME', # factor returns 58 | rebalancing_freq='QE', # saa rebalancing 59 | returns_freqs='ME', # instrument return frequency 60 | span=lasso_model.span) # for ewma of factors 61 | # 4. set time period for backtest and fit rolling covars 62 | time_period = qis.TimePeriod('31Dec2004', '30Jun2025') # period for computing weights backtest 63 | rolling_covar_data = covar_estimator.fit_rolling_covars(prices=universe_prices, 64 | risk_factor_prices=risk_factor_prices, 65 | time_period=time_period) 66 | # 5. set equal risk-budgets and compute rolling weights of risk budgets 67 | risk_budget = {asset: 1.0 / len(universe_prices.columns) for asset in universe_prices.columns} 68 | saa_rolling_weights = rolling_risk_budgeting(prices=universe_prices, 69 | time_period=time_period, 70 | covar_dict=rolling_covar_data.y_covars, 71 | risk_budget=risk_budget, 72 | constraints0=Constraints(is_long_only=True)) # trivial constraints 73 | # 6. run backtest using portfolio weights using qis package 74 | saa_portfolio_data = qis.backtest_model_portfolio(prices=universe_prices, 75 | weights=saa_rolling_weights, 76 | ticker='Risk Budget SAA', 77 | weight_implementation_lag=1, # next day after weights computation 78 | rebalancing_costs=0.0010) # rebalancing costs per volume = 10bp 79 | # 7. compute equal weight portfolio as benchmark 80 | benchmark_portfolio_data = qis.backtest_model_portfolio(prices=universe_prices, 81 | weights=risk_budget, 82 | ticker='Equal Weights SAA', 83 | weight_implementation_lag=1, # next day after weights computation 84 | rebalancing_costs=0.0010) # rebalancing costs per volume = 10bp 85 | # 8. generate backtest reporting with strategy benchmark factsheet 86 | multi_portfolio_data = qis.MultiPortfolioData(portfolio_datas=[saa_portfolio_data, benchmark_portfolio_data], 87 | benchmark_prices=risk_factor_prices, 88 | covar_dict=rolling_covar_data.y_covars) 89 | # set groups for backtest reporting 90 | [x.set_group_data(group_data=group_data) for x in multi_portfolio_data.portfolio_datas] 91 | figs = qis.generate_strategy_benchmark_factsheet_plt(multi_portfolio_data=multi_portfolio_data, 92 | add_strategy_factsheet=True, 93 | add_brinson_attribution=True, 94 | time_period=time_period, 95 | **qis.fetch_default_report_kwargs(reporting_frequency=qis.ReportingFrequency.MONTHLY, 96 | add_rates_data=False)) 97 | # 9. save report to pdf and png 98 | qis.save_figs_to_pdf(figs=figs, 99 | file_name=f"saa_risk_budget_portfolio_factsheet", 100 | orientation='landscape', 101 | local_path="C://Users//Artur//OneDrive//analytics//outputs") 102 | 103 | plt.show() 104 | -------------------------------------------------------------------------------- /optimalportfolios/examples/solve_risk_budgets_balanced_portfolio.py: -------------------------------------------------------------------------------- 1 | """ 2 | illustration of solving for risk budgets using input weights 3 | create 60/40 portfolio with static weights 4 | find equivalent risk budget portfolio with weights matching in average weights of 60/40 portfolio 5 | show weights/risk contributions for both 6 | """ 7 | 8 | import pandas as pd 9 | import qis as qis 10 | import matplotlib.pyplot as plt 11 | import yfinance as yf 12 | from typing import Dict, List 13 | from enum import Enum 14 | from optimalportfolios import estimate_rolling_ewma_covar, rolling_risk_budgeting, Constraints 15 | from optimalportfolios.optimization.solvers.risk_budgeting import solve_for_risk_budgets_from_given_weights 16 | 17 | 18 | def plot_static_risk_budgets_vs_weights(prices: pd.DataFrame, 19 | risk_budgets_weights: pd.DataFrame, 20 | given_static_weights: pd.Series, 21 | covar_dict: Dict[pd.Timestamp, pd.DataFrame], 22 | time_period: qis.TimePeriod = None, 23 | figsize=(16, 10), 24 | add_titles: bool = True, 25 | var_format: str = '{:.1%}', 26 | strategy_ticker: str = 'Risk-budgeted portfolio', 27 | benchmark_ticker: str = 'Static portfolio' 28 | ) -> List[plt.Figure]: 29 | # create static_weights on same 30 | static_weights = pd.DataFrame.from_dict({date: given_static_weights for date in risk_budgets_weights.index}, orient='index') 31 | 32 | static_portfolio = qis.backtest_model_portfolio(prices=prices, weights=static_weights, 33 | ticker=benchmark_ticker) 34 | 35 | risk_budget_portfolio = qis.backtest_model_portfolio(prices=prices, weights=risk_budgets_weights, 36 | ticker=strategy_ticker) 37 | 38 | multi_portfolio_data = qis.MultiPortfolioData(portfolio_datas=[risk_budget_portfolio, static_portfolio], 39 | benchmark_prices=prices.iloc[:, 0], 40 | covar_dict=covar_dict) 41 | 42 | report_kwargs = qis.fetch_default_report_kwargs(reporting_frequency=qis.ReportingFrequency.MONTHLY, 43 | add_rates_data=False) 44 | 45 | figs = qis.generate_strategy_benchmark_factsheet_plt(multi_portfolio_data=multi_portfolio_data, 46 | time_period=time_period, 47 | strategy_idx=0, 48 | benchmark_idx=1, 49 | add_benchmarks_to_navs=False, 50 | add_exposures_comp=False, 51 | add_strategy_factsheet=False, 52 | **report_kwargs) 53 | 54 | # plot strategy and benchmark weights by ac 55 | kwargs = qis.update_kwargs(report_kwargs, dict(strategy_ticker=f"(B) {strategy_ticker}", 56 | benchmark_ticker=f"(A) {benchmark_ticker}")) 57 | fig, axs = plt.subplots(1, 2, figsize=figsize, tight_layout=True) 58 | if add_titles: 59 | qis.set_suptitle(fig, title=f"Time series of weights by asset classes") 60 | figs.append(fig) 61 | qis.plot_exposures_strategy_vs_benchmark_stack(strategy_exposures=risk_budgets_weights, 62 | benchmark_exposures=static_weights, 63 | axs=axs, 64 | var_format=var_format, 65 | **kwargs) 66 | 67 | # risk contributions 68 | rc_kwargs = dict(covar_dict=multi_portfolio_data.covar_dict, normalise=True) 69 | strategy_risk_contributions_ac = risk_budget_portfolio.compute_risk_contributions_implied_by_covar(**rc_kwargs) 70 | benchmark_risk_contributions_ac = static_portfolio.compute_risk_contributions_implied_by_covar(**rc_kwargs) 71 | fig, axs = plt.subplots(1, 2, figsize=figsize, tight_layout=True) 72 | if add_titles: 73 | qis.set_suptitle(fig, title=f"Time Series of risk contributions by asset classes") 74 | figs.append(fig) 75 | qis.plot_exposures_strategy_vs_benchmark_stack(strategy_exposures=strategy_risk_contributions_ac, 76 | benchmark_exposures=benchmark_risk_contributions_ac, 77 | var_format=var_format, 78 | axs=axs, 79 | **kwargs) 80 | 81 | # portfolio vol 82 | strategy_ex_anti_vol = risk_budget_portfolio.compute_ex_anti_portfolio_vol_implied_by_covar(covar_dict=covar_dict) 83 | benchmark_ex_anti_vol = static_portfolio.compute_ex_anti_portfolio_vol_implied_by_covar(covar_dict=covar_dict) 84 | ex_anti_vols = pd.concat([strategy_ex_anti_vol, benchmark_ex_anti_vol], axis=1) 85 | fig, ax = plt.subplots(1, 1, figsize=figsize, tight_layout=True) 86 | figs.append(fig) 87 | qis.plot_time_series(df=ex_anti_vols, var_format='{:.2%}', 88 | title='Ex-anti volatilities', 89 | ax=ax, **report_kwargs) 90 | return figs 91 | 92 | 93 | class LocalTests(Enum): 94 | SOLVE_FOR_RISK_BUDGETS = 1 95 | ILLUSTRATE_WEIGHTS = 2 96 | 97 | 98 | @qis.timer 99 | def run_local_test(local_test: LocalTests): 100 | """Run local tests for development and debugging purposes. 101 | 102 | These are integration tests that download real data and generate reports. 103 | Use for quick verification during development. 104 | """ 105 | 106 | from optimalportfolios import local_path as lp 107 | 108 | is_60_40 = False 109 | 110 | if is_60_40: 111 | given_static_weights = {'SPY': 0.6, 'IEF': 0.4} 112 | else: 113 | given_static_weights = {'SPY': 0.55, 'IEF': 0.35, 'GLD': 0.1} 114 | given_static_weights = pd.Series(given_static_weights) 115 | 116 | prices = yf.download(tickers=given_static_weights.index.to_list(), start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'] 117 | prices = prices[given_static_weights.index].dropna() 118 | print(prices) 119 | 120 | time_period = qis.TimePeriod('31Dec2004', '27Jun2025') 121 | rebalancing_freq = 'QE' 122 | 123 | # compute covar matrix using 1y span 124 | covar_dict = estimate_rolling_ewma_covar(prices=prices, 125 | time_period=time_period, 126 | rebalancing_freq=rebalancing_freq, 127 | returns_freq='W-WED', 128 | span=52) 129 | 130 | if local_test == LocalTests.SOLVE_FOR_RISK_BUDGETS: 131 | risk_budgets = solve_for_risk_budgets_from_given_weights(prices=prices, 132 | given_weights=given_static_weights, 133 | time_period=time_period, 134 | covar_dict=covar_dict) 135 | print(risk_budgets) 136 | 137 | elif local_test == LocalTests.ILLUSTRATE_WEIGHTS: 138 | risk_budgets = solve_for_risk_budgets_from_given_weights(prices=prices, 139 | given_weights=given_static_weights, 140 | time_period=time_period, 141 | covar_dict=covar_dict) 142 | risk_budgets_weights = rolling_risk_budgeting(prices=prices, 143 | time_period=time_period, 144 | covar_dict=covar_dict, 145 | risk_budget=risk_budgets, 146 | constraints0=Constraints(is_long_only=True)) 147 | 148 | figs = plot_static_risk_budgets_vs_weights(prices=prices, 149 | risk_budgets_weights=risk_budgets_weights, 150 | given_static_weights=given_static_weights, 151 | covar_dict=covar_dict, 152 | time_period=time_period) 153 | qis.save_figs_to_pdf(figs, file_name='risk_budget_portfolio', local_path=lp.get_output_path()) 154 | 155 | 156 | if __name__ == '__main__': 157 | 158 | run_local_test(local_test=LocalTests.ILLUSTRATE_WEIGHTS) 159 | -------------------------------------------------------------------------------- /optimalportfolios/examples/solvers/carra_mixture.py: -------------------------------------------------------------------------------- 1 | """ 2 | example of minimization of tracking error 3 | """ 4 | import pandas as pd 5 | import matplotlib.pyplot as plt 6 | import qis as qis 7 | from enum import Enum 8 | 9 | from optimalportfolios import (Constraints, GroupLowerUpperConstraints, 10 | compute_tre_turnover_stats, 11 | rolling_maximize_cara_mixture, 12 | wrapper_maximize_cara_mixture, 13 | fit_gaussian_mixture) 14 | 15 | from optimalportfolios.examples.universe import fetch_benchmark_universe_data 16 | 17 | 18 | class LocalTests(Enum): 19 | ONE_STEP_OPTIMISATION = 1 20 | ROLLING_OPTIMISATION = 2 21 | 22 | 23 | def run_local_test(local_test: LocalTests): 24 | """Run local tests for development and debugging purposes. 25 | 26 | These are integration tests that download real data and generate reports. 27 | Use for quick verification during development. 28 | """ 29 | 30 | import optimalportfolios.local_path as lp 31 | 32 | prices, benchmark_prices, ac_loadings, benchmark_weights, group_data, ac_benchmark_prices = fetch_benchmark_universe_data() 33 | 34 | # add costraints that each asset class is 10% <= sum ac weights <= 30% (benchamrk is 20% each) 35 | group_min_allocation = pd.Series(0.05, index=ac_loadings.columns) 36 | group_max_allocation = pd.Series(0.25, index=ac_loadings.columns) 37 | group_lower_upper_constraints = GroupLowerUpperConstraints(group_loadings=ac_loadings, 38 | group_min_allocation=group_min_allocation, 39 | group_max_allocation=group_max_allocation) 40 | constraints0 = Constraints(is_long_only=True, 41 | group_lower_upper_constraints=group_lower_upper_constraints, 42 | min_weights=pd.Series(0.0, index=prices.columns), 43 | max_weights=pd.Series(0.5, index=prices.columns), 44 | weights_0=benchmark_weights) 45 | 46 | if local_test == LocalTests.ONE_STEP_OPTIMISATION: 47 | # optimise using last available data as inputs 48 | returns = qis.to_returns(prices, freq='ME', is_log_returns=True).dropna() 49 | params = fit_gaussian_mixture(x=returns.to_numpy(), n_components=3, scaler=52) 50 | 51 | weights = wrapper_maximize_cara_mixture(means=params.means, 52 | covars=params.covars, 53 | probs=params.probs, 54 | constraints0=constraints0, 55 | tickers=returns.columns.to_list(), 56 | carra=0.5) 57 | 58 | df_weight = pd.concat([benchmark_weights.rename('benchmark'), weights.rename('portfolio')], axis=1) 59 | print(f"weights=\n{df_weight}") 60 | qis.plot_bars(df=df_weight) 61 | 62 | pd_covar = pd.DataFrame(12.0 * qis.compute_masked_covar_corr(data=returns, is_covar=True), 63 | index=prices.columns, columns=prices.columns) 64 | te_vol, turnover, alpha, port_vol, benchmark_vol = compute_tre_turnover_stats(covar=pd_covar.to_numpy(), 65 | benchmark_weights=benchmark_weights, 66 | weights=weights, 67 | weights_0=benchmark_weights) 68 | print(f"port_vol={port_vol:0.4f}, benchmark_vol={benchmark_vol:0.4f}, te_vol={te_vol:0.4f}, " 69 | f"turnover={turnover:0.4f}, alpha={alpha:0.4f}") 70 | 71 | plt.show() 72 | 73 | elif local_test == LocalTests.ROLLING_OPTIMISATION: 74 | # optimise using last available data as inputs 75 | time_period = qis.TimePeriod('31Jan2007', '17Apr2025') 76 | rebalancing_costs = 0.0003 77 | 78 | weights = rolling_maximize_cara_mixture(prices=prices, 79 | constraints0=constraints0, 80 | roll_window=12*10, 81 | returns_freq='ME', 82 | time_period=time_period) 83 | print(weights) 84 | portfolio_dict = {'Optimal Portfolio': weights, 85 | 'EqualWeight Portfolio': qis.df_to_equal_weight_allocation(prices, index=weights.index)} 86 | portfolio_datas = [] 87 | for ticker, weights in portfolio_dict.items(): 88 | portfolio_data = qis.backtest_model_portfolio(prices=prices, 89 | weights=weights, 90 | rebalancing_costs=rebalancing_costs, 91 | weight_implementation_lag=1, 92 | ticker=ticker) 93 | portfolio_data.set_group_data(group_data=group_data) 94 | portfolio_datas.append(portfolio_data) 95 | multi_portfolio_data = qis.MultiPortfolioData(portfolio_datas, benchmark_prices=benchmark_prices) 96 | kwargs = qis.fetch_default_report_kwargs(time_period=time_period, add_rates_data=True) 97 | figs = qis.generate_strategy_benchmark_factsheet_plt(multi_portfolio_data=multi_portfolio_data, 98 | time_period=time_period, 99 | add_strategy_factsheet=True, 100 | add_grouped_exposures=False, 101 | add_grouped_cum_pnl=False, 102 | **kwargs) 103 | qis.save_figs_to_pdf(figs=figs, 104 | file_name=f"carra utility portfolio", orientation='landscape', 105 | local_path=lp.get_output_path()) 106 | 107 | 108 | if __name__ == '__main__': 109 | 110 | run_local_test(local_test=LocalTests.ROLLING_OPTIMISATION) 111 | -------------------------------------------------------------------------------- /optimalportfolios/examples/solvers/max_diversification.py: -------------------------------------------------------------------------------- 1 | """ 2 | example of minimization of tracking error 3 | """ 4 | import pandas as pd 5 | import matplotlib.pyplot as plt 6 | import qis as qis 7 | from enum import Enum 8 | 9 | from optimalportfolios import (Constraints, GroupLowerUpperConstraints, CovarEstimator, 10 | compute_tre_turnover_stats, 11 | rolling_maximise_diversification, 12 | wrapper_maximise_diversification) 13 | 14 | from optimalportfolios.examples.universe import fetch_benchmark_universe_data 15 | 16 | 17 | class LocalTests(Enum): 18 | ONE_STEP_OPTIMISATION = 1 19 | TRACKING_ERROR_GRID = 2 20 | ROLLING_OPTIMISATION = 3 21 | 22 | 23 | def run_local_test(local_test: LocalTests): 24 | """Run local tests for development and debugging purposes. 25 | 26 | These are integration tests that download real data and generate reports. 27 | Use for quick verification during development. 28 | """ 29 | 30 | import optimalportfolios.local_path as lp 31 | 32 | prices, benchmark_prices, ac_loadings, benchmark_weights, group_data, ac_benchmark_prices = fetch_benchmark_universe_data() 33 | 34 | # add costraints that each asset class is 10% <= sum ac weights <= 30% (benchamrk is 20% each) 35 | group_min_allocation = pd.Series(0.1, index=ac_loadings.columns) 36 | group_max_allocation = pd.Series(0.3, index=ac_loadings.columns) 37 | group_lower_upper_constraints = GroupLowerUpperConstraints(group_loadings=ac_loadings, 38 | group_min_allocation=group_min_allocation, 39 | group_max_allocation=group_max_allocation) 40 | constraints0 = Constraints(is_long_only=True, 41 | min_weights=0.0 * benchmark_weights, 42 | max_weights=3.0 * benchmark_weights, 43 | weights_0=benchmark_weights, 44 | group_lower_upper_constraints=group_lower_upper_constraints) 45 | 46 | if local_test == LocalTests.ONE_STEP_OPTIMISATION: 47 | # optimise using last available data as inputs 48 | returns = qis.to_returns(prices, freq='W-WED', is_log_returns=True) 49 | pd_covar = pd.DataFrame(52.0 * qis.compute_masked_covar_corr(data=returns, is_covar=True), 50 | index=prices.columns, columns=prices.columns) 51 | print(f"pd_covar=\n{pd_covar}") 52 | 53 | weights = wrapper_maximise_diversification(pd_covar=pd_covar, 54 | constraints0=constraints0, 55 | weights_0=benchmark_weights) 56 | 57 | df_weight = pd.concat([benchmark_weights.rename('benchmark'), weights.rename('portfolio')], axis=1) 58 | print(f"weights=\n{df_weight}") 59 | qis.plot_bars(df=df_weight, stacked=False) 60 | 61 | te_vol, turnover, alpha, port_vol, benchmark_vol = compute_tre_turnover_stats(covar=pd_covar.to_numpy(), 62 | benchmark_weights=benchmark_weights, 63 | weights=weights, 64 | weights_0=benchmark_weights) 65 | print(f"port_vol={port_vol:0.4f}, benchmark_vol={benchmark_vol:0.4f}, te_vol={te_vol:0.4f}, " 66 | f"turnover={turnover:0.4f}, alpha={alpha:0.4f}") 67 | 68 | plt.show() 69 | 70 | elif local_test == LocalTests.ROLLING_OPTIMISATION: 71 | # optimise using last available data as inputs 72 | time_period = qis.TimePeriod('31Jan2007', '17Apr2025') 73 | rebalancing_costs = 0.0003 74 | covar_estimator = CovarEstimator() 75 | weights = rolling_maximise_diversification(prices=prices, 76 | constraints0=constraints0, 77 | time_period=time_period, 78 | covar_estimator=covar_estimator) 79 | print(weights) 80 | 81 | portfolio_dict = {'Optimal Portfolio': weights, 82 | 'EqualWeight Portfolio': qis.df_to_equal_weight_allocation(prices, index=weights.index)} 83 | portfolio_datas = [] 84 | for ticker, weights in portfolio_dict.items(): 85 | portfolio_data = qis.backtest_model_portfolio(prices=prices, 86 | weights=weights, 87 | rebalancing_costs=rebalancing_costs, 88 | weight_implementation_lag=1, 89 | ticker=ticker) 90 | portfolio_data.set_group_data(group_data=group_data) 91 | portfolio_datas.append(portfolio_data) 92 | multi_portfolio_data = qis.MultiPortfolioData(portfolio_datas, benchmark_prices=benchmark_prices) 93 | kwargs = qis.fetch_default_report_kwargs(time_period=time_period, add_rates_data=True) 94 | figs = qis.generate_strategy_benchmark_factsheet_plt(multi_portfolio_data=multi_portfolio_data, 95 | time_period=time_period, 96 | add_strategy_factsheet=True, 97 | add_grouped_exposures=False, 98 | add_grouped_cum_pnl=False, 99 | **kwargs) 100 | qis.save_figs_to_pdf(figs=figs, 101 | file_name=f"max diversification portfolio", orientation='landscape', 102 | local_path=lp.get_output_path()) 103 | 104 | 105 | if __name__ == '__main__': 106 | 107 | run_local_test(local_test=LocalTests.ROLLING_OPTIMISATION) 108 | -------------------------------------------------------------------------------- /optimalportfolios/examples/solvers/max_sharpe.py: -------------------------------------------------------------------------------- 1 | """ 2 | example of minimization of tracking error 3 | """ 4 | import numpy as np 5 | import pandas as pd 6 | import matplotlib.pyplot as plt 7 | import qis as qis 8 | from enum import Enum 9 | 10 | from optimalportfolios import (Constraints, GroupLowerUpperConstraints, 11 | compute_tre_turnover_stats, 12 | rolling_maximize_portfolio_sharpe, 13 | wrapper_maximize_portfolio_sharpe) 14 | 15 | from optimalportfolios.examples.universe import fetch_benchmark_universe_data 16 | 17 | 18 | class LocalTests(Enum): 19 | ONE_STEP_OPTIMISATION = 1 20 | ROLLING_OPTIMISATION = 2 21 | 22 | 23 | def run_local_test(local_test: LocalTests): 24 | """Run local tests for development and debugging purposes. 25 | 26 | These are integration tests that download real data and generate reports. 27 | Use for quick verification during development. 28 | """ 29 | 30 | import optimalportfolios.local_path as lp 31 | 32 | prices, benchmark_prices, ac_loadings, benchmark_weights, group_data, ac_benchmark_prices = fetch_benchmark_universe_data() 33 | 34 | # add costraints that each asset class is 10% <= sum ac weights <= 30% (benchamrk is 20% each) 35 | group_min_allocation = pd.Series(0.05, index=ac_loadings.columns) 36 | group_max_allocation = pd.Series(0.25, index=ac_loadings.columns) 37 | group_lower_upper_constraints = GroupLowerUpperConstraints(group_loadings=ac_loadings, 38 | group_min_allocation=group_min_allocation, 39 | group_max_allocation=group_max_allocation) 40 | constraints0 = Constraints(is_long_only=True, 41 | group_lower_upper_constraints=group_lower_upper_constraints, 42 | min_weights=pd.Series(0.0, index=prices.columns), 43 | max_weights=pd.Series(1.0, index=prices.columns), 44 | weights_0=benchmark_weights) 45 | 46 | if local_test == LocalTests.ONE_STEP_OPTIMISATION: 47 | # optimise using last available data as inputs 48 | returns = qis.to_returns(prices, freq='W-WED', is_log_returns=True) 49 | pd_covar = pd.DataFrame(52.0 * qis.compute_masked_covar_corr(data=returns, is_covar=True), 50 | index=prices.columns, columns=prices.columns) 51 | print(f"pd_covar=\n{pd_covar}") 52 | 53 | weights = wrapper_maximize_portfolio_sharpe(pd_covar=pd_covar, 54 | means=52.0*returns.mean(0), 55 | constraints0=constraints0, 56 | weights_0=benchmark_weights) 57 | 58 | df_weight = pd.concat([benchmark_weights.rename('benchmark'), weights.rename('portfolio')], axis=1) 59 | print(f"weights=\n{df_weight}") 60 | qis.plot_bars(df=df_weight) 61 | 62 | te_vol, turnover, alpha, port_vol, benchmark_vol = compute_tre_turnover_stats(covar=pd_covar.to_numpy(), 63 | benchmark_weights=benchmark_weights, 64 | weights=weights, 65 | weights_0=benchmark_weights) 66 | print(f"port_vol={port_vol:0.4f}, benchmark_vol={benchmark_vol:0.4f}, te_vol={te_vol:0.4f}, " 67 | f"turnover={turnover:0.4f}, alpha={alpha:0.4f}") 68 | 69 | plt.show() 70 | 71 | elif local_test == LocalTests.ROLLING_OPTIMISATION: 72 | # optimise using last available data as inputs 73 | time_period = qis.TimePeriod('31Jan2007', '17Apr2025') 74 | rebalancing_costs = 0.0003 75 | 76 | weights = rolling_maximize_portfolio_sharpe(prices=prices, 77 | constraints0=constraints0, 78 | time_period=time_period) 79 | 80 | print(weights) 81 | 82 | portfolio_dict = {'Optimal Portfolio': weights, 83 | 'EqualWeight Portfolio': qis.df_to_equal_weight_allocation(prices, index=weights.index)} 84 | portfolio_datas = [] 85 | for ticker, weights in portfolio_dict.items(): 86 | portfolio_data = qis.backtest_model_portfolio(prices=prices, 87 | weights=weights, 88 | rebalancing_costs=rebalancing_costs, 89 | weight_implementation_lag=1, 90 | ticker=ticker) 91 | portfolio_data.set_group_data(group_data=group_data) 92 | portfolio_datas.append(portfolio_data) 93 | multi_portfolio_data = qis.MultiPortfolioData(portfolio_datas, benchmark_prices=benchmark_prices) 94 | kwargs = qis.fetch_default_report_kwargs(time_period=time_period, add_rates_data=True) 95 | figs = qis.generate_strategy_benchmark_factsheet_plt(multi_portfolio_data=multi_portfolio_data, 96 | time_period=time_period, 97 | add_strategy_factsheet=True, 98 | add_grouped_exposures=False, 99 | add_grouped_cum_pnl=False, 100 | **kwargs) 101 | qis.save_figs_to_pdf(figs=figs, 102 | file_name=f"max sharpe portfolio", orientation='landscape', 103 | local_path=lp.get_output_path()) 104 | 105 | 106 | if __name__ == '__main__': 107 | 108 | run_local_test(local_test=LocalTests.ROLLING_OPTIMISATION) 109 | -------------------------------------------------------------------------------- /optimalportfolios/examples/solvers/min_variance.py: -------------------------------------------------------------------------------- 1 | """ 2 | example of minimization of tracking error 3 | """ 4 | import numpy as np 5 | import pandas as pd 6 | import matplotlib.pyplot as plt 7 | import qis as qis 8 | from enum import Enum 9 | 10 | from optimalportfolios import (Constraints, GroupLowerUpperConstraints, CovarEstimator, 11 | compute_tre_turnover_stats, 12 | wrapper_quadratic_optimisation, 13 | rolling_quadratic_optimisation) 14 | 15 | from optimalportfolios.examples.universe import fetch_benchmark_universe_data 16 | 17 | 18 | class LocalTests(Enum): 19 | ONE_STEP_OPTIMISATION = 1 20 | TRACKING_ERROR_GRID = 2 21 | ROLLING_OPTIMISATION = 3 22 | 23 | 24 | def run_local_test(local_test: LocalTests): 25 | """Run local tests for development and debugging purposes. 26 | 27 | These are integration tests that download real data and generate reports. 28 | Use for quick verification during development. 29 | """ 30 | 31 | import optimalportfolios.local_path as lp 32 | 33 | prices, benchmark_prices, ac_loadings, benchmark_weights, group_data, ac_benchmark_prices = fetch_benchmark_universe_data() 34 | 35 | # add costraints that each asset class is 10% <= sum ac weights <= 30% (benchamrk is 20% each) 36 | group_min_allocation = pd.Series(0.1, index=ac_loadings.columns) 37 | group_max_allocation = pd.Series(0.3, index=ac_loadings.columns) 38 | group_lower_upper_constraints = GroupLowerUpperConstraints(group_loadings=ac_loadings, 39 | group_min_allocation=group_min_allocation, 40 | group_max_allocation=group_max_allocation) 41 | 42 | constraints0 = Constraints(is_long_only=True, 43 | min_weights=pd.Series(0.0, index=prices.columns), 44 | max_weights=pd.Series(0.2, index=prices.columns), 45 | weights_0=benchmark_weights, 46 | group_lower_upper_constraints=group_lower_upper_constraints) 47 | 48 | if local_test == LocalTests.ONE_STEP_OPTIMISATION: 49 | # optimise using last available data as inputs 50 | returns = qis.to_returns(prices, freq='W-WED', is_log_returns=True) 51 | pd_covar = pd.DataFrame(52.0 * qis.compute_masked_covar_corr(data=returns, is_covar=True), 52 | index=prices.columns, columns=prices.columns) 53 | print(f"pd_covar=\n{pd_covar}") 54 | weights = wrapper_quadratic_optimisation(pd_covar=pd_covar, 55 | constraints0=constraints0, 56 | weights_0=benchmark_weights) 57 | 58 | df_weight = pd.concat([benchmark_weights.rename('benchmark'), weights.rename('portfolio')], axis=1) 59 | print(f"weights=\n{df_weight}") 60 | qis.plot_bars(df=df_weight) 61 | 62 | te_vol, turnover, alpha, port_vol, benchmark_vol = compute_tre_turnover_stats(covar=pd_covar.to_numpy(), 63 | benchmark_weights=benchmark_weights, 64 | weights=weights, 65 | weights_0=benchmark_weights) 66 | print(f"port_vol={port_vol:0.4f}, benchmark_vol={benchmark_vol:0.4f}, te_vol={te_vol:0.4f}, " 67 | f"turnover={turnover:0.4f}, alpha={alpha:0.4f}") 68 | 69 | plt.show() 70 | 71 | elif local_test == LocalTests.ROLLING_OPTIMISATION: 72 | # optimise using last available data as inputs 73 | time_period = qis.TimePeriod('31Jan2007', '17Apr2025') 74 | rebalancing_costs = 0.0003 75 | covar_estimator = CovarEstimator() 76 | weights = rolling_quadratic_optimisation(prices=prices, 77 | constraints0=constraints0, 78 | time_period=time_period, 79 | covar_estimator=covar_estimator) 80 | print(weights) 81 | 82 | portfolio_dict = {'Optimal Portfolio': weights, 83 | 'EqualWeight Portfolio': qis.df_to_equal_weight_allocation(prices, index=weights.index)} 84 | portfolio_datas = [] 85 | for ticker, weights in portfolio_dict.items(): 86 | portfolio_data = qis.backtest_model_portfolio(prices=prices, 87 | weights=weights, 88 | rebalancing_costs=rebalancing_costs, 89 | weight_implementation_lag=1, 90 | ticker=ticker) 91 | portfolio_data.set_group_data(group_data=group_data) 92 | portfolio_datas.append(portfolio_data) 93 | multi_portfolio_data = qis.MultiPortfolioData(portfolio_datas, benchmark_prices=benchmark_prices) 94 | kwargs = qis.fetch_default_report_kwargs(time_period=time_period, add_rates_data=True) 95 | figs = qis.generate_strategy_benchmark_factsheet_plt(multi_portfolio_data=multi_portfolio_data, 96 | time_period=time_period, 97 | add_strategy_factsheet=True, 98 | add_grouped_exposures=False, 99 | add_grouped_cum_pnl=False, 100 | **kwargs) 101 | qis.save_figs_to_pdf(figs=figs, 102 | file_name=f"min variance portfolio", orientation='landscape', 103 | local_path=lp.get_output_path()) 104 | 105 | 106 | if __name__ == '__main__': 107 | 108 | run_local_test(local_test=LocalTests.ROLLING_OPTIMISATION) 109 | -------------------------------------------------------------------------------- /optimalportfolios/examples/solvers/risk_parity.py: -------------------------------------------------------------------------------- 1 | """ 2 | example of minimization of tracking error 3 | """ 4 | import numpy as np 5 | import pandas as pd 6 | import matplotlib.pyplot as plt 7 | import qis as qis 8 | from enum import Enum 9 | 10 | from optimalportfolios import (Constraints, GroupLowerUpperConstraints, CovarEstimator, 11 | compute_tre_turnover_stats, 12 | wrapper_risk_budgeting, 13 | rolling_risk_budgeting) 14 | 15 | from optimalportfolios.examples.universe import fetch_benchmark_universe_data 16 | 17 | 18 | class LocalTests(Enum): 19 | ONE_STEP_OPTIMISATION = 1 20 | TRACKING_ERROR_GRID = 2 21 | ROLLING_OPTIMISATION = 3 22 | 23 | 24 | def run_local_test(local_test: LocalTests): 25 | """Run local tests for development and debugging purposes. 26 | 27 | These are integration tests that download real data and generate reports. 28 | Use for quick verification during development. 29 | """ 30 | 31 | import optimalportfolios.local_path as lp 32 | 33 | prices, benchmark_prices, ac_loadings, benchmark_weights, group_data, ac_benchmark_prices = fetch_benchmark_universe_data() 34 | 35 | # add costraints that each asset class is 10% <= sum ac weights <= 30% (benchamrk is 20% each) 36 | group_min_allocation = pd.Series(0.1, index=ac_loadings.columns) 37 | group_max_allocation = pd.Series(0.3, index=ac_loadings.columns) 38 | group_lower_upper_constraints = GroupLowerUpperConstraints(group_loadings=ac_loadings, 39 | group_min_allocation=group_min_allocation, 40 | group_max_allocation=group_max_allocation) 41 | 42 | constraints0 = Constraints(is_long_only=True, 43 | min_weights=pd.Series(0.0, index=prices.columns), 44 | max_weights=pd.Series(1.0, index=prices.columns), 45 | weights_0=benchmark_weights) 46 | 47 | if local_test == LocalTests.ONE_STEP_OPTIMISATION: 48 | # optimise using last available data as inputs 49 | returns = qis.to_returns(prices, freq='W-WED', is_log_returns=True) 50 | pd_covar = pd.DataFrame(52.0 * qis.compute_masked_covar_corr(data=returns, is_covar=True), 51 | index=prices.columns, columns=prices.columns) 52 | print(f"pd_covar=\n{pd_covar}") 53 | 54 | weights = wrapper_risk_budgeting(pd_covar=pd_covar, 55 | constraints0=constraints0, 56 | weights_0=benchmark_weights) 57 | 58 | df_weight = pd.concat([benchmark_weights.rename('benchmark'), weights.rename('portfolio')], axis=1) 59 | print(f"weights=\n{df_weight}") 60 | qis.plot_bars(df=df_weight) 61 | 62 | te_vol, turnover, alpha, port_vol, benchmark_vol = compute_tre_turnover_stats(covar=pd_covar.to_numpy(), 63 | benchmark_weights=benchmark_weights, 64 | weights=weights, 65 | weights_0=benchmark_weights) 66 | print(f"port_vol={port_vol:0.4f}, benchmark_vol={benchmark_vol:0.4f}, te_vol={te_vol:0.4f}, " 67 | f"turnover={turnover:0.4f}, alpha={alpha:0.4f}") 68 | 69 | plt.show() 70 | 71 | elif local_test == LocalTests.ROLLING_OPTIMISATION: 72 | # optimise using last available data as inputs 73 | time_period = qis.TimePeriod('31Jan2007', '17Apr2025') 74 | rebalancing_costs = 0.0003 75 | covar_estimator = CovarEstimator(returns_freqs='W-WED', rebalancing_freq='ME', span=52) 76 | weights = rolling_risk_budgeting(prices=prices, 77 | constraints0=constraints0, 78 | time_period=time_period, 79 | covar_estimator=covar_estimator) 80 | print(weights) 81 | 82 | portfolio_dict = {'Optimal Portfolio': weights, 83 | 'EqualWeight Portfolio': qis.df_to_equal_weight_allocation(prices, index=weights.index)} 84 | portfolio_datas = [] 85 | for ticker, weights in portfolio_dict.items(): 86 | portfolio_data = qis.backtest_model_portfolio(prices=prices, 87 | weights=weights, 88 | rebalancing_costs=rebalancing_costs, 89 | weight_implementation_lag=1, 90 | ticker=ticker) 91 | portfolio_data.set_group_data(group_data=group_data) 92 | portfolio_datas.append(portfolio_data) 93 | multi_portfolio_data = qis.MultiPortfolioData(portfolio_datas, benchmark_prices=benchmark_prices) 94 | kwargs = qis.fetch_default_report_kwargs(time_period=time_period, add_rates_data=True) 95 | figs = qis.generate_strategy_benchmark_factsheet_plt(multi_portfolio_data=multi_portfolio_data, 96 | time_period=time_period, 97 | add_strategy_factsheet=True, 98 | add_grouped_exposures=False, 99 | add_grouped_cum_pnl=False, 100 | **kwargs) 101 | qis.save_figs_to_pdf(figs=figs, 102 | file_name=f"risk parity portfolio", orientation='landscape', 103 | local_path=lp.get_output_path()) 104 | 105 | 106 | if __name__ == '__main__': 107 | 108 | run_local_test(local_test=LocalTests.ROLLING_OPTIMISATION) 109 | -------------------------------------------------------------------------------- /optimalportfolios/examples/sp500_minvar.py: -------------------------------------------------------------------------------- 1 | """ 2 | run Minimum Variance portfolio optimiser for S&P 500 universe 3 | The goal is to backtest the sensetivity of squeezing of the covariance matrix using SSRN paper 4 | Squeezing Financial Noise: A Novel Approach to Covariance Matrix Estimation 5 | https://papers.ssrn.com/sol3/papers.cfm?abstract_id=4986939 6 | """ 7 | 8 | # packages 9 | import pandas as pd 10 | import qis as qis 11 | import yfinance as yf 12 | from typing import List 13 | from enum import Enum 14 | 15 | # optimalportfolios 16 | from optimalportfolios import PortfolioObjective, Constraints, rolling_quadratic_optimisation, CovarEstimator 17 | 18 | 19 | def run_cross_backtest(prices: pd.DataFrame, 20 | inclusion_indicators: pd.DataFrame, 21 | group_data: pd.Series, 22 | time_period: qis.TimePeriod, 23 | squeeze_factors: List[float] = (0.0, 0.125, 0.250, 0.375, 0.5, 0.7, 0.9) 24 | ) -> List[qis.PortfolioData]: 25 | """Runs cross-validation backtest for minimum variance portfolios with different covariance shrinkage factors. 26 | 27 | Tests sensitivity of portfolio performance to covariance matrix shrinkage by running 28 | backtests across multiple squeeze factors. Uses long-only constraints with 5% max weight. 29 | 30 | Args: 31 | prices: Asset price DataFrame. 32 | inclusion_indicators: S&P 500 inclusion indicators DataFrame. 33 | group_data: Sector group data Series. 34 | time_period: Backtest time period. 35 | squeeze_factors: List of shrinkage factors for covariance estimation. Defaults to (0.0, 0.125, 0.250, 0.375, 0.5, 0.7, 0.9). 36 | 37 | Returns: 38 | List of PortfolioData objects, one for each squeeze factor. 39 | """ 40 | constraints0 = Constraints(is_long_only=True, 41 | min_weights=pd.Series(0.0, index=prices.columns), 42 | max_weights=pd.Series(0.05, index=prices.columns)) 43 | 44 | portfolio_datas = [] 45 | for squeeze_factor in squeeze_factors: 46 | covar_estimator = CovarEstimator(squeeze_factor=squeeze_factor, returns_freqs='W-WED', rebalancing_freq='QE') 47 | weights = rolling_quadratic_optimisation(prices=prices, 48 | constraints0=constraints0, 49 | portfolio_objective=PortfolioObjective.MIN_VARIANCE, 50 | time_period=time_period, 51 | inclusion_indicators=inclusion_indicators, 52 | covar_estimator=covar_estimator) 53 | portfolio_data = qis.backtest_model_portfolio(prices=time_period.locate(prices), 54 | weights=weights, 55 | ticker=f"squeeze={squeeze_factor: 0.3f}", 56 | funding_rate=None, 57 | weight_implementation_lag=1, 58 | rebalancing_costs=0.0030) 59 | portfolio_data.set_group_data(group_data=group_data) 60 | portfolio_datas.append(portfolio_data) 61 | return portfolio_datas 62 | 63 | 64 | class LocalTests(Enum): 65 | CROSS_BACKTEST = 1 66 | 67 | 68 | def run_local_test(local_test: LocalTests): 69 | """Run local tests for development and debugging purposes. 70 | 71 | These are integration tests that download real data and generate reports. 72 | Use for quick verification during development. 73 | """ 74 | 75 | import quant_strats.local_path as lp 76 | from optimalportfolios.examples.sp500_universe import load_sp500_universe_yahoo 77 | 78 | if local_test == LocalTests.CROSS_BACKTEST: 79 | 80 | # time_period = qis.TimePeriod('31Dec2010', '31Jan2024', tz='UTC') 81 | time_period = qis.TimePeriod('31Dec2010', '31Jan2024') 82 | # define squeeze_factors 83 | squeeze_factors = [0.0, 0.25, 0.5] 84 | # squeeze_factors = [0.0, 0.125, 0.250, 0.375, 0.5, 0.7, 0.9] 85 | 86 | prices, inclusion_indicators, group_data = load_sp500_universe_yahoo() 87 | 88 | portfolio_datas = run_cross_backtest(prices=prices, 89 | inclusion_indicators=inclusion_indicators, 90 | group_data=group_data, 91 | time_period=time_period, 92 | squeeze_factors=squeeze_factors) 93 | 94 | # run cross portfolio report 95 | benchmark_prices = yf.download('SPY', start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'].asfreq('B').ffill() 96 | multi_portfolio_data = qis.MultiPortfolioData(portfolio_datas=portfolio_datas, 97 | benchmark_prices=benchmark_prices) 98 | 99 | figs = qis.generate_multi_portfolio_factsheet(multi_portfolio_data=multi_portfolio_data, 100 | time_period=time_period, 101 | add_benchmarks_to_navs=True, 102 | add_strategy_factsheets=False, 103 | **qis.fetch_default_report_kwargs(time_period=time_period)) 104 | 105 | # save report to pdf and png 106 | qis.save_figs_to_pdf(figs=figs, 107 | file_name=f"sp500_squeeze_portfolio_factsheet", 108 | orientation='landscape', 109 | local_path=lp.get_output_path()) 110 | 111 | 112 | if __name__ == '__main__': 113 | 114 | run_local_test(local_test=LocalTests.CROSS_BACKTEST) 115 | -------------------------------------------------------------------------------- /optimalportfolios/examples/sp500_universe.py: -------------------------------------------------------------------------------- 1 | """ 2 | create composition of S&P 500 universe compositions using members in https://github.com/fja05680/sp500 3 | note that some of the companies ever included in the S&P500 are de-listed and yfinance does not have data on them 4 | """ 5 | 6 | # packages 7 | import pandas as pd 8 | import qis as qis 9 | import yfinance as yf 10 | from typing import Tuple, List 11 | from enum import Enum 12 | 13 | from optimalportfolios.local_path import get_resource_path 14 | 15 | # path to save universe data 16 | LOCAL_PATH = f"{get_resource_path()}//sp500//" 17 | # download from source: https://github.com/fja05680/sp500 18 | SP500_FILE = "S&P 500 Historical Components & Changes(07-12-2025).csv" 19 | 20 | 21 | def create_inclusion_indicators() -> Tuple[pd.DataFrame, pd.DataFrame]: 22 | """ 23 | Create binary inclusion indicator DataFrames for S&P 500 constituents over time. 24 | 25 | Returns: 26 | Tuple[pd.DataFrame, pd.DataFrame]: Regular and Bloomberg-formatted ticker 27 | inclusion indicators with dates as index and 1.0 for included stocks. 28 | """ 29 | universe = pd.read_csv(f"{LOCAL_PATH}{SP500_FILE}", index_col='date') 30 | inclusion_indicators = {} 31 | inclusion_indicators_bbg = {} 32 | for date in universe.index: 33 | tickers = universe.loc[date, :].apply(lambda x: sorted(x.split(','))).to_list()[0] 34 | bbg_tickers = [f"{x} US Equity" for x in tickers] 35 | inclusion_indicators[date] = pd.Series(1.0, index=tickers) 36 | inclusion_indicators_bbg[date] = pd.Series(1.0, index=bbg_tickers) 37 | inclusion_indicators = pd.DataFrame.from_dict(inclusion_indicators, orient='index').sort_index() 38 | inclusion_indicators_bbg = pd.DataFrame.from_dict(inclusion_indicators_bbg, orient='index').sort_index() 39 | return inclusion_indicators, inclusion_indicators_bbg 40 | 41 | 42 | def create_sp500_universe_with_yahoo(local_path: str = LOCAL_PATH) -> None: 43 | """ 44 | Fetch S&P 500 price and sector data from Yahoo Finance and save to CSV files. 45 | 46 | Downloads historical prices and industry classifications for S&P 500 constituents, 47 | then saves the cleaned data as 'sp500_prices_yahoo.csv', 'sp500_inclusions_yahoo.csv', 48 | and 'sp500_groups_yahoo.csv' files. 49 | """ 50 | inclusion_indicators, inclusion_indicators_bbg = create_inclusion_indicators() 51 | def fetch_universe_prices(tickers: List[str]) -> pd.DataFrame: 52 | prices = yf.download(tickers=tickers, start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'] 53 | return prices[tickers] 54 | 55 | def fetch_universe_industry(tickers: List[str]) -> pd.Series: 56 | group_data = {} 57 | for ticker in tickers: 58 | this = yf.Ticker(ticker).info 59 | if 'sector' in this: 60 | group_data[ticker] = this['sector'] 61 | else: 62 | group_data[ticker] = 'unclassified' 63 | return pd.Series(group_data) 64 | 65 | prices = fetch_universe_prices(tickers=inclusion_indicators.columns.to_list()) 66 | # remove all nans 67 | prices = prices.dropna(axis=1, how='all').asfreq('B', method='ffill') 68 | group_data = fetch_universe_industry(tickers=prices.columns.to_list()) 69 | inclusion_indicators = inclusion_indicators[prices.columns] 70 | qis.save_df_to_csv(df=prices, file_name='sp500_prices_yahoo', local_path=local_path) 71 | qis.save_df_to_csv(df=inclusion_indicators, file_name='sp500_inclusions_yahoo', local_path=local_path) 72 | qis.save_df_to_csv(df=group_data.to_frame(), file_name='sp500_groups_yahoo', local_path=local_path) 73 | 74 | 75 | def load_sp500_universe_yahoo(local_path: str = LOCAL_PATH) -> Tuple[pd.DataFrame, pd.DataFrame, pd.Series]: 76 | """Loads S&P 500 universe data from Yahoo Finance CSV files. 77 | 78 | Loads prices, inclusion indicators, and sector group data. Aligns timezone 79 | information between prices and inclusion indicators. 80 | 81 | Args: 82 | local_path: Path to directory containing CSV files. Defaults to LOCAL_PATH. 83 | 84 | Returns: 85 | Tuple of (prices DataFrame, inclusion indicators DataFrame, group data Series). 86 | """ 87 | prices = qis.load_df_from_csv(file_name='sp500_prices_yahoo', local_path=local_path) 88 | inclusion_indicators = qis.load_df_from_csv(file_name='sp500_inclusions_yahoo', local_path=local_path) 89 | inclusion_indicators.index = inclusion_indicators.index.tz_localize(tz=prices.index.tz) # align tz info 90 | group_data = qis.load_df_from_csv(file_name='sp500_groups_yahoo', parse_dates=False, local_path=local_path).iloc[:, 0] 91 | return prices, inclusion_indicators, group_data 92 | 93 | 94 | def create_sp500_universe_with_bloomberg(start_date: pd.Timestamp = pd.Timestamp('31Dec1995'), 95 | local_path: str = LOCAL_PATH 96 | ) -> None: 97 | """Creates S&P 500 universe data using Bloomberg API. 98 | 99 | Fetches prices, market cap, sector data, and inclusion indicators for S&P 500 100 | constituents. Filters out delisted stocks without sectors and saves results to CSV. 101 | 102 | Args: 103 | start_date: Start date for time series data. Defaults to Dec 31, 1995. 104 | local_path: Path to directory for saving CSV files. Defaults to LOCAL_PATH. 105 | """ 106 | from bbg_fetch import fetch_field_timeseries_per_tickers, fetch_fundamentals 107 | inclusion_indicators, inclusion_indicators_bbg = create_inclusion_indicators() 108 | 109 | tickers = inclusion_indicators_bbg.columns.to_list() 110 | print(tickers) 111 | 112 | # first get industries 113 | group_datas = fetch_fundamentals(tickers=tickers, fields=['gics_sector_name']) 114 | # drop stocks without sectors: for delisted stocks their tickers can become funds or etfs 115 | clean_group_datas = group_datas.dropna() 116 | print(f"original n = {len(group_datas.index)}, new n = {len(clean_group_datas.index)}") 117 | 118 | tickers = clean_group_datas.index.to_list() 119 | prices = fetch_field_timeseries_per_tickers(tickers=tickers, start_date=start_date, freq='B') 120 | # remove all nans 121 | prices = prices.dropna(axis=1, how='all') 122 | 123 | market_cap = fetch_field_timeseries_per_tickers(tickers=tickers, start_date=start_date, freq='B', field='CUR_MKT_CAP') 124 | market_cap = market_cap.reindex(columns=prices.columns) 125 | 126 | group_datas = clean_group_datas.reindex(index=prices.columns) 127 | 128 | inclusion_indicators_bbg = inclusion_indicators_bbg.reindex(columns=prices.columns) 129 | qis.save_df_to_csv(df=prices, file_name='sp500_prices_bloomberg', local_path=local_path) 130 | qis.save_df_to_csv(df=market_cap, file_name='sp500_market_cap_bloomberg', local_path=local_path) 131 | qis.save_df_to_csv(df=inclusion_indicators_bbg, file_name='sp500_inclusions_bloomberg', local_path=local_path) 132 | qis.save_df_to_csv(df=group_datas, file_name='sp500_groups_bloomberg', local_path=local_path) 133 | 134 | 135 | def load_sp500_universe_bloomberg(local_path: str = LOCAL_PATH 136 | ) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame, pd.Series]: 137 | """Loads S&P 500 universe data from Bloomberg CSV files. 138 | 139 | Loads prices, market cap, inclusion indicators, and sector group data 140 | previously saved by create_sp500_universe_with_bloomberg(). 141 | 142 | Args: 143 | local_path: Path to directory containing CSV files. Defaults to LOCAL_PATH. 144 | 145 | Returns: 146 | Tuple of (prices DataFrame, market cap DataFrame, inclusion indicators DataFrame, group data Series). 147 | """ 148 | prices = qis.load_df_from_csv(file_name='sp500_prices_bloomberg', local_path=local_path) 149 | market_cap = qis.load_df_from_csv(file_name='sp500_market_cap_bloomberg', local_path=local_path) 150 | inclusion_indicators = qis.load_df_from_csv(file_name='sp500_inclusions_bloomberg', local_path=local_path) 151 | group_data = qis.load_df_from_csv(file_name='sp500_groups_bloomberg', parse_dates=False, local_path=local_path).iloc[:, 0] 152 | return prices, market_cap, inclusion_indicators, group_data 153 | 154 | 155 | class LocalTests(Enum): 156 | CREATE_UNIVERSE_DATA_WITH_YAHOO = 1 157 | CREATE_UNIVERSE_DATA_WITH_BLOOMBERG = 2 158 | LOAD = 3 159 | 160 | 161 | def run_local_test(local_test: LocalTests): 162 | """Run local tests for development and debugging purposes. 163 | 164 | These are integration tests that download real data and generate reports. 165 | Use for quick verification during development. 166 | """ 167 | 168 | if local_test == LocalTests.CREATE_UNIVERSE_DATA_WITH_YAHOO: 169 | create_sp500_universe_with_yahoo() 170 | 171 | elif local_test == LocalTests.CREATE_UNIVERSE_DATA_WITH_BLOOMBERG: 172 | create_sp500_universe_with_bloomberg() 173 | 174 | elif local_test == LocalTests.LOAD: 175 | prices, market_cap, inclusion_indicators, group_data = load_sp500_universe_bloomberg() 176 | print(prices) 177 | print(market_cap) 178 | print(inclusion_indicators) 179 | print(group_data) 180 | 181 | 182 | if __name__ == '__main__': 183 | 184 | run_local_test(local_test=LocalTests.CREATE_UNIVERSE_DATA_WITH_BLOOMBERG) 185 | -------------------------------------------------------------------------------- /optimalportfolios/examples/universe.py: -------------------------------------------------------------------------------- 1 | """ 2 | fetch an universe of bond etfs for testing optimisations 3 | """ 4 | import numpy as np 5 | import pandas as pd 6 | import matplotlib.pyplot as plt 7 | import seaborn as sns 8 | import qis as qis 9 | import yfinance as yf 10 | from typing import Tuple 11 | from enum import Enum 12 | 13 | 14 | def fetch_benchmark_universe_data() -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame, pd.Series, pd.Series, pd.DataFrame]: 15 | """ 16 | fetch a universe of etfs 17 | define custom universe with asset class grouping 18 | 5 asset groups with 3 etfs in each 19 | """ 20 | universe_data = dict(SPY='Equities', 21 | QQQ='Equities', 22 | EEM='Equities', 23 | TLT='Bonds', 24 | IEF='Bonds', 25 | TIP='Bonds', 26 | IGSB='IG', 27 | LQD='IG', 28 | EMB='IG', 29 | HYG='HighYield', 30 | SHYG='HighYield', 31 | FALN='HighYield', 32 | GLD='Commodts', 33 | GSG='Commodts', 34 | COMT='Commodts') 35 | group_data = pd.Series(universe_data) # for portfolio reporting 36 | equal_weight = 1.0 / len(universe_data.keys()) 37 | benchmark_weights = {x: equal_weight for x in universe_data.keys()} 38 | 39 | # asset class loadings 40 | ac_loadings = qis.set_group_loadings(group_data=group_data) 41 | 42 | tickers = list(universe_data.keys()) 43 | benchmark_weights = pd.Series(benchmark_weights) 44 | prices = yf.download(tickers=tickers, start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True)['Close'][tickers] 45 | prices = prices.asfreq('B').ffill() 46 | # for group lass 47 | ac_benchmark_prices = prices[['SPY', 'TLT', 'LQD', 'HYG', 'GSG']].rename(dict(SPY='Equities', TLT='Bonds', IG='LQD', HYG='HighYield', GLD='Commodts')) 48 | 49 | # select asset class benchmarks from universe 50 | benchmark_prices = prices[['SPY', 'TLT']] 51 | 52 | return prices, benchmark_prices, ac_loadings, benchmark_weights, group_data, ac_benchmark_prices 53 | 54 | 55 | class LocalTests(Enum): 56 | ILLUSTRATE_INPUT_DATA = 1 57 | 58 | 59 | def run_local_test(local_test: LocalTests): 60 | """Run local tests for development and debugging purposes. 61 | 62 | These are integration tests that download real data and generate reports. 63 | Use for quick verification during development. 64 | """ 65 | 66 | prices, benchmark_prices, ac_loadings, benchmark_weights, group_data, ac_benchmark_prices = fetch_benchmark_universe_data() 67 | 68 | if local_test == LocalTests.ILLUSTRATE_INPUT_DATA: 69 | with sns.axes_style('darkgrid'): 70 | fig, axs = plt.subplots(2, 1, figsize=(14, 12), constrained_layout=True) 71 | qis.plot_prices_with_dd(prices=prices, axs=axs) 72 | 73 | plt.show() 74 | 75 | 76 | if __name__ == '__main__': 77 | 78 | run_local_test(local_test=LocalTests.ILLUSTRATE_INPUT_DATA) 79 | -------------------------------------------------------------------------------- /optimalportfolios/lasso/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from optimalportfolios.lasso. lasso_model_estimator import (LassoModelType, 3 | LassoModel, 4 | ClusterDataByDates, 5 | solve_lasso_cvx_problem, 6 | solve_group_lasso_cvx_problem) -------------------------------------------------------------------------------- /optimalportfolios/local_path.py: -------------------------------------------------------------------------------- 1 | """ 2 | get local path using setting.yaml 3 | setting.yaml is untracked file with PC specific paths 4 | use: 5 | git update-index --skip-worktree optimalportfolios/settings.yaml 6 | """ 7 | import yaml 8 | from pathlib import Path 9 | 10 | 11 | def get_resource_path() -> str: 12 | """ 13 | read path specs in settings.yaml 14 | """ 15 | full_file_path = Path(__file__).parent.joinpath('settings.yaml') 16 | with open(full_file_path) as settings: 17 | settings_data = yaml.load(settings, Loader=yaml.Loader) 18 | return settings_data['RESOURCE_PATH'] 19 | 20 | 21 | def get_output_path() -> str: 22 | """ 23 | read path specs in settings.yaml 24 | """ 25 | full_file_path = Path(__file__).parent.joinpath('settings.yaml') 26 | with open(full_file_path) as settings: 27 | settings_data = yaml.load(settings, Loader=yaml.Loader) 28 | return settings_data['OUTPUT_PATH'] 29 | 30 | -------------------------------------------------------------------------------- /optimalportfolios/optimization/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from optimalportfolios.optimization.constraints import (Constraints, 3 | GroupLowerUpperConstraints, 4 | GroupTrackingErrorConstraint, 5 | GroupTurnoverConstraint, 6 | merge_group_lower_upper_constraints) 7 | 8 | from optimalportfolios.optimization.wrapper_rolling_portfolios import (compute_rolling_optimal_weights, 9 | backtest_rolling_optimal_portfolio) 10 | 11 | from optimalportfolios.optimization.solvers.__init__ import * 12 | -------------------------------------------------------------------------------- /optimalportfolios/optimization/solvers/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from optimalportfolios.optimization.solvers.carra_mixure import (rolling_maximize_cara_mixture, 3 | wrapper_maximize_cara_mixture, 4 | opt_maximize_cara_mixture) 5 | 6 | from optimalportfolios.optimization.solvers.max_diversification import (rolling_maximise_diversification, 7 | wrapper_maximise_diversification, 8 | opt_maximise_diversification) 9 | 10 | from optimalportfolios.optimization.solvers.max_sharpe import (rolling_maximize_portfolio_sharpe, 11 | wrapper_maximize_portfolio_sharpe, 12 | cvx_maximize_portfolio_sharpe) 13 | 14 | from optimalportfolios.optimization.solvers.quadratic import (rolling_quadratic_optimisation, 15 | wrapper_quadratic_optimisation, 16 | cvx_quadratic_optimisation) 17 | 18 | from optimalportfolios.optimization.solvers.risk_budgeting import (rolling_risk_budgeting, 19 | wrapper_risk_budgeting, 20 | opt_risk_budgeting, 21 | solve_for_risk_budgets_from_given_weights) 22 | 23 | from optimalportfolios.optimization.solvers.target_return import (rolling_maximise_alpha_with_target_return, 24 | wrapper_maximise_alpha_with_target_return, 25 | cvx_maximise_alpha_with_target_return) 26 | 27 | from optimalportfolios.optimization.solvers.tracking_error import (rolling_maximise_alpha_over_tre, 28 | wrapper_maximise_alpha_over_tre, 29 | cvx_maximise_alpha_over_tre) 30 | -------------------------------------------------------------------------------- /optimalportfolios/optimization/solvers/carra_mixure.py: -------------------------------------------------------------------------------- 1 | """ 2 | Implementation of carra utility 3 | """ 4 | 5 | import numpy as np 6 | import pandas as pd 7 | import qis as qis 8 | from scipy.optimize import minimize 9 | from typing import List, Optional 10 | from enum import Enum 11 | 12 | from optimalportfolios.utils.gaussian_mixture import fit_gaussian_mixture 13 | from optimalportfolios.utils.portfolio_funcs import (compute_portfolio_variance, compute_portfolio_risk_contributions) 14 | from optimalportfolios.optimization.constraints import (Constraints, total_weight_constraint, long_only_constraint) 15 | from optimalportfolios.covar_estimation.utils import squeeze_covariance_matrix 16 | 17 | 18 | def rolling_maximize_cara_mixture(prices: pd.DataFrame, 19 | constraints0: Constraints, 20 | time_period: qis.TimePeriod, # when we start building portfolios 21 | rebalancing_freq: str = 'QE', 22 | roll_window: int = 52*6, # number of returns in mixture estimation, default is 6y of weekly returns 23 | returns_freq: str = 'W-WED', # frequency for returns computing mixure distr 24 | carra: float = 0.5, # carra parameters 25 | n_components: int = 3, 26 | squeeze_factor: Optional[float] = None # for squeezing covar matrix 27 | ) -> pd.DataFrame: 28 | """ 29 | solve solvers mixture Carra portfolios 30 | estimation is applied for the whole period of prices 31 | """ 32 | returns = qis.to_returns(prices=prices, is_log_returns=True, drop_first=True, freq=returns_freq) 33 | # generate rebalancing dates on the returns index 34 | rebalancing_schedule = qis.generate_rebalancing_indicators(df=returns, freq=rebalancing_freq) 35 | 36 | _, scaler = qis.get_period_days(freq=returns_freq) 37 | 38 | tickers = prices.columns.to_list() 39 | weights = {} 40 | weights_0 = None 41 | for idx, (date, value) in enumerate(rebalancing_schedule.items()): 42 | if idx >= roll_window-1 and value: 43 | period = qis.TimePeriod(rebalancing_schedule.index[idx - roll_window+1], date) 44 | # drop assets with 45 | rets_ = period.locate(returns).dropna(axis=1, how='any') 46 | params = fit_gaussian_mixture(x=rets_.to_numpy(), n_components=n_components, scaler=scaler) 47 | constraints = constraints0.update_with_valid_tickers(valid_tickers=rets_.columns.to_list(), 48 | total_to_good_ratio=len(tickers)/len(rets_.columns), 49 | weights_0=weights_0) 50 | if squeeze_factor is not None and squeeze_factor > 0.0: 51 | params.covars = [squeeze_covariance_matrix(covars, squeeze_factor=squeeze_factor) for covars in params.covars] 52 | 53 | weights_ = wrapper_maximize_cara_mixture(means=params.means, 54 | covars=params.covars, 55 | probs=params.probs, 56 | constraints0=constraints, 57 | tickers=rets_.columns.to_list(), 58 | carra=carra) 59 | weights_0 = weights_ # update for next rebalancing 60 | weights[date] = weights_.reindex(index=tickers).fillna(0.0) 61 | weights = pd.DataFrame.from_dict(weights, orient='index', columns=prices.columns) 62 | if time_period is not None: 63 | weights = time_period.locate(weights) 64 | 65 | return weights 66 | 67 | 68 | def wrapper_maximize_cara_mixture(means: List[np.ndarray], 69 | covars: List[np.ndarray], 70 | probs: np.ndarray, 71 | constraints0: Constraints, 72 | tickers: List[str], 73 | carra: float = 0.5 74 | ) -> pd.Series: 75 | """ 76 | wrapper assumes means and covars are valid 77 | """ 78 | weights = opt_maximize_cara_mixture(means=means, 79 | covars=covars, 80 | probs=probs, 81 | constraints=constraints0, 82 | carra=carra) 83 | weights = pd.Series(weights, index=tickers) 84 | return weights 85 | 86 | 87 | def opt_maximize_cara_mixture(means: List[np.ndarray], 88 | covars: List[np.ndarray], 89 | probs: np.ndarray, 90 | constraints: Constraints, 91 | carra: float = 0.5, 92 | verbose: bool = False 93 | ) -> np.ndarray: 94 | 95 | # set up problem 96 | n = covars[0].shape[0] 97 | if Constraints.weights_0 is not None: 98 | x0 = Constraints.weights_0.to_numpy() 99 | else: 100 | x0 = np.ones(n) / n 101 | 102 | constraints_ = constraints.set_scipy_constraints() # covar is not used for this method 103 | res = minimize(carra_objective_mixture, x0, args=[means, covars, probs, carra], method='SLSQP', 104 | constraints=constraints_, 105 | options={'disp': verbose, 'ftol': 1e-12}) 106 | optimal_weights = res.x 107 | 108 | if optimal_weights is None: 109 | # raise ValueError(f"not solved") 110 | print(f"not solved") 111 | if constraints.weights_0 is not None: 112 | optimal_weights = constraints.weights_0 113 | print(f"using weights_0") 114 | else: 115 | optimal_weights = np.zeros(n) 116 | print(f"using zeroweights") 117 | 118 | return optimal_weights 119 | 120 | 121 | def opt_maximize_cara(means: np.ndarray, 122 | covar: np.ndarray, 123 | carra: float = 0.5, 124 | min_weights: np.ndarray = None, 125 | max_weights: np.ndarray = None, 126 | disp: bool = False, 127 | is_exp: bool = False, 128 | is_print_log: bool = False 129 | ) -> np.ndarray: 130 | n = covar.shape[0] 131 | x0 = np.ones(n) / n 132 | cons = [{'type': 'ineq', 'fun': long_only_constraint}, 133 | {'type': 'eq', 'fun': total_weight_constraint}] 134 | if min_weights is not None: 135 | cons.append({'type': 'ineq', 'fun': lambda x: x - min_weights}) 136 | if max_weights is not None: 137 | cons.append({'type': 'ineq', 'fun': lambda x: max_weights - x}) 138 | 139 | if is_exp: 140 | func = carra_objective_exp 141 | else: 142 | func = carra_objective 143 | res = minimize(func, x0, args=[means, covar, carra], method='SLSQP', constraints=cons, 144 | options={'disp': disp, 'ftol': 1e-16}) 145 | w_rb = res.x 146 | 147 | if is_print_log: 148 | print(f'return_p = {w_rb@means}, ' 149 | f'sigma_p = {np.sqrt(compute_portfolio_variance(w_rb, covar))}, weights: {w_rb}, ' 150 | f'risk contrib.s: {compute_portfolio_risk_contributions(w_rb, covar).T} ' 151 | f'sum of weights: {sum(w_rb)}') 152 | return w_rb 153 | 154 | 155 | def carra_objective(w: np.ndarray, pars: List[np.ndarray]) -> float: 156 | means, covar, carra = pars[0], pars[1], pars[2] 157 | v = means.T @ w - 0.5*carra*w.T @ covar @ w 158 | return -v 159 | 160 | 161 | def carra_objective_exp(w: np.ndarray, pars: List[np.ndarray]) -> float: 162 | means, covar, carra = pars[0], pars[1], pars[2] 163 | v = np.exp(-carra*means.T @ w + 0.5*carra*carra*w.T @ covar @ w) 164 | return v 165 | 166 | 167 | def carra_objective_mixture(w: np.ndarray, pars: List[np.ndarray]) -> float: 168 | means, covars, probs, carra = pars[0], pars[1], pars[2], pars[3] 169 | v = 0.0 170 | for idx, prob in enumerate(probs): 171 | v = v + prob*np.exp(-carra*means[idx].T @ w + 0.5*carra*carra*w.T @ covars[idx] @ w) 172 | return v 173 | 174 | 175 | class LocalTests(Enum): 176 | CARA = 1 177 | CARA_MIX = 2 178 | 179 | 180 | def run_local_test(local_test: LocalTests): 181 | """Run local tests for development and debugging purposes. 182 | 183 | These are integration tests that download real data and generate reports. 184 | Use for quick verification during development. 185 | """ 186 | 187 | if local_test == LocalTests.CARA: 188 | means = np.array([0.3, 0.1]) 189 | covar = np.array([[0.2 ** 2, 0.01], 190 | [0.01, 0.1 ** 2]]) 191 | w_rb = opt_maximize_cara(means=means, covar=covar, carra=10, is_exp=False, disp=True) 192 | w_rb = opt_maximize_cara(means=means, covar=covar, carra=10, is_exp=True, disp=True) 193 | 194 | elif local_test == LocalTests.CARA_MIX: 195 | means = [np.array([0.05, -0.1]), np.array([0.05, 2.0])] 196 | covars = [np.array([[0.2 ** 2, 0.01], 197 | [0.01, 0.2 ** 2]]), 198 | np.array([[0.2 ** 2, 0.01], 199 | [0.01, 0.2 ** 2]]) 200 | ] 201 | probs = np.array([0.95, 0.05]) 202 | optimal_weights = opt_maximize_cara_mixture(means=means, covars=covars, probs=probs, 203 | constraints=Constraints(), 204 | carra=20.0, verbose=True) 205 | print(optimal_weights) 206 | 207 | 208 | if __name__ == '__main__': 209 | 210 | run_local_test(local_test=LocalTests.CARA_MIX) 211 | -------------------------------------------------------------------------------- /optimalportfolios/optimization/solvers/max_diversification.py: -------------------------------------------------------------------------------- 1 | """ 2 | implementation of maximum diversification objective 3 | """ 4 | # packages 5 | import numpy as np 6 | import pandas as pd 7 | import qis as qis 8 | from scipy.optimize import minimize 9 | from typing import List, Dict 10 | 11 | # optimalportfolios 12 | from optimalportfolios.utils.portfolio_funcs import calculate_diversification_ratio 13 | from optimalportfolios.utils.filter_nans import filter_covar_and_vectors_for_nans 14 | from optimalportfolios.optimization.constraints import Constraints 15 | from optimalportfolios.covar_estimation.covar_estimator import CovarEstimator 16 | 17 | 18 | def rolling_maximise_diversification(prices: pd.DataFrame, 19 | constraints0: Constraints, 20 | time_period: qis.TimePeriod, # when we start building portfolios 21 | covar_dict: Dict[pd.Timestamp, pd.DataFrame] = None, # can be precomputed 22 | covar_estimator: CovarEstimator = CovarEstimator() # default EWMA estimator 23 | ) -> pd.DataFrame: 24 | """ 25 | compute rolling maximum diversification portfolios 26 | covar_dict: Dict[timestamp, covar matrix] can be precomputed 27 | portolio is rebalances at covar_dict.keys() 28 | """ 29 | 30 | if covar_dict is None: # use default ewm covar with covar_estimator 31 | covars = covar_estimator.fit_rolling_covars(prices=prices, time_period=time_period) 32 | covar_dict = covars.y_covars 33 | 34 | weights = {} 35 | weights_0 = None 36 | for date, pd_covar in covar_dict.items(): 37 | weights_ = wrapper_maximise_diversification(pd_covar=pd_covar, 38 | constraints0=constraints0, 39 | weights_0=weights_0) 40 | weights_0 = weights_ # update for next rebalancing 41 | weights[date] = weights_ 42 | 43 | weights = pd.DataFrame.from_dict(weights, orient='index') 44 | weights = weights.reindex(columns=prices.columns.to_list()) 45 | return weights 46 | 47 | 48 | def wrapper_maximise_diversification(pd_covar: pd.DataFrame, 49 | constraints0: Constraints, 50 | weights_0: pd.Series = None 51 | ) -> pd.Series: 52 | """ 53 | create wrapper accounting for nans or zeros in covar matrix 54 | assets in columns/rows of covar must correspond to alphas.index 55 | """ 56 | # filter out assets with zero variance or nans 57 | vectors = None 58 | clean_covar, good_vectors = filter_covar_and_vectors_for_nans(pd_covar=pd_covar, vectors=vectors) 59 | 60 | constraints = constraints0.update_with_valid_tickers(valid_tickers=clean_covar.columns.to_list(), 61 | total_to_good_ratio=len(pd_covar.columns) / len(clean_covar.columns), 62 | weights_0=weights_0) 63 | 64 | weights = opt_maximise_diversification(covar=clean_covar.to_numpy(), 65 | constraints=constraints) 66 | weights = pd.Series(weights, index=clean_covar.columns) 67 | weights = weights.reindex(index=pd_covar.columns).fillna(0.0) # align with tickers 68 | return weights 69 | 70 | 71 | def opt_maximise_diversification(covar: np.ndarray, 72 | constraints: Constraints, 73 | verbose: bool = False 74 | ) -> np.ndarray: 75 | n = covar.shape[0] 76 | x0 = np.ones(n) / n 77 | 78 | constraints_ = constraints.set_scipy_constraints(covar=covar) 79 | res = minimize(max_diversification_objective, x0, args=[covar], method='SLSQP', 80 | constraints=constraints_, 81 | options={'disp': verbose, 'ftol': 1e-18, 'maxiter': 200}) 82 | optimal_weights = res.x 83 | if optimal_weights is None: 84 | # raise ValueError(f"not solved") 85 | print(f"not solved") 86 | if constraints.weights_0 is not None: 87 | optimal_weights = constraints.weights_0 88 | print(f"using weights_0") 89 | else: 90 | optimal_weights = np.zeros(n) 91 | print(f"using zeroweights") 92 | 93 | else: 94 | if constraints.is_long_only: 95 | optimal_weights = np.where(optimal_weights > 0.0, optimal_weights, 0.0) 96 | 97 | return optimal_weights 98 | 99 | 100 | def max_diversification_objective(w: np.ndarray, pars: List[np.ndarray]) -> float: 101 | covar = pars[0] 102 | return -calculate_diversification_ratio(w=w, covar=covar) 103 | -------------------------------------------------------------------------------- /optimalportfolios/optimization/solvers/target_return.py: -------------------------------------------------------------------------------- 1 | """ 2 | optimise alpha with targeting return 3 | """ 4 | import numpy as np 5 | import pandas as pd 6 | import cvxpy as cvx 7 | import qis as qis 8 | from typing import Optional, Dict 9 | 10 | from optimalportfolios import filter_covar_and_vectors_for_nans 11 | from optimalportfolios.optimization.constraints import Constraints 12 | from optimalportfolios.covar_estimation.rolling_covar import estimate_rolling_ewma_covar 13 | 14 | 15 | def rolling_maximise_alpha_with_target_return(prices: pd.DataFrame, 16 | alphas: pd.DataFrame, 17 | yields: pd.DataFrame, 18 | target_returns: pd.Series, 19 | constraints0: Constraints, 20 | time_period: qis.TimePeriod, # when we start building portfolios 21 | covar_dict: Dict[pd.Timestamp, pd.DataFrame] = None, # can be precomputed 22 | returns_freq: str = 'W-WED', 23 | rebalancing_freq: str = 'QE', 24 | span: int = 52, # 1y 25 | squeeze_factor: Optional[float] = None, # for squeezing covar matrix 26 | solver: str = 'ECOS_BB', 27 | verbose: bool = False 28 | ) -> pd.DataFrame: 29 | """ 30 | maximise portfolio alpha subject to constraint on tracking tracking error 31 | """ 32 | if covar_dict is None: # use default ewm covar 33 | covar_dict = estimate_rolling_ewma_covar(prices=prices, 34 | time_period=time_period, 35 | returns_freq=returns_freq, 36 | rebalancing_freq=rebalancing_freq, 37 | span=span, 38 | squeeze_factor=squeeze_factor) 39 | 40 | # create rebalancing schedule: it must much idx in covar_tensor_txy using returns.index 41 | rebalancing_schedule = list(covar_dict.keys()) 42 | alphas = alphas.reindex(index=rebalancing_schedule, method='ffill') 43 | yields = yields.reindex(index=rebalancing_schedule, method='ffill') 44 | target_returns = target_returns.reindex(index=rebalancing_schedule, method='ffill') 45 | 46 | weights = {} 47 | weights_0 = None 48 | for date, pd_covar in covar_dict.items(): 49 | 50 | if verbose: 51 | print(f"date={date}") 52 | print(f"pd_covar=\n{pd_covar}") 53 | print(f"alphas=\n{alphas.loc[date, :]}") 54 | print(f"yields=\n{yields.loc[date, :]}") 55 | print(f"target_return=\n{target_returns[date]}") 56 | 57 | # call optimiser 58 | weights_ = wrapper_maximise_alpha_with_target_return(pd_covar=pd_covar, 59 | alphas=alphas.loc[date, :], 60 | yields=yields.loc[date, :], 61 | target_return=target_returns[date], 62 | constraints0=constraints0, 63 | weights_0=weights_0, 64 | solver=solver) 65 | 66 | weights_0 = weights_ # update for next rebalancing 67 | weights[date] = weights_ 68 | 69 | weights = pd.DataFrame.from_dict(weights, orient='index') 70 | weights = weights.reindex(columns=prices.columns).fillna(0.0) # align with tickers 71 | return weights 72 | 73 | 74 | def wrapper_maximise_alpha_with_target_return(pd_covar: pd.DataFrame, 75 | alphas: pd.Series, 76 | yields: pd.Series, 77 | target_return: float, 78 | constraints0: Constraints, 79 | weights_0: pd.Series = None, 80 | solver: str = 'ECOS_BB' 81 | ) -> pd.Series: 82 | """ 83 | create wrapper accounting for nans or zeros in covar matrix 84 | assets in columns/rows of covar must correspond to alphas.index 85 | """ 86 | # filter out assets with zero variance or nans 87 | vectors = dict(alphas=alphas) 88 | clean_covar, good_vectors = filter_covar_and_vectors_for_nans(pd_covar=pd_covar, vectors=vectors) 89 | 90 | constraints = constraints0.update_with_valid_tickers(valid_tickers=clean_covar.columns.to_list(), 91 | total_to_good_ratio=len(pd_covar.columns) / len(clean_covar.columns), 92 | weights_0=weights_0, 93 | asset_returns=yields, 94 | target_return=target_return) 95 | 96 | weights = cvx_maximise_alpha_with_target_return(covar=clean_covar.to_numpy(), 97 | alphas=good_vectors['alphas'].to_numpy(), 98 | constraints=constraints, 99 | solver=solver) 100 | weights[np.isinf(weights)] = 0.0 101 | weights = pd.Series(weights, index=clean_covar.index) 102 | weights = weights.reindex(index=pd_covar.index).fillna(0.0) # align with tickers 103 | 104 | return weights 105 | 106 | 107 | def cvx_maximise_alpha_with_target_return(covar: np.ndarray, 108 | alphas: np.ndarray, 109 | constraints: Constraints, 110 | verbose: bool = False, 111 | solver: str = 'ECOS_BB' 112 | ) -> np.ndarray: 113 | """ 114 | numpy level one step solution of problem 115 | max alpha @ w 116 | such that 117 | yields @ w = target return 118 | sum(w) = 1 # exposure constraint 119 | w >= 0 # long only constraint 120 | w.T @ Sigma @ w <= vol_constraint 121 | """ 122 | # set up problem 123 | n = covar.shape[0] 124 | if constraints.is_long_only: 125 | nonneg = True 126 | else: 127 | nonneg = False 128 | w = cvx.Variable(n, nonneg=nonneg) 129 | # covar = cvx.psd_wrap(covar) 130 | 131 | # set solver 132 | objective_fun = alphas.T @ w 133 | objective = cvx.Maximize(objective_fun) 134 | constraints_ = constraints.set_cvx_constraints(w=w, covar=covar) 135 | problem = cvx.Problem(objective, constraints_) 136 | problem.solve(verbose=verbose, solver=solver) 137 | 138 | optimal_weights = w.value 139 | if optimal_weights is None: 140 | # raise ValueError(f"not solved") 141 | print(f"not solved") 142 | if constraints.weights_0 is not None: 143 | optimal_weights = constraints.weights_0.to_numpy() 144 | print(f"using weights_0") 145 | else: 146 | optimal_weights = np.zeros(n) 147 | print(f"using zeroweights") 148 | 149 | return optimal_weights 150 | -------------------------------------------------------------------------------- /optimalportfolios/optimization/wrapper_rolling_portfolios.py: -------------------------------------------------------------------------------- 1 | """ 2 | linking engine to different optimisation routines 3 | """ 4 | # packages 5 | import pandas as pd 6 | import qis as qis 7 | from typing import Optional, Dict 8 | # optimalportfolios 9 | import optimalportfolios as opt 10 | from optimalportfolios.covar_estimation.covar_estimator import CovarEstimator, CovarEstimatorType 11 | from optimalportfolios.optimization.constraints import Constraints 12 | from optimalportfolios.config import PortfolioObjective 13 | 14 | 15 | def compute_rolling_optimal_weights(prices: pd.DataFrame, 16 | constraints0: Constraints, 17 | time_period: qis.TimePeriod, 18 | portfolio_objective: PortfolioObjective = PortfolioObjective.MAX_DIVERSIFICATION, 19 | covar_dict: Dict[pd.Timestamp, pd.DataFrame] = None, # can be precomputed 20 | covar_estimator: CovarEstimator = None, 21 | risk_budget: pd.Series = None, 22 | returns_freq: Optional[str] = 'W-WED', # returns freq 23 | rebalancing_freq: str = 'QE', # portfolio rebalancing 24 | span: int = 52, # ewma span for covariance matrix estimation 25 | roll_window: int = 20, # linked to returns at rebalancing_freq 26 | carra: float = 0.5, # carra parameters 27 | n_mixures: int = 3 28 | ) -> pd.DataFrame: 29 | """ 30 | wrapper function that links implemented optimisation solvers optimisation methods 31 | for portfolio_objective in config.PortfolioObjective 32 | covar_dict: Dict[timestamp, covar matrix] can be precomputed 33 | portolio is rebalances at covar_dict.keys() 34 | """ 35 | if covar_estimator is None: 36 | covar_estimator = CovarEstimator(returns_freqs=returns_freq, rebalancing_freq=rebalancing_freq, span=span, 37 | covar_estimator_type=CovarEstimatorType.EWMA) 38 | if portfolio_objective == PortfolioObjective.EQUAL_RISK_CONTRIBUTION: 39 | weights = opt.rolling_risk_budgeting(prices=prices, 40 | constraints0=constraints0, 41 | time_period=time_period, 42 | covar_dict=covar_dict, 43 | risk_budget=risk_budget, 44 | covar_estimator=covar_estimator) 45 | 46 | elif portfolio_objective == PortfolioObjective.MAX_DIVERSIFICATION: 47 | weights = opt.rolling_maximise_diversification(prices=prices, 48 | constraints0=constraints0, 49 | time_period=time_period, 50 | covar_dict=covar_dict, 51 | covar_estimator=covar_estimator) 52 | 53 | elif portfolio_objective in [PortfolioObjective.MIN_VARIANCE, PortfolioObjective.QUADRATIC_UTILITY]: 54 | weights = opt.rolling_quadratic_optimisation(prices=prices, 55 | constraints0=constraints0, 56 | portfolio_objective=portfolio_objective, 57 | time_period=time_period, 58 | covar_dict=covar_dict, 59 | covar_estimator=covar_estimator, 60 | carra=carra) 61 | 62 | elif portfolio_objective == PortfolioObjective.MAXIMUM_SHARPE_RATIO: 63 | weights = opt.rolling_maximize_portfolio_sharpe(prices=prices, 64 | constraints0=constraints0, 65 | time_period=time_period, 66 | returns_freq=returns_freq, 67 | rebalancing_freq=rebalancing_freq, 68 | span=span, 69 | roll_window=roll_window) 70 | 71 | elif portfolio_objective == PortfolioObjective.MAX_CARA_MIXTURE: 72 | weights = opt.rolling_maximize_cara_mixture(prices=prices, 73 | constraints0=constraints0, 74 | time_period=time_period, 75 | returns_freq=returns_freq, 76 | rebalancing_freq=rebalancing_freq, 77 | carra=carra, 78 | n_components=n_mixures, 79 | roll_window=roll_window) 80 | 81 | else: 82 | raise NotImplementedError(f"{portfolio_objective}") 83 | 84 | return weights 85 | 86 | 87 | def backtest_rolling_optimal_portfolio(prices: pd.DataFrame, 88 | constraints0: Constraints, 89 | time_period: qis.TimePeriod, # for computing weights 90 | covar_dict: Dict[pd.Timestamp, pd.DataFrame] = None, # can be precomputed 91 | perf_time_period: qis.TimePeriod = None, # for computing performance 92 | portfolio_objective: PortfolioObjective = PortfolioObjective.MAX_DIVERSIFICATION, 93 | returns_freq: Optional[str] = 'W-WED', # returns freq 94 | rebalancing_freq: str = 'QE', # portfolio rebalancing 95 | span: int = 52, # ewma span for covariance matrix estimation 96 | roll_window: int = 6*52, # linked to returns at rebalancing_freq: 6y of weekly returns 97 | carra: float = 0.5, # carra parameter 98 | n_mixures: int = 3, # for mixture carra utility 99 | ticker: str = None, 100 | rebalancing_costs: float = 0.0010, # 10 bp 101 | weight_implementation_lag: Optional[int] = None # = 1 for daily data 102 | ) -> qis.PortfolioData: 103 | """ 104 | compute solvers portfolio weights and return portfolio data 105 | weight_implementation_lag: Optional[int] = None # = 1 for daily data otherwise skip 106 | covar_dict: Dict[timestamp, covar matrix] can be precomputed 107 | portolio is rebalances at covar_dict.keys() 108 | """ 109 | weights = compute_rolling_optimal_weights(prices=prices, 110 | time_period=time_period, 111 | constraints0=constraints0, 112 | covar_dict=covar_dict, 113 | portfolio_objective=portfolio_objective, 114 | returns_freq=returns_freq, 115 | rebalancing_freq=rebalancing_freq, 116 | span=span, 117 | carra=carra, 118 | roll_window=roll_window, 119 | n_mixures=n_mixures) 120 | 121 | # make sure price exists for the first weight date: can happen when the first weight date falls on weekend 122 | if perf_time_period is not None: 123 | weights = perf_time_period.locate(weights) 124 | prices_ = qis.truncate_prior_to_start(df=prices, start=weights.index[0]) 125 | portfolio_out = qis.backtest_model_portfolio(prices=prices_, 126 | weights=weights, 127 | rebalancing_costs=rebalancing_costs, 128 | weight_implementation_lag=weight_implementation_lag, 129 | ticker=ticker) 130 | return portfolio_out 131 | -------------------------------------------------------------------------------- /optimalportfolios/reports/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ArturSepp/OptimalPortfolios/32c384c6cc4c6d050fd115b1f2cfd46859c3a2c1/optimalportfolios/reports/__init__.py -------------------------------------------------------------------------------- /optimalportfolios/reports/config.py: -------------------------------------------------------------------------------- 1 | """ 2 | set common configuration for different reports 3 | """ 4 | 5 | from qis import PerfStat, PerfParams, BenchmarkReturnsQuantileRegimeSpecs 6 | 7 | BENCHMARK_TABLE_COLUMNS2 = (PerfStat.PA_RETURN, 8 | PerfStat.VOL, 9 | PerfStat.SHARPE_EXCESS, 10 | PerfStat.MAX_DD, 11 | # PerfStat.MAX_DD_VOL, 12 | PerfStat.BEST, 13 | PerfStat.WORST, 14 | PerfStat.SKEWNESS, 15 | PerfStat.ALPHA_AN, 16 | PerfStat.BETA, 17 | PerfStat.R2) 18 | 19 | DATE_FORMAT = '%d%b%Y' 20 | FIG_SIZE1 = (14, 3) # one figure for whole page 21 | FIG_SIZE11 = (4.65, 2.35) # one figure for half page 22 | FIG_SIZE11_2 = (4.70, 0.95) 23 | FIG_SIZE11_2a = (4.70, 0.6) 24 | 25 | 26 | PERF_PARAMS = PerfParams(freq_vol='ME', freq_reg='ME', freq_drawdown='ME', alpha_an_factor=12) 27 | 28 | REGIME_PARAMS = BenchmarkReturnsQuantileRegimeSpecs(freq='ME') 29 | 30 | KWARGS = dict(fontsize=7, 31 | linewidth=0.5, 32 | digits_to_show=1, sharpe_digits=2, 33 | weight='normal', 34 | markersize=2, 35 | framealpha=0.8, 36 | date_format='%b-%y', 37 | trend_line_colors=['darkred'], 38 | trend_linewidth=2.0, 39 | x_date_freq='QE', 40 | short=True) 41 | 42 | # for py blocks 43 | margin_top = 0.0 44 | margin_bottom = 0.0 45 | line_height = 1.0 46 | font_family = 'Calibri' 47 | 48 | KWARGS_SUPTITLE = {'title_wrap': True, 'text_align': 'center', 'color': 'blue', 'font_size': "12px", 'font-weight': 'normal', 49 | 'title_level': 1, 'line_height': 0.7, 'inherit_cfg': False, 50 | 'margin_top': 0, 'margin_bottom': 0, 51 | 'font-family': 'sans-serif'} 52 | KWARGS_TITLE = {'title_wrap': True, 'text_align': 'left', 'color': 'blue', 'font_size': "12px", 53 | 'title_level': 2, 'line_height': line_height, 'inherit_cfg': False, 54 | 'margin_top': margin_top, 'margin_bottom': margin_bottom, 55 | 'font-family': font_family} 56 | KWARGS_DESC = {'title_wrap': True, 'text_align': 'left', 'font_size': "12px", 'font-weight': 'normal', 57 | 'title_level': 3, 'line_height': line_height, 'inherit_cfg': False, 58 | 'margin_top': margin_top, 'margin_bottom': margin_bottom, 59 | 'font-family': font_family} 60 | KWARGS_TEXT = {'title_wrap': True, 'text_align': 'left', 'font_size': "12px", 'font-weight': 'normal', 61 | 'title_level': 3, 'line_height': line_height, 'inherit_cfg': False, 62 | 'margin_top': margin_top, 'margin_bottom': margin_bottom, 63 | 'font-family': font_family} 64 | KWARGS_FIG = {'title_wrap': True, 'text_align': 'left', 'font_size': "12px", 65 | 'title_level': 3, 'line_height': line_height, 'inherit_cfg': False, 66 | 'margin_top': margin_top, 'margin_bottom': margin_bottom, 67 | 'font-family': font_family} 68 | KWARGS_FOOTNOTE = {'title_wrap': True, 'text_align': 'left', 'font_size': "12px", 'font-weight': 'normal', 69 | 'title_level': 8, 'line_height': line_height, 'inherit_cfg': False, 70 | 'margin_top': 0, 'margin_bottom': 0, 71 | 'font-family': font_family} 72 | 73 | RA_TABLE_FOOTNOTE = (u"\u002A" + f"Vol (annualized volatility) and Skew (Skeweness) are computed using daily returns, " 74 | f"Sharpe is computed assuming zero risk-free rate, " 75 | f"Max DD is maximum drawdown, " 76 | f"Best and Worst are the highest and lowest daily returns, " 77 | f"Alpha (annualized daily alpha), Beta, R2 (R squared) are estimated using regression " 78 | f"of daily returns explained by underlying coin") 79 | -------------------------------------------------------------------------------- /optimalportfolios/settings.yaml: -------------------------------------------------------------------------------- 1 | # set pc/developer local paths 2 | # add to gitignore afterwards 3 | # set paths to dbs 4 | 5 | 6 | RESOURCE_PATH: 7 | "..\\" 8 | 9 | LOCAL_RESOURCE_PATH: 10 | "..\\" 11 | 12 | UNIVERSE_PATH: 13 | "..\\" 14 | 15 | OUTPUT_PATH: 16 | "..\\" 17 | 18 | AWS_POSTGRES: 19 | "" 20 | -------------------------------------------------------------------------------- /optimalportfolios/test_data.py: -------------------------------------------------------------------------------- 1 | """ 2 | implement test data for optimisations 3 | use update and save data for speed-up of test cases 4 | """ 5 | 6 | # imports 7 | import pandas as pd 8 | import yfinance as yf 9 | import qis 10 | import optimalportfolios.local_path as local_path 11 | from enum import Enum 12 | 13 | FILE_NAME = 'test_prices' 14 | 15 | UNIVERSE_DATA = dict(SPY='Equities', 16 | QQQ='Equities', 17 | EEM='Equities', 18 | TLT='Bonds', 19 | IEF='Bonds', 20 | LQD='Credit', 21 | HYG='HighYield', 22 | GLD='Gold') 23 | 24 | 25 | def update_test_prices() -> pd.DataFrame: 26 | tickers = list(UNIVERSE_DATA.keys()) 27 | prices = yf.download(tickers=tickers, start="2003-12-31", end=None, ignore_tz=True, auto_adjust=True) 28 | prices = prices['Close'] 29 | prices = prices.asfreq('B', method='ffill') # rescale to business days 30 | prices = prices[tickers] # align order 31 | qis.save_df_to_csv(df=prices, file_name=FILE_NAME, local_path=local_path.get_resource_path()) 32 | return prices 33 | 34 | 35 | def load_test_data() -> pd.DataFrame: 36 | prices = qis.load_df_from_csv(file_name=FILE_NAME, local_path=local_path.get_resource_path()) 37 | return prices 38 | 39 | 40 | class LocalTests(Enum): 41 | UPDATE_TEST_PRICES = 1 42 | LOAD_TEST_PRICES = 2 43 | 44 | 45 | def run_local_test(local_test: LocalTests): 46 | """Run local tests for development and debugging purposes. 47 | 48 | These are integration tests that download real data and generate reports. 49 | Use for quick verification during development. 50 | """ 51 | 52 | if local_test == LocalTests.UPDATE_TEST_PRICES: 53 | prices = update_test_prices() 54 | print(prices) 55 | 56 | elif local_test == LocalTests.LOAD_TEST_PRICES: 57 | prices = load_test_data() 58 | print(prices) 59 | 60 | 61 | if __name__ == '__main__': 62 | 63 | run_local_test(local_test=LocalTests.UPDATE_TEST_PRICES) 64 | -------------------------------------------------------------------------------- /optimalportfolios/utils/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from optimalportfolios.utils.filter_nans import (filter_covar_and_vectors, 3 | filter_covar_and_vectors_for_nans) 4 | 5 | from optimalportfolios.utils.portfolio_funcs import (compute_portfolio_vol, 6 | compute_tre_turnover_stats) 7 | 8 | from optimalportfolios.utils.portfolio_funcs import (compute_portfolio_variance, 9 | calculate_diversification_ratio, 10 | compute_portfolio_risk_contribution_outputs) 11 | 12 | from optimalportfolios.utils.gaussian_mixture import fit_gaussian_mixture 13 | 14 | from optimalportfolios.utils.factor_alphas import (compute_low_beta_alphas, 15 | compute_low_beta_alphas_different_freqs, 16 | compute_momentum_alphas, 17 | compute_momentum_alphas_different_freqs, 18 | compute_ra_carry_alphas, 19 | estimate_lasso_regression_alphas, 20 | wrapper_compute_low_beta_alphas, 21 | wrapper_estimate_regression_alphas) 22 | 23 | from optimalportfolios.utils.manager_alphas import (AlphasData, 24 | compute_joint_alphas) 25 | 26 | from optimalportfolios.utils.returns_unsmoother import (adjust_returns_with_ar1, 27 | compute_ar1_unsmoothed_prices) 28 | 29 | -------------------------------------------------------------------------------- /optimalportfolios/utils/filter_nans.py: -------------------------------------------------------------------------------- 1 | """ 2 | when we roll optimisation in time, we need to filter our data with nans 3 | add some utils to deal to provide solution 4 | """ 5 | import pandas as pd 6 | import numpy as np 7 | from typing import Dict, Tuple, Optional 8 | 9 | 10 | def filter_covar_and_vectors(covar: np.ndarray, 11 | tickers: pd.Index, 12 | vectors: Dict[str, pd.Series] = None 13 | ) -> Tuple[pd.DataFrame, Optional[Dict[str, pd.Series]]]: 14 | """ 15 | filter out assets with zero variance or nans 16 | filter corresponding vectors (can be means, win_max_weights, etc 17 | """ 18 | covar_pd = pd.DataFrame(covar, index=tickers, columns=tickers) 19 | variances = np.diag(covar) 20 | is_good_asset = np.where(np.logical_and(np.greater(variances, 0.0), np.isnan(variances) == False)) 21 | good_tickers = tickers[is_good_asset] 22 | covar_pd = covar_pd.loc[good_tickers, good_tickers] 23 | if vectors is not None: 24 | good_vectors = {key: vector[good_tickers] for key, vector in vectors.items()} 25 | else: 26 | good_vectors = None 27 | return covar_pd, good_vectors 28 | 29 | 30 | def filter_covar_and_vectors_for_nans(pd_covar: pd.DataFrame, 31 | vectors: Dict[str, pd.Series] = None, 32 | inclusion_indicators: pd.Series = None 33 | ) -> Tuple[pd.DataFrame, Optional[Dict[str, pd.Series]]]: 34 | """ 35 | filter out assets with zero variance or nans 36 | filter corresponding vectors (can be means, win_max_weights, etc 37 | inclusion_indicators are ones if asset is included for the allocation 38 | """ 39 | variances = np.diag(pd_covar.to_numpy()) 40 | is_good_asset = np.logical_and(np.greater(variances, 1e-8), np.isnan(variances) == False) 41 | if inclusion_indicators is not None: 42 | is_included = inclusion_indicators.loc[pd_covar.columns].to_numpy() 43 | is_good_asset = np.where(np.isclose(is_included, 1.0), is_good_asset, False) 44 | 45 | good_tickers = pd_covar.index[is_good_asset] 46 | pd_covar = pd_covar.loc[good_tickers, good_tickers] 47 | if vectors is not None: 48 | good_vectors = {} 49 | for key, vector in vectors.items(): 50 | if vector is not None: 51 | if isinstance(vector, pd.Series): 52 | good_vectors[key] = vector[good_tickers].fillna(0.0) 53 | else: 54 | raise TypeError(f"vector muts be pd.Series not type={type(vector)}") 55 | else: 56 | good_vectors = None 57 | return pd_covar, good_vectors 58 | -------------------------------------------------------------------------------- /optimalportfolios/utils/manager_alphas.py: -------------------------------------------------------------------------------- 1 | """ 2 | for multi-asset portfolios we compute managers alpha with is regression alpha 3 | for other asset classes we compute grouped alpha 4 | """ 5 | 6 | import numpy as np 7 | import pandas as pd 8 | import qis as qis 9 | from typing import Union, Dict, Optional 10 | from dataclasses import dataclass, asdict 11 | 12 | from optimalportfolios.utils.factor_alphas import (wrapper_compute_low_beta_alphas, 13 | wrapper_estimate_regression_alphas) 14 | 15 | 16 | @dataclass 17 | class AlphasData: 18 | alpha_scores: pd.DataFrame 19 | beta: Optional[pd.DataFrame] 20 | momentum: Optional[pd.DataFrame] 21 | managers_alphas: Optional[pd.DataFrame] 22 | momentum_score: Optional[pd.DataFrame] 23 | beta_score: Optional[pd.DataFrame] 24 | managers_scores: Optional[pd.DataFrame] 25 | 26 | def get_alphas_snapshot(self, date: pd.Timestamp) -> pd.DataFrame: 27 | if date not in self.alpha_scores.index: 28 | raise KeyError(f"{date} is not in {self.alpha_scores.index}") 29 | snapshot = self.alpha_scores.loc[date, :].to_frame('Alpha Scores') 30 | 31 | if self.momentum_score is not None: 32 | snapshot = pd.concat([snapshot, self.momentum_score.loc[date, :].to_frame('Momentum Score')], axis=1) 33 | if self.beta_score is not None: 34 | snapshot = pd.concat([snapshot, self.beta_score.loc[date, :].to_frame('Beta Score')], axis=1) 35 | if self.managers_scores is not None: 36 | if date in self.managers_scores.index: 37 | snapshot = pd.concat([snapshot, self.managers_scores.loc[date, :].to_frame('Managers Score')], axis=1) 38 | else: 39 | snapshot = pd.concat([snapshot, self.managers_scores.iloc[-1, :].to_frame('Managers Score')], axis=1) 40 | if self.momentum is not None: 41 | snapshot = pd.concat([snapshot, self.momentum.loc[date, :].to_frame('Momentum')], axis=1) 42 | if self.beta is not None: 43 | snapshot = pd.concat([snapshot, self.beta.loc[date, :].to_frame('Beta')], axis=1) 44 | if self.managers_alphas is not None: 45 | if date in self.managers_alphas.index: 46 | snapshot = pd.concat([snapshot, self.managers_alphas.loc[date, :].to_frame('Managers Alpha')], axis=1) 47 | else: 48 | snapshot = pd.concat([snapshot, self.managers_alphas.iloc[-1, :].to_frame('Managers Alpha')], axis=1) 49 | return snapshot 50 | 51 | def to_dict(self) -> Dict[str, pd.DataFrame]: 52 | return asdict(self) 53 | 54 | 55 | def compute_joint_alphas(prices: pd.DataFrame, 56 | benchmark_price: pd.Series, 57 | risk_factors_prices: pd.DataFrame, 58 | alpha_beta_type: pd.Series, 59 | rebalancing_freq: Union[str, pd.Series], 60 | estimated_betas: Dict[pd.Timestamp, pd.DataFrame], 61 | group_data_alphas: Optional[pd.Series], 62 | beta_span: int = 12, 63 | momentum_long_span: int = 12, 64 | managers_alpha_span: int = 12, 65 | return_annualisation_freq_dict: Optional[Dict[str, float]] = {'ME': 12.0, 'QE': 4.0} 66 | ) -> AlphasData: 67 | """ 68 | for multi-asset portfolios we compute alpha based on the type: 69 | 1) Beta 70 | 71 | managers alpha with is regression alpha 72 | for other asset classes we compute grouped alpha 73 | beta_span = 12 for monthly rebalancing_freq 74 | """ 75 | # 1. compute momentum and low betas for selected universe 76 | beta_assets = alpha_beta_type.loc[alpha_beta_type == 'Beta'].index.to_list() 77 | if len(beta_assets) > 0: 78 | if group_data_alphas is not None: 79 | group_data_alphas = group_data_alphas.loc[beta_assets] 80 | alpha_scores, momentum, beta, momentum_score, beta_score = wrapper_compute_low_beta_alphas(prices=prices[beta_assets], 81 | benchmark_price=benchmark_price, 82 | rebalancing_freq=rebalancing_freq, 83 | group_data_alphas=group_data_alphas, 84 | beta_span=beta_span, 85 | momentum_long_span=momentum_long_span) 86 | else: 87 | alpha_scores, momentum, beta, momentum_score, beta_score = None, None, None, None, None 88 | 89 | # 2. compute alphas for managers 90 | alpha_assets = alpha_beta_type.loc[alpha_beta_type == 'Alpha'].index.to_list() 91 | if len(alpha_assets) > 0: 92 | excess_returns = wrapper_estimate_regression_alphas(prices=prices[alpha_assets], 93 | risk_factors_prices=risk_factors_prices, 94 | estimated_betas=estimated_betas, 95 | rebalancing_freq=rebalancing_freq, 96 | return_annualisation_freq_dict=return_annualisation_freq_dict) 97 | # alphas_ = excess_returns.rolling(managers_alpha_span).sum() 98 | managers_alphas = qis.compute_ewm(data=excess_returns, span=managers_alpha_span) 99 | # managers_scores = qis.df_to_cross_sectional_score(df=managers_alphas) 100 | managers_scores = managers_alphas.divide(np.nanstd(managers_alphas, axis=1, keepdims=True)) 101 | else: 102 | managers_alphas = None 103 | managers_scores = None 104 | 105 | # merge 106 | if alpha_scores is not None and managers_scores is not None: 107 | managers_scores = managers_scores.reindex(index=alpha_scores.index).ffill() 108 | alpha_scores = pd.concat([alpha_scores, managers_scores], axis=1) 109 | alpha_scores = alpha_scores.reindex(columns=prices.columns) 110 | elif alpha_scores is None and managers_scores is not None: 111 | alpha_scores = managers_scores 112 | alpha_scores = alpha_scores.fillna(0.0) 113 | alphas = AlphasData(alpha_scores=alpha_scores, 114 | beta=beta, 115 | momentum=momentum, 116 | managers_alphas=managers_alphas, 117 | momentum_score=momentum_score, 118 | beta_score=beta_score, 119 | managers_scores=managers_scores) 120 | return alphas 121 | -------------------------------------------------------------------------------- /optimalportfolios/utils/portfolio_funcs.py: -------------------------------------------------------------------------------- 1 | """ 2 | examples of 3 | """ 4 | from __future__ import division 5 | 6 | import numpy as np 7 | import pandas as pd 8 | from typing import Tuple, Union, Optional 9 | from numba import njit 10 | 11 | 12 | @njit 13 | def compute_portfolio_variance(w: np.ndarray, covar: np.ndarray) -> float: 14 | return w.T @ covar @ w 15 | 16 | 17 | @njit 18 | def compute_portfolio_risk_contributions(w: np.ndarray, covar: np.ndarray) -> np.ndarray: 19 | portfolio_vol = np.sqrt(w.T @ covar @ w) 20 | marginal_risk_contribution = covar @ w.T 21 | rc = np.multiply(marginal_risk_contribution, w) / portfolio_vol 22 | return rc 23 | 24 | 25 | def compute_portfolio_vol(covar: Union[np.ndarray, pd.DataFrame], 26 | weights: Union[np.ndarray, pd.Series] 27 | ): 28 | if isinstance(covar, pd.DataFrame): 29 | covar = covar.to_numpy() 30 | if isinstance(weights, pd.Series): 31 | weights = weights.to_numpy() 32 | return np.sqrt(compute_portfolio_variance(w=weights, covar=covar)) 33 | 34 | 35 | def compute_tre_turnover_stats(covar: np.ndarray, 36 | benchmark_weights: pd.Series, 37 | weights: pd.Series, 38 | weights_0: pd.Series, 39 | alphas: pd.Series = None 40 | ) -> Tuple[float, float, float, float, float]: 41 | weight_diff = weights.subtract(benchmark_weights) 42 | benchmark_vol = np.sqrt(benchmark_weights @ covar @ benchmark_weights.T) 43 | port_vol = np.sqrt(weights @ covar @ weights.T) 44 | te_vol = np.sqrt(weight_diff @ covar @ weight_diff.T) 45 | turnover = np.nansum(np.abs(weights.subtract(weights_0))) 46 | if alphas is not None: 47 | port_alpha = alphas @ weights 48 | else: 49 | port_alpha = 0.0 50 | return te_vol, turnover, port_alpha, port_vol, benchmark_vol 51 | 52 | 53 | def calculate_diversification_ratio(w: np.ndarray, covar: np.ndarray) -> float: 54 | avg_weighted_vol = np.sqrt(np.diag(covar)) @ w.T 55 | portfolio_vol = np.sqrt(compute_portfolio_variance(w, covar)) 56 | diversification_ratio = avg_weighted_vol/portfolio_vol 57 | return diversification_ratio 58 | 59 | 60 | def compute_portfolio_risk_contribution_outputs(weights: pd.Series, 61 | clean_covar: pd.DataFrame, 62 | risk_budget: Optional[pd.Series] = None 63 | ) -> pd.DataFrame: 64 | weights = weights.loc[clean_covar.columns] 65 | asset_rc = compute_portfolio_risk_contributions(weights.to_numpy(), clean_covar.to_numpy()) 66 | asset_rc_ratio = asset_rc / np.nansum(asset_rc) 67 | if risk_budget is None: 68 | risk_budget = pd.Series(0.0, index=clean_covar.columns) 69 | df = pd.concat([pd.Series(weights, index=clean_covar.columns, name='weights'), 70 | pd.Series(asset_rc, index=clean_covar.columns, name='risk contribution'), 71 | risk_budget.rename('Risk Budget'), 72 | pd.Series(asset_rc_ratio, index=clean_covar.columns, name='asset_rc_ratio') 73 | ], axis=1) 74 | return df 75 | 76 | -------------------------------------------------------------------------------- /optimalportfolios/utils/returns_unsmoother.py: -------------------------------------------------------------------------------- 1 | """ 2 | returns unsmoothing using AR-1 betas 3 | """ 4 | import numpy as np 5 | import pandas as pd 6 | import qis as qis 7 | from typing import Optional, Tuple 8 | 9 | 10 | def adjust_returns_with_ar1(returns: pd.DataFrame, 11 | span: int = 20, 12 | mean_adj_type: qis.MeanAdjType = qis.MeanAdjType.NONE, 13 | warmup_period: Optional[int] = 10, 14 | max_value_for_beta: Optional[float] = 0.75, 15 | apply_ewma_mean_smoother: bool = True 16 | ) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]: 17 | """ 18 | estimate: x_{t} = beta * x_{t-1} 19 | compute: usmoothed value = (x_{t} - beta * x_{t-1}) / ( 1.0 - beta) 20 | beta is clipped to make it stationary 21 | """ 22 | x = returns.shift(1) 23 | betas, _, _, _, _, ewm_r2 = qis.compute_ewm_beta_alpha_forecast(x_data=x, 24 | y_data=returns, 25 | mean_adj_type=mean_adj_type, 26 | span=span) 27 | if max_value_for_beta is not None: 28 | betas = betas.clip(lower=0.0, upper=max_value_for_beta) 29 | if apply_ewma_mean_smoother: 30 | betas = qis.compute_ewm(data=betas, span=span) 31 | 32 | if warmup_period is not None: # set to nan first nonnan in warmup_period and backfill from the first available beta 33 | betas = qis.set_nans_for_warmup_period(a=betas, warmup_period=warmup_period) 34 | betas = betas.reindex(index=returns.index).bfill() 35 | 36 | prediction = x.multiply(betas) 37 | unsmoothed = (returns - prediction).divide(1.0-betas) 38 | return unsmoothed, betas, ewm_r2 39 | 40 | 41 | def compute_ar1_unsmoothed_prices(prices: pd.DataFrame, 42 | freq: str = 'QE', 43 | span: int = 20, 44 | mean_adj_type: qis.MeanAdjType = qis.MeanAdjType.NONE, 45 | warmup_period: Optional[int] = 8, 46 | max_value_for_beta: Optional[float] = 0.5, 47 | is_log_returns: bool = True 48 | ) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame, pd.DataFrame]: 49 | y = qis.to_returns(prices, freq=freq, drop_first=False, is_log_returns=is_log_returns) 50 | unsmoothed, betas, ewm_r2 = adjust_returns_with_ar1(returns=y, 51 | span=span, 52 | mean_adj_type=mean_adj_type, 53 | warmup_period=warmup_period, 54 | max_value_for_beta=max_value_for_beta) 55 | if is_log_returns: # back to compounded 56 | unsmoothed = np.expm1(unsmoothed) 57 | navs = qis.returns_to_nav(returns=unsmoothed) 58 | return navs, unsmoothed, betas, ewm_r2 59 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # pyproject.toml - Comprehensive configuration for OptimalPortfolios package 2 | # Modern Python packaging configuration replacing setup.py 3 | 4 | [build-system] 5 | requires = ["poetry-core>=1.0.0", "hatchling==1.27.0", "hatch-vcs"] 6 | build-backend = "setuptools.build_meta" 7 | 8 | [project] 9 | name = "optimalportfolios" 10 | version = "3.4.8" 11 | description = "Implementation of optimisation analytics for constructing and backtesting optimal portfolios in Python" 12 | readme = "README.md" 13 | license = {file = "LICENSE.txt"} 14 | authors = [ 15 | {name = "Artur Sepp", email = "artursepp@gmail.com"} 16 | ] 17 | maintainers = [ 18 | {name = "Artur Sepp", email = "artursepp@gmail.com"} 19 | ] 20 | keywords = [ 21 | "portfolio optimization", 22 | "quantitative finance", 23 | "optimal portfolios", 24 | "mean variance optimization", 25 | "risk budgeting", 26 | "portfolio backtesting", 27 | "financial optimization", 28 | "asset allocation", 29 | "portfolio analytics", 30 | "maximum diversification", 31 | "equal risk contribution", 32 | "minimum variance", 33 | "systematic strategies", 34 | "investment strategies", 35 | "covariance estimation", 36 | "EWMA", 37 | "Lasso regularization" 38 | ] 39 | classifiers = [ 40 | "Development Status :: 4 - Beta", 41 | "Intended Audience :: Developers", 42 | "Intended Audience :: Financial and Insurance Industry", 43 | "Intended Audience :: Science/Research", 44 | "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", 45 | "Operating System :: OS Independent", 46 | "Programming Language :: Python", 47 | "Programming Language :: Python :: 3", 48 | "Programming Language :: Python :: 3.8", 49 | "Programming Language :: Python :: 3.9", 50 | "Programming Language :: Python :: 3.10", 51 | "Programming Language :: Python :: 3.11", 52 | "Programming Language :: Python :: 3.12", 53 | "Programming Language :: Python :: 3.13", 54 | "Topic :: Office/Business :: Financial :: Investment", 55 | "Topic :: Scientific/Engineering :: Mathematics", 56 | "Topic :: Scientific/Engineering :: Information Analysis", 57 | "Topic :: Software Development :: Libraries :: Python Modules", 58 | ] 59 | requires-python = ">=3.8" 60 | 61 | # Core dependencies - always required for optimalportfolios to function 62 | dependencies = [ 63 | "numba>=0.60.0", 64 | "numpy==2.2.6", # for numba>=0.60.0 65 | "scipy>=1.15.0", 66 | "statsmodels>=0.14.0", 67 | "pandas>=2.3.1", 68 | "matplotlib>=3.9.0", 69 | "seaborn>=0.13.0", 70 | "openpyxl>=3.1.0", 71 | "PyYAML>=6.0", 72 | "easydev>=0.12.0", 73 | "pyarrow>=18.0.0", 74 | "fsspec>=2024.12.0", 75 | "yfinance>=0.2.65", 76 | "pandas-datareader>=0.10.0", 77 | "cvxpy>=1.7.0", 78 | "ecos>=2.0.0", 79 | "quadprog>=0.1.11", 80 | "scikit-learn>=1.7.0", 81 | "qis>=3.3.14", 82 | ] 83 | 84 | [project.optional-dependencies] 85 | # Report generation and document export 86 | reports = [ 87 | "pybloqs>=1.2.13", 88 | "jinja2>=3.0.0", 89 | ] 90 | 91 | # Enhanced visualization and interactive plots 92 | visualization = [ 93 | "plotly>=5.0.0", 94 | ] 95 | 96 | # Jupyter notebook support and interactive development 97 | jupyter = [ 98 | "jupyter>=1.0.0", 99 | "notebook>=6.5.0", 100 | "jupyterlab>=3.0.0", 101 | "ipykernel>=6.0.0", 102 | "ipywidgets>=8.0.0", 103 | ] 104 | 105 | # Development tools and testing 106 | dev = [ 107 | "pytest>=7.0.0", 108 | "pytest-cov>=4.0.0", 109 | "pytest-mock>=3.10.0", 110 | "pytest-xdist>=3.0.0", 111 | "black>=22.0.0", 112 | "flake8>=5.0.0", 113 | "mypy>=1.0.0", 114 | "isort>=5.10.0", 115 | "pre-commit>=3.0.0", 116 | ] 117 | 118 | # Performance profiling and optimization 119 | performance = [ 120 | "memory-profiler>=0.60.0", 121 | "line-profiler>=4.0.0", 122 | "py-spy>=0.3.0", 123 | "scalene>=1.5.0", 124 | ] 125 | 126 | # Database connectivity (core database packages now in dependencies) 127 | database = [ 128 | "SQLAlchemy>=2.0.0", 129 | "psycopg2>=2.9.5", 130 | ] 131 | 132 | # Advanced optimization solvers 133 | solvers = [ 134 | "cvxopt>=1.3.0", 135 | "mosek>=10.0.0", 136 | "gurobi>=11.0.0", 137 | "clarabel>=0.5.0", 138 | ] 139 | 140 | # Risk budgeting and advanced analytics 141 | riskbudgeting = [ 142 | "pyrb>=0.4.0", 143 | ] 144 | 145 | # All optional dependencies combined 146 | all = [ 147 | "optimalportfolios[reports,visualization,jupyter,performance,database,solvers,riskbudgeting]" 148 | ] 149 | 150 | # Minimal additional packages for most users 151 | extras = [ 152 | "optimalportfolios[reports,jupyter,solvers]" 153 | ] 154 | 155 | [project.urls] 156 | Homepage = "https://github.com/ArturSepp/OptimalPortfolios" 157 | Documentation = "https://github.com/ArturSepp/OptimalPortfolios/blob/master/README.md" 158 | Repository = "https://github.com/ArturSepp/OptimalPortfolios.git" 159 | Issues = "https://github.com/ArturSepp/OptimalPortfolios/issues" 160 | Changelog = "https://github.com/ArturSepp/OptimalPortfolios/blob/master/CHANGELOG.md" 161 | 162 | # Version is now manually set in [project] section above 163 | 164 | # Package discovery configuration 165 | [tool.setuptools.packages.find] 166 | include = ["optimalportfolios*", "pyrb*"] 167 | exclude = [ 168 | "tests*", 169 | "docs*", 170 | "optimalportfolios.examples.figures*", 171 | "optimalportfolios.examples.resources*", 172 | "notebooks*" 173 | ] 174 | 175 | # Include additional files in the package 176 | [tool.setuptools.package-data] 177 | optimalportfolios = [ 178 | "*.txt", 179 | "*.md", 180 | "*.yml", 181 | "*.yaml", 182 | "*.json", 183 | "data/*.csv", 184 | "templates/*.html", 185 | "static/*.css", 186 | "static/*.js" 187 | ] 188 | 189 | # Explicitly exclude files from the package 190 | [tool.setuptools.exclude-package-data] 191 | "*" = [ 192 | "*/examples/figures/*", 193 | "*/figures/*", 194 | "*.png", 195 | "*.jpg", 196 | "*.jpeg", 197 | "*.gif", 198 | "*.svg" 199 | ] 200 | 201 | # Black code formatting configuration 202 | [tool.black] 203 | line-length = 100 204 | target-version = ['py38', 'py39', 'py310', 'py311', 'py312'] 205 | include = '\.pyi?$' 206 | extend-exclude = ''' 207 | /( 208 | # directories 209 | \.eggs 210 | | \.git 211 | | \.hg 212 | | \.mypy_cache 213 | | \.tox 214 | | \.venv 215 | | build 216 | | dist 217 | )/ 218 | ''' 219 | 220 | # isort import sorting configuration 221 | [tool.isort] 222 | profile = "black" 223 | line_length = 100 224 | multi_line_output = 3 225 | include_trailing_comma = true 226 | force_grid_wrap = 0 227 | use_parentheses = true 228 | ensure_newline_before_comments = true 229 | 230 | # MyPy type checking configuration 231 | [tool.mypy] 232 | python_version = "3.8" 233 | warn_return_any = true 234 | warn_unused_configs = true 235 | disallow_untyped_defs = false 236 | disallow_incomplete_defs = false 237 | check_untyped_defs = true 238 | disallow_untyped_decorators = false 239 | no_implicit_optional = true 240 | warn_redundant_casts = true 241 | warn_unused_ignores = true 242 | warn_no_return = true 243 | warn_unreachable = true 244 | strict_equality = true 245 | 246 | # Pytest configuration 247 | [tool.pytest.ini_options] 248 | minversion = "7.0" 249 | addopts = "-ra -q --strict-markers --strict-config" 250 | testpaths = ["tests"] 251 | python_files = ["test_*.py", "*_test.py"] 252 | python_classes = ["Test*"] 253 | python_functions = ["test_*"] 254 | markers = [ 255 | "slow: marks tests as slow (deselect with '-m \"not slow\"')", 256 | "integration: marks tests as integration tests", 257 | "unit: marks tests as unit tests", 258 | "optimization: marks tests for optimization methods", 259 | "backtesting: marks tests for backtesting functionality", 260 | ] 261 | 262 | # Coverage configuration 263 | [tool.coverage.run] 264 | source = ["optimalportfolios"] 265 | omit = [ 266 | "*/tests/*", 267 | "*/test_*", 268 | "setup.py", 269 | "*/venv/*", 270 | "*/.venv/*", 271 | ] 272 | 273 | [tool.coverage.report] 274 | exclude_lines = [ 275 | "pragma: no cover", 276 | "def __repr__", 277 | "if self.debug:", 278 | "if settings.DEBUG", 279 | "raise AssertionError", 280 | "raise NotImplementedError", 281 | "if 0:", 282 | "if __name__ == .__main__.:", 283 | "class .*\\bProtocol\\):", 284 | "@(abc\\.)?abstractmethod", 285 | ] 286 | 287 | # Flake8 linting configuration 288 | [tool.flake8] 289 | max-line-length = 100 290 | extend-ignore = ["E203", "W503", "E501"] 291 | exclude = [ 292 | ".git", 293 | "__pycache__", 294 | "build", 295 | "dist", 296 | ".venv", 297 | "venv", 298 | ".eggs", 299 | "*.egg", 300 | ] -------------------------------------------------------------------------------- /pyrb/README.md: -------------------------------------------------------------------------------- 1 | 2 | This package is forked from pyrb package https://github.com/jcrichard/pyrb 3 | 4 | for the distribution of optimalportfolios package, because Pyrb is not available through pip install 5 | 6 | jcrichard/pyrb is licensed under the MIT License 7 | 8 | for -------------------------------------------------------------------------------- /pyrb/__init__.py: -------------------------------------------------------------------------------- 1 | # https://github.com/jcrichard/pyrb 2 | 3 | from .allocation import ( 4 | EqualRiskContribution, 5 | RiskBudgeting, 6 | RiskBudgetAllocation, 7 | RiskBudgetingWithER, 8 | ConstrainedRiskBudgeting, 9 | ) -------------------------------------------------------------------------------- /pyrb/settings.py: -------------------------------------------------------------------------------- 1 | # algorithm tolerance 2 | CCD_COVERGENCE_TOL = 1e-10 3 | BISECTION_TOL = 1e-5 4 | ADMM_TOL = 1e-10 5 | MAX_ITER = 5000 6 | BISECTION_UPPER_BOUND = 10 7 | MAXITER_BISECTION = 5000 8 | 9 | # bounds 10 | MIN_WEIGHT = 0 11 | MAX_WEIGHT = 1e3 12 | RISK_BUDGET_TOL = 0.00001 -------------------------------------------------------------------------------- /pyrb/tools.py: -------------------------------------------------------------------------------- 1 | import quadprog 2 | 3 | import numpy as np 4 | 5 | 6 | def to_column_matrix(x): 7 | """Return x as a matrix columns.""" 8 | x = np.matrix(x) 9 | if x.shape[1] != 1: 10 | x = x.T 11 | if x.shape[1] == 1: 12 | return x 13 | else: 14 | raise ValueError("x is not a vector") 15 | 16 | 17 | def to_array(x): 18 | """Turn a columns or row matrix to an array.""" 19 | if x is None: 20 | return None 21 | elif (len(x.shape)) == 1: 22 | return x 23 | 24 | if x.shape[1] != 1: 25 | x = x.T 26 | return np.squeeze(np.asarray(x)) 27 | 28 | 29 | def quadprog_solve_qp(P, q, G=None, h=None, A=None, b=None, bounds=None): 30 | """Quadprog helper.""" 31 | n = P.shape[0] 32 | if bounds is not None: 33 | I = np.eye(n) 34 | LB = -I 35 | UB = I 36 | if G is None: 37 | G = np.vstack([LB, UB]) 38 | h = np.array(np.hstack([-to_array(bounds[:, 0]), to_array(bounds[:, 1])])) 39 | else: 40 | G = np.vstack([G, LB, UB]) 41 | h = np.array( 42 | np.hstack([h, -to_array(bounds[:, 0]), to_array(bounds[:, 1])]) 43 | ) 44 | 45 | qp_a = q # because 1/2 x^T G x - a^T x 46 | qp_G = P 47 | if A is not None: 48 | qp_C = -np.vstack([A, G]).T 49 | qp_b = -np.hstack([b, h]) 50 | meq = A.shape[0] 51 | else: # no equality constraints 52 | qp_C = -G.T 53 | qp_b = -h 54 | meq = 0 55 | return quadprog.solve_qp(qp_G, qp_a, qp_C, qp_b, meq)[0] 56 | 57 | 58 | def proximal_polyhedra(y, C, d, bound, A=None, b=None): 59 | """Wrapper for projecting a vector on the constrained set.""" 60 | n = len(y) 61 | return quadprog_solve_qp( 62 | np.eye(n), np.array(y), np.array(C), np.array(d), A=A, b=b, bounds=bound 63 | ) -------------------------------------------------------------------------------- /pyrb/validation.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from .settings import RISK_BUDGET_TOL 4 | 5 | 6 | def check_covariance(cov): 7 | if cov.shape[0] != cov.shape[1]: 8 | raise ValueError("The covariance matrix is not squared") 9 | if np.isnan(cov).sum().sum() > 0: 10 | raise ValueError("The covariance matrix contains missing values") 11 | 12 | 13 | def check_expected_return(mu, n): 14 | if mu is None: 15 | return 16 | if n != len(mu): 17 | raise ValueError( 18 | "Expected returns vector size is not equal to the number of asset." 19 | ) 20 | if np.isnan(mu).sum() > 0: 21 | raise ValueError("The expected returns vector contains missing values") 22 | 23 | 24 | def check_constraints(C, d, n): 25 | if C is None: 26 | return 27 | if n != C.shape[1]: 28 | raise ValueError("Number of columns of C is not equal to the number of asset.") 29 | if len(d) != C.shape[0]: 30 | raise ValueError("Number of rows of C is not equal to the length of d.") 31 | 32 | 33 | def check_bounds(bounds, n): 34 | if bounds is None: 35 | return 36 | if n != bounds.shape[0]: 37 | raise ValueError( 38 | "The number of rows of the bounds array is not equal to the number of asset." 39 | ) 40 | if 2 != bounds.shape[1]: 41 | raise ValueError( 42 | "The number of columns the bounds array should be equal to two (min and max bounds)." 43 | ) 44 | 45 | 46 | def check_risk_budget(riskbudgets, n): 47 | if riskbudgets is None: 48 | return 49 | if np.isnan(riskbudgets).sum() > 0: 50 | raise ValueError("Risk budget contains missing values") 51 | if (np.array(riskbudgets) < 0).sum() > 0: 52 | raise ValueError("Risk budget contains negative values") 53 | if n != len(riskbudgets): 54 | raise ValueError("Risk budget size is not equal to the number of asset.") 55 | if all(v < RISK_BUDGET_TOL for v in riskbudgets): 56 | raise ValueError( 57 | "One of the budget is smaller than {}. If you want a risk budget of 0 please remove the asset.".format( 58 | RISK_BUDGET_TOL 59 | ) 60 | ) -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numba>=0.60.0 2 | numpy==2.2.6 # for numba 3 | scipy>=1.15.0 4 | statsmodels>=0.14.0 5 | pandas>=2.3.1 6 | matplotlib>=3.9.0 7 | seaborn>=0.13.0 8 | openpyxl>=3.1.0 9 | PyYAML>=6.0 10 | easydev>=0.12.0 11 | pyarrow>=18.0.0 12 | fsspec>=2024.12.0 13 | yfinance>=0.2.65 14 | pandas-datareader>=0.10.0 15 | cvxpy>=1.7.0 16 | ecos>=2.0.0 17 | quadprog>=0.1.11 18 | scikit-learn>=1.7.0 19 | qis>=3.3.10 --------------------------------------------------------------------------------