├── dist ├── accbpg-0.2.tar.gz └── accbpg-0.2-py3-none-any.whl ├── ipynb └── ABPGvsFW │ ├── figures │ ├── Dopt_compareFW_m80e_3.pdf │ ├── Dopt_compareFW_m80e_4.pdf │ ├── Dopt_compareFW_m80e_5.pdf │ ├── Dopt_compareFW_m80e_6.pdf │ ├── Dopt_compareFW_m80e_7.pdf │ ├── Dopt_compareFW_m80e_8.pdf │ ├── Dopt_compareFW_m100n1000.pdf │ ├── Dopt_compareFW_m100n10000.pdf │ ├── Dopt_compareFW_m350n1000.pdf │ ├── Dopt_compareFW_m400n1000.pdf │ ├── Dopt_compareFW_m500n1000.pdf │ ├── Dopt_compareFW_n2000e_3.pdf │ ├── Dopt_compareFW_n2000e_4.pdf │ ├── Dopt_compareFW_n2000e_5.pdf │ ├── Dopt_compareFW_n2000e_6.pdf │ ├── Dopt_compareFW_n2000e_7.pdf │ ├── Dopt_compareFW_n2000e_8.pdf │ ├── Dopt_compareFW_m80e_6nomax.pdf │ ├── Dopt_compareFW_m80e_7nomax.pdf │ └── Dopt_compareFW_m80e_8nomax.pdf │ └── ex_Dopt_FW_n1000.ipynb ├── accbpg ├── __init__.py ├── plotfigs.py ├── ex_LR_L2L1Linf.py ├── trianglescaling.py ├── utils.py ├── applications.py ├── D_opt_alg.py ├── functions.py └── algorithms.py ├── setup.py ├── .gitignore ├── CONTRIBUTING.md ├── LICENSE ├── README.md └── SECURITY.md /dist/accbpg-0.2.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/dist/accbpg-0.2.tar.gz -------------------------------------------------------------------------------- /dist/accbpg-0.2-py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/dist/accbpg-0.2-py3-none-any.whl -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_3.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_3.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_4.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_4.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_5.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_5.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_6.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_6.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_7.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_7.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_8.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_8.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m100n1000.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m100n1000.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m100n10000.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m100n10000.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m350n1000.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m350n1000.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m400n1000.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m400n1000.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m500n1000.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m500n1000.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_n2000e_3.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_n2000e_3.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_n2000e_4.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_n2000e_4.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_n2000e_5.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_n2000e_5.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_n2000e_6.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_n2000e_6.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_n2000e_7.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_n2000e_7.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_n2000e_8.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_n2000e_8.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_6nomax.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_6nomax.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_7nomax.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_7nomax.pdf -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_8nomax.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/accbpg/master/ipynb/ABPGvsFW/figures/Dopt_compareFW_m80e_8nomax.pdf -------------------------------------------------------------------------------- /accbpg/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | 5 | from .functions import * 6 | from .algorithms import BPG, ABPG, ABPG_expo, ABPG_gain, ABDA 7 | from .applications import D_opt_libsvm, D_opt_design, D_opt_KYinit, Poisson_regrL1, Poisson_regrL2, KL_nonneg_regr 8 | from .D_opt_alg import D_opt_FW, D_opt_FW_away 9 | from .trianglescaling import plotTSE, plotTSE0 10 | from .plotfigs import plot_comparisons -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name='accbpg', 5 | version='0.2', 6 | packages=find_packages(exclude=['tests*']), 7 | license='MIT', 8 | description='Accelerated Bregman proximal gradient (ABPG) methods', 9 | long_description=open('README.md').read(), 10 | long_description_content_type='text/markdown', 11 | install_requires=['numpy', 'scipy'], 12 | url='https://github.com/Microsoft/accbpg', 13 | author='Lin Xiao', 14 | author_email='lin.xiao@gmail.com' 15 | ) 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | 4 | # Distribution / packaging 5 | .Python 6 | build/ 7 | develop-eggs/ 8 | # dist/ 9 | downloads/ 10 | eggs/ 11 | .eggs/ 12 | lib/ 13 | lib64/ 14 | parts/ 15 | sdist/ 16 | var/ 17 | wheels/ 18 | *.egg-info/ 19 | .installed.cfg 20 | *.egg 21 | MANIFEST 22 | 23 | # Installer logs 24 | pip-log.txt 25 | pip-delete-this-directory.txt 26 | 27 | # Sphinx documentation 28 | docs/_build/ 29 | 30 | # PyBuilder 31 | target/ 32 | 33 | # Jupyter Notebook 34 | .ipynb_checkpoints 35 | 36 | # pyenv 37 | .python-version 38 | 39 | # Environments 40 | .env 41 | .venv 42 | env/ 43 | venv/ 44 | ENV/ 45 | env.bak/ 46 | venv.bak/ 47 | 48 | # Spyder project settings 49 | .spyderproject 50 | .spyproject 51 | 52 | # mkdocs documentation 53 | /site 54 | 55 | # mypy 56 | .mypy_cache/ 57 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | 2 | # Contributing 3 | 4 | This project welcomes contributions and suggestions. Most contributions require you to agree to a 5 | Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us 6 | the rights to use your contribution. For details, visit https://cla.microsoft.com. 7 | 8 | When you submit a pull request, a CLA-bot will automatically determine whether you need to provide 9 | a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions 10 | provided by the bot. You will only need to do this once across all repos using our CLA. 11 | 12 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). 13 | For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or 14 | contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. 15 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Microsoft Corporation. All rights reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE 22 | -------------------------------------------------------------------------------- /accbpg/plotfigs.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | 5 | import numpy as np 6 | #import matplotlib.pyplot as plt 7 | from matplotlib.pyplot import * 8 | 9 | 10 | def plot_comparisons(axis, y_vals, labels, x_vals=[], plotdiff=False, 11 | yscale="linear", xscale="linear", 12 | xlim=[], ylim=[], xlabel="", ylabel="", legendloc=0, 13 | linestyles=['k:', 'g-', 'b-.', 'k-', 'r--', 'k-', 'm-'], 14 | linedash=[[1,2], [], [4,2,1,2], [], [4,2], [], [], []]): 15 | """ 16 | Plot comparison figures using matplotlib.pyplot. 17 | """ 18 | 19 | y_shift = 0 20 | if plotdiff: 21 | y_shift = y_vals[0].min() 22 | for i in range(len(y_vals)): 23 | y_shift = min(y_shift, y_vals[i].min()) 24 | 25 | for i in range(len(y_vals)): 26 | if len(x_vals) > 0: 27 | xi = x_vals[i] 28 | else: 29 | xi = np.arange(len(y_vals[i])) + 1 30 | 31 | axis.plot(xi, y_vals[i]-y_shift, linestyles[i], label=labels[i], 32 | dashes=linedash[i]) 33 | 34 | axis.set_xscale(xscale) 35 | axis.set_yscale(yscale) 36 | axis.set_xlabel(xlabel) 37 | axis.set_ylabel(ylabel) 38 | if legendloc == "no": 39 | pass 40 | elif legendloc == "outside": 41 | axis.legend(bbox_to_anchor=(1.05, 1), loc="upper left", borderaxespad=0) 42 | else: 43 | axis.legend(loc=legendloc) 44 | 45 | if len(xlim) > 0: 46 | axis.set_xlim(xlim) 47 | if len(ylim) > 0: 48 | axis.set_ylim(ylim) 49 | -------------------------------------------------------------------------------- /accbpg/ex_LR_L2L1Linf.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | 5 | """ 6 | Example of logistic regression with L1 regularization and Linf bounds 7 | 8 | minimize_x f(x) = (1/m) * sum_{i=1}^m log(1 + exp(-b_i*(ai'*x))) 9 | subject to x in R^n, and ||x||_inf <= B 10 | 11 | The objective f is 1-relative smooth relative to (1/2)||x||_2^2. 12 | 13 | """ 14 | 15 | import numpy as np 16 | from functions import RSmoothFunction, L2L1Linf, SquaredL2Norm 17 | from algorithms import BPG, ABPG_gain 18 | 19 | class LogisticRegression(RSmoothFunction): 20 | """ 21 | f(x) = (1/m)*sum_{i=1}^m log(1 + exp(-b_i*(ai'*x))) with ai in R^n, bi in R 22 | """ 23 | def __init__(self, A, b): 24 | assert len(b) == A.shape[0], "Logistic Regression: len(b) != m" 25 | self.bA = np.reshape(b, [len(b),1]) * A 26 | self.m = A.shape[0] 27 | self.n = A.shape[1] 28 | 29 | def __call__(self, x): 30 | return self.func_grad(x, flag=0) 31 | 32 | def gradient(self, x): 33 | return self.func_grad(x, flag=1) 34 | 35 | def func_grad(self, x, flag=2): 36 | assert x.size == self.n, "Logistic Regression: x.size not equal to n" 37 | 38 | bAx = np.dot(self.bA, x) 39 | 40 | loss = - bAx 41 | mask = bAx > -50 42 | loss[mask] = np.log(1 + np.exp(-bAx[mask])) 43 | f = np.sum(loss) / self.m 44 | 45 | if flag == 0: 46 | return f 47 | 48 | p = -1/(1+np.exp(bAx)) 49 | g = np.dot(p, self.bA) / self.m 50 | 51 | if flag == 1: 52 | return g 53 | 54 | return f, g 55 | 56 | 57 | def test_L2L1Linf(): 58 | 59 | m = 100 60 | n = 200 61 | A = np.random.randn(m, n) 62 | #b = np.sign(A[:, 0]) 63 | b = np.sign(np.random.rand(m,1)) 64 | 65 | f = LogisticRegression(A, b) 66 | #h = SquaredL2Norm() 67 | h = L2L1Linf(lamda=1.0/m, B=1) 68 | 69 | L = 0.25 70 | x0 = np.zeros(n) 71 | maxitrs = 100 72 | 73 | x1, F1, G1, _ = BPG(f, h, L, x0, maxitrs, verbskip=10) 74 | 75 | x2, F2, G2, _, _, _ = ABPG_gain(f, h, L, x0, gamma=2, maxitrs=maxitrs, 76 | restart=False, verbskip=10) 77 | 78 | if __name__ == "__main__": 79 | test_L2L1Linf() -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Accelerated Bregman Proximal Gradient Methods 2 | 3 | A Python package of accelerated first-order algorithms for solving relatively-smooth convex optimization problems 4 | 5 | minimize { f(x) + P(x) | x in C } 6 | 7 | with a reference function h(x), where C is a closed convex set and 8 | 9 | * h(x) is convex and essentially smooth on C; 10 | * f(x) is convex and differentiable, and L-smooth relative to h(x), that is, f(x)-L*h(x) is convex; 11 | * P(x) is convex and closed (lower semi-continuous). 12 | 13 | ### Implemented algorithms in [HRX2018](https://arxiv.org/abs/1808.03045): 14 | 15 | * BPG(Bregman proximal gradient) method with line search option 16 | * ABPG (Accelerated BPG) method 17 | * ABPG-expo (ABPG with exponent adaption) 18 | * ABPG-gain (ABPG with gain adaption) 19 | * ABDA (Accelerated Bregman dual averaging) method 20 | 21 | Additional algorithms for solving D-Optimal Experiment Design problems: 22 | 23 | * D_opt_FW (basic Frank-Wolfe method) 24 | * D_opt_FW_away (Frank-Wolfe method with away steps) 25 | 26 | ## Install 27 | 28 | Clone or fork from GitHub. Or install from PyPI: 29 | 30 | pip install accbpg 31 | 32 | ## Usage 33 | 34 | Example: generate a random instance of D-optimal design problem and solve it using two different methods. 35 | 36 | ```python 37 | import accbpg 38 | 39 | # generate a random instance of D-optimal design problem of size 80 by 200 40 | f, h, L, x0 = accbpg.D_opt_design(80, 200) 41 | 42 | # solve the problem instance using BPG with line search 43 | x1, F1, G1, T1 = accbpg.BPG(f, h, L, x0, maxitrs=1000, verbskip=100) 44 | 45 | # solve it again using ABPG with gamma=2 46 | x2, F2, G2, T2 = accbpg.ABPG(f, h, L, x0, gamma=2, maxitrs=1000, verbskip=100) 47 | 48 | # solve it again using adaptive variant of ABPG with gamma=2 49 | x3, F3, G3, _, _, T3 = accbpg.ABPG_gain(f, h, L, x0, gamma=2, maxitrs=1000, verbskip=100) 50 | ``` 51 | 52 | D-optimal experiment design problems can be constructed from files (LIBSVM format) directly using 53 | 54 | ```python 55 | f, h, L, X0 = accbpg.D_opt_libsvm(filename) 56 | ``` 57 | 58 | All algorithms can work with customized functions f(x) and h(x), and an example is given in [this Python file](accbpg/ex_LR_L2L1Linf.py). 59 | 60 | ### Additional examples 61 | 62 | * A complete example with visualization is given in this [Jupyter Notebook](ipynb/ex_Dopt_random.ipynb). 63 | 64 | * All examples in [HRX2018](https://arxiv.org/abs/1808.03045) can be found in the [ipynb](ipynb/) directory. 65 | 66 | * Comparisons with the Frank-Wolfe method can be found in [ipynb/ABPGvsFW](ipynb/ABPGvsFW/). 67 | 68 | 69 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Security 4 | 5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). 6 | 7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below. 8 | 9 | ## Reporting Security Issues 10 | 11 | **Please do not report security vulnerabilities through public GitHub issues.** 12 | 13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report). 14 | 15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey). 16 | 17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc). 18 | 19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: 20 | 21 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) 22 | * Full paths of source file(s) related to the manifestation of the issue 23 | * The location of the affected source code (tag/branch/commit or direct URL) 24 | * Any special configuration required to reproduce the issue 25 | * Step-by-step instructions to reproduce the issue 26 | * Proof-of-concept or exploit code (if possible) 27 | * Impact of the issue, including how an attacker might exploit the issue 28 | 29 | This information will help us triage your report more quickly. 30 | 31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs. 32 | 33 | ## Preferred Languages 34 | 35 | We prefer all communications to be in English. 36 | 37 | ## Policy 38 | 39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd). 40 | 41 | 42 | -------------------------------------------------------------------------------- /accbpg/trianglescaling.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | 5 | import numpy as np 6 | import matplotlib.pyplot as plt 7 | from .functions import * 8 | 9 | 10 | def plotTSE(h, dim=10, nTriples=10, nThetas=100, R=1, onSimplex=True, 11 | randseed=-1): 12 | """ 13 | Plot estimated triangle scaling exponents of Bregman distance. 14 | """ 15 | 16 | if randseed >= 0: 17 | np.random.seed(randseed) 18 | 19 | plt.figure() 20 | 21 | for k in range(nTriples): 22 | x = R * np.random.rand(dim) 23 | y = R * np.random.rand(dim) 24 | z = R * np.random.rand(dim) 25 | if onSimplex: 26 | x = x / x.sum() 27 | y = y / y.sum() 28 | z = z / z.sum() 29 | 30 | theta = np.arange(1.0/nThetas, 1, 1.0/nThetas) 31 | expnt = np.zeros(theta.shape) 32 | dyz = h.divergence(y, z) 33 | 34 | for i in range(theta.size): 35 | c = theta[i] 36 | dtheta = h.divergence((1-c)*x+c*y, (1-c)*x+c*z) 37 | expnt[i] = np.log(dtheta / dyz) / np.log(c) 38 | #expnt[i] = (np.log(dtheta) - np.log(dyz)) / np.log(c) 39 | plt.plot(theta, expnt) 40 | 41 | plt.xlim([0,1]) 42 | #plt.ylim([0,5]) 43 | #plt.xlabel(r'$\theta$') 44 | #plt.ylabel(r'$\hat{\gamma}(\theta)$') 45 | plt.tight_layout() 46 | 47 | 48 | def plotTSE0(h, dim=10, xscale=1, yscale=1, zscale=2, nThetas=1000, maxTheta=1): 49 | """ 50 | Plot estimated triangle scaling exponents of Bregman distance. 51 | """ 52 | 53 | plt.figure() 54 | 55 | # test for extreme cases 56 | #x = np.zeros(dim) 57 | x = xscale*np.ones(dim) 58 | #x = np.random.rand(dim) 59 | y = yscale*np.ones(dim) 60 | z = zscale*np.ones(dim) 61 | #y = yscale*np.random.rand(dim) 62 | #z = zscale*np.random.rand(dim) 63 | 64 | theta = np.arange(1.0/nThetas, maxTheta, 1.0/nThetas) 65 | expnt = np.zeros(theta.shape) 66 | dyz = h.divergence(y, z) 67 | 68 | for i in range(theta.size): 69 | c = theta[i] 70 | dtheta = h.divergence((1-c)*x+c*y, (1-c)*x+c*z) 71 | expnt[i] = np.log(dtheta / dyz) / np.log(c) 72 | #expnt[i] = (np.log(dtheta) - np.log(dyz)) / np.log(c) 73 | plt.plot(theta, expnt) 74 | 75 | plt.xlim([0,maxTheta]) 76 | #plt.ylim([0,5]) 77 | #plt.xlabel(r'$\theta$') 78 | #plt.ylabel(r'$\hat{\gamma}(\theta)$') 79 | plt.tight_layout() 80 | 81 | 82 | if __name__ == "__main__": 83 | 84 | #h = ShannonEntropy() 85 | #h = BurgEntropy() 86 | h = PowerNeg1() 87 | #h = SquaredL2Norm() 88 | #h = SumOf2nd4thPowers(1) 89 | 90 | plotTSE(h, nThetas=1000) 91 | #plotTSE0(h, xscale=1e-8, yscale=10, zscale=20, nThetas=10000, maxTheta=1e-2) 92 | -------------------------------------------------------------------------------- /accbpg/utils.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import numpy as np 3 | import scipy.sparse as sparse 4 | 5 | 6 | def _open_file(filename): 7 | 8 | _, ext = os.path.splitext(filename) 9 | if ext == '.gz': 10 | import gzip 11 | return gzip.open(filename, 'rt') 12 | elif ext == '.bz2': 13 | import bz2 14 | return bz2.open(filename, 'rt') 15 | else: 16 | return open(filename, 'r') 17 | 18 | 19 | def load_libsvm_file(filename, dtype=np.float64, 20 | n_features=None, zero_based="auto"): 21 | """ 22 | Load dataset in svmlight / libsvm format into sparse CSR matrix. 23 | 24 | Inputs: 25 | filename: a string including file path and name 26 | dtype: numpy dtype of feature values 27 | n_features: number of features, optional 28 | zero_based: boolean or "auto", optional 29 | 30 | Returns: 31 | X: scipy.sparse.csr_matrix of shape (n_samples, n_features) 32 | y: numpy.ndarray of shape (n_samples,) 33 | 34 | """ 35 | 36 | labels = [] 37 | data = [] 38 | indptr = [] 39 | indices = [] 40 | 41 | with _open_file(filename) as f: 42 | 43 | for line in f: 44 | 45 | # skip comments in the line 46 | idx_comment = line.find('#') 47 | if idx_comment >= 0: 48 | line = line[:idx_comment] 49 | 50 | line_parts = line.split() 51 | if len(line_parts) == 0: 52 | continue 53 | 54 | labels.append(float(line_parts[0])) 55 | indptr.append(len(data)) 56 | 57 | prev_idx = -1 58 | for i in range(1,len(line_parts)): 59 | idx_str, value = line_parts[i].split(':',1) 60 | idx = int(idx_str) 61 | if idx < 0 or (not zero_based and idx == 0): 62 | raise ValueError( 63 | "Invalid index {0:d} in LibSVM data file.".format(idx)) 64 | if idx <= prev_idx: 65 | raise ValueError("Feature indices in LibSVM data file" 66 | "should be sorted and unique.") 67 | indices.append(idx) 68 | data.append(dtype(value)) 69 | prev_idx = idx 70 | 71 | # construct data arrays 72 | indptr.append(len(data)) 73 | 74 | data = np.array(data) 75 | indptr = np.array(indptr) 76 | indices = np.array(indices) 77 | 78 | if (zero_based is False or zero_based == "auto" and indices.min() > 0): 79 | indices -= 1 80 | if n_features is None: 81 | n_features = indices.max() + 1 82 | else: 83 | if n_features < indices.max() + 1: 84 | n_features = indices.max() + 1 85 | print("Warning: n_features increased to match data.") 86 | 87 | shape = (indptr.shape[0] - 1, n_features) 88 | X = sparse.csr_matrix((data, indices, indptr), shape) 89 | X.sort_indices() 90 | y = np.array(labels) 91 | 92 | return X, y 93 | 94 | 95 | def shuffle_data(X, y): 96 | ''' 97 | We need to return here since whole array assignment in numpy does not 98 | change input arguments, i.e., it does NOT behaves as passing by reference 99 | ''' 100 | index = np.arange(len(y)) 101 | np.random.shuffle(index) 102 | X = X[index,:] 103 | y = y[index] 104 | return X, y 105 | 106 | 107 | def mnist_2digits(X, y, d1, d2): 108 | index1 = np.nonzero(y==d1) 109 | index2 = np.nonzero(y==d2) 110 | ycopy = y.copy() 111 | ycopy[index1] = 1 112 | ycopy[index2] = -1 113 | index = np.concatenate((index1[0], index2[0])) 114 | np.random.shuffle(index) 115 | Xd1d2 = X[index, :] 116 | yd1d2 = ycopy[index] 117 | return Xd1d2, yd1d2 118 | 119 | 120 | def binary_error_rate(X, y, w, bias=0): 121 | if sparse.isspmatrix(X): 122 | yp = np.sign( X * w + bias ) 123 | else: 124 | yp = np.sign( np.dot(X, w) + bias ) 125 | 126 | return (1 - np.dot(yp, y)/len(y))/2 127 | 128 | 129 | def rmse(X, y, w, bias=0): 130 | if sparse.isspmatrix(X): 131 | yp = X * w + bias 132 | else: 133 | yp = np.dot(X, w) + bias 134 | 135 | error2 = (yp - y)**2 136 | return np.sqrt(error2.mean()) 137 | 138 | 139 | def row_norm_squared(X): 140 | "return squared 2-norms of each row" 141 | X2 = sparse.csr_matrix((X.data**2, X.indices, X.indptr), X.shape) 142 | return np.squeeze(np.asarray(X2.sum(1))) 143 | 144 | 145 | def load_sido(filename): 146 | with np.load(filename) as D: 147 | data = D['Xdata'] 148 | indptr = D['Xindptr'] 149 | indices = D['Xindices'] 150 | y = D['y'] 151 | shape = D['shape'] 152 | 153 | X = sparse.csr_matrix((data, indices, indptr), shape) 154 | 155 | return X, y 156 | -------------------------------------------------------------------------------- /accbpg/applications.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | 5 | import numpy as np 6 | from .functions import * 7 | from .utils import load_libsvm_file 8 | 9 | 10 | def D_opt_libsvm(filename): 11 | """ 12 | Generate a D-Optimal Design problem from LIBSVM datasets 13 | """ 14 | X, y = load_libsvm_file(filename) 15 | if X.shape[0] > X.shape[1]: 16 | H = X.T.toarray('C') 17 | else: 18 | H = X.toarray('C') 19 | n = H.shape[1] 20 | 21 | f = DOptimalObj(H) 22 | h = BurgEntropySimplex() 23 | L = 1.0 24 | x0 = (1.0/n)*np.ones(n) 25 | 26 | return f, h, L, x0 27 | 28 | 29 | def D_opt_design(m, n, randseed=-1): 30 | """ 31 | Generate a random instance of the D-Optimal Design problem 32 | m, n: size of design matrix H is m by n wiht m < n 33 | Return f, h, L, x0: 34 | f: f(x) = - log(det(H*diag(x)*H')) 35 | h: Burg Entrop with Simplex constraint 36 | L: L = 1 37 | x0: initial point is center of simplex 38 | """ 39 | 40 | if randseed > 0: 41 | np.random.seed(randseed) 42 | H = np.random.randn(m,n) 43 | 44 | f = DOptimalObj(H) 45 | h = BurgEntropySimplex() 46 | L = 1.0 47 | x0 = (1.0/n)*np.ones(n) 48 | 49 | return f, h, L, x0 50 | 51 | 52 | def D_opt_KYinit(V): 53 | """ 54 | Return a sparse initial point for MVE or D-optimal design problem 55 | proposed by Kuman and Yildirim (JOTA 126(1):1-21, 2005) 56 | 57 | """ 58 | m, n = V.shape 59 | 60 | if n <= 2*m: 61 | return (1.0/n)*np.ones(n) 62 | 63 | I = [] 64 | Q = np.zeros((m, m)) 65 | # Using (unstable) Gram-Schmidt without calling QR repetitively 66 | for i in range(m): 67 | b = np.random.rand(m) 68 | q = np.copy(b) 69 | for j in range(i): 70 | Rij = np.dot(Q[:,j], b) 71 | q = q - Rij * Q[:,j] 72 | qV = np.dot(q, V) 73 | kmax = np.argmax(qV) 74 | kmin = np.argmin(qV) 75 | I.append(kmax) 76 | I.append(kmin) 77 | v = V[:,kmin] - V[:,kmax] 78 | q = np.copy(v) 79 | for j in range(i): 80 | Rij = np.dot(Q[:,j], v) 81 | q = q - Rij * Q[:,j] 82 | Q[:,i] = q / np.linalg.norm(q) 83 | 84 | x0 = np.zeros(n) 85 | x0[I] = np.ones(len(I)) / len(I) 86 | # in case there are repeated entries in I, scale to sum 1 87 | x0 /= x0.sum() 88 | return x0 89 | 90 | 91 | def Poisson_regrL1(m, n, noise=0.01, lamda=0, randseed=-1, normalizeA=True): 92 | """ 93 | Generate a random instance of L1-regularized Poisson regression problem 94 | minimize_{x >= 0} D_KL(b, Ax) + lamda * ||x||_1 95 | where 96 | A: m by n nonnegative matrix 97 | b: nonnegative vector of length m 98 | noise: noise level to generate b = A * x + noise 99 | lambda: L1 regularization weight 100 | normalizeA: wether or not to normalize columns of A 101 | 102 | Return f, h, L, x0: 103 | f: f(x) = D_KL(b, Ax) 104 | h: Burg entropy with L1 regularization 105 | L: L = ||b||_1 106 | x0: initial point, scaled version of all-one vector 107 | """ 108 | 109 | if randseed > 0: 110 | np.random.seed(randseed) 111 | A = np.random.rand(m,n) 112 | if normalizeA: 113 | A = A / A.sum(axis=0) # scaling to make column sums equal to 1 114 | x = np.random.rand(n) / n 115 | xavg = x.sum() / x.size 116 | x = np.maximum(x - xavg, 0) * 10 117 | b = np.dot(A, x) + noise * (np.random.rand(m) - 0.5) 118 | assert b.min() > 0, "need b > 0 for nonnegative regression." 119 | 120 | f = PoissonRegression(A, b) 121 | # L1 regularization often not enough for convergence! 122 | h = BurgEntropyL1(lamda) 123 | L = b.sum() 124 | # Initial point should be far from 0 in order for ARDA to work well! 125 | x0 = (1.0/n)*np.ones(n) * 10 126 | 127 | return f, h, L, x0 128 | 129 | 130 | def Poisson_regrL2(m, n, noise=0.01, lamda=0, randseed=-1, normalizeA=True): 131 | """ 132 | Generate a random instance of L2-regularized Poisson regression problem 133 | minimize_{x >= 0} D_KL(b, Ax) + (lamda/2) * ||x||_2^2 134 | where 135 | A: m by n nonnegative matrix 136 | b: nonnegative vector of length m 137 | noise: noise level to generate b = A * x + noise 138 | lambda: L2 regularization weight 139 | normalizeA: wether or not to normalize columns of A 140 | 141 | Return f, h, L, x0: 142 | f: f(x) = D_KL(b, Ax) 143 | h: Burg entropy with L1 regularization 144 | L: L = ||b||_1 145 | x0: initial point is center of simplex 146 | """ 147 | 148 | if randseed > 0: 149 | np.random.seed(randseed) 150 | A = np.random.rand(m,n) 151 | if normalizeA: 152 | A = A / A.sum(axis=0) # scaling to make column sums equal to 1 153 | x = np.random.rand(n) / n 154 | xavg = x.sum() / x.size 155 | x = np.maximum(x - xavg, 0) * 10 156 | b = np.dot(A, x) + noise * (np.random.rand(m) - 0.5) 157 | assert b.min() > 0, "need b > 0 for nonnegative regression." 158 | 159 | f = PoissonRegression(A, b) 160 | h = BurgEntropyL2(lamda) 161 | L = b.sum() 162 | # Initial point should be far from 0 in order for ARDA to work well! 163 | x0 = (1.0/n)*np.ones(n) 164 | 165 | return f, h, L, x0 166 | 167 | 168 | def KL_nonneg_regr(m, n, noise=0.01, lamdaL1=0, randseed=-1, normalizeA=True): 169 | """ 170 | Generate a random instance of L1-regularized KL regression problem 171 | minimize_{x >= 0} D_KL(Ax, b) + lamda * ||x||_1 172 | where 173 | A: m by n nonnegative matrix 174 | b: nonnegative vector of length m 175 | noise: noise level to generate b = A * x + noise 176 | lambda: L2 regularization weight 177 | normalizeA: wether or not to normalize columns of A 178 | 179 | Return f, h, L, x0: 180 | f: f(x) = D_KL(Ax, b) 181 | h: h(x) = Shannon entropy (with L1 regularization as Psi) 182 | L: L = max(sum(A, axis=0)), maximum column sum 183 | x0: initial point, scaled version of all-one vector 184 | """ 185 | if randseed > 0: 186 | np.random.seed(randseed) 187 | A = np.random.rand(m,n) 188 | if normalizeA: 189 | A = A / A.sum(axis=0) # scaling to make column sums equal to 1 190 | x = np.random.rand(n) 191 | b = np.dot(A, x) + noise * (np.random.rand(m) - 0.5) 192 | assert b.min() > 0, "need b > 0 for nonnegative regression." 193 | 194 | f = KLdivRegression(A, b) 195 | h = ShannonEntropyL1(lamdaL1) 196 | L = max( A.sum(axis=0) ) #L = 1.0 if columns of A are normalized 197 | x0 = 0.5*np.ones(n) 198 | #x0 = (1.0/n)*np.ones(n) 199 | 200 | return f, h, L, x0 201 | -------------------------------------------------------------------------------- /accbpg/D_opt_alg.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | 5 | import numpy as np 6 | import time 7 | 8 | 9 | def D_opt_FW(V, x0, eps, maxitrs, verbose=True, verbskip=1): 10 | """ 11 | Solve the D-optimal design problem by the Frank-Wolfe algorithm 12 | minimize - log(det(V*diag(x)*V')) 13 | subject to x >= 0 and sum_i x_i=1 14 | where V is m by n matrix and x belongs to n-dimensional simplex 15 | 16 | Inputs: 17 | V: matrix of size m by n with m < n 18 | x0: initial point 19 | eps: precision for optimality conditions (complementary slackness) 20 | maxitrs: maximum number of iterations 21 | verbose: display computational progress (True or False) 22 | verbskip: number of iterations to skip between displays 23 | 24 | Returns (x, F, SP, SN, T): 25 | x: the last iterate of BPG 26 | F: array storing F(x[k]) for all k 27 | SP: positive slackness 28 | SN: negative slackness 29 | T: array storing time used up to iteration k 30 | """ 31 | start_time = time.time() 32 | 33 | m, n = V.shape 34 | F = np.zeros(maxitrs) 35 | SP = np.zeros(maxitrs) 36 | SN = np.zeros(maxitrs) 37 | T = np.zeros(maxitrs) 38 | 39 | x = np.copy(x0) 40 | VXVT = np.dot(V*x, V.T) 41 | detVXVT = np.linalg.det(VXVT) 42 | H = np.linalg.inv(VXVT) 43 | 44 | # compute w = - gradient # This step cost m^2*n 45 | w = np.sum(V * np.dot(H, V), axis=0) 46 | 47 | if verbose: 48 | print("\nSolving D-opt design problem using Frank-Wolfe method") 49 | print(" k F(x) pos_slack neg_slack time") 50 | 51 | for k in range(maxitrs): 52 | F[k] = - np.log(detVXVT) 53 | T[k] = time.time() - start_time 54 | 55 | # compute w = - gradient # This step cost m^2*n 56 | #w = np.sum(V * np.dot(H, V), axis=0) 57 | 58 | # check approximate optimality conditions 59 | i = np.argmax(w) 60 | w_xpos = w[x>0] 61 | j = np.argmin(w_xpos) 62 | 63 | eps_pos = w[i] / m - 1 64 | eps_neg = 1 - w_xpos[j] / m 65 | SP[k] = eps_pos 66 | SN[k] = eps_neg 67 | 68 | if verbose and k % verbskip == 0: 69 | print("{0:6d} {1:10.3e} {2:10.3e} {3:10.3e} {4:6.1f}".format( 70 | k, F[k], eps_pos, eps_neg, T[k])) 71 | 72 | if eps_pos <= eps and eps_neg <= eps: 73 | break 74 | 75 | t = (w[i] / m - 1) / (w[i] - 1) 76 | x *= (1 - t) 77 | x[i] += t 78 | HVi = np.dot(H, V[:,i]) 79 | H = (H - (t / (1 + t * (w[i] - 1))) * np.outer(HVi, HVi)) / (1 - t) 80 | detVXVT *= np.power(1 - t, m - 1) * (1 + t * (w[i] - 1)) 81 | # compute w more efficiently # This step cost m*n 82 | w = (w - (t / (1 + t * (w[i] - 1))) * np.dot(HVi, V)**2 ) / (1 - t) 83 | 84 | F = F[0:k+1] 85 | SP = SP[0:k+1] 86 | SN = SN[0:k+1] 87 | T = T[0:k+1] 88 | return x, F, SP, SN, T 89 | 90 | 91 | def D_opt_FW_away(V, x0, eps, maxitrs, verbose=True, verbskip=1): 92 | """ 93 | Solve the D-optimal design problem by Frank-Wolfe (Wolfe-Atwood) algorithm 94 | minimize - log(det(V*diag(x)*V')) 95 | subject to x >= 0 and sum_i x_i=1 96 | where V is m by n matrix and x belongs to n-dimensional simplex. 97 | 98 | This is equivalent to the Frank-Wolfe algorithm with Away steps. 99 | 100 | Inputs: 101 | V: matrix of size m by n with m < n 102 | x0: initial point 103 | eps: precision for optimality conditions (complementary slackness) 104 | maxitrs: maximum number of iterations 105 | verbose: display computational progress (True or False) 106 | verbskip: number of iterations to skip between displays 107 | 108 | Returns (x, F, SP, SN, T): 109 | x: the last iterate of BPG 110 | F: array storing F(x[k]) for all k 111 | SP: positive slackness 112 | SN: negative slackness 113 | T: array storing time used up to iteration k 114 | """ 115 | start_time = time.time() 116 | 117 | m, n = V.shape 118 | F = np.zeros(maxitrs) 119 | SP = np.zeros(maxitrs) 120 | SN = np.zeros(maxitrs) 121 | T = np.zeros(maxitrs) 122 | 123 | x = np.copy(x0) 124 | VXVT = np.dot(V*x, V.T) 125 | detVXVT = np.linalg.det(VXVT) 126 | H = np.linalg.inv(VXVT) 127 | 128 | # compute w = - gradient # This step cost m^2*n 129 | w = np.sum(V * np.dot(H, V), axis=0) 130 | 131 | if verbose: 132 | print("\nSolving D-opt design problem using Frank-Wolfe method with away steps") 133 | print(" k F(x) pos_slack neg_slack time") 134 | 135 | for k in range(maxitrs): 136 | F[k] = np.log(np.linalg.det(H)) 137 | # the following can be much faster but often inaccurate! 138 | #F[k] = - np.log(detVXVT) 139 | T[k] = time.time() - start_time 140 | 141 | # compute w = - gradient # This step cost m^2*n 142 | #w = np.sum(V * np.dot(H, V), axis=0) 143 | 144 | # check approximate optimality conditions 145 | i = np.argmax(w) 146 | ww = w - w[i] # shift the array so that ww.max() = 0 147 | j = np.argmin(ww * [x > 1.0e-8]) 148 | #j = np.argmin(ww * [x > 0]) 149 | 150 | eps_pos = w[i] / m - 1 151 | eps_neg = 1 - w[j] / m 152 | SP[k] = eps_pos 153 | SN[k] = eps_neg 154 | 155 | if verbose and k % verbskip == 0: 156 | print("{0:6d} {1:10.3e} {2:10.3e} {3:10.3e} {4:6.1f}".format( 157 | k, F[k], eps_pos, eps_neg, T[k])) 158 | 159 | if eps_pos <= eps and eps_neg <= eps: 160 | break 161 | 162 | if eps_pos >= eps_neg: 163 | t = (w[i] / m - 1) / (w[i] - 1) 164 | x *= (1 - t) 165 | x[i] += t 166 | HVi = np.dot(H, V[:,i]) 167 | H = (H - (t / (1 - t + t * w[i])) * np.outer(HVi, HVi)) / (1 - t) 168 | detVXVT *= np.power(1 - t, m - 1) * (1 + t * (w[i] - 1)) 169 | # compute w more efficiently # This step cost m*n 170 | w = (w - (t / (1 - t + t * w[i])) * np.dot(HVi, V)**2 ) / (1 - t) 171 | else: # Wolfe's awaystep 172 | t = min((1 - w[j] / m) / (w[j] - 1), x[j] / (1 - x[j])) 173 | x *= (1 + t) 174 | x[j] -= t 175 | HVj = np.dot(H, V[:,j]) 176 | H = (H + (t / (1 + t - t * w[j])) * np.outer(HVj, HVj)) / (1 + t) 177 | detVXVT *= np.power(1 + t, m - 1) * (1 + t - t * w[i]) 178 | # compute w more efficiently # This step cost m*n 179 | w = (w + (t / (1 + t - t * w[j])) * np.dot(HVj, V)**2 ) / (1 + t) 180 | 181 | F = F[0:k+1] 182 | SP = SP[0:k+1] 183 | SN = SN[0:k+1] 184 | T = T[0:k+1] 185 | return x, F, SP, SN, T 186 | 187 | -------------------------------------------------------------------------------- /accbpg/functions.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | 5 | import numpy as np 6 | 7 | 8 | class RSmoothFunction: 9 | """ 10 | Relatively-Smooth Function, can query f(x) and gradient 11 | """ 12 | def __call__(self, x): 13 | assert 0, "RSmoothFunction: __call__(x) is not defined" 14 | 15 | def gradient(self, x): 16 | assert 0, "RSmoothFunction: gradient(x) is not defined" 17 | 18 | def func_grad(self, x, flag): 19 | """ 20 | flag=0: function, flag=1: gradient, flag=2: function & gradient 21 | """ 22 | assert 0, "RSmoothFunction: func_grad(x, flag) is not defined" 23 | 24 | 25 | class DOptimalObj(RSmoothFunction): 26 | """ 27 | f(x) = - log(det(H*diag(x)*H')) where H is an m by n matrix, m < n 28 | """ 29 | def __init__(self, H): 30 | self.H = H 31 | self.m = H.shape[0] 32 | self.n = H.shape[1] 33 | assert self.m < self.n, "DOptimalObj: need m < n" 34 | 35 | def __call__(self, x): 36 | return self.func_grad(x, flag=0) 37 | 38 | def gradient(self, x): 39 | return self.func_grad(x, flag=1) 40 | 41 | def func_grad(self, x, flag=2): 42 | assert x.size == self.n, "DOptimalObj: x.size not equal to n" 43 | assert x.min() >= 0, "DOptimalObj: x needs to be nonnegative" 44 | HXHT = np.dot(self.H*x, self.H.T) 45 | 46 | if flag == 0: # only return function value 47 | f = -np.log(np.linalg.det(HXHT)) 48 | return f 49 | 50 | HXHTinvH = np.dot(np.linalg.inv(HXHT), self.H) 51 | g = - np.sum(self.H * HXHTinvH, axis=0) 52 | 53 | if flag == 1: # only return gradient 54 | return g 55 | 56 | # return both function value and gradient 57 | f = -np.log(np.linalg.det(HXHT)) 58 | return f, g 59 | 60 | def func_grad_slow(self, x, flag=2): 61 | assert x.size == self.n, "DOptimalObj: x.size not equal to n" 62 | assert x.min() >= 0, "DOptimalObj: x needs to be nonnegative" 63 | sx = np.sqrt(x) 64 | Hsx = self.H*sx; # using numpy array broadcast 65 | HXHT = np.dot(Hsx,Hsx.T) 66 | 67 | if flag == 0: # only return function value 68 | f = -np.log(np.linalg.det(HXHT)) 69 | return f 70 | 71 | Hsx = np.linalg.solve(HXHT, self.H) 72 | g = np.empty(self.n) 73 | for i in range(self.n): 74 | g[i] = - np.dot(self.H[:,i], Hsx[:,i]) 75 | 76 | if flag == 1: # only return gradient 77 | return g 78 | 79 | # return both function value and gradient 80 | f = -np.log(np.linalg.det(HXHT)) 81 | return f, g 82 | 83 | 84 | class PoissonRegression(RSmoothFunction): 85 | """ 86 | f(x) = D_KL(b, Ax) for linear inverse problem A * x = b 87 | """ 88 | def __init__(self, A, b): 89 | assert A.shape[0] == b.shape[0], "A and b sizes not matching" 90 | self.A = A 91 | self.b = b 92 | self.m = A.shape[0] 93 | self.n = A.shape[1] 94 | 95 | def __call__(self, x): 96 | return self.func_grad(x, flag=0) 97 | 98 | def gradient(self, x): 99 | return self.func_grad(x, flag=1) 100 | 101 | def func_grad(self, x, flag=2): 102 | assert x.size == self.n, "PoissonRegression: x.size not equal to n." 103 | Ax = np.dot(self.A, x) 104 | if flag == 0: 105 | fx = sum( self.b * np.log(self.b / Ax) + Ax - self.b ) 106 | return fx 107 | 108 | # use array broadcasting 109 | g = ((1-self.b/Ax).reshape(self.m, 1) * self.A).sum(axis=0) 110 | # line above is the same as the following code 111 | #g = np.zeros(x.shape) 112 | #for i in range(self.m): 113 | # g += (1 - self.b[i]/np.dot(self.A[i,:], x)) * self.A[i,:] 114 | if flag == 1: 115 | return g 116 | 117 | # return both function value and gradient 118 | fx = sum( self.b * np.log(self.b / Ax) + Ax - self.b ) 119 | return fx, g 120 | 121 | 122 | class KLdivRegression(RSmoothFunction): 123 | """ 124 | f(x) = D_KL(Ax, b) for linear inverse problem A * x = b 125 | """ 126 | def __init__(self, A, b): 127 | assert A.shape[0] == b.shape[0], "A and b size not matching" 128 | self.A = A 129 | self.b = b 130 | self.m = A.shape[0] 131 | self.n = A.shape[1] 132 | 133 | def __call__(self, x): 134 | return self.func_grad(x, flag=0) 135 | 136 | def gradient(self, x): 137 | return self.func_grad(x, flag=1) 138 | 139 | def func_grad(self, x, flag=2): 140 | assert x.size == self.n, "NonnegRegression: x.size not equal to n." 141 | Ax = np.dot(self.A, x) 142 | if flag == 0: 143 | fx = sum( Ax * np.log(Ax / self.b) - Ax + self.b ) 144 | return fx 145 | 146 | # use array broadcasting 147 | g = (np.log(Ax/self.b).reshape(self.m, 1) * self.A).sum(axis=0) 148 | # line above is the same as the following code 149 | #g = np.zeros(x.shape) 150 | #for i in range(self.m): 151 | # g += np.log(Ax[i]/self.b[i]) * self.A[i,:] 152 | if flag == 1: 153 | return g 154 | 155 | # return both function value and gradient 156 | fx = sum( Ax * np.log(Ax / self.b) - Ax + self.b ) 157 | return fx, g 158 | 159 | 160 | ####################################################################### 161 | 162 | 163 | class LegendreFunction: 164 | """ 165 | Function of Legendre type, used as the kernel of Bregman divergence for 166 | composite optimization 167 | minimize_{x in C} f(x) + Psi(x) 168 | where f is L-smooth relative to a Legendre function h(x), 169 | Psi(x) is an additional simple convex function. 170 | """ 171 | def __call__(self, x): 172 | assert 0, "LegendreFunction: __call__(x) is not defined." 173 | 174 | def extra_Psi(self, x): 175 | return 0 176 | 177 | def gradient(self, x): 178 | assert 0, "LegendreFunction: gradient(x) is not defined." 179 | 180 | def divergence(self, x, y): 181 | """ 182 | Return D(x,y) = h(x) - h(y) - 183 | """ 184 | assert 0, "LegendreFunction: divergence(x,y) is not defined." 185 | 186 | def prox_map(self, g, L): 187 | """ 188 | Return argmin_{x in C} { Psi(x) + + L * h(x) } 189 | """ 190 | assert 0, "LegendreFunction: prox_map(x, L) is not defined." 191 | 192 | def div_prox_map(self, y, g, L): 193 | """ 194 | Return argmin_{x in C} { Psi(x) + + L * D(x,y) } 195 | default implementation by calling prox_map(g - L*g(y), L) 196 | """ 197 | assert y.shape == g.shape, "Vectors y and g should have same size." 198 | assert L > 0, "Relative smoothness constant L should be positive." 199 | return self.prox_map(g - L*self.gradient(y), L) 200 | 201 | 202 | class BurgEntropy(LegendreFunction): 203 | """ 204 | h(x) = - sum_{i=1}^n log(x[i]) for x > 0 205 | """ 206 | def __call__(self, x): 207 | assert x.min()>0, "BurgEntropy only takes positive arguments." 208 | return -sum(np.log(x)) 209 | 210 | def gradient(self, x): 211 | assert x.min()>0, "BurgEntropy only takes positive arguments." 212 | return -1/x 213 | 214 | def divergence(self, x, y): 215 | assert x.shape == y.shape, "Vectors x and y are of different sizes." 216 | assert x.min() > 0 and y.min() > 0, "Entries of x or y not positive." 217 | return sum(x/y - np.log(x/y) - 1) 218 | 219 | def prox_map(self, g, L): 220 | """ 221 | Return argmin_{x > 0} { + L * h(x) } 222 | This function needs to be replaced with inheritance 223 | """ 224 | assert L > 0, "BurgEntropy prox_map only takes positive L value." 225 | assert g.min() > 0, "BurgEntropy prox_map only takes positive value." 226 | return L / g 227 | 228 | def div_prox_map(self, y, g, L): 229 | """ 230 | Return argmin_{x > C} { + L * D(x,y) } 231 | This is a general function that works for all derived classes 232 | """ 233 | assert y.shape == g.shape, "Vectors y and g are of different sizes." 234 | assert y.min() > 0 and L > 0, "Either y or L is not positive." 235 | return self.prox_map(g - L*self.gradient(y), L) 236 | 237 | 238 | class BurgEntropyL1(BurgEntropy): 239 | """ 240 | h(x) = - sum_{i=1}^n log(x[i]) used in context of solving the problem 241 | min_{x > 0} f(x) + lamda * ||x||_1 242 | """ 243 | def __init__(self, lamda=0, x_max=1e4): 244 | assert lamda >= 0, "BurgEntropyL1: lambda should be nonnegative." 245 | self.lamda = lamda 246 | self.x_max = x_max 247 | 248 | def extra_Psi(self, x): 249 | """ 250 | return lamda * ||x||_1 251 | """ 252 | return self.lamda * x.sum() 253 | 254 | def prox_map(self, g, L): 255 | """ 256 | Return argmin_{x > 0} { lambda * ||x||_1 + + L h(x) } 257 | !!! This proximal mapping may have unbounded solution x->infty 258 | """ 259 | assert L > 0, "BurgEntropyL1: prox_map only takes positive L." 260 | assert g.min() > -self.lamda, "Not getting positive solution." 261 | #g = np.maximum(g, -self.lamda + 1.0 / self.x_max) 262 | return L / (self.lamda + g) 263 | 264 | 265 | class BurgEntropyL2(BurgEntropy): 266 | """ 267 | h(x) = - sum_{i=1}^n log(x[i]) used in context of solving the problem 268 | min_{x > 0} f(x) + (lambda/2) ||x||_2^2 269 | """ 270 | def __init__(self, lamda=0): 271 | assert lamda >= 0, "BurgEntropyL2: lamda should be nonnegative." 272 | self.lamda = lamda 273 | 274 | def extra_Psi(self, x): 275 | """ 276 | return (lamda/2) * ||x||_2^2 277 | """ 278 | return (self.lamda / 2) * np.dot(x, x) 279 | 280 | def prox_map(self, g, L): 281 | """ 282 | Return argmin_{x > 0} { (lamda/2) * ||x||_2^2 + + L * h(x) } 283 | """ 284 | assert L > 0, "BurgEntropyL2: prox_map only takes positive L value." 285 | gg = g / L 286 | lamda_L = self.lamda / L 287 | return (np.sqrt(gg*gg + 4*lamda_L) - gg) / (2 * lamda_L) 288 | 289 | 290 | class BurgEntropySimplex(BurgEntropy): 291 | """ 292 | h(x) = - sum_{i=1}^n log(x[i]) used in the context of solving 293 | min_{x \in C} f(x) where C is the standard simplex, with Psi(x) = 0 294 | """ 295 | def __init__(self, eps=1e-8): 296 | # eps is precision for solving prox_map using Newton's method 297 | assert eps > 0, "BurgEntropySimplex: eps should be positive." 298 | self.eps = eps 299 | 300 | def prox_map(self, g, L): 301 | """ 302 | Return argmin_{x in C} { + L h(x) } where C is unit simplex 303 | """ 304 | assert L > 0, "BergEntropySimplex prox_map only takes positive L." 305 | gg = g / L 306 | cmin = -gg.min() # choose cmin to ensure min(gg+c) >= 0 307 | # first use bisection to find c such that sum(1/(gg+c)) > 0 308 | c = cmin + 1 309 | while sum(1/(gg+c))-1 < 0: 310 | c = (cmin + c) / 2.0 311 | # then use Newton's method to find optimal c 312 | fc = sum(1/(gg+c))-1 313 | while abs(fc) > self.eps: 314 | fpc = sum(-1.0/(gg+c)**2) 315 | c = c - fc / fpc 316 | fc = sum(1/(gg+c))-1 317 | x = 1.0/(gg+c) 318 | return x 319 | 320 | 321 | class ShannonEntropy(LegendreFunction): 322 | """ 323 | h(x) = sum_{i=1}^n x[i]*log(x[i]) for x >= 0, note h(0) = 0 324 | """ 325 | def __init__(self, delta=1e-20): 326 | self.delta = delta 327 | 328 | def __call__(self, x): 329 | assert x.min() >= 0, "ShannonEntropy takes nonnegative arguments." 330 | xx = np.maximum(x, self.delta) 331 | return sum( xx * np.log(xx) ) 332 | 333 | def gradient(self, x): 334 | assert x.min() >= 0, "ShannonEntropy takes nonnegative arguments." 335 | xx = np.maximum(x, self.delta) 336 | return 1.0 + np.log(xx) 337 | 338 | def divergence(self, x, y): 339 | assert x.shape == y.shape, "Vectors x and y are of different shapes." 340 | assert x.min() >= 0 and y.min() >= 0, "Some entries are negative." 341 | #for i in range(x.size): 342 | # if x[i] > 0 and y[i] == 0: 343 | # return np.inf 344 | return sum(x*np.log((x+self.delta)/(y+self.delta))) + (sum(y)-sum(x)) 345 | 346 | def prox_map(self, g, L): 347 | """ 348 | Return argmin_{x >= 0} { + L * h(x) } 349 | """ 350 | assert L > 0, "ShannonEntropy prox_map require L > 0." 351 | return np.exp(-g/L - 1) 352 | 353 | def div_prox_map(self, y, g, L): 354 | """ 355 | Return argmin_{x >= 0} { + L * D(x,y) } 356 | """ 357 | assert y.shape == g.shape, "Vectors y and g are of different sizes." 358 | assert y.min() >= 0 and L > 0, "Some entries of y are negavie." 359 | #gg = g/L - self.gradient(y) 360 | #return self.prox_map(gg, 1) 361 | return y * np.exp(-g/L) 362 | 363 | 364 | class ShannonEntropyL1(ShannonEntropy): 365 | """ 366 | h(x) = sum_{i=1}^n x[i]*log(x[i]) for x >= 0, note h(0) = 0 367 | used in the context of min_{x >=0 } f(x) + lamda * ||x||_1 368 | """ 369 | def __init__(self, lamda=0, delta=1e-20): 370 | ShannonEntropy.__init__(self, delta) 371 | self.lamda = lamda 372 | 373 | def extra_Psi(self, x): 374 | """ 375 | return lamda * ||x||_1 376 | """ 377 | return self.lamda * x.sum() 378 | 379 | def prox_map(self, g, L): 380 | """ 381 | Return argmin_{x >= 0} { lamda * ||x||_1 + + L * h(x) } 382 | """ 383 | return ShannonEntropy.prox_map(self, self.lamda + g, L) 384 | 385 | def div_prox_map(self, y, g, L): 386 | """ 387 | Return argmin_{x >= 0} { lamda * ||x||_1 + + L * D(x,y) } 388 | """ 389 | return ShannonEntropy.div_prox_map(self, y, self.lamda + g, L) 390 | 391 | 392 | class ShannonEntropySimplex(ShannonEntropy): 393 | """ 394 | h(x) = sum_{i=1}^n x[i]*log(x[i]) for x >= 0, note h(0) = 0 395 | used in the context of min_{x in C } f(x) where C is standard simplex 396 | """ 397 | 398 | def prox_map(self, g, L): 399 | """ 400 | Return argmin_{x in C} { + L * h(x) } where C is unit simplex 401 | """ 402 | assert L > 0, "ShannonEntropy prox_map require L > 0." 403 | x = np.exp(-g/L - 1) 404 | return x / sum(x) 405 | 406 | def div_prox_map(self, y, g, L): 407 | """ 408 | Return argmin_{x in C} { + L*d(x,y) } where C is unit simplex 409 | """ 410 | assert y.shape == g.shape, "Vectors y and g are of different shapes." 411 | assert y.min() > 0 and L > 0, "prox_map needs positive arguments." 412 | x = y * np.exp(-g/L) 413 | return x / sum(x) 414 | 415 | 416 | class SumOf2nd4thPowers(LegendreFunction): 417 | """ 418 | h(x) = (1/2)||x||_2^2 + (M/4)||x||_2^4 419 | """ 420 | def __init__(self, M): 421 | self.M = M 422 | 423 | def __call__(self, x): 424 | normsq = np.dot(x, x) 425 | return 0.5 * normsq + (self.M / 4) * normsq**2 426 | 427 | def gradient(self, x): 428 | normsq = np.dot(x, x) 429 | return (1 + self.M * normsq) * x 430 | 431 | def divergence(self, x, y): 432 | assert x.shape == y.shape, "Bregman div: x and y not same shape." 433 | return self.__call__(x) - (self.__call__(y) 434 | + np.dot(self.gradient(y), x-y)) 435 | 436 | class SquaredL2Norm(LegendreFunction): 437 | """ 438 | h(x) = (1/2)||x||_2^2 439 | """ 440 | def __call__(self, x): 441 | return 0.5*np.dot(x, x) 442 | 443 | def gradient(self, x): 444 | return x 445 | 446 | def divergence(self, x, y): 447 | assert x.shape == y.shape, "SquaredL2Norm: x and y not same shape." 448 | xy = x - y 449 | return 0.5*np.dot(xy, xy) 450 | 451 | def prox_map(self, g, L): 452 | assert L > 0, "SquaredL2Norm: L should be positive." 453 | return -(1/L)*g 454 | 455 | def div_prox_map(self, y, g, L): 456 | assert y.shape == g.shape and L > 0, "Vectors y and g not same shape." 457 | return y - (1/L)*g 458 | 459 | 460 | class PowerNeg1(LegendreFunction): 461 | """ 462 | h(x) = 1/x for x>0 463 | """ 464 | def __call__(self, x): 465 | return 1/x 466 | 467 | def gradient(self, x): 468 | return -1/(x*x) 469 | 470 | def divergence(self, x, y): 471 | assert x.shape == y.shape, "SquaredL2Norm: x and y not same shape." 472 | xy = x - y 473 | return np.sum(xy*xy/(x*y*y)) 474 | 475 | def prox_map(self, g, L): 476 | assert L > 0, "SquaredL2Norm: L should be positive." 477 | return np.sqrt(L/g) 478 | 479 | 480 | class L2L1Linf(LegendreFunction): 481 | """ 482 | usng h(x) = (1/2)||x||_2^2 in solving problems of the form 483 | 484 | minimize f(x) + lamda * ||x||_1 485 | subject to ||x||_inf <= B 486 | 487 | """ 488 | def __init__(self, lamda=0, B=1): 489 | self.lamda = lamda 490 | self.B = B 491 | 492 | def __call__(self, x): 493 | return 0.5*np.dot(x, x) 494 | 495 | def extra_Psi(self, x): 496 | """ 497 | return lamda * ||x||_1 498 | """ 499 | return self.lamda * np.sum(abs(x)) 500 | 501 | def gradient(self, x): 502 | """ 503 | gradient of h(x) = (1/2)||x||_2^2 504 | """ 505 | return x 506 | 507 | def divergence(self, x, y): 508 | """ 509 | Bregman divergence D(x, y) = (1/2)||x-y||_2^2 510 | """ 511 | assert x.shape == y.shape, "L2L1Linf: x and y not same shape." 512 | xy = x - y 513 | return 0.5*np.dot(xy, xy) 514 | 515 | def prox_map(self, g, L): 516 | """ 517 | Return argmin_{x in C} { Psi(x) + + L * h(x) } 518 | """ 519 | assert L > 0, "L2L1Linf: L should be positive." 520 | x = -(1.0/L) * g 521 | threshold = self.lamda / L 522 | x[abs(x) <= threshold] = 0 523 | x[x > threshold] -= threshold 524 | x[x < -threshold] += threshold 525 | np.clip(x, -self.B, self.B, out=x) 526 | return x 527 | 528 | def div_prox_map(self, y, g, L): 529 | """ 530 | Return argmin_{x in C} { Psi(x) + + L * D(x,y) } 531 | """ 532 | assert y.shape == g.shape and L > 0, "Vectors y and g not same shape." 533 | return self.prox_map(g - L*y, L) 534 | -------------------------------------------------------------------------------- /accbpg/algorithms.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | 5 | import numpy as np 6 | import time 7 | 8 | 9 | def BPG(f, h, L, x0, maxitrs, epsilon=1e-14, linesearch=True, ls_ratio=1.2, 10 | verbose=True, verbskip=1): 11 | """ 12 | Bregman Proximal Gradient (BGP) method for min_{x in C} f(x) + Psi(x): 13 | 14 | x(k+1) = argmin_{x in C} { Psi(x) + + L(k) * D_h(x,x(k))} 15 | 16 | Inputs: 17 | f, h, L: f is L-smooth relative to h, and Psi is defined within h 18 | x0: initial point to start algorithm 19 | maxitrs: maximum number of iterations 20 | epsilon: stop if F(x[k])-F(x[k-1]) < epsilon, where F(x)=f(x)+Psi(x) 21 | linesearch: whether or not perform line search (True or False) 22 | ls_ratio: backtracking line search parameter >= 1 23 | verbose: display computational progress (True or False) 24 | verbskip: number of iterations to skip between displays 25 | 26 | Returns (x, Fx, Ls): 27 | x: the last iterate of BPG 28 | F: array storing F(x[k]) for all k 29 | Ls: array storing local Lipschitz constants obtained by line search 30 | T: array storing time used up to iteration k 31 | """ 32 | 33 | if verbose: 34 | print("\nBPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)") 35 | print(" k F(x) Lk time") 36 | 37 | start_time = time.time() 38 | F = np.zeros(maxitrs) 39 | Ls = np.ones(maxitrs) * L 40 | T = np.zeros(maxitrs) 41 | 42 | x = np.copy(x0) 43 | for k in range(maxitrs): 44 | fx, g = f.func_grad(x) 45 | F[k] = fx + h.extra_Psi(x) 46 | T[k] = time.time() - start_time 47 | 48 | if linesearch: 49 | L = L / ls_ratio 50 | x1 = h.div_prox_map(x, g, L) 51 | while f(x1) > fx + np.dot(g, x1-x) + L*h.divergence(x1, x): 52 | L = L * ls_ratio 53 | x1 = h.div_prox_map(x, g, L) 54 | x = x1 55 | else: 56 | x = h.div_prox_map(x, g, L) 57 | 58 | # store and display computational progress 59 | Ls[k] = L 60 | if verbose and k % verbskip == 0: 61 | print("{0:6d} {1:10.3e} {2:10.3e} {3:6.1f}".format(k, F[k], L, T[k])) 62 | 63 | # stopping criteria 64 | if k > 0 and abs(F[k]-F[k-1]) < epsilon: 65 | break; 66 | 67 | F = F[0:k+1] 68 | Ls = Ls[0:k+1] 69 | T = T[0:k+1] 70 | return x, F, Ls, T 71 | 72 | 73 | def solve_theta(theta, gamma, gainratio=1): 74 | """ 75 | solve theta_k1 from the equation 76 | (1-theta_k1)/theta_k1^gamma = gainratio * 1/theta_k^gamma 77 | using Newton's method, starting from theta 78 | 79 | """ 80 | ckg = theta**gamma / gainratio 81 | cta = theta 82 | eps = 1e-6 * theta 83 | phi = cta**gamma - ckg*(1-cta) 84 | while abs(phi) > eps: 85 | drv = gamma * cta**(gamma-1) + ckg 86 | cta = cta - phi / drv 87 | phi = cta**gamma - ckg*(1-cta) 88 | 89 | return cta 90 | 91 | 92 | def ABPG(f, h, L, x0, gamma, maxitrs, epsilon=1e-14, theta_eq=False, 93 | restart=False, restart_rule='g', verbose=True, verbskip=1): 94 | """ 95 | Accelerated Bregman Proximal Gradient (ABPG) method for solving 96 | minimize_{x in C} f(x) + Psi(x): 97 | 98 | Inputs: 99 | f, h, L: f is L-smooth relative to h, and Psi is defined within h 100 | x0: initial point to start algorithm 101 | gamma: triangle scaling exponent (TSE) for Bregman div D_h(x,y) 102 | maxitrs: maximum number of iterations 103 | epsilon: stop if D_h(z[k],z[k-1]) < epsilon 104 | theta_eq: calculate theta_k by solving equality using Newton's method 105 | restart: restart the algorithm when overshooting (True or False) 106 | restart_rule: 'f' for function increasing or 'g' for gradient angle 107 | verbose: display computational progress (True or False) 108 | verbskip: number of iterations to skip between displays 109 | 110 | Returns (x, Fx, Ls): 111 | x: the last iterate of BPG 112 | F: array storing F(x[k]) for all k 113 | G: triangle scaling gains D(xk,yk) / D(zk,zk_1) / theta_k^gamma 114 | T: array storing time used up to iteration k 115 | """ 116 | 117 | if verbose: 118 | print("\nABPG method for minimize_{x in C} F(x) = f(x) + Psi(x)") 119 | print(" k F(x) theta" + 120 | " TSG D(x+,y) D(z+,z) time") 121 | 122 | start_time = time.time() 123 | F = np.zeros(maxitrs) 124 | G = np.zeros(maxitrs) 125 | T = np.zeros(maxitrs) 126 | 127 | x = np.copy(x0) 128 | z = np.copy(x0) 129 | theta = 1.0 # initialize theta = 1 for updating with equality 130 | kk = 0 # separate counter for theta_k, easy for restart 131 | for k in range(maxitrs): 132 | # function value at previous iteration 133 | fx = f(x) 134 | F[k] = fx + h.extra_Psi(x) 135 | T[k] = time.time() - start_time 136 | 137 | # Update three iterates x, y and z 138 | z_1 = z 139 | x_1 = x # only required for restart mode 140 | if theta_eq and kk > 0: 141 | theta = solve_theta(theta, gamma) 142 | else: 143 | theta = gamma / (kk + gamma) 144 | 145 | y = (1-theta)*x + theta*z_1 146 | g = f.gradient(y) 147 | z = h.div_prox_map(z_1, g, theta**(gamma-1) * L) 148 | x = (1-theta)*x + theta*z 149 | 150 | # compute triangle scaling quantities 151 | dxy = h.divergence(x, y) 152 | dzz = h.divergence(z, z_1) 153 | Gdr = dxy / dzz / theta**gamma 154 | 155 | # store and display computational progress 156 | G[k] = Gdr 157 | if verbose and k % verbskip == 0: 158 | print("{0:6d} {1:10.3e} {2:10.3e} {3:10.3e} {4:10.3e} {5:10.3e} {6:6.1f}".format( 159 | k, F[k], theta, Gdr, dxy, dzz, T[k])) 160 | 161 | # restart if gradient predicts objective increase 162 | kk += 1 163 | if restart and k > 0: 164 | #if k > 0 and F[k] > F[k-1]: 165 | #if np.dot(g, x-x_1) > 0: 166 | if (restart_rule == 'f' and F[k] > F[k-1]) or (restart_rule == 'g' and np.dot(g, x-x_1) > 0): 167 | theta = 1.0 # reset theta = 1 for updating with equality 168 | kk = 0 # reset kk = 0 for theta = gamma/(kk+gamma) 169 | z = x # in either case, reset z = x and also y 170 | 171 | # stopping criteria 172 | if dzz < epsilon: 173 | break; 174 | 175 | F = F[0:k+1] 176 | G = G[0:k+1] 177 | T = T[0:k+1] 178 | return x, F, G, T 179 | 180 | 181 | def ABPG_expo(f, h, L, x0, gamma0, maxitrs, epsilon=1e-14, delta=0.2, 182 | theta_eq=True, checkdiv=False, Gmargin=10, restart=False, 183 | restart_rule='g', verbose=True, verbskip=1): 184 | """ 185 | Accelerated Bregman Proximal Gradient method with exponent adaption for 186 | minimize_{x in C} f(x) + Psi(x) 187 | 188 | Inputs: 189 | f, h, L: f is L-smooth relative to h, and Psi is defined within h 190 | x0: initial point to start algorithm 191 | gamma0: initial triangle scaling exponent(TSE) for D_h(x,y) (>2) 192 | maxitrs: maximum number of iterations 193 | epsilon: stop if D_h(z[k],z[k-1]) < epsilon 194 | delta: amount to decrease TSE for exponent adaption 195 | theta_eq: calculate theta_k by solving equality using Newton's method 196 | checkdiv: check triangle scaling inequality for adaption (True/False) 197 | Gmargin: extra gain margin allowed for checking TSI 198 | restart: restart the algorithm when overshooting (True or False) 199 | restart_rule: 'f' for function increasing or 'g' for gradient angle 200 | verbose: display computational progress (True or False) 201 | verbskip: number of iterations to skip between displays 202 | 203 | Returns (x, Fx, Ls): 204 | x: the last iterate of BPG 205 | F: array storing F(x[k]) for all k 206 | Gamma: gamma_k obtained at each iteration 207 | G: triangle scaling gains D(xk,yk)/D(zk,zk_1)/theta_k^gamma_k 208 | T: array storing time used up to iteration k 209 | """ 210 | 211 | if verbose: 212 | print("\nABPG_expo method for min_{x in C} F(x) = f(x) + Psi(x)") 213 | print(" k F(x) theta gamma" + 214 | " TSG D(x+,y) D(z+,z) time") 215 | 216 | start_time = time.time() 217 | F = np.zeros(maxitrs) 218 | G = np.zeros(maxitrs) 219 | Gamma = np.ones(maxitrs) * gamma0 220 | T = np.zeros(maxitrs) 221 | 222 | gamma = gamma0 223 | x = np.copy(x0) 224 | z = np.copy(x0) 225 | theta = 1.0 # initialize theta = 1 for updating with equality 226 | kk = 0 # separate counter for theta_k, easy for restart 227 | for k in range(maxitrs): 228 | # function value at previous iteration 229 | fx = f(x) 230 | F[k] = fx + h.extra_Psi(x) 231 | T[k] = time.time() - start_time 232 | 233 | # Update three iterates x, y and z 234 | z_1 = z 235 | x_1 = x 236 | if theta_eq and kk > 0: 237 | theta = solve_theta(theta, gamma) 238 | else: 239 | theta = gamma / (kk + gamma) 240 | 241 | y = (1-theta)*x_1 + theta*z_1 242 | #g = f.gradient(y) 243 | fy, g = f.func_grad(y) 244 | 245 | condition = True 246 | while condition: # always execute at least once per iteration 247 | z = h.div_prox_map(z_1, g, theta**(gamma-1) * L) 248 | x = (1-theta)*x_1 + theta*z 249 | 250 | # compute triangle scaling quantities 251 | dxy = h.divergence(x, y) 252 | dzz = h.divergence(z, z_1) 253 | Gdr = dxy / dzz / theta**gamma 254 | 255 | if checkdiv: 256 | condition = (dxy > Gmargin * (theta**gamma) * dzz ) 257 | else: 258 | condition = (f(x) > fy + np.dot(g, x-y) + theta**gamma*L*dzz) 259 | 260 | if condition and gamma > 1: 261 | gamma = max(gamma - delta, 1) 262 | else: 263 | condition = False 264 | 265 | # store and display computational progress 266 | G[k] = Gdr 267 | Gamma[k] = gamma 268 | if verbose and k % verbskip == 0: 269 | print("{0:6d} {1:10.3e} {2:10.3e} {3:10.3e} {4:10.3e} {5:10.3e} {6:10.3e} {7:6.1f}".format( 270 | k, F[k], theta, gamma, Gdr, dxy, dzz, T[k])) 271 | 272 | # restart if gradient predicts objective increase 273 | kk += 1 274 | if restart: 275 | #if k > 0 and F[k] > F[k-1]: 276 | #if np.dot(g, x-x_1) > 0: 277 | if (restart_rule == 'f' and F[k] > F[k-1]) or (restart_rule == 'g' and np.dot(g, x-x_1) > 0): 278 | theta = 1.0 # reset theta = 1 for updating with equality 279 | kk = 0 # reset kk = 0 for theta = gamma/(kk+gamma) 280 | z = x # in either case, reset z = x and also y 281 | 282 | # stopping criteria 283 | if dzz < epsilon: 284 | break; 285 | 286 | F = F[0:k+1] 287 | Gamma = Gamma[0:k+1] 288 | G = G[0:k+1] 289 | T = T[0:k+1] 290 | return x, F, Gamma, G, T 291 | 292 | 293 | def ABPG_gain(f, h, L, x0, gamma, maxitrs, epsilon=1e-14, G0=1, 294 | ls_inc=1.2, ls_dec=1.2, theta_eq=True, checkdiv=False, 295 | restart=False, restart_rule='g', verbose=True, verbskip=1): 296 | """ 297 | Accelerated Bregman Proximal Gradient (ABPG) method with gain adaption for 298 | minimize_{x in C} f(x) + Psi(x): 299 | 300 | Inputs: 301 | f, h, L: f is L-smooth relative to h, and Psi is defined within h 302 | x0: initial point to start algorithm 303 | gamma: triangle scaling exponent(TSE) for Bregman distance D_h(x,y) 304 | G0: initial value for triangle scaling gain 305 | maxitrs: maximum number of iterations 306 | epsilon: stop if D_h(z[k],z[k-1]) < epsilon 307 | ls_inc: factor of increasing gain (>=1) 308 | ls_dec: factor of decreasing gain (>=1) 309 | theta_eq: calculate theta_k by solving equality using Newton's method 310 | checkdiv: check triangle scaling inequality for adaption (True/False) 311 | restart: restart the algorithm when overshooting (True/False) 312 | restart_rule: 'f' for function increasing or 'g' for gradient angle 313 | verbose: display computational progress (True/False) 314 | verbskip: number of iterations to skip between displays 315 | 316 | Returns (x, Fx, Ls): 317 | x: the last iterate of BPG 318 | F: array storing F(x[k]) for all k 319 | Gain: triangle scaling gains G_k obtained by LS at each iteration 320 | Gdiv: triangle scaling gains D(xk,yk)/D(zk,zk_1)/theta_k^gamma_k 321 | Gavg: geometric mean of G_k at all steps up to iteration k 322 | T: array storing time used up to iteration k 323 | """ 324 | if verbose: 325 | print("\nABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)") 326 | print(" k F(x) theta Gk" + 327 | " TSG D(x+,y) D(z+,z) Gavg time") 328 | 329 | start_time = time.time() 330 | F = np.zeros(maxitrs) 331 | Gain = np.ones(maxitrs) * G0 332 | Gdiv = np.zeros(maxitrs) 333 | Gavg = np.zeros(maxitrs) 334 | T = np.zeros(maxitrs) 335 | 336 | x = np.copy(x0) 337 | z = np.copy(x0) 338 | G = G0 339 | # logGavg = (gamma*log(G0) + log(G_1) + ... + log(Gk)) / (k+gamma) 340 | sumlogG = gamma * np.log(G) 341 | theta = 1.0 # initialize theta = 1 for updating with equality 342 | kk = 0 # separate counter for theta_k, easy for restart 343 | for k in range(maxitrs): 344 | # function value at previous iteration 345 | fx = f(x) 346 | F[k] = fx + h.extra_Psi(x) 347 | T[k] = time.time() - start_time 348 | 349 | # Update three iterates x, y and z 350 | z_1 = z 351 | x_1 = x 352 | # adaptive option: always try a smaller Gain first before line search 353 | G_1 = G 354 | theta_1 = theta 355 | 356 | G = G / ls_dec 357 | 358 | condition = True 359 | while condition: 360 | if kk > 0: 361 | if theta_eq: 362 | theta = solve_theta(theta_1, gamma, G / G_1) 363 | else: 364 | alpha = G / G_1 365 | theta = theta_1*((1+alpha*(gamma-1))/(gamma*alpha+theta_1)) 366 | 367 | y = (1-theta)*x_1 + theta*z_1 368 | #g = f.gradient(y) 369 | fy, g = f.func_grad(y) 370 | 371 | z = h.div_prox_map(z_1, g, theta**(gamma-1) * G * L) 372 | x = (1-theta)*x_1 + theta*z 373 | 374 | # compute triangle scaling quantities 375 | dxy = h.divergence(x, y) 376 | dzz = h.divergence(z, z_1) 377 | if dzz < epsilon: 378 | break 379 | 380 | Gdr = dxy / dzz / theta**gamma 381 | 382 | if checkdiv: 383 | condition = (Gdr > G ) 384 | else: 385 | condition = (f(x) > fy + np.dot(g,x-y) + theta**gamma*G*L*dzz) 386 | 387 | if condition: 388 | G = G * ls_inc 389 | 390 | # store and display computational progress 391 | Gain[k] = G 392 | Gdiv[k] = Gdr 393 | sumlogG += np.log(G) 394 | Gavg[k] = np.exp(sumlogG / (gamma + k)) 395 | if verbose and k % verbskip == 0: 396 | print("{0:6d} {1:10.3e} {2:10.3e} {3:10.3e} {4:10.3e} {5:10.3e} {6:10.3e} {7:10.3e} {8:6.1f}".format( 397 | k, F[k], theta, G, Gdr, dxy, dzz, Gavg[k], T[k])) 398 | 399 | # restart if gradient predicts objective increase 400 | kk += 1 401 | if restart: 402 | #if k > 0 and F[k] > F[k-1]: 403 | #if np.dot(g, x-x_1) > 0: 404 | if (restart_rule == 'f' and F[k] > F[k-1]) or (restart_rule == 'g' and np.dot(g, x-x_1) > 0): 405 | theta = 1.0 # reset theta = 1 for updating with equality 406 | kk = 0 # reset kk = 0 for theta = gamma/(kk+gamma) 407 | z = x # in either case, reset z = x and also y 408 | 409 | # stopping criteria 410 | if dzz < epsilon: 411 | break; 412 | 413 | F = F[0:k+1] 414 | Gain = Gain[0:k+1] 415 | Gdiv = Gdiv[0:k+1] 416 | Gavg = Gavg[0:k+1] 417 | T = T[0:k+1] 418 | return x, F, Gain, Gdiv, Gavg, T 419 | 420 | 421 | def ABDA(f, h, L, x0, gamma, maxitrs, epsilon=1e-14, theta_eq=True, 422 | verbose=True, verbskip=1): 423 | """ 424 | Accelerated Bregman Dual Averaging (ABDA) method for solving 425 | minimize_{x in C} f(x) + Psi(x) 426 | 427 | Inputs: 428 | f, h, L: f is L-smooth relative to h, and Psi is defined within h 429 | x0: initial point to start algorithm 430 | gamma: triangle scaling exponent (TSE) for Bregman distance D_h(x,y) 431 | maxitrs: maximum number of iterations 432 | epsilon: stop if D_h(z[k],z[k-1]) < epsilon 433 | theta_eq: calculate theta_k by solving equality using Newton's method 434 | verbose: display computational progress (True or False) 435 | verbskip: number of iterations to skip between displays 436 | 437 | Returns (x, Fx, Ls): 438 | x: the last iterate of BPG 439 | F: array storing F(x[k]) for all k 440 | G: triangle scaling gains D(xk,yk)/D(zk,zk_1)/theta_k^gamma 441 | T: array storing time used up to iteration k 442 | """ 443 | # Simple restart schemes for dual averaging method do not work! 444 | restart = False 445 | 446 | if verbose: 447 | print("\nABDA method for min_{x in C} F(x) = f(x) + Psi(x)") 448 | print(" k F(x) theta" + 449 | " TSG D(x+,y) D(z+,z) time") 450 | 451 | start_time = time.time() 452 | F = np.zeros(maxitrs) 453 | G = np.zeros(maxitrs) 454 | T = np.zeros(maxitrs) 455 | 456 | x = np.copy(x0) 457 | z = np.copy(x0) 458 | theta = 1.0 # initialize theta = 1 for updating with equality 459 | kk = 0 # separate counter for theta_k, easy for restart 460 | gavg = np.zeros(x.size) 461 | csum = 0 462 | for k in range(maxitrs): 463 | # function value at previous iteration 464 | fx = f(x) 465 | F[k] = fx + h.extra_Psi(x) 466 | T[k] = time.time() - start_time 467 | 468 | # Update three iterates x, y and z 469 | z_1 = z 470 | x_1 = x 471 | if theta_eq and kk > 0: 472 | theta = solve_theta(theta, gamma) 473 | else: 474 | theta = gamma / (kk + gamma) 475 | 476 | y = (1-theta)*x_1 + theta*z_1 477 | g = f.gradient(y) 478 | gavg = gavg + theta**(1-gamma) * g 479 | csum = csum + theta**(1-gamma) 480 | z = h.prox_map(gavg/csum, L/csum) 481 | x = (1-theta)*x_1 + theta*z 482 | 483 | # compute triangle scaling quantities 484 | dxy = h.divergence(x, y) 485 | dzz = h.divergence(z, z_1) 486 | Gdr = dxy / dzz / theta**gamma 487 | 488 | # store and display computational progress 489 | G[k] = Gdr 490 | if verbose and k % verbskip == 0: 491 | print("{0:6d} {1:10.3e} {2:10.3e} {3:10.3e} {4:10.3e} {5:10.3e} {6:6.1f}".format( 492 | k, F[k], theta, Gdr, dxy, dzz, T[k])) 493 | 494 | kk += 1 495 | # restart does not work for ABDA (restart = False) 496 | if restart: 497 | if k > 0 and F[k] > F[k-1]: 498 | #if np.dot(g, x-x_1) > 0: # this does not work for dual averaging 499 | theta = 1.0 # reset theta = 1 for updating with equality 500 | kk = 0 # reset kk = 0 for theta = gamma/(kk+gamma) 501 | z = x # in either case, reset z = x and also y 502 | gavg = np.zeros(x.size) # this is why restart does not work 503 | csum = 0 504 | 505 | # stopping criteria 506 | if dzz < epsilon: 507 | break; 508 | 509 | F = F[0:k+1] 510 | G = G[0:k+1] 511 | T = T[0:k+1] 512 | return x, F, G, T 513 | -------------------------------------------------------------------------------- /ipynb/ABPGvsFW/ex_Dopt_FW_n1000.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# D-optimal experiment design: comparing ABPG and Frank-Wolfe\n", 8 | "Solve the D-Optimal experiment design problem\n", 9 | "$$\n", 10 | "\\begin{array}{ll}\n", 11 | "\\textrm{minimize} & F(x):=\\log\\left(\\det\\left(\\sum_{i=1}^n x_i V_i V_i^T\\right)\\right) \\\\\n", 12 | "\\textrm{subject to} & \\sum_{i=1}^n x_i = 1, \\\\ \n", 13 | " & x_i\\geq 0, \\quad i=1,\\ldots,n\n", 14 | "\\end{array}\n", 15 | "$$\n", 16 | "where $V_i\\in R^m$ for $i=1,\\ldots,n$.\n", 17 | "\n", 18 | "Methods compared:\n", 19 | "* Original Frank-Wolfe method\n", 20 | "* Frank-Wolfe method with away steps\n", 21 | "* Bregman Proximal Gradient (BPG) method with adaptive line search\n", 22 | "* Accelerated Bregman Proximal Gradient (ABPG) method with gain adaption" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 1, 28 | "metadata": {}, 29 | "outputs": [ 30 | { 31 | "name": "stdout", 32 | "output_type": "stream", 33 | "text": [ 34 | "C:\\github\\accbpg\n" 35 | ] 36 | } 37 | ], 38 | "source": [ 39 | "cd C:\\\\github\\accbpg" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 2, 45 | "metadata": { 46 | "collapsed": true 47 | }, 48 | "outputs": [], 49 | "source": [ 50 | "import numpy as np\n", 51 | "import matplotlib\n", 52 | "import matplotlib.pyplot as plt\n", 53 | "matplotlib.rcParams.update({'font.size': 12, 'font.family': 'serif'})\n", 54 | "#matplotlib.rcParams.update({'text.usetex': True})\n", 55 | "\n", 56 | "import accbpg" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": 3, 62 | "metadata": { 63 | "scrolled": true 64 | }, 65 | "outputs": [ 66 | { 67 | "name": "stdout", 68 | "output_type": "stream", 69 | "text": [ 70 | "\n", 71 | "********** m = 100, n = 1000 **********\n", 72 | "\n", 73 | "Solving D-opt design problem using Frank-Wolfe method\n", 74 | " k F(x) pos_slack neg_slack time\n", 75 | " 0 2.124e+01 2.181e+00 2.811e-01 0.0\n", 76 | " 2000 -1.555e+00 1.723e-02 3.038e-01 0.3\n", 77 | " 4000 -1.725e+00 1.009e-02 2.944e-01 0.7\n", 78 | " 6000 -1.798e+00 7.361e-03 2.869e-01 1.0\n", 79 | " 8000 -1.840e+00 5.759e-03 2.819e-01 1.3\n", 80 | " 10000 -1.868e+00 4.882e-03 2.792e-01 1.5\n", 81 | " 12000 -1.888e+00 4.177e-03 2.764e-01 1.8\n", 82 | " 14000 -1.904e+00 3.697e-03 2.743e-01 2.0\n", 83 | " 16000 -1.916e+00 3.252e-03 2.725e-01 2.4\n", 84 | " 18000 -1.926e+00 2.924e-03 2.710e-01 2.8\n", 85 | "\n", 86 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 87 | " k F(x) pos_slack neg_slack time\n", 88 | " 0 2.124e+01 2.181e+00 2.811e-01 0.0\n", 89 | " 2000 -2.045e+00 7.112e-04 7.098e-04 0.6\n", 90 | " 4000 -2.045e+00 3.861e-06 4.047e-06 1.3\n", 91 | " 6000 -2.045e+00 3.455e-08 3.423e-08 1.9\n", 92 | "\n", 93 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 94 | " k F(x) Lk time\n", 95 | " 0 5.590e+00 6.667e-01 0.0\n", 96 | " 2000 -2.032e+00 8.779e-02 25.1\n", 97 | " 4000 -2.038e+00 1.317e-01 47.1\n", 98 | " 6000 -2.040e+00 8.779e-02 64.7\n", 99 | " 8000 -2.042e+00 5.853e-02 83.1\n", 100 | " 10000 -2.042e+00 1.317e-01 101.1\n", 101 | " 12000 -2.043e+00 5.853e-02 118.9\n", 102 | " 14000 -2.043e+00 1.317e-01 136.6\n", 103 | " 16000 -2.043e+00 8.779e-02 154.4\n", 104 | " 18000 -2.043e+00 8.779e-02 172.2\n", 105 | "\n", 106 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 107 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 108 | " 0 5.590e+00 1.000e+00 6.667e-01 1.000e+00 1.968e-01 1.968e-01 8.165e-01 0.0\n", 109 | " 2000 -2.044e+00 1.124e-03 1.317e-01 3.126e-01 7.813e-11 1.978e-04 1.315e-01 24.8\n", 110 | " 4000 -2.045e+00 7.412e-04 1.317e-01 2.947e-01 6.163e-12 3.806e-05 1.316e-01 49.2\n", 111 | " 6000 -2.045e+00 6.044e-04 1.317e-01 5.884e-01 5.463e-12 2.542e-05 1.316e-01 73.6\n", 112 | " 8000 -2.045e+00 5.339e-04 1.317e-01 3.340e-01 7.256e-13 7.621e-06 1.316e-01 98.1\n", 113 | " 10000 -2.045e+00 4.926e-04 1.317e-01 7.387e-01 1.911e-12 1.066e-05 1.316e-01 122.5\n", 114 | " 12000 -2.045e+00 5.705e-04 8.779e-02 4.252e-01 5.611e-13 4.054e-06 1.316e-01 146.9\n", 115 | " 14000 -2.045e+00 4.465e-04 1.317e-01 8.297e-01 8.979e-13 5.430e-06 1.316e-01 171.3\n", 116 | " 16000 -2.045e+00 4.294e-04 1.317e-01 8.993e-01 7.454e-13 4.495e-06 1.316e-01 195.6\n", 117 | " 18000 -2.045e+00 5.086e-04 8.779e-02 6.704e-01 2.953e-13 1.703e-06 1.315e-01 220.0\n", 118 | "\n", 119 | "Solving D-opt design problem using Frank-Wolfe method\n", 120 | " k F(x) pos_slack neg_slack time\n", 121 | " 0 2.050e+01 2.077e+00 2.819e-01 0.0\n", 122 | " 2000 -2.487e+00 1.818e-02 2.957e-01 0.2\n", 123 | " 4000 -2.678e+00 1.077e-02 2.880e-01 0.5\n", 124 | " 6000 -2.762e+00 7.647e-03 2.821e-01 0.7\n", 125 | " 8000 -2.811e+00 5.995e-03 2.784e-01 1.0\n", 126 | " 10000 -2.843e+00 5.220e-03 2.744e-01 1.2\n", 127 | " 12000 -2.866e+00 4.472e-03 2.730e-01 1.5\n", 128 | " 14000 -2.884e+00 3.855e-03 2.703e-01 1.7\n", 129 | " 16000 -2.898e+00 3.452e-03 2.694e-01 2.0\n", 130 | " 18000 -2.909e+00 3.203e-03 2.675e-01 2.2\n", 131 | "\n", 132 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 133 | " k F(x) pos_slack neg_slack time\n", 134 | " 0 2.050e+01 2.077e+00 2.819e-01 0.0\n", 135 | " 2000 -3.038e+00 6.566e-04 6.346e-04 0.5\n", 136 | " 4000 -3.038e+00 3.431e-06 3.444e-06 1.1\n", 137 | " 6000 -3.038e+00 2.609e-08 2.584e-08 1.6\n", 138 | "\n", 139 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 140 | " k F(x) Lk time\n", 141 | " 0 4.464e+00 6.667e-01 0.0\n", 142 | " 2000 -3.025e+00 1.317e-01 17.7\n", 143 | " 4000 -3.032e+00 8.779e-02 35.4\n", 144 | " 6000 -3.034e+00 8.779e-02 53.5\n", 145 | " 8000 -3.035e+00 5.853e-02 71.4\n", 146 | " 10000 -3.035e+00 1.317e-01 89.3\n", 147 | " 12000 -3.036e+00 3.902e-02 107.3\n", 148 | " 14000 -3.036e+00 8.779e-02 125.0\n", 149 | " 16000 -3.037e+00 1.975e-01 142.9\n", 150 | " 18000 -3.037e+00 1.317e-01 160.5\n", 151 | "\n", 152 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 153 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 154 | " 0 4.464e+00 1.000e+00 6.667e-01 1.000e+00 1.849e-01 1.849e-01 8.165e-01 0.0\n", 155 | " 2000 -3.038e+00 1.122e-03 1.317e-01 4.391e-01 1.503e-10 2.717e-04 1.401e-01 25.1\n", 156 | " 4000 -3.038e+00 7.224e-04 1.317e-01 6.126e-01 2.543e-11 7.953e-05 1.402e-01 50.1\n", 157 | " 6000 -3.038e+00 5.761e-04 1.317e-01 6.071e-01 6.334e-12 3.144e-05 1.402e-01 75.1\n", 158 | " 8000 -3.038e+00 4.071e-04 1.975e-01 7.360e-01 1.928e-12 1.580e-05 1.403e-01 100.0\n", 159 | " 10000 -3.038e+00 5.535e-04 8.779e-02 3.917e-01 1.052e-12 8.763e-06 1.403e-01 124.8\n", 160 | " 12000 -3.038e+00 3.440e-04 1.975e-01 7.387e-01 4.190e-13 4.794e-06 1.403e-01 149.8\n", 161 | " 14000 -3.038e+00 3.272e-04 1.975e-01 8.930e-01 5.824e-13 6.092e-06 1.403e-01 174.7\n", 162 | " 16000 -3.038e+00 3.153e-04 1.975e-01 7.909e-01 2.720e-13 3.460e-06 1.402e-01 200.1\n", 163 | " 18000 -3.038e+00 3.071e-04 1.975e-01 6.648e-01 1.392e-13 2.221e-06 1.402e-01 225.8\n", 164 | "\n", 165 | "Solving D-opt design problem using Frank-Wolfe method\n", 166 | " k F(x) pos_slack neg_slack time\n", 167 | " 0 1.972e+01 2.069e+00 2.966e-01 0.0\n", 168 | " 2000 -1.813e+00 1.719e-02 3.061e-01 0.3\n", 169 | " 4000 -1.984e+00 1.048e-02 3.001e-01 0.5\n", 170 | " 6000 -2.058e+00 7.465e-03 2.954e-01 0.8\n", 171 | " 8000 -2.102e+00 6.096e-03 2.913e-01 1.0\n", 172 | " 10000 -2.132e+00 4.914e-03 2.894e-01 1.3\n", 173 | " 12000 -2.154e+00 4.310e-03 2.872e-01 1.5\n", 174 | " 14000 -2.171e+00 3.879e-03 2.857e-01 1.8\n", 175 | " 16000 -2.184e+00 3.524e-03 2.843e-01 2.0\n", 176 | " 18000 -2.195e+00 3.217e-03 2.831e-01 2.3\n", 177 | "\n", 178 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 179 | " k F(x) pos_slack neg_slack time\n", 180 | " 0 1.972e+01 2.069e+00 2.966e-01 0.0\n", 181 | " 2000 -2.326e+00 6.254e-04 6.299e-04 0.5\n", 182 | " 4000 -2.327e+00 3.337e-06 3.343e-06 1.1\n", 183 | " 6000 -2.327e+00 2.952e-08 2.950e-08 1.7\n", 184 | "\n", 185 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 186 | " k F(x) Lk time\n", 187 | " 0 5.023e+00 6.667e-01 0.0\n", 188 | " 2000 -2.314e+00 8.779e-02 18.3\n", 189 | " 4000 -2.320e+00 8.779e-02 36.2\n", 190 | " 6000 -2.322e+00 8.779e-02 54.3\n", 191 | " 8000 -2.323e+00 5.853e-02 72.0\n", 192 | " 10000 -2.324e+00 8.779e-02 90.0\n", 193 | " 12000 -2.324e+00 8.779e-02 108.2\n", 194 | " 14000 -2.325e+00 8.779e-02 126.4\n", 195 | " 16000 -2.325e+00 8.779e-02 144.5\n", 196 | " 18000 -2.325e+00 8.779e-02 162.4\n", 197 | "\n", 198 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 199 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 200 | " 0 5.023e+00 1.000e+00 6.667e-01 1.000e+00 1.882e-01 1.882e-01 8.165e-01 0.0\n", 201 | " 2000 -2.326e+00 1.545e-03 5.853e-02 1.593e-01 1.415e-10 3.723e-04 1.244e-01 26.0\n", 202 | " 4000 -2.326e+00 7.461e-04 1.317e-01 6.522e-01 2.506e-11 6.901e-05 1.236e-01 51.0\n", 203 | " 6000 -2.326e+00 5.872e-04 1.317e-01 7.318e-01 9.307e-12 3.689e-05 1.218e-01 75.7\n", 204 | " 8000 -2.327e+00 4.112e-04 1.975e-01 7.912e-01 2.186e-12 1.635e-05 1.209e-01 101.9\n", 205 | " 10000 -2.327e+00 4.517e-04 1.317e-01 6.361e-01 1.006e-12 7.747e-06 1.205e-01 127.8\n", 206 | " 12000 -2.327e+00 4.178e-04 1.317e-01 7.897e-01 1.028e-12 7.458e-06 1.202e-01 153.3\n", 207 | " 14000 -2.327e+00 4.842e-04 8.779e-02 4.384e-01 2.844e-13 2.767e-06 1.201e-01 178.0\n", 208 | " 16000 -2.327e+00 4.636e-04 8.779e-02 4.913e-01 2.136e-13 2.023e-06 1.199e-01 202.6\n", 209 | " 18000 -2.327e+00 4.492e-04 8.779e-02 5.352e-01 1.525e-13 1.413e-06 1.197e-01 227.2\n", 210 | "\n", 211 | "********** m = 200, n = 1000 **********\n", 212 | "\n", 213 | "Solving D-opt design problem using Frank-Wolfe method\n", 214 | " k F(x) pos_slack neg_slack time\n", 215 | " 0 5.348e+01 1.508e+00 2.002e-01 0.0\n", 216 | " 2000 1.898e+01 1.656e-02 2.076e-01 0.6\n", 217 | " 4000 1.883e+01 9.295e-03 1.971e-01 1.1\n", 218 | " 6000 1.877e+01 6.627e-03 1.905e-01 1.7\n", 219 | " 8000 1.874e+01 5.127e-03 1.850e-01 2.2\n", 220 | " 10000 1.872e+01 4.236e-03 1.808e-01 2.7\n" 221 | ] 222 | }, 223 | { 224 | "name": "stdout", 225 | "output_type": "stream", 226 | "text": [ 227 | " 12000 1.870e+01 3.664e-03 1.776e-01 3.3\n", 228 | " 14000 1.869e+01 3.183e-03 1.747e-01 3.8\n", 229 | " 16000 1.868e+01 2.867e-03 1.725e-01 4.3\n", 230 | " 18000 1.867e+01 2.592e-03 1.706e-01 4.8\n", 231 | "\n", 232 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 233 | " k F(x) pos_slack neg_slack time\n", 234 | " 0 5.348e+01 1.508e+00 2.002e-01 0.0\n", 235 | " 2000 1.857e+01 1.109e-03 1.119e-03 1.2\n", 236 | " 4000 1.857e+01 2.740e-06 2.832e-06 2.3\n", 237 | " 6000 1.857e+01 1.050e-08 1.053e-08 3.4\n", 238 | "\n", 239 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 240 | " k F(x) Lk time\n", 241 | " 0 2.231e+01 6.667e-01 0.0\n", 242 | " 2000 1.858e+01 1.317e-01 52.0\n", 243 | " 4000 1.858e+01 1.317e-01 104.4\n", 244 | " 6000 1.858e+01 8.779e-02 154.0\n", 245 | " 8000 1.857e+01 8.779e-02 203.9\n", 246 | " 10000 1.857e+01 1.317e-01 253.6\n", 247 | " 12000 1.857e+01 8.779e-02 303.8\n", 248 | " 14000 1.857e+01 1.317e-01 353.7\n", 249 | " 16000 1.857e+01 8.779e-02 404.1\n", 250 | " 18000 1.857e+01 1.317e-01 457.3\n", 251 | "\n", 252 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 253 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 254 | " 0 2.231e+01 1.000e+00 6.667e-01 1.000e+00 3.229e-01 3.229e-01 8.165e-01 0.0\n", 255 | " 2000 1.857e+01 1.197e-03 1.317e-01 6.753e-01 3.249e-11 3.359e-05 1.518e-01 76.3\n", 256 | " 4000 1.857e+01 8.106e-04 1.317e-01 6.216e-01 2.233e-12 5.468e-06 1.518e-01 157.2\n", 257 | " 6000 1.857e+01 5.540e-04 1.975e-01 7.624e-01 5.242e-13 2.240e-06 1.518e-01 235.3\n", 258 | " 8000 1.857e+01 4.853e-04 1.975e-01 8.706e-01 4.363e-13 2.128e-06 1.514e-01 311.2\n", 259 | " 10000 1.857e+01 4.309e-04 1.975e-01 2.972e-01 1.110e-14 2.012e-07 1.511e-01 384.9\n", 260 | " 12000 1.857e+01 6.045e-04 8.779e-02 3.628e-01 3.930e-14 2.964e-07 1.517e-01 457.0\n", 261 | " 14000 1.857e+01 4.172e-04 1.975e-01 7.832e-01 3.308e-14 2.427e-07 1.567e-01 535.5\n", 262 | " 16000 1.857e+01 5.137e-04 1.317e-01 8.652e-01 8.393e-14 3.677e-07 1.598e-01 606.2\n", 263 | " 18000 1.857e+01 8.303e-04 1.317e-01 9.740e-01 2.034e-13 3.029e-07 1.728e-01 676.0\n", 264 | "\n", 265 | "Solving D-opt design problem using Frank-Wolfe method\n", 266 | " k F(x) pos_slack neg_slack time\n", 267 | " 0 5.339e+01 1.762e+00 2.020e-01 0.0\n", 268 | " 2000 1.885e+01 1.555e-02 2.074e-01 0.6\n", 269 | " 4000 1.872e+01 8.390e-03 1.940e-01 1.2\n", 270 | " 6000 1.867e+01 6.164e-03 1.860e-01 1.8\n", 271 | " 8000 1.864e+01 4.815e-03 1.804e-01 2.4\n", 272 | " 10000 1.862e+01 3.917e-03 1.755e-01 2.9\n", 273 | " 12000 1.861e+01 3.326e-03 1.721e-01 3.4\n", 274 | " 14000 1.860e+01 2.934e-03 1.692e-01 3.9\n", 275 | " 16000 1.859e+01 2.563e-03 1.667e-01 4.5\n", 276 | " 18000 1.858e+01 2.328e-03 1.644e-01 5.0\n", 277 | "\n", 278 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 279 | " k F(x) pos_slack neg_slack time\n", 280 | " 0 5.339e+01 1.762e+00 2.020e-01 0.0\n", 281 | " 2000 1.851e+01 1.070e-03 1.073e-03 1.2\n", 282 | " 4000 1.851e+01 2.976e-06 2.983e-06 2.3\n", 283 | " 6000 1.851e+01 1.100e-08 1.108e-08 3.4\n", 284 | "\n", 285 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 286 | " k F(x) Lk time\n", 287 | " 0 2.265e+01 6.667e-01 0.0\n", 288 | " 2000 1.851e+01 1.317e-01 51.8\n", 289 | " 4000 1.851e+01 1.975e-01 104.8\n", 290 | " 6000 1.851e+01 1.317e-01 158.8\n", 291 | " 8000 1.851e+01 1.317e-01 214.3\n", 292 | " 10000 1.851e+01 1.317e-01 266.9\n", 293 | " 12000 1.851e+01 8.779e-02 317.6\n", 294 | " 14000 1.851e+01 1.317e-01 368.6\n", 295 | " 16000 1.851e+01 1.975e-01 418.5\n", 296 | " 18000 1.851e+01 5.853e-02 468.9\n", 297 | "\n", 298 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 299 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 300 | " 0 2.265e+01 1.000e+00 6.667e-01 1.000e+00 3.551e-01 3.551e-01 8.165e-01 0.0\n", 301 | " 2000 1.851e+01 1.212e-03 1.317e-01 4.998e-01 2.727e-11 3.714e-05 1.609e-01 68.9\n", 302 | " 4000 1.851e+01 8.504e-04 1.317e-01 5.519e-01 2.975e-12 7.453e-06 1.611e-01 140.8\n", 303 | " 6000 1.851e+01 5.862e-04 1.975e-01 6.512e-01 5.471e-13 2.445e-06 1.612e-01 218.1\n", 304 | " 8000 1.851e+01 6.381e-04 1.317e-01 4.163e-01 1.776e-13 1.048e-06 1.612e-01 298.4\n", 305 | " 10000 1.851e+01 6.978e-04 8.779e-02 3.041e-01 1.092e-13 7.379e-07 1.614e-01 374.7\n", 306 | " 12000 1.851e+01 4.256e-04 1.975e-01 7.635e-01 8.260e-14 5.973e-07 1.620e-01 459.1\n", 307 | " 14000 1.851e+01 4.041e-04 1.975e-01 5.084e-01 1.510e-14 1.818e-07 1.636e-01 541.3\n", 308 | " 16000 1.851e+01 7.328e-04 1.975e-01 6.537e-01 1.976e-14 5.629e-08 1.720e-01 621.2\n", 309 | " 18000 1.851e+01 6.772e-04 1.975e-01 9.235e-01 1.037e-13 2.448e-07 1.811e-01 702.7\n", 310 | "\n", 311 | "Solving D-opt design problem using Frank-Wolfe method\n", 312 | " k F(x) pos_slack neg_slack time\n", 313 | " 0 5.346e+01 1.675e+00 2.140e-01 0.0\n", 314 | " 2000 1.760e+01 1.766e-02 2.613e-01 0.6\n", 315 | " 4000 1.743e+01 1.018e-02 2.496e-01 1.2\n", 316 | " 6000 1.736e+01 7.344e-03 2.414e-01 1.7\n", 317 | " 8000 1.732e+01 5.900e-03 2.360e-01 2.2\n", 318 | " 10000 1.729e+01 4.933e-03 2.312e-01 2.7\n", 319 | " 12000 1.727e+01 4.343e-03 2.280e-01 3.2\n", 320 | " 14000 1.725e+01 3.739e-03 2.248e-01 3.8\n", 321 | " 16000 1.724e+01 3.423e-03 2.223e-01 4.3\n", 322 | " 18000 1.723e+01 3.133e-03 2.196e-01 4.9\n", 323 | "\n", 324 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 325 | " k F(x) pos_slack neg_slack time\n", 326 | " 0 5.346e+01 1.675e+00 2.140e-01 0.0\n", 327 | " 2000 1.708e+01 1.171e-03 1.179e-03 1.2\n", 328 | " 4000 1.708e+01 2.903e-06 2.913e-06 2.3\n", 329 | " 6000 1.708e+01 1.174e-08 1.184e-08 3.5\n", 330 | "\n", 331 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 332 | " k F(x) Lk time\n", 333 | " 0 2.104e+01 6.667e-01 0.0\n", 334 | " 2000 1.708e+01 1.317e-01 54.3\n", 335 | " 4000 1.708e+01 1.317e-01 106.6\n", 336 | " 6000 1.708e+01 1.317e-01 158.8\n", 337 | " 8000 1.708e+01 8.779e-02 211.2\n", 338 | " 10000 1.708e+01 5.853e-02 261.7\n", 339 | " 12000 1.708e+01 8.779e-02 311.5\n", 340 | " 14000 1.708e+01 1.317e-01 360.9\n", 341 | " 16000 1.708e+01 8.779e-02 413.1\n", 342 | " 18000 1.708e+01 8.779e-02 463.5\n", 343 | "\n", 344 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 345 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 346 | " 0 2.104e+01 1.000e+00 6.667e-01 1.000e+00 3.458e-01 3.458e-01 8.165e-01 0.0\n", 347 | " 2000 1.708e+01 1.138e-03 1.317e-01 7.066e-01 2.838e-11 3.101e-05 1.320e-01 69.7\n", 348 | " 4000 1.708e+01 8.968e-04 8.779e-02 2.949e-01 9.623e-13 4.058e-06 1.319e-01 139.7\n", 349 | " 6000 1.708e+01 7.250e-04 8.779e-02 2.592e-01 2.056e-13 1.509e-06 1.318e-01 209.6\n", 350 | " 8000 1.708e+01 4.187e-04 1.975e-01 7.847e-01 1.728e-13 1.256e-06 1.319e-01 279.4\n", 351 | " 10000 1.708e+01 3.828e-04 1.975e-01 3.437e-01 1.066e-14 2.116e-07 1.328e-01 350.0\n", 352 | " 12000 1.708e+01 4.081e-04 1.975e-01 8.767e-01 8.882e-14 6.083e-07 1.397e-01 420.5\n", 353 | " 14000 1.708e+01 2.681e-04 4.444e-01 1.628e-01 4.441e-16 3.795e-08 1.424e-01 491.4\n", 354 | " 16000 1.708e+01 3.641e-04 2.963e-01 7.564e-01 1.243e-14 1.240e-07 1.494e-01 561.5\n", 355 | " 18000 1.708e+01 8.593e-04 5.853e-02 8.741e-01 1.696e-13 2.628e-07 1.570e-01 631.6\n", 356 | "\n", 357 | "********** m = 300, n = 1000 **********\n", 358 | "\n", 359 | "Solving D-opt design problem using Frank-Wolfe method\n", 360 | " k F(x) pos_slack neg_slack time\n", 361 | " 0 8.703e+01 1.797e+00 1.566e-01 0.0\n", 362 | " 2000 4.839e+01 1.378e-02 1.434e-01 2.5\n", 363 | " 4000 4.829e+01 7.322e-03 1.314e-01 5.1\n", 364 | " 6000 4.826e+01 5.141e-03 1.241e-01 7.7\n", 365 | " 8000 4.823e+01 3.975e-03 1.191e-01 10.2\n", 366 | " 10000 4.822e+01 3.322e-03 1.147e-01 12.7\n", 367 | " 12000 4.821e+01 2.787e-03 1.113e-01 15.3\n", 368 | " 14000 4.821e+01 2.409e-03 1.083e-01 17.8\n", 369 | " 16000 4.820e+01 2.160e-03 1.059e-01 20.3\n", 370 | " 18000 4.820e+01 1.924e-03 1.036e-01 22.9\n", 371 | "\n", 372 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 373 | " k F(x) pos_slack neg_slack time\n", 374 | " 0 8.703e+01 1.797e+00 1.566e-01 0.0\n", 375 | " 2000 4.816e+01 3.961e-04 3.737e-04 3.7\n", 376 | " 4000 4.816e+01 2.840e-07 2.849e-07 7.2\n" 377 | ] 378 | }, 379 | { 380 | "name": "stdout", 381 | "output_type": "stream", 382 | "text": [ 383 | "\n", 384 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 385 | " k F(x) Lk time\n", 386 | " 0 5.050e+01 6.667e-01 0.0\n", 387 | "\n", 388 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 389 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 390 | " 0 5.050e+01 1.000e+00 6.667e-01 1.000e+00 4.692e-01 4.692e-01 8.165e-01 0.0\n", 391 | "\n", 392 | "Solving D-opt design problem using Frank-Wolfe method\n", 393 | " k F(x) pos_slack neg_slack time\n", 394 | " 0 8.577e+01 1.807e+00 1.577e-01 0.0\n", 395 | " 2000 4.868e+01 1.472e-02 1.778e-01 1.0\n", 396 | " 4000 4.856e+01 8.239e-03 1.646e-01 2.0\n", 397 | " 6000 4.851e+01 5.874e-03 1.559e-01 3.0\n", 398 | " 8000 4.848e+01 4.483e-03 1.498e-01 3.9\n", 399 | " 10000 4.847e+01 3.609e-03 1.452e-01 4.8\n", 400 | " 12000 4.846e+01 3.001e-03 1.417e-01 5.8\n", 401 | " 14000 4.845e+01 2.565e-03 1.387e-01 6.7\n", 402 | " 16000 4.844e+01 2.253e-03 1.362e-01 7.7\n", 403 | " 18000 4.844e+01 2.005e-03 1.339e-01 8.6\n", 404 | "\n", 405 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 406 | " k F(x) pos_slack neg_slack time\n", 407 | " 0 8.577e+01 1.807e+00 1.577e-01 0.0\n", 408 | " 2000 4.840e+01 4.435e-04 4.388e-04 2.0\n", 409 | " 4000 4.840e+01 3.008e-07 3.103e-07 3.9\n", 410 | "\n", 411 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 412 | " k F(x) Lk time\n", 413 | " 0 5.102e+01 6.667e-01 0.0\n", 414 | " 2000 4.840e+01 1.317e-01 74.5\n", 415 | "\n", 416 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 417 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 418 | " 0 5.102e+01 1.000e+00 6.667e-01 1.000e+00 5.135e-01 5.135e-01 8.165e-01 0.0\n", 419 | "\n", 420 | "Solving D-opt design problem using Frank-Wolfe method\n", 421 | " k F(x) pos_slack neg_slack time\n", 422 | " 0 8.494e+01 1.577e+00 1.762e-01 0.0\n", 423 | " 2000 4.806e+01 1.614e-02 1.671e-01 1.0\n", 424 | " 4000 4.792e+01 9.183e-03 1.530e-01 2.0\n", 425 | " 6000 4.786e+01 6.549e-03 1.446e-01 2.9\n", 426 | " 8000 4.782e+01 4.977e-03 1.385e-01 3.8\n", 427 | " 10000 4.780e+01 4.058e-03 1.336e-01 4.8\n", 428 | " 12000 4.779e+01 3.376e-03 1.295e-01 5.7\n", 429 | " 14000 4.778e+01 2.901e-03 1.261e-01 6.6\n", 430 | " 16000 4.777e+01 2.556e-03 1.233e-01 7.6\n", 431 | " 18000 4.777e+01 2.334e-03 1.207e-01 8.6\n", 432 | "\n", 433 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 434 | " k F(x) pos_slack neg_slack time\n", 435 | " 0 8.494e+01 1.577e+00 1.762e-01 0.0\n", 436 | " 2000 4.770e+01 3.696e-04 3.670e-04 2.0\n", 437 | " 4000 4.770e+01 2.464e-07 2.435e-07 4.0\n", 438 | "\n", 439 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 440 | " k F(x) Lk time\n", 441 | " 0 5.036e+01 6.667e-01 0.0\n", 442 | "\n", 443 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 444 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 445 | " 0 5.036e+01 1.000e+00 6.667e-01 1.000e+00 5.308e-01 5.308e-01 8.165e-01 0.0\n", 446 | "\n", 447 | "********** m = 400, n = 1000 **********\n", 448 | "\n", 449 | "Solving D-opt design problem using Frank-Wolfe method\n", 450 | " k F(x) pos_slack neg_slack time\n", 451 | " 0 1.230e+02 1.438e+00 1.563e-01 0.0\n", 452 | " 2000 9.456e+01 1.163e-02 1.718e-01 6.8\n", 453 | " 4000 9.448e+01 6.607e-03 1.606e-01 13.5\n", 454 | " 6000 9.445e+01 4.681e-03 1.538e-01 20.2\n", 455 | " 8000 9.444e+01 3.605e-03 1.489e-01 26.9\n", 456 | " 10000 9.442e+01 2.985e-03 1.450e-01 33.6\n", 457 | " 12000 9.442e+01 2.565e-03 1.416e-01 40.3\n", 458 | " 14000 9.441e+01 2.280e-03 1.388e-01 47.0\n", 459 | " 16000 9.441e+01 2.014e-03 1.363e-01 53.7\n", 460 | " 18000 9.440e+01 1.836e-03 1.340e-01 60.4\n", 461 | "\n", 462 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 463 | " k F(x) pos_slack neg_slack time\n", 464 | " 0 1.230e+02 1.438e+00 1.563e-01 0.0\n", 465 | " 2000 9.437e+01 1.144e-04 1.146e-04 9.8\n", 466 | " 4000 9.437e+01 4.341e-08 4.337e-08 19.7\n", 467 | "\n", 468 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 469 | " k F(x) Lk time\n", 470 | " 0 9.587e+01 6.667e-01 0.0\n", 471 | "\n", 472 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 473 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 474 | " 0 9.587e+01 1.000e+00 6.667e-01 1.000e+00 5.276e-01 5.276e-01 8.165e-01 0.0\n", 475 | "\n", 476 | "Solving D-opt design problem using Frank-Wolfe method\n", 477 | " k F(x) pos_slack neg_slack time\n", 478 | " 0 1.197e+02 1.394e+00 1.506e-01 0.0\n", 479 | " 2000 9.156e+01 1.222e-02 1.304e-01 6.8\n", 480 | " 4000 9.148e+01 6.475e-03 1.184e-01 13.4\n", 481 | " 6000 9.145e+01 4.542e-03 1.111e-01 20.1\n", 482 | " 8000 9.143e+01 3.527e-03 1.059e-01 26.8\n", 483 | " 10000 9.142e+01 2.852e-03 1.017e-01 33.5\n", 484 | " 12000 9.142e+01 2.360e-03 9.835e-02 40.3\n", 485 | " 14000 9.141e+01 2.031e-03 9.550e-02 47.0\n", 486 | " 16000 9.141e+01 1.813e-03 9.303e-02 53.7\n", 487 | " 18000 9.141e+01 1.614e-03 9.091e-02 60.3\n", 488 | "\n", 489 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 490 | " k F(x) pos_slack neg_slack time\n", 491 | " 0 1.197e+02 1.394e+00 1.506e-01 0.0\n", 492 | " 2000 9.138e+01 1.085e-04 1.081e-04 9.8\n", 493 | " 4000 9.138e+01 4.996e-08 4.974e-08 19.5\n", 494 | "\n", 495 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 496 | " k F(x) Lk time\n", 497 | " 0 9.282e+01 6.667e-01 0.0\n", 498 | "\n", 499 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 500 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 501 | " 0 9.282e+01 1.000e+00 6.667e-01 1.000e+00 5.127e-01 5.127e-01 8.165e-01 0.0\n", 502 | "\n", 503 | "Solving D-opt design problem using Frank-Wolfe method\n", 504 | " k F(x) pos_slack neg_slack time\n", 505 | " 0 1.214e+02 1.474e+00 1.465e-01 0.0\n", 506 | " 2000 9.357e+01 1.231e-02 1.246e-01 6.8\n", 507 | " 4000 9.348e+01 6.577e-03 1.115e-01 13.5\n", 508 | " 6000 9.345e+01 4.712e-03 1.040e-01 20.2\n", 509 | " 8000 9.344e+01 3.524e-03 9.853e-02 26.8\n", 510 | " 10000 9.343e+01 2.888e-03 9.448e-02 33.5\n", 511 | " 12000 9.342e+01 2.446e-03 9.098e-02 40.1\n", 512 | " 14000 9.341e+01 2.103e-03 8.785e-02 46.8\n", 513 | " 16000 9.341e+01 1.846e-03 8.520e-02 53.5\n", 514 | " 18000 9.341e+01 1.650e-03 8.300e-02 60.2\n", 515 | "\n", 516 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 517 | " k F(x) pos_slack neg_slack time\n", 518 | " 0 1.214e+02 1.474e+00 1.465e-01 0.0\n", 519 | " 2000 9.338e+01 9.714e-05 9.775e-05 9.8\n", 520 | " 4000 9.338e+01 3.814e-08 3.729e-08 19.4\n", 521 | "\n", 522 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 523 | " k F(x) Lk time\n", 524 | " 0 9.502e+01 6.667e-01 0.0\n", 525 | "\n", 526 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 527 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 528 | " 0 9.502e+01 1.000e+00 6.667e-01 1.000e+00 5.823e-01 5.823e-01 8.165e-01 0.0\n", 529 | "\n", 530 | "********** m = 500, n = 1000 **********\n", 531 | "\n", 532 | "Solving D-opt design problem using Frank-Wolfe method\n", 533 | " k F(x) pos_slack neg_slack time\n", 534 | " 0 1.518e+02 1.452e-01 1.508e-01 0.0\n", 535 | " 2000 1.509e+02 9.635e-03 1.165e-01 10.2\n", 536 | " 4000 1.508e+02 5.305e-03 1.066e-01 20.3\n", 537 | " 6000 1.508e+02 3.712e-03 1.003e-01 30.4\n", 538 | " 8000 1.508e+02 2.835e-03 9.559e-02 40.5\n", 539 | " 10000 1.508e+02 2.285e-03 9.192e-02 50.6\n", 540 | " 12000 1.508e+02 1.908e-03 8.890e-02 60.6\n", 541 | " 14000 1.508e+02 1.635e-03 8.646e-02 70.8\n", 542 | " 16000 1.508e+02 1.430e-03 8.431e-02 80.9\n", 543 | " 18000 1.508e+02 1.270e-03 8.240e-02 90.9\n", 544 | "\n", 545 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 546 | " k F(x) pos_slack neg_slack time\n", 547 | " 0 1.518e+02 1.452e-01 1.508e-01 0.0\n", 548 | " 2000 1.508e+02 2.435e-05 2.422e-05 14.6\n", 549 | " 4000 1.508e+02 2.126e-08 2.100e-08 29.2\n", 550 | "\n", 551 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 552 | " k F(x) Lk time\n", 553 | " 0 1.518e+02 6.667e-01 0.0\n", 554 | "\n", 555 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 556 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 557 | " 0 1.518e+02 1.000e+00 6.667e-01 1.000e+00 5.767e-01 5.767e-01 8.165e-01 0.0\n", 558 | "\n", 559 | "Solving D-opt design problem using Frank-Wolfe method\n", 560 | " k F(x) pos_slack neg_slack time\n", 561 | " 0 1.546e+02 1.457e-01 1.180e-01 0.0\n" 562 | ] 563 | }, 564 | { 565 | "name": "stdout", 566 | "output_type": "stream", 567 | "text": [ 568 | " 2000 1.538e+02 9.025e-03 8.160e-02 10.2\n", 569 | " 4000 1.537e+02 4.929e-03 7.140e-02 20.3\n", 570 | " 6000 1.537e+02 3.454e-03 6.499e-02 30.3\n", 571 | " 8000 1.537e+02 2.572e-03 6.027e-02 40.4\n", 572 | " 10000 1.537e+02 2.069e-03 5.668e-02 50.5\n", 573 | " 12000 1.537e+02 1.689e-03 5.383e-02 60.5\n", 574 | " 14000 1.537e+02 1.473e-03 5.139e-02 70.6\n", 575 | " 16000 1.537e+02 1.277e-03 4.928e-02 80.7\n", 576 | " 18000 1.537e+02 1.139e-03 4.744e-02 90.8\n", 577 | "\n", 578 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 579 | " k F(x) pos_slack neg_slack time\n", 580 | " 0 1.546e+02 1.457e-01 1.180e-01 0.0\n", 581 | " 2000 1.537e+02 3.591e-05 3.668e-05 14.7\n", 582 | " 4000 1.537e+02 6.206e-08 6.268e-08 29.4\n", 583 | "\n", 584 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 585 | " k F(x) Lk time\n", 586 | " 0 1.546e+02 6.667e-01 0.0\n", 587 | "\n", 588 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 589 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 590 | " 0 1.546e+02 1.000e+00 6.667e-01 1.000e+00 5.211e-01 5.211e-01 8.165e-01 0.0\n", 591 | "\n", 592 | "Solving D-opt design problem using Frank-Wolfe method\n", 593 | " k F(x) pos_slack neg_slack time\n", 594 | " 0 1.562e+02 1.428e-01 1.494e-01 0.0\n", 595 | " 2000 1.554e+02 9.199e-03 1.116e-01 10.1\n", 596 | " 4000 1.553e+02 5.078e-03 1.015e-01 20.1\n", 597 | " 6000 1.553e+02 3.587e-03 9.504e-02 30.2\n", 598 | " 8000 1.553e+02 2.762e-03 9.029e-02 40.3\n", 599 | " 10000 1.553e+02 2.218e-03 8.666e-02 50.4\n", 600 | " 12000 1.553e+02 1.920e-03 8.363e-02 60.5\n", 601 | " 14000 1.553e+02 1.702e-03 8.102e-02 70.6\n", 602 | " 16000 1.553e+02 1.495e-03 7.863e-02 80.6\n", 603 | " 18000 1.553e+02 1.367e-03 7.656e-02 90.7\n", 604 | "\n", 605 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 606 | " k F(x) pos_slack neg_slack time\n", 607 | " 0 1.562e+02 1.428e-01 1.494e-01 0.0\n", 608 | " 2000 1.553e+02 3.276e-05 3.306e-05 14.7\n", 609 | " 4000 1.553e+02 2.694e-08 2.709e-08 29.2\n", 610 | "\n", 611 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 612 | " k F(x) Lk time\n", 613 | " 0 1.562e+02 6.667e-01 0.0\n", 614 | "\n", 615 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 616 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 617 | " 0 1.562e+02 1.000e+00 6.667e-01 1.000e+00 5.459e-01 5.459e-01 8.165e-01 0.0\n", 618 | "\n", 619 | "********** m = 600, n = 1000 **********\n", 620 | "\n", 621 | "Solving D-opt design problem using Frank-Wolfe method\n", 622 | " k F(x) pos_slack neg_slack time\n", 623 | " 0 2.348e+02 1.026e-01 1.188e-01 0.2\n", 624 | " 2000 2.342e+02 7.160e-03 8.847e-02 14.6\n", 625 | " 4000 2.342e+02 3.838e-03 8.050e-02 29.3\n", 626 | " 6000 2.342e+02 2.675e-03 7.559e-02 43.6\n", 627 | " 8000 2.342e+02 2.019e-03 7.197e-02 58.4\n", 628 | " 10000 2.342e+02 1.598e-03 6.916e-02 73.1\n", 629 | " 12000 2.342e+02 1.322e-03 6.689e-02 87.5\n", 630 | " 14000 2.342e+02 1.104e-03 6.504e-02 101.9\n", 631 | " 16000 2.342e+02 9.690e-04 6.344e-02 116.4\n", 632 | " 18000 2.342e+02 8.585e-04 6.203e-02 130.8\n", 633 | "\n", 634 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 635 | " k F(x) pos_slack neg_slack time\n", 636 | " 0 2.348e+02 1.026e-01 1.188e-01 0.0\n", 637 | " 2000 2.342e+02 1.705e-05 1.693e-05 21.1\n", 638 | " 4000 2.342e+02 2.282e-08 2.234e-08 42.2\n", 639 | "\n", 640 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 641 | " k F(x) Lk time\n", 642 | " 0 2.348e+02 6.667e-01 0.0\n", 643 | "\n", 644 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 645 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 646 | " 0 2.348e+02 1.000e+00 6.667e-01 1.000e+00 5.053e-01 5.053e-01 8.165e-01 0.0\n", 647 | "\n", 648 | "Solving D-opt design problem using Frank-Wolfe method\n", 649 | " k F(x) pos_slack neg_slack time\n", 650 | " 0 2.337e+02 1.401e-01 1.164e-01 0.0\n", 651 | " 2000 2.331e+02 7.738e-03 8.641e-02 14.5\n", 652 | " 4000 2.330e+02 4.229e-03 7.792e-02 28.9\n", 653 | " 6000 2.330e+02 2.957e-03 7.246e-02 43.3\n", 654 | " 8000 2.330e+02 2.262e-03 6.844e-02 57.7\n", 655 | " 10000 2.330e+02 1.808e-03 6.533e-02 72.2\n", 656 | " 12000 2.330e+02 1.498e-03 6.281e-02 86.6\n", 657 | " 14000 2.330e+02 1.276e-03 6.068e-02 101.0\n", 658 | " 16000 2.330e+02 1.100e-03 5.886e-02 115.3\n", 659 | " 18000 2.330e+02 9.726e-04 5.725e-02 129.8\n", 660 | "\n", 661 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 662 | " k F(x) pos_slack neg_slack time\n", 663 | " 0 2.337e+02 1.401e-01 1.164e-01 0.0\n", 664 | " 2000 2.330e+02 2.818e-05 2.734e-05 21.0\n", 665 | " 4000 2.330e+02 3.855e-08 4.077e-08 42.2\n", 666 | "\n", 667 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 668 | " k F(x) Lk time\n", 669 | " 0 2.337e+02 6.667e-01 0.0\n", 670 | "\n", 671 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 672 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 673 | " 0 2.337e+02 1.000e+00 6.667e-01 1.000e+00 5.478e-01 5.478e-01 8.165e-01 0.0\n", 674 | "\n", 675 | "Solving D-opt design problem using Frank-Wolfe method\n", 676 | " k F(x) pos_slack neg_slack time\n", 677 | " 0 2.345e+02 1.168e-01 1.099e-01 0.0\n", 678 | " 2000 2.339e+02 7.454e-03 7.910e-02 14.5\n", 679 | " 4000 2.339e+02 3.989e-03 7.045e-02 29.0\n", 680 | " 6000 2.339e+02 2.778e-03 6.503e-02 43.4\n", 681 | " 8000 2.339e+02 2.115e-03 6.118e-02 57.8\n", 682 | " 10000 2.338e+02 1.695e-03 5.817e-02 72.3\n", 683 | " 12000 2.338e+02 1.409e-03 5.569e-02 86.8\n", 684 | " 14000 2.338e+02 1.227e-03 5.359e-02 101.3\n", 685 | " 16000 2.338e+02 1.076e-03 5.174e-02 115.8\n", 686 | " 18000 2.338e+02 9.630e-04 5.010e-02 130.2\n", 687 | "\n", 688 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 689 | " k F(x) pos_slack neg_slack time\n", 690 | " 0 2.345e+02 1.168e-01 1.099e-01 0.0\n", 691 | " 2000 2.338e+02 1.745e-05 1.763e-05 21.1\n", 692 | " 4000 2.338e+02 2.310e-08 2.337e-08 42.0\n", 693 | "\n", 694 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 695 | " k F(x) Lk time\n", 696 | " 0 2.345e+02 6.667e-01 0.0\n", 697 | "\n", 698 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 699 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 700 | " 0 2.345e+02 1.000e+00 6.667e-01 1.000e+00 5.467e-01 5.467e-01 8.165e-01 0.0\n", 701 | "\n", 702 | "********** m = 700, n = 1000 **********\n", 703 | "\n", 704 | "Solving D-opt design problem using Frank-Wolfe method\n", 705 | " k F(x) pos_slack neg_slack time\n", 706 | " 0 3.391e+02 8.316e-02 1.263e-01 0.1\n", 707 | " 2000 3.388e+02 5.609e-03 1.012e-01 19.8\n", 708 | " 4000 3.387e+02 2.850e-03 9.505e-02 39.4\n", 709 | " 6000 3.387e+02 1.943e-03 9.141e-02 59.0\n", 710 | " 8000 3.387e+02 1.505e-03 8.875e-02 78.6\n", 711 | " 10000 3.387e+02 1.237e-03 8.662e-02 98.3\n", 712 | " 12000 3.387e+02 1.056e-03 8.485e-02 117.9\n", 713 | " 14000 3.387e+02 9.313e-04 8.331e-02 137.6\n", 714 | " 16000 3.387e+02 8.359e-04 8.194e-02 157.2\n", 715 | " 18000 3.387e+02 7.506e-04 8.072e-02 176.8\n", 716 | "\n", 717 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 718 | " k F(x) pos_slack neg_slack time\n", 719 | " 0 3.391e+02 8.316e-02 1.263e-01 0.1\n", 720 | " 2000 3.387e+02 2.407e-05 2.396e-05 28.9\n", 721 | " 4000 3.387e+02 4.905e-08 5.007e-08 57.5\n", 722 | "\n", 723 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 724 | " k F(x) Lk time\n", 725 | " 0 3.391e+02 6.667e-01 0.1\n", 726 | "\n", 727 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 728 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 729 | " 0 3.391e+02 1.000e+00 6.667e-01 1.000e+00 4.693e-01 4.693e-01 8.165e-01 0.0\n", 730 | "\n", 731 | "Solving D-opt design problem using Frank-Wolfe method\n", 732 | " k F(x) pos_slack neg_slack time\n", 733 | " 0 3.403e+02 9.170e-02 1.053e-01 0.1\n", 734 | " 2000 3.399e+02 6.508e-03 7.973e-02 19.7\n", 735 | " 4000 3.399e+02 3.609e-03 7.215e-02 39.3\n", 736 | " 6000 3.399e+02 2.542e-03 6.722e-02 59.0\n", 737 | " 8000 3.398e+02 1.944e-03 6.358e-02 78.7\n", 738 | " 10000 3.398e+02 1.577e-03 6.068e-02 98.3\n", 739 | " 12000 3.398e+02 1.316e-03 5.832e-02 118.0\n", 740 | " 14000 3.398e+02 1.127e-03 5.632e-02 137.7\n", 741 | " 16000 3.398e+02 9.797e-04 5.458e-02 157.6\n", 742 | " 18000 3.398e+02 8.670e-04 5.305e-02 178.0\n" 743 | ] 744 | }, 745 | { 746 | "name": "stdout", 747 | "output_type": "stream", 748 | "text": [ 749 | "\n", 750 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 751 | " k F(x) pos_slack neg_slack time\n", 752 | " 0 3.403e+02 9.170e-02 1.053e-01 0.1\n", 753 | " 2000 3.398e+02 1.410e-05 1.407e-05 28.5\n", 754 | " 4000 3.398e+02 2.894e-08 2.891e-08 57.0\n", 755 | "\n", 756 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 757 | " k F(x) Lk time\n", 758 | " 0 3.403e+02 6.667e-01 0.1\n", 759 | "\n", 760 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 761 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 762 | " 0 3.403e+02 1.000e+00 6.667e-01 1.000e+00 4.770e-01 4.770e-01 8.165e-01 0.0\n", 763 | "\n", 764 | "Solving D-opt design problem using Frank-Wolfe method\n", 765 | " k F(x) pos_slack neg_slack time\n", 766 | " 0 3.404e+02 8.775e-02 1.051e-01 0.1\n", 767 | " 2000 3.400e+02 6.748e-03 7.916e-02 19.7\n", 768 | " 4000 3.400e+02 3.714e-03 7.103e-02 39.4\n", 769 | " 6000 3.400e+02 2.533e-03 6.614e-02 58.9\n", 770 | " 8000 3.400e+02 1.971e-03 6.254e-02 78.5\n", 771 | " 10000 3.400e+02 1.608e-03 5.963e-02 98.0\n", 772 | " 12000 3.400e+02 1.361e-03 5.719e-02 117.6\n", 773 | " 14000 3.400e+02 1.187e-03 5.509e-02 137.1\n", 774 | " 16000 3.400e+02 1.068e-03 5.325e-02 156.7\n", 775 | " 18000 3.400e+02 9.570e-04 5.160e-02 176.3\n", 776 | "\n", 777 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 778 | " k F(x) pos_slack neg_slack time\n", 779 | " 0 3.404e+02 8.775e-02 1.051e-01 0.1\n", 780 | " 2000 3.399e+02 2.365e-05 2.422e-05 28.6\n", 781 | " 4000 3.399e+02 3.866e-08 3.843e-08 57.0\n", 782 | "\n", 783 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 784 | " k F(x) Lk time\n", 785 | " 0 3.404e+02 6.667e-01 0.1\n", 786 | "\n", 787 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 788 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 789 | " 0 3.404e+02 1.000e+00 6.667e-01 1.000e+00 4.835e-01 4.835e-01 8.165e-01 0.0\n", 790 | "\n", 791 | "********** m = 800, n = 1000 **********\n", 792 | "\n", 793 | "Solving D-opt design problem using Frank-Wolfe method\n", 794 | " k F(x) pos_slack neg_slack time\n", 795 | " 0 4.779e+02 6.065e-02 7.766e-02 0.1\n", 796 | " 2000 4.777e+02 4.507e-03 5.814e-02 26.2\n", 797 | " 4000 4.777e+02 2.598e-03 5.228e-02 52.0\n", 798 | " 6000 4.777e+02 1.830e-03 4.852e-02 77.9\n", 799 | " 8000 4.777e+02 1.432e-03 4.570e-02 103.7\n", 800 | " 10000 4.777e+02 1.158e-03 4.346e-02 129.6\n", 801 | " 12000 4.777e+02 9.713e-04 4.160e-02 155.4\n", 802 | " 14000 4.777e+02 8.398e-04 4.003e-02 181.1\n", 803 | " 16000 4.777e+02 7.401e-04 3.864e-02 207.0\n", 804 | " 18000 4.777e+02 6.610e-04 3.742e-02 232.9\n", 805 | "\n", 806 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 807 | " k F(x) pos_slack neg_slack time\n", 808 | " 0 4.779e+02 6.065e-02 7.766e-02 0.1\n", 809 | " 2000 4.777e+02 2.030e-05 1.978e-05 37.9\n", 810 | " 4000 4.777e+02 2.888e-08 2.996e-08 75.8\n", 811 | "\n", 812 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 813 | " k F(x) Lk time\n", 814 | " 0 4.779e+02 6.667e-01 0.1\n", 815 | "\n", 816 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 817 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 818 | " 0 4.779e+02 1.000e+00 6.667e-01 1.000e+00 3.412e-01 3.412e-01 8.165e-01 0.0\n", 819 | "\n", 820 | "Solving D-opt design problem using Frank-Wolfe method\n", 821 | " k F(x) pos_slack neg_slack time\n", 822 | " 0 4.800e+02 6.539e-02 7.568e-02 0.1\n", 823 | " 2000 4.798e+02 4.775e-03 5.484e-02 26.0\n", 824 | " 4000 4.797e+02 2.491e-03 4.889e-02 51.9\n", 825 | " 6000 4.797e+02 1.628e-03 4.537e-02 77.7\n", 826 | " 8000 4.797e+02 1.214e-03 4.290e-02 103.7\n", 827 | " 10000 4.797e+02 9.607e-04 4.101e-02 129.5\n", 828 | " 12000 4.797e+02 7.991e-04 3.947e-02 155.4\n", 829 | " 14000 4.797e+02 6.787e-04 3.818e-02 181.1\n", 830 | " 16000 4.797e+02 5.892e-04 3.707e-02 207.1\n", 831 | " 18000 4.797e+02 5.250e-04 3.609e-02 233.1\n", 832 | "\n", 833 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 834 | " k F(x) pos_slack neg_slack time\n", 835 | " 0 4.800e+02 6.539e-02 7.568e-02 0.1\n", 836 | " 2000 4.797e+02 3.822e-05 3.908e-05 38.1\n", 837 | " 4000 4.797e+02 2.298e-08 2.313e-08 75.9\n", 838 | "\n", 839 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 840 | " k F(x) Lk time\n", 841 | " 0 4.800e+02 6.667e-01 0.1\n", 842 | "\n", 843 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 844 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 845 | " 0 4.800e+02 1.000e+00 6.667e-01 1.000e+00 3.479e-01 3.479e-01 8.165e-01 0.0\n", 846 | "\n", 847 | "Solving D-opt design problem using Frank-Wolfe method\n", 848 | " k F(x) pos_slack neg_slack time\n", 849 | " 0 4.771e+02 7.549e-02 7.580e-02 0.1\n", 850 | " 2000 4.769e+02 4.713e-03 5.529e-02 26.0\n", 851 | " 4000 4.769e+02 2.607e-03 4.928e-02 51.8\n", 852 | " 6000 4.769e+02 1.826e-03 4.552e-02 77.6\n", 853 | " 8000 4.769e+02 1.408e-03 4.273e-02 103.5\n", 854 | " 10000 4.769e+02 1.125e-03 4.055e-02 129.2\n", 855 | " 12000 4.769e+02 9.447e-04 3.875e-02 155.0\n", 856 | " 14000 4.769e+02 8.113e-04 3.722e-02 180.9\n", 857 | " 16000 4.769e+02 7.088e-04 3.590e-02 206.8\n", 858 | " 18000 4.769e+02 6.309e-04 3.472e-02 232.8\n", 859 | "\n", 860 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 861 | " k F(x) pos_slack neg_slack time\n", 862 | " 0 4.771e+02 7.549e-02 7.580e-02 0.1\n", 863 | " 2000 4.769e+02 1.582e-05 1.555e-05 38.2\n", 864 | " 4000 4.769e+02 2.581e-08 2.481e-08 75.9\n", 865 | "\n", 866 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 867 | " k F(x) Lk time\n", 868 | " 0 4.771e+02 6.667e-01 0.1\n", 869 | "\n", 870 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 871 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 872 | " 0 4.771e+02 1.000e+00 6.667e-01 1.000e+00 3.522e-01 3.522e-01 8.165e-01 0.0\n", 873 | "\n", 874 | "********** m = 900, n = 1000 **********\n", 875 | "\n", 876 | "Solving D-opt design problem using Frank-Wolfe method\n", 877 | " k F(x) pos_slack neg_slack time\n", 878 | " 0 6.702e+02 3.761e-02 4.642e-02 0.1\n", 879 | " 2000 6.701e+02 2.961e-03 3.192e-02 33.3\n", 880 | " 4000 6.701e+02 1.508e-03 2.809e-02 66.6\n", 881 | " 6000 6.701e+02 1.023e-03 2.580e-02 99.8\n", 882 | " 8000 6.701e+02 7.687e-04 2.416e-02 133.1\n", 883 | " 10000 6.701e+02 6.207e-04 2.289e-02 166.3\n", 884 | " 12000 6.701e+02 5.274e-04 2.183e-02 199.6\n", 885 | " 14000 6.701e+02 4.627e-04 2.092e-02 232.8\n", 886 | " 16000 6.701e+02 4.103e-04 2.011e-02 266.0\n", 887 | " 18000 6.701e+02 3.649e-04 1.939e-02 299.3\n", 888 | "\n", 889 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 890 | " k F(x) pos_slack neg_slack time\n", 891 | " 0 6.702e+02 3.761e-02 4.642e-02 0.1\n", 892 | " 2000 6.701e+02 1.917e-05 1.936e-05 48.9\n", 893 | "\n", 894 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 895 | " k F(x) Lk time\n", 896 | " 0 6.702e+02 1.000e+00 0.1\n", 897 | "\n", 898 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 899 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 900 | " 0 6.702e+02 1.000e+00 1.000e+00 1.000e+00 8.417e-02 8.417e-02 1.000e+00 0.0\n", 901 | "\n", 902 | "Solving D-opt design problem using Frank-Wolfe method\n", 903 | " k F(x) pos_slack neg_slack time\n", 904 | " 0 6.711e+02 4.689e-02 5.598e-02 0.1\n", 905 | " 2000 6.710e+02 3.044e-03 4.179e-02 33.2\n", 906 | " 4000 6.710e+02 1.747e-03 3.766e-02 67.0\n", 907 | " 6000 6.710e+02 1.205e-03 3.503e-02 101.2\n", 908 | " 8000 6.710e+02 9.163e-04 3.313e-02 134.4\n", 909 | " 10000 6.710e+02 7.455e-04 3.161e-02 167.6\n", 910 | " 12000 6.710e+02 6.305e-04 3.037e-02 200.7\n", 911 | " 14000 6.710e+02 5.404e-04 2.931e-02 233.9\n", 912 | " 16000 6.710e+02 4.685e-04 2.839e-02 267.1\n", 913 | " 18000 6.710e+02 4.146e-04 2.758e-02 300.5\n", 914 | "\n", 915 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 916 | " k F(x) pos_slack neg_slack time\n", 917 | " 0 6.711e+02 4.689e-02 5.598e-02 0.1\n", 918 | " 2000 6.710e+02 8.364e-06 8.560e-06 49.2\n", 919 | " 4000 6.710e+02 1.228e-08 1.215e-08 98.1\n", 920 | "\n", 921 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 922 | " k F(x) Lk time\n", 923 | " 0 6.711e+02 1.000e+00 0.1\n", 924 | "\n", 925 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 926 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n" 927 | ] 928 | }, 929 | { 930 | "name": "stdout", 931 | "output_type": "stream", 932 | "text": [ 933 | " 0 6.711e+02 1.000e+00 1.000e+00 1.000e+00 8.710e-02 8.710e-02 1.000e+00 0.0\n", 934 | "\n", 935 | "Solving D-opt design problem using Frank-Wolfe method\n", 936 | " k F(x) pos_slack neg_slack time\n", 937 | " 0 6.702e+02 4.331e-02 5.407e-02 0.1\n", 938 | " 2000 6.701e+02 2.996e-03 3.980e-02 33.4\n", 939 | " 4000 6.701e+02 1.588e-03 3.592e-02 66.6\n", 940 | " 6000 6.701e+02 1.086e-03 3.353e-02 99.9\n", 941 | " 8000 6.701e+02 8.156e-04 3.182e-02 133.1\n", 942 | " 10000 6.701e+02 6.568e-04 3.048e-02 166.2\n", 943 | " 12000 6.701e+02 5.588e-04 2.938e-02 199.7\n", 944 | " 14000 6.701e+02 4.798e-04 2.843e-02 232.9\n", 945 | " 16000 6.701e+02 4.225e-04 2.760e-02 266.2\n", 946 | " 18000 6.701e+02 3.756e-04 2.688e-02 299.3\n", 947 | "\n", 948 | "Solving D-opt design problem using Frank-Wolfe method with away steps\n", 949 | " k F(x) pos_slack neg_slack time\n", 950 | " 0 6.702e+02 4.331e-02 5.407e-02 0.1\n", 951 | " 2000 6.701e+02 7.209e-06 7.175e-06 49.3\n", 952 | "\n", 953 | "BPG_LS method for min_{x in C} F(x) = f(x) + Psi(x)\n", 954 | " k F(x) Lk time\n", 955 | " 0 6.702e+02 1.000e+00 0.1\n", 956 | "\n", 957 | "ABPG_gain method for min_{x in C} F(x) = f(x) + Psi(x)\n", 958 | " k F(x) theta Gk TSG D(x+,y) D(z+,z) Gavg time\n", 959 | " 0 6.702e+02 1.000e+00 1.000e+00 1.000e+00 8.783e-02 8.783e-02 1.000e+00 0.0\n" 960 | ] 961 | } 962 | ], 963 | "source": [ 964 | "n = 1000\n", 965 | "m = [100, 200, 300, 400, 500, 600, 700, 800, 900]\n", 966 | "#n = 100\n", 967 | "#m = [10, 50, 80]\n", 968 | "K = 3\n", 969 | "eps = ['1e-3', '1e-4', '1e-5', '1e-6', '1e-7', '1e-8']\n", 970 | "\n", 971 | "Nmax = 20000\n", 972 | "Nskip = int(Nmax/10)\n", 973 | "\n", 974 | "Ieps = dict()\n", 975 | "Teps = dict()\n", 976 | "for s in eps:\n", 977 | " Ieps[s] = np.zeros((4, len(m), K))\n", 978 | " Teps[s] = np.zeros((4, len(m), K))\n", 979 | "\n", 980 | "for i in range(len(m)):\n", 981 | " print(\"\\n********** m = {0:d}, n = {1:d} **********\".format(m[i], n))\n", 982 | " for k in range(K):\n", 983 | " f, h, L, x0Kh = accbpg.D_opt_design(m[i], n)\n", 984 | " x0KY = accbpg.D_opt_KYinit(f.H)\n", 985 | " x0Mx = (1-1e-3)*x0KY + 1e-3*x0Kh\n", 986 | "\n", 987 | " _, F_FWKY, _, _, T_FWKY = accbpg.D_opt_FW(f.H, x0KY, 1e-8, maxitrs=Nmax, verbskip=Nskip)\n", 988 | " _, F_WAKY, _, _, T_WAKY = accbpg.D_opt_FW_away(f.H, x0KY, 1e-8, maxitrs=Nmax, verbskip=Nskip)\n", 989 | " _, F_LSKh, _, T_LSKh = accbpg.BPG(f, h, L, x0Kh, maxitrs=Nmax, linesearch=True, ls_ratio=1.5, verbskip=Nskip)\n", 990 | " _, F_ABKh, _, _, _, T_ABKh = accbpg.ABPG_gain(f, h, L, x0Kh, gamma=2, maxitrs=Nmax, ls_inc=1.5, ls_dec=1.5, restart=True, verbskip=Nskip)\n", 991 | "\n", 992 | " Fmin = min(F_FWKY.min(), F_WAKY.min(), F_LSKh.min(), F_ABKh.min())\n", 993 | " F = [F_FWKY, F_WAKY, F_LSKh, F_ABKh]\n", 994 | " T = [T_FWKY, T_WAKY, T_LSKh, T_ABKh]\n", 995 | " for s in eps:\n", 996 | " for j in range(len(F)):\n", 997 | " I_eps = np.nonzero(F[j] - Fmin <= float(s))\n", 998 | " if len(I_eps[0]) > 0:\n", 999 | " i_eps = I_eps[0][0]\n", 1000 | " t_eps = T[j][i_eps]\n", 1001 | " else:\n", 1002 | " i_eps = Nmax + 1\n", 1003 | " t_eps = T[j][-1]\n", 1004 | " Ieps[s][j,i,k] = i_eps\n", 1005 | " Teps[s][j,i,k] = t_eps" 1006 | ] 1007 | }, 1008 | { 1009 | "cell_type": "code", 1010 | "execution_count": 16, 1011 | "metadata": {}, 1012 | "outputs": [ 1013 | { 1014 | "data": { 1015 | "text/plain": [ 1016 | "Text(0,0.5,'Time (sec)')" 1017 | ] 1018 | }, 1019 | "execution_count": 16, 1020 | "metadata": {}, 1021 | "output_type": "execute_result" 1022 | }, 1023 | { 1024 | "data": { 1025 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAtYAAAEJCAYAAACwkGChAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzs3Xl4lNX1wPHvTQj7noAge4IkBBGQgAIqURBQQl2qUkWRFkWkv7qhgIoyKFJsQVERNFXqhq11QQ0IahWwisgmKIQ9buwk7IRAkjm/P24SErJNktnnfJ5nnuR95513TqIcDnfuvceICEoppZRSSqmqCfN1AEoppZRSSgUDLayVUkoppZRyAy2slVJKKaWUcgMtrJVSSimllHIDLayVUkoppZRyAy2slVJKKaWUcgMtrJVSSimllHIDLayVUkoppZRyAy2slVJKKaWUcoNqvg6gsqKioqRt27a+DkMpFeLWrFmTLiJNfB1HsNJcr5TyB67m+oAtrNu2bcvq1at9HYZSKsQZY37xdQzBTHO9UsofuJrrA24qiDFmiDEm+ciRI74ORSmllFJKqQIBV1iLSIqIjGrQoIGvQ1FKKaWUUqpAwBXWSimllFJK+aOAnWOt/Ft2djY7d+4kKyvL16EEtJo1a9KyZUsiIiJ8HYpSXmWMGQIMad++va9DUWXQXO8emuuDhxbWyiN27txJvXr1aNu2LcYYX4cTkESEjIwMdu7cSbt27XwdjlJeJSIpQEpCQsKdvo5FlU5zfdVprg8uOhVEeURWVhaRkZGaaKvAGENkZKSOBCml/Jbm+qrTXB9ctLBWHuNqonU4HBhjij0cDodnAwwA+peVUsrfaa6vOs31wUMLa+VzDocDEaFv37707dsXEUFENNkqr0lLS6NTp05Uq1aNTp06kZaW5uuQlAo6muuVr3kj1wdcYa37WCtXrVy5ksTERHr37o3D4cDhcDBhwgT+9Kc/0ahRI9566y0AUlNTad68OSICwNNPP82AAQP4+eefi9wvOTmZhg0bMmbMGI4ePcp///tfYmJieOqpp5g4cSLh4eHMnDkTgL1799K3b18mTpzo1Z9ZVZzD4SAmJobU1FRyc3NJTU0lJiZG/7JXKgBonleu8lauD7jFi7qgJTilpaWxatUqMjMz6dSpEykpKURHR1fpnj179iQxMZHjx48X/MHJysri008/JSUlhf79+wOwaNEiateuzapVq+jZsydxcXH06dOHs9sojxo1iqlTpzJmzBhOnjzJu+++y5IlS2jdujUA+/btY/v27QCEh4fTp08fpkyZUqWfQSmlgom7c73meeUqh8PBlClTyM3NLTgXHh7u9sI64EasVWBKTEzktddeA+z2TImJiQUjCZmZmXTu3JnMzEwANm/eTOfOnfnggw8ASE9PJzExkZSUFMCOElRGTk4OEyZM4JprrmHAgAEsXLgQgN27dzN8+HAWLFgAwIoVK+jVq1ep99mwYQNPPPEE06dPL0i2ANOnT+fDDz/kq6++Ytq0aYwfP75ScSrvcjgcxMfHExZm02FYWBjx8fE6Yq1UJfg612ueV6VxOBxFimqA3NxcLaxVcMpPtABOp7PIcVUtWbKE++67j7FjxxacS0pKYsGCBRw5coSGDRsyePDggoQrIoSHh5d6v7vvvptu3bpRr169IucbNGjArFmzuP7667nsssvQ7qCBIyUlhbi4OMLDw4mLiyv4i135jk77C06eyvWa51V5HA4HO3bsID4+nvDwcOLj49mxY4f7B1HyFw8E2qN79+6i/FdqamqFro+PjxdAAAkLC5P4+Hi3xDFp0iQZO3asiIg4nU7ZsmWLiIgcPHhQGjZsKG+++aasWLFCnE6nNG/eXBYtWiRvvfWWiIgsX75cBg4cKAMHDpQdO3aIiEibNm1k4cKFct5558mLL75Y4nsCcuzYMbfEL1Lx36XyLmC1+EFODNaH5nr/5g+5PhjyvIjmen/naq4PuDnWKjilpKQUfEToqRFDYwwdOnQAoFGjRnTu3JlZs2axfPlyjDFcddVVPPjggyxbtgyAXr16sXjx4mL3ad26NcuWLePKK68kKyuLBx54wO2xKqVUMPJ0rtc8r3xNp4IovxAdHU2PHj3o27cvGzdurPLCRYDVq1fz1VdfsWLFCt59991izyclJREbG1swt3bw4ME0aNCAyMjIEu83d+5cjhw5QnJyMnXr1mXp0qXMmzePUaNGsXnzZsDOvyv8VSml1BnuzvWa55W/MXZ0O/AkJCTI6tWrfR2GKsWmTZvo2LFjhV6TmJgIwNKlS90fUACrzO9SeY8xZo2IJPg6jmClud6/aa53H831/s3VXK8j1srn8rtxLVu2jGXLlmk3LqWUCkKa61Uo0DnWyufyN/VXSikVvDTXq1CgI9ZKKaWUUkq5gV8V1saYJcaYS3wdh1JKKaWUUhXlN1NBjDEDgBO+jkN5n2Opg8nLJhc7P6nvJByJDu8HpJRSyu0016tQ4LHC2hjTDJgCdBGRHoXO9weuB/YDIiKTjTEGSAB06XcIciQ6cCQ6SHwtEYClI5b6NB6llFLup7lehQJPjlhfAnwEdM0/YYypDbwEdBKRU8aY940x/YCGwIfATR6MR4WYlStXMm7cOE6fPs2AAQMAyMrKYv/+/cyfP58XXniBW2+9ldTUVPr168fu3bsxxvD000/zxRdfkJycTNu2bX37QygVoowxQ4Ah7du393Uoyo9pnlf+xmOFtYi8Z4xJPOt0L+AXETmVd/wNMBjYBTTBjlrXMcZsEZEDZ9/TGDMKGAW2K5IKHmmH0li1exWZ2Zl0mt2JlJtTiG5UtcYBPXv2JDExkePHjxesRM/KyuLTTz8lJSWF/v37A7Bo0SJq167NqlWr6NmzJ3FxcfTp00eTrVI+JCIpQEpCQsKdvo5FuY+7c73meeVvvD3HuilwrNDxUaCpiDxgjGkLXA3kAkdKerGIJAPJYJsGeDRS5Tb3Lb6PdXvXlXlNfqIFSD2QSuc5nelxbo9Sr+/arCszB82sUBw5OTlMmDCBmTNn8p///IeFCxcycuRIdu/ezfDhw1mwYAE9e/ZkxYoVTJkypdjrP/zwQz766CNiY2P58ccfmTNnDkePHmXo0KE0adKEuXPn8tFHH7Fy5UrmzJnD5MmTWbNmDe+99x5Dhw6lR48e7Ny5kz59+jBs2DAeeOAB3n//fd58803at2/P0KFDGTZsGKNHj67Qz6WUUv7AH3K95nnla97eFWQ/UK/Qcf28c4jIzyLyOxGZICKnS7uBMWaIMSb5yJESa28VoPITbWnHVbFkyRLuu+8+xo4dW3AuKSmJBQsWcOTIERo2bMjgwYNZsGABACJCeHh4sfs0atSImTNnMmHCBLp3786bb75Jy5YtmThxIvXr16dx48Z8//33fPLJJ+Tm5hIZGcmzzz5L9erVGTFiBI888ggvvPACkyfbxTvTp0+nRo0atG/fnubNm9O5c2dNtkqpoOapXK95XvkLb49Yfwu0McbUyJsO0geYXZEb6MeDgceV0YZOszuReiAVgDATRlxUnNsWtlx++eVMnz4dEWHbtm0ADBo0iDFjxpCSksKAAQPo3r07e/fuZfHixXTu3BmAb7/9tiA5zp49m7p16/LEE08QFRXF2rVr6dSpEwD9+/fnz3/+Mzt37qRly5b06NGDL7/8ku3btxMTE0NOTg6pqamsXbuWWrVqceCAneUUFhbGXXfdxezZs+nVqxdXXXWVW35epZTyBV/mes3zyl94cleQvsBtQHNjzERghohkGmPuBp43xhwAfhCRLzwVgwocKTen0HlOZzKzM4mLiiPl5hS3v4cxhg4dOgB2VKJz587MmjWL5cuXY4zhqquu4sEHH2TZsmUA9OrVi8WLFxe8vlu3bjz33HNcdtllJCcns3v3bgAiIiLo168fd9xxBy+++CLR0dFMnz6dQYMGAbBw4UI+//xzvvzySwBeeOGFgnuOHDmSCy+8kD179vCPf/zD7T+zUkr5E0/nes3zytc8NhVERJaJyEgRaSEiU0TkZN75z0XkLhGZKCLFN7Qsh04FCU7RjaLpcW4P+rbpy8YxG6u8cBFg9erVfPXVV6xYsYJ333232PNJSUnExsYSFmb/GAwePJgGDRoQGRlZ4v1GjhzJk08+ydSpU/n000/56quv2L59OwBDhw4lIyODmJgYBg8ezIoVK7jhhhsAm7idTid/+ctfePrppzlx4gRz584FoGHDhlx11VVccMEFBXEopVSwcneu1zyv/I0RCcw1gAkJCbJ6tW577a82bdpEx44dK/SaUNvb9NSpU9SoUYPx48fz6KOPUr9+/RKvq8zvUnmPMWaNiCT4Oo5gpbnev2muL5ureR401/s7V3O933ReVKHr7G5cZrIBgr8b17Rp0zh16hStWrUqM9kqpVQwCMVcr3k+9ATciHWhpgF35i9QUP5H/+XtPvq79G86Yu1ZOmLt3zQ/uY/+Lv2bq7k+4Cb7iEiKiIxq0KCBr0NRSimllFKqQMAV1koppZRSSvmjgCusdVeQ4ONwgDHFH3ndaZVSSgUBzfUqFARcYa1TQYKPwwEi0LevfYjYhyZbpZQKHprrVSjQXUFU0Fq5ciXjxo3j9OnTDBgwgL1795KRkcG8efMYN24cycnJjBs3juzsbDZu3Mjf/vY3OnTowJEjR3j66aepWbMmTqeTX375hYiICObMmVOsBe7f/vY3xo8fz+TJk3n88ceLPDd//nxSUlJo3bo1e/bsISsri9dff92bvwKllApqmueV3xGRgHx0795dlP9KTU2t0PU7dojUrm3HL+Lj7bE7TJo0ScaOHVtwnJiYKB999JH89NNPEhkZWXB+7ty5cuONN4rT6ZT+/fvLqlWrCp5zOp1y0003SVZWVonvAcixY8eKnY+MjJQjR44UHI8cObJSP0NFf5fKu4DV4gc5MVgfmuv9mz/k+mDI8yKa6/2dq7k+4EasC2235+tQVAUkJpb9/KpVkJlpv9+8GTp3hhdfhBEjID0d8ppbFVi6tOIxOJ1OMjIyaNKkSbHn9uzZQ1RUFGvXruXw4cMkJJzZUccYwzvvvFPh92vWrBlPPfUU99xzDy1atOCVV16peNBKBRFjzO+AOCAC2CoixVvlqYDm61yveV75ms6xVn4hP9ECOJ1Fj6tq5cqVTJs2jZtvvpkuXbrQvXv3vPfMxOFw8Mgjj3D06FH++te/kpaWRrNmzQpee/jwYaZNm8aNN97Id999V6H3/eSTTzh69Cg9evSgW7duzJs3z30/lFJ+whjTzBjzijFm1Vnn+xtjZhtjHMaYSXmn14jI34BZwFCvB6t8zlO5XvO88huuDGv740M/HvRvFf1IKz5eJH8pS1iYPXaHsz8inDp1qtxzzz3FPiLMt2bNGinp/62+fftKSkqKzJ8/XwYOHCgDBw6U48ePi0jpHxHmczqd8sUXX0iDBg3khx9+qPDPoB8P+jdCfCoIcAMwpPDvAagNbAdq5B2/D/Qr9PwI4GJX7q+53r/5Q64Phjwvorne37ma6wNuxFoFp5QUqF3bfh8XZ489oXnz5uzevbvU57t160ajRo2KjVrk5OQAcO2117J48WIWL15MnTp1SrzHzJkzAejXrx9gP2K84ooraNWqFdnZ2e74MZTyGyLyHnDsrNO9gF9E5FTe8TfAYABjzGAgDdhV2j2NMaOMMauNMasPHDjggaiVr3gj12ueV74UcHOsVXCKjoYePez3lZk/XZLVq1fz1Vdfcfr0aZ566ilycnL48ccfeeyxx5g1axYnT55kypQpjB8/noiICMAmx/fee4+pU6eyaNEinE4nO3fuJCEhgZ49exZ7j2eeeQawq8arV68OwBdffMF9991H48aNGTNmDE2bNmX37t3cdNNNXHjhhe754ZTyb00pWmwfBZoaY64FxgPrgXrAsJJeLCLJQDLYluaeDVV5k7tzveZ55W+MHd0OHIUWL965bds2X4ejSrFp0yY6duxYodfkL3pxV2EdLCrzu1TeY4xZIyIJ5V8ZvIwxicD0/N+DMaYf8IiI9Ms7fgBoKSIPVPTeCQkJsnr1aneGq9xIc737aK73b67m+oCbCiK6eDHo5HfjWrbMPrQbl1IB71ugjTGmRt5xH2BhRW6gXXaDj+Z6FQoCrrBWwSe/G9fZD022Svk/Y0xf4DaguTFmojGmlohkAncDzxtjpgA/iMgXFbmvDqIEH831KhToHGullFKVJiLLgGUlnP8c+Nz7ESmllO/oiLVSSim/o1NBlFKBSAtr5Xv5E+/Ofujng0qFrECcCqKprBz6C1IhIOAK60AdxdB8Uob8iXd9+9qHTrxTSgUgTWXl0F+QCgEBN8daRFKAlISEhDt9HUtFOBz2odsMed/Ro0c599xzWbZsGd27d2f//v2MGzeOzz77jFGjRnHixAm+//57kpOTiY6OJjk5mXHjxnHLLbcQFRVFamoqt99+O0OGDMHpdDJ79mx2795NjRo1OHToEHv27GHq1KnExMT4+kdVSqmQpble+QVX2jP64yNQ29z27Wsfwa7CrVl37BCpXduOX8TH22M3mT17ttxyyy0yatSognNLliwp0tLW4XDI/fffX3Dcpk0b+fHHH0VEJC0tTZo0aVJw3YwZM4rcf/r06bJ06VK3xXs2bXPr3wjxluaeemDbpCe3b9++Qv89/EGo5HkRzfXupLnev7ma6wNuxFoFqPyh+sJuugnGjIHMTOjc2X4FSE21xy++CCNGQHo63HBD0ddWYMh/27ZtPPvss8THxzNjxgzq1q1b7JoDBw7QokWLEl+/Z88eoqKiEBGeeeYZdu0q2ol57NixJb4uIyODO++8k9jYWOrWrcvzzz/Po48+yj333ONy7EqFKgnQTydDnuZ6zfUhTgtr5R/yE21px5X03Xffcemll9K0aVMGDRrE22+/zahRowDYu3cv06ZNY8+ePaxdu5ZHH320yGtnz55NZGQkhw8fZv78+aSnp+N0Oosk69mzZ7Nu3ToSEhIK7ptv2rRpXHTRRYwfP57jx48zZcoUTbRKqdCmuV4FOS2svSgtDVatsnmkUydISYHoaF9H5SVljTrUrg3x8Xb0AiAsDOLi7AgGQFRUpSelv/POO9SpU4f169dTo0YNXn755YKk2KxZMyZMmADAt99+y6BBg1i/fn3Ba8eMGcP5559fcCwihIWFcezYMerVq1dwjcPhYOvWraSlpTFmzBgAJk2axMaNG7njjjsAqFu3Lk2aNKnUz6CUUgFDc73m+hBXbmFtjGkAnAacwFXAUhE57OnAgtGQIWf+cb55sz3euNG3MfmNlJQzHxHGxdnjKjp69CgNGzbk8ccfLzgXExPDmjVril3bvHlzdu/eXeb9jDGMHTuWOXPmMG7cuILzOTk5AERHR7N48eKC8/Hx8WzduhWA48ePc+DAgSr9PEqFEmPMEGBI+/btfR1KhYT0AIorNNerIOfKiPVLwFPAncA5wO+AP3kyqGC1ZcuZ753OoschLzoaevSw37thy5RTp04xevRoqlevXnBu8+bNNG7cmAcffJCwsDD27NnDlClTAEhNTeXll18GYO7cuRw5coTk5GTuueceCv/FPnHiRF588UXGjx9PrVq1OHz4MAcOHOChhx4qFsOECRMYOXIk48aNo3nz5jRu3LjKP5dSoSJQ51jrAEo5NNerIGfsQscyLjDmQWAG8J2I9DTGTBCRaV6JrgwJCQmyevVqX4dRIZ06Ff8ELFgT7qZNm+jYsWPFXhRkexHu2bOHrKws2rVrR1ZWFj179uSHH36o8H0q9btUXmOMWSMiCb6OI1gFWq6vVg1yc88ch4dD3kBnUNJcr7k+VLia610ZsY4DHgG+yTtuWpXAqipQPx4E+4lXXBxkZ0NMjFs+AQsODgdMnnzm2Bj7ddKkgG4ckJGRwWOPPUbv3r35+eefmTlzpq9DUkp5WGzsmQEUY+yxyqO5XoUAV0asewM3YqeDXAy0FZFZXoitTIE2ipHv4ovhu+/gqafgkUd8HY3n6L+83Ud/l/5NR6w9K9ByfVramSnE9evD998H9xxrzU/uo79L/+Zqri+3pbmILBeR+0UkXUQWAEE6ecHz0g6l8eNlnaDVNzzxwnZ2HEzzdUgeVd4/2lT59HeoQpUxZogxJvnIkSO+DqVC8qcQN2tmj1u29G083qB5qur0dxg8yi2sjTFXGWM+MsZ8aYxZArzihbgClohwJOsIOw7u4Lud37Fw60JeX/c6M5bPoOc/epJZexNc8Ban9ranz9QxrNu7jgMnDgTdH6qaNWuSkZERdD+XN4kIGRkZ1KxZ09ehKOV1IpIiIqMaNGjg61AqJTISjh6Fr7/2dSSepbm+6jTXBxdX5lg/CtwHHAAMcLtHI/IjIsKJ7BOkZ6a7/Mg4mUGOs5SVKoL9Dca/C4ueZ9+3/ej2cjcAaoTX4Nx659Kyfkta1G9By3r2a4t6LQrONa/bnIjwCK/9/FXRsmVLdu7cqdsOVVHNmjVpGQpDXkoFmUaNoEYNWLAArrjC19F4juZ699BcHzxcKayXi0jBBDdjzJsejMcj0g6lMeRfQ9iSvoXoRtHMGDCDmtVqFhTCZRXKp3JPlXjPMBNGVO2ogkdsVCy9a/Uucu7sR69XerE5YzPOOhnQ/lPCN97K2y9FszdzFzuP7mTXMft11a5VzD86v9h7Gwzn1D3HFtr5BXe9FrYQL/R93erF27h6W0REBO3atfN1GEop5TVnr80DePZZO9c6gNfmlUlzvVJFubJ48U3slJGteacuFZH+ng6sPK4uaHEsdTB52eRyr2tcq3HRQrhW6QVyVO0oGtRsQJgpdyZNEYUL/GY/38euf07nyy/h8suLXysiHDx5sKDY3nX0TPFd+NyhrEPFXtugRoOCUe7CBXjL+i0RhAc/e5DtB7cTGxVLys0pRDcK4pU1SnmYLl70rEBbvFjY7Nnw5z/bngUdOvg6GqVUVbhzu722FJ1X3bmyQfmTEV1GMK7POKJqR9GoViOqhXm+u3t0o2g2jrFrPzMz4c2LoGvXkq81xhBZO5LI2pFccM4Fpd4zMzuzaNF9tOjo94b9G9h7fC9OcRZ7beqBVLq91I0/9/wz7Ru357zG59G+cXua1W2Gyd8GSSmlVKUMHgyvvgrp6VpYKxUqXBmxbi8i2wsdNxWR/R6PrBwVGcXoNLsTm9M34xQnYSaMuKi4ggI3FOQ4c9h7fC+/+9fv+H7v98WeNxiEM/8f1I6oTfvG7e2jUfsz3zduT4v6LSo8Uq9UMNMRa88o1LPgzm3btvk6HKVUiHPniPVxY8w84AJgHfBgVYPztkW9XuT01QNpu+80P59TjeqfvOjrkADIyoK5c+H88+Gyyzz3PtXCqtGyfkvW3rW2xH9krB+9nl+P/Mr2g9uLPFIPpLJg6wJO554uuFeN8BrENI4pKLrPizyvoOhuVb8V4WHhnvtBlFIhI1Bbmpfk5EnbbbdGDV9HopTyNFdGrF8FPgW2A+cBg0Tkj16IrUwuj1iXtJoE/KLTU04OtGgBf/gDPPecd96z8DxvV+ZY5zpz2Xl0Z9Gi+9CZ77NysgqujQiLILpRdJER7vxHmwZtCnY0qWgMSvkzHbH2rECeYw2waRN07w6vvQY33eTraJRSleVqrnelsH5IRP5e6PhhEfmrG2I8+326AD2A+kBDEXm8rOsrlGyrVYPc3DPH4eG2qvUD+/ZB06ZnOrsGEqc42XNsT4lF97aMbZzIPlFwbbgJp23DtrRv3J6Vu1ZyOOswghBmwoiNjCX1z6k+/EmUqjwtrD0r0Avr3FwYPx5uuw26dPF1NEqpynLnVJD2xpjGInLQGBMFuDy0aIxpBkwBuohIj0Ln+wPXA/sBEZHJIrLeGHMMO9VkvqvvUS6Ho2hRDWcy3dNPu+1tKuucc3wdQeWFmTC780j9FvRt27fIcyLCvhP7ik0vWbhtIcdPHy+4zilONqVvosaUGlzc8mJiGsXYR+MYohtFE9Mohsa1GutiSqVUQAoPh+nTfR2FUspbXCmsXwfWG2PqAUeAP1Tg/pcAHwEFe18YY2oDLwGdROSUMeZ9Y0w/EflCRNKMMePy3vPzCrxP6RwOGD4chgyxex6dey7s3QtffAGHDtld/H3sscdgwwaY775/TvicMYZmdZvRrG4zLml9SZHnOr3Yye7nLU4MhqjaUVx93tXsOLSDxdsXs+f4niLXN6jRgJjGMUWK7phGtvBuWb+lzutWSvm13FxYudK2Odctn5UKbuUW1iKyHGhljIkSkXRjTC1Xby4i7xljEs863Qv4RUTyu598Aww2xlQTkU9F5HheEe8+0dGwsdAuIAsXwvXXw913w7//7da3qozwcPjoI9i1y865DnYpt6SUOcc6MzuTtENp7Di4gx2HdtjvD+1g3d51fLj5Q7Kd2QXXVg+vTtuGbYsV3TGNY2jXsB21Ilz+31UppTzi2DG7QP2hh2DqVF9Ho5TypFIL60KF9GWFzgHcBlRllXZT4Fih46N555oYYx4BnMBrpcQ0ChgF0Lp168pHMHiwLa7j4yt/DzcaNsyur/z3v2HsWF9H43mF9/MuSe2I2pzf9HzOb3p+sedynbn8dvS3gqJ7x8EdpB22Rfg3v33D0VNHi1x/br1zixbced+Hm3CGzx/OlgzfLaDURZxKhYaGDeHSS217cy2slQpupS5eNMYsEpGrjDHfY7fZy9e5Igt18kasp+e/xhjTD3hERPrlHT8AtBSRByoSuNsWtOTkwJNPwr33QuPGVb9fJfXsaUNZu9ZnIQQ8ESHjZEaRorvwiPfuY7tLfW31sOqcF3ke1cKqEREeYb+GRZR4XOI1pVxb1vGE/05g17FdCILB0LpBa9647g0iwiKoHl6diPAIIsIiCr6efa56eHXCTXiV5p9rcV91unjRM4JtH+tnnrEDJz//DG3a+DoapVRFuXNXkEtF5H9534cBvUTkmwoEkkjRwro28AOF5lgDs0XkCxfv595ku3Yt9OplN5P+/HOfFdfPPQf33QepqdCxo09CCHqZ2Zn8dOgnhr43lI0Hio+YR9WOoner3uQ4c8jOzbZfndmVOi6p06WnlFd8l/X8kp+WcOy0/QDJYGgYLQHIAAAgAElEQVRcqzFjeoyhdkRtalWrZb9G1Crx+OxzEWERlSryA72418LaswJ9V5B8W7ZAXBy8+CKMGePraJRSFeXOwnqUiCTnfX8+MNbVfayNMX2B4cAgYA4wQ0ROGmOuBG4ADgDZIlLCRtNlc2uyXbQIrrvOTg357399Ulzv3WvnVz/8MEyZ4vW3Dzme7sbpFCc5zpxSi/Bnvn2GWatmFXvdbRfcxi2dbyE7N5vTuafJdmaTnZtNtjPvOO/7kp4vcq6UawrfY9OBTUW2RKyqcBPuWhF+1nNzVs/hwIkDBSP3Leu35J/X/JPaEbWpU72O/RpRp+D7amGurLn2Hi2sPStYCmsR29b8vPPgk098HY1SqqKqXFgbY+oDDYH7gJl5p8OACSIy2l2BVpTHPh5cvBiuvdanxfWAAbB9O+zYEZj7WgcSfxgl9YcYSvoHxoa7N3Aq9xSZ2ZlkZmdyMvuk/ZpzssTjEq8569rSXl+4wZCrqodXLyi284vvwt8Xea5QQV7W9ekn0vnTx39i+8HtFf5voYW1ZwVLYQ1w//0wZw5kZECdOr6ORilVEe4orG8HRgBtgZ8AA+QAi0VkhtsirSSPJNvFi+HWW+0Kk4svdu+9XfD66zBiBCxfbmenKOVpvi7uJy2ZxBNfPVHs/IguIxh6/lBOnD7BiewTZGZncuJ03tdCxwXfl3Qu7/vKTsuZ1HcSjkRHuddpYe1ZwVRYf/EF9O9vd4H63e98HY1SqiKq3CBGRF4HXjfGXCki7tlT2t8NGgQ//QT18nb7O30aqlf32ttfdx2MHg3z5mlhrbyjvB1aPG3y5ZO5vevtHivuRYTTuacLiu+SivM3f3iT+ZuDaBN55bcuvdT+9bJggRbWSgUrV/axLlJUG2NGiMhrHouoHIWmgnjmDfKL6tmz4R//sNNCIiM9815nqV8f3ngDunYt/1qlgoUni3tjDDWq1aBGtRo0qlVyM6jrOl5X4pQYV0arlaqI6tVh4EDd/UmpYBZW3gXGmFuMMVuMMRnGmJ3AM16Iq1QikiIioxo0aODZN4qJgU2boF8/OyHOS2680S5uUUp5T8rNKcRFxRFuwomLiiPl5hRfh6SC1D/+YbswKqWCkyvL63sDHYGHROTpvJbjwW/gQDsR7pprbHH9xRdeG7levBi2boV77vHK2ykV8nw9JcYfGGMaAR2AWsCvIpLm45CCUsOGvo5AKeVJ5Y5YAztFxAnUzDsOgabbeQYOhI8/hs2b7YqTU6fKf40bfPghzJwJubleeTulVAgzxtQ1xrwJrAWeB6YAC4wxq40x3X0bXXCaPt1uQqWUCj6ujFj3zJvXfMoY8wV2dxCf8fgc67MNGHCmuK5Rwytv+de/wqxZEB7ulbdTSoW2J4FnROS2wieNMY2BJ4wxe0Vkl29CC07VqkFEhO22W82/tmVXSlWRKw1i6gOZgACDgW9F5IAXYiuTz7ZgWrUK2rWDqCiPv5WI7metlL8L5O328rrp1hWRo6U8b4D6InLEu5EFX0tzpZTvVWWLWVdzvStTQbYCcSKSKyIf+0NR7TMnTkBSkp1znZ7u0bdavBjat4f9+z36NkqpECYizvyi2hhztTHmqrzvk4wxzcXyelGdF5t3Fqr7kBfXxSsVkjIyM/jm12+Y+/1c+rzah5jnY0g9kEqu5JJ6IJWY52NwLHW49T1d+RDqLRHZkH9gjLlQRHy2WZDXp4IUVqeO3WR6yJAzCxo9NHLdogWkpcF//gP/938eeQullCpsKPBQ3vdrgYcBXULtIZMn27nW6elem2WoVFDKceaQdiiNLelb2Jy+mS0ZZ76mZ54ZBA033plf60ph3dAYMw3YhJ0OMgS40aNRlUFEUoCUhISEO30SQP/+kJJii+srroAvv/RIcd25s33Mm6eFtVLKK34Qkf0AIrI7b3tV5SEJCXD8OCxbZpfyKKXKdujkoTNFc/oWNmfYr9sPbifbmV1wXdM6TYmNjOW6uOuIi4ojNjKWuKg42jRsQ5eXuni8Z4ErhXV34ENsa3OAxm6NIBAVLq5nzLCrDT1g2DCYMAF27LDbaiullAfF5+0CsgOIAWJ9HE9Qu+IKqFXLdmHUwlopK9eZy8+Hfy428rw5fTP7T5yZGxsRFkH7xu2JjYrlmthriI2KLSiiS2sGBrZnwdlzrN3NlcWLF4vIikLH54mIz1eS+GzxYmHr1kGnTnZ5twf8+iu0aQNPPAGPPeaRt1BKVVEgL14szBgTB8wFLgDWASNFZItvo/KTXO8hQ4bAxo128EQXqqtgdvaiwbevf5tTuaeKTd/YdnAbp3NPF7wuqnZUwYhz4dHndo3aUS3Mu1vquJrrXSmsqwN3ARHAd8C2/I8Lfcmvku3evbaby4svQpMmbr11376wb59tAqmJVyn/EyyFtb+qUK5PS7PV6pYtEBtrP1mMdm3Fvy+8/DKMHg2pqdCxo6+jUcr9snOz2ZS+iavmXcWeY3sQitec4Sa8YPQ5LjKuyOhzZG3vNOZzhau53pVy/xngANAIOyXkCWB01cKrPJ8uXizNtm3287xNm+yCxqZN3XbrYcPgrrtg7Vrorq0alFIeYoxpB0wHjgMLsc3Blvs2qgpwOOyKwHypqXYO3aRJ9jk/NHiw/bpggRbWKvAdO3WM9fvWs27vOr7f8z3r9q1jw/4NRUagC+vUpBPv3/Q+0Y2iiQj3zCf/vuDKdnu/iMhkYE9ei1ufNgrwyy2YLr3UZsYdO+zEOTfukXfDDXamybx5brulUkqV5BFs58WfsIMot/o2nApyOIp31TLG7uBUziezvtKyJXTtav/6UCqQ7Dm2h0XbFjH1f1O56d2bOO+F86g/rT6X/vNS/rLoL3y89WMia0Vy30X38a/f/4uYRjGEGVtyhpkw4pvEs2HMBmKjYoOqqAbXRqyjjTE1AMlrJnCOh2MKTFdcYbNjUtKZ3ULcMHLduDFcfTX8+9/w979rN0allMdsFpFlxpiLROR0wO0K4nBAbm7RcyJw2WV2OHj+fDs9xM8kJdn17wcP2nyvlD9xipPtB7cXjEJ/v/d71u1dx74T+wquiW4UTbdm3bi9y+10a9aNrs26cm69czGF5q/2bNHT44sG/YUrhfVn2BEMAUYB93s0okCWX1yPH+/WEZKJE+HkSZ1jrZTyqAuMMRcDNY0x5wPtfB1QhTgcMHx40TnW77xju+W+845dCQ42RzdoAJdc4hdJNSkJpkyBTz+Fm2/2dTQqlGXlZLFx/8aC4vn7vd+zfu96TmSfAKBaWDU6NenEVeddRddzutKteTe6nNOFBjXLn0EQ3SiajWM2evpH8AvlLl4EMMY0BNoDe4F0EcnydGDl8avFi2dzOiEsDLKz4ehRiPSfyfdKKfcKlsWLxph44BXO7Apyh4hs9m1UHsj13bvbRSsdO8KoUbYY9+FQsdMJn3xix2Vq1/ZZGCqIldTGu1HNRqzbu66ggF63dx2b0jeR48wBoF71enRt1pWuzboWjELHN4mnRrXQ7Wbkzl1BxonI3/K+jwGeEJFh7gmz8vy6sM53++2werWdFjJnTtGFNflcXFizdSu89BI89ZTd+1Qp5R+CpbD2V27P9SdO2Ja2ycmwYoVtezh1KjzwgPveQyk/0nFWR7ZkbCnYkSMiLKJIQ5XmdZvTrXm3glHors26Et0oumBOtLKqvCuIMaY1tilMnDHmsrzTYVDCXile5Je7gpTmj3+Ed989M+fa4YDERPvc0qUVutWvv8Ls2XYxY+/e7g5UKRXqjDFPAJ8ArYFngRdEZJpvo/KAOnVsbv7jH+GHH2yBHRdnn/vtN3j/fa+PYh88CLNm2WkhF17otbdVQUhE2H5wO8t/W87y35bz4ZYPizRWAch2ZtOvXT8e6v0QXZt15Zy6unTOncqaY90NuBboCuRPRMsFfLp+2ectzSsiMdF+xjd48JniupIuv9zuZ+1Pm6EopYLKaRFZYYyZjs379/g6II+74AJb0eZbuBDuv9+2vL3hBrvXqRfmYlerZgfNGzXSwlpVzMnsk6zevdoW0jttMZ2emQ5AgxoNaFK7SbHCGuCS1pcwsP1Ab4cbEkotrEXkI+AjY0wPEVnlxZiCS2KiTdaDB8OgQXZOR2am7dhYgeYF4eFnimoRv1hzo5QKLjWMMbHAfhE5YIzJdPcbGGOqAeOBNiIyyt33r7LRo+1HgsnJ8Oabdp/Tzp1hzRqPddgFqF/f7tJav77H3kIFiV1HdxWMRi/fuZy1e9YWzIvuENmBpA5J9G7Zm96tetOxSUfCTFiJc6yjG/lv46RAV9ZUECN2Ava+vGkh+UaLyCOeDy2I5I9cjxhhi2qAzZvP9LN10cGDtja/4w675kYppdwoF/gSuNkYkwR4oiVVHWAxcLcH7u0e+aPYTz9tp/Lt2HGmqJ461Y5gX3ppyaMbZzepyefCWhotqtXZsnOz+WHfD0VGo3898isANavVpGeLnjzY60F6t+rNxS0vpkmdkjs/h9KOHP6g1MWLxpiVItLTGPMTdru9/CzSWkRivBVgaQJi8eLZqlUrus9qeDjk5Lj8chG7kL1ZswpP0VZKeUioL140xjQDpgBdRKRHofP9geuB/YDkNRrDGNMWmCgid7hyf7/J9QcP2k8Yjxyxc7LzdxQpadenSqylycyE666zj9E+622sfCkjM4MVO1cUFNIrd60kM9sOxrWo14I+rfsUjEZ3adaF6uHVfRxxaKny4kUR6Zn37T1585rzb3y1G+ILTbGxts0u2O34KtiswBjb4vzxx+0am1atPBCjUipk5DX9uh94RkoYZclbLP6TiGwo4zaXAB9h52Xnv6428BLQSUROGWPeN8b0E5Ev3PsTeFHjxrBrlx3Ffvllu4vIww/bxjNXXVXl29eubRepz5+vhXUocIqTzembWf7bcr797VuW71zO5nS7u2W4Cadb827c0e0OereyhXSrBvoXfqAot0FM4aI67/gTz4UT5FJS4PzzbbeXWrXg448rfItbbrGF9b/+BePGeSBGpVTIEBGnMWY58LUx5jtgF5ADNAYuAr45+++AEu7xnjEm8azTvYBfRORU3vE3wGDApcLaGDMK25CM1q1bl3O1F9WpY6f0jRhhdxR55RXomTcG9dFHkJZmOz2uWlWptTSDB8MLL8CxY1Cvnsd+CuVFhec3t6zfkus7Xs/m9M18u/NbDmcdBqBxrcb0btWb4RcMp3er3iScm0Cd6nV8HLmqLJcaxPgjv/l4sKISE2HnTjtvb8ECm0krqFcvm7PXr3d/eEqpigmGqSDGmAbADUAnoAbwG/Chqw1i8grr6fm/B2PMzcBQEbk27/gOIBG4DRgHJAH3isja8u4dMLn+7rttswFjznTeDQuz00ZcXEuzdKndAeqDD+yUEBW4snOz+W7Xd1z3znUFu3Tk69SkU8FIdO9WvTmv8XlF2n8r/+SOfayHAf8WkdzSrlEVUNKilqQk26/8yScrdKthw+Avf4ENG+wAuFJKVYWIHAFedeMt9wOFx1zrY3cbEeDpvEeZAqpnAdgmYGPGQJcuZ845nba9uov69LG7P6WkaGEdaESEbQe38dmOz/g87XOW/LSEY6ePlXhtVO0okockezlC5S1ltdXpIiK5xpg/FD5pjOla2gu8wRgzxBiTfOTIEV+GUXEOhx3FyH8sXAhNmsBNN1X4VjfdZNc9zpvn/jCVUsoNvgXaGGPy+x/3ARZW5AYikiIioxoE0ub9nTvbFeb5jKnQWpqICLvz08KFtiZX/i0jM4P/bPwPd3x8B22fa0vsrFj+sugv/LjvR27pfAvv3fgesZGxBR0Mw0wY8U3iWTpiqW8DVx5V1hzrRnkf7w0wxuwudP42wGfNWQKqQUxZrrrKzserW7fCL23aFAYMgLffti3Ow7TrqFLKR4wxfbF/LzQ3xkwEZohIpjHmbuB5Y8wB4IeAXrhYESkptsDOzLRFdkqZU9SLSUqCd96B1avPTN9W/uFUzimW/7acz9M+5/O0z1mzew2C0KBGA65odwUPX/IwV0ZfSUzjMxundWverdge0iq4lVVYvw3cgu3AWHjyT2ePRhQqjLFFdU4OfPut3Re1AoYNg1tvheXL7baqSinlCyKyDFhWwvnPgc+9H5GPRUdDj7xdB5cssWtqKmDQIDtYsmCBFta+JiKkHkgtmN6x7JdlZGZnUi2sGhe3vBhHooMBMQNIODeBamEll1O6h3ToKWu7vSXAEmPMpSLyv/zzxpg+XoksVEydCk88YVcidurk8suuvdaOanT3RAsHpVTIMca0A2YAx7DTNnaKyHIfxhNYc6xL8sAD9qPFtDS7o4gLoqLsAvUFC+xfDcq79h3fx3/T/lswKr37mP3AvkNkB/7U9U9cGXMliW0TqV9DO/qokpW73R52G6Y7gC7AOmCuZ0MKMWPGwLPP2gS8eLHLvcrr1KnU9GyllCrNI8Bz2N07PgRmAj4rrINi2t8NN8DMmTB7Njz0kMsvu/9+SE+3y3F0swjPOpl9kq9//bpgVHr9PrvdVuNajekf3Z8B0QO4MuZKWjfwo20flV9zpbB+FqgObAcSsFNB7vNkUCElKsq2u73/ftv2vALb7504Ac8/b0c38ht9KaVUJW0WkWXGmItE5LQxpmJzGFTx3Z/y5+lNmmS7vri4OfXvf+/+0JTlFCc/7PuBz3d8zmdpn/G/X/7HqdxTRIRFcEnrS5h6xVSujLmSbs26ER4W7utwVQBypbA+ICJP5R8YYxyeCydEjRljt2p64AG7KjEiwqWXVa9uB7tPntTCWilVZRcYYy4Gahpjzgfa+TKYgJwK4nDYR2ErV8JFF9nOL4884vKt9uyxPWgGDnRrhCGlcHOWc+qeQ8K5CXz727ccyDwA2P2kx/QYw5XRV3JZm8u0KYtyC1cK67P3OtKJRe5WvTrMmAETJtiWuW3buvSyiAjYts3ue6qUUlX0NPAKcAEwABjpy2CCYioI2BWISUl2rvWECS5v4/TUUzB3LmRk2Ea9qmwiwv4T+9mUvolNBzbxz3X/ZNXuVQXP7z62m4+3fEznpp2ZPmA6/aP7c269c30YsQpWrhTW24wx64CfsSMYL3g0olA1eLBdDl7Nlf8kZ+QX1U6nbrunlKo8EUkFeucfG2Oa+DCc4JKcDA0bVihJ33+/nT1Ss6YH4wpATnHy25HfCgro1AOp9vv0TRw8ebDguurh1Ut8/fUdr2d4l+HeCleFoHKrOBH5hzHmf8D5wI8i4nobKeU6Y2xRfeSI7Wt7zTUuv/T//g9++aXC26UqpVQBY0xd4ErOdEwcAtzou4iCSPPm9uvp05Cd7dIOITEx5V4S1HKcOew4uONMAZ2eyqYDm9icvpkT2ScKrousFUl8k3hu6HgD8U3i6dikIx2jOtKyfkvOn3M+m9M34xQnYSaMuKg4HIkO3/1QKiS4NDwqIpuBzR6ORQFMmWInTv/4Y9EOXmVo2NBO0d67F5o183B8SqlgtQC789OhvOPGPowlMOdYl+X4cdvu/OabbZ53wfLl8J//2L8SgnV3kKycLLakbylWQG/N2Eq2M7vgupb1W9IxqiMju40sUkA3qVP6ByspN6docxbldUZEfB1DpSQkJMjq1at9HYb7padD+/bQu7fdJcQFmzZBfLzd1eneez0cn1KqCGPMGhFJ8HUcVWWMeUVE7ih0fJ6IbPNlTBBkuX7oUJvXf/rJ7ghVjldegTvvtG0OLrjAC/G5UeGFg7FRsbz9+7fJys4qVkD/dPgnnGL7t4eZMKIbRdMxyhbN+QV0XFSc7hutfM7VXO83hbUx5ndAHBABbBWRd8u6PqiS7dny97X+5BPb+twFF15oZ5KsXOnh2JRSRQRRYX0jUBfYkXfqNhHx+cLBoMr1mzbZRmAPPQRPP13u5bt3Q4sWdiFjBTYU8QudZndi04FNCMVrjIiwCGKjYosV0B0iO1Czmk4qV/7JbYW1MeYX4DoRWVuJIJoBU4AuItKj0Pn+wPXAfkBEZLIxpoWI7DLGNABeFZEbyrp3UCXbs50+Deefbyvl9etd2n5vxgx48EHYuhXOO88LMSqlgKAqrD8BTgGH80519oefK+hy/a23wvz5thvjOeeUe3lCgt04arnPWvVUXOJriSz7pViXezo16cQHQz8gulF0qS3AlfJXruZ6V5Yof1i4qDbGRFcgjkuAj4CC2WHGmNrAS8D9IuLA7p3aT0R25V1yHTC9Au8RfPK332vbFg4fLvdygD/8wc7BmzfPs6EppYJWuohcJyJ/FJE/Aj4frQ5KkybBqVPw2msuXZ6UBCtWwIEDng3LnW6ML7rmNcyEEd8kng1jNtAhsoMW1SqouVJY5xhjRhtj+hpjLgMedvXmIvIecOys072AX0TkVN7xN8BgAGPMYCAN2EUJjDGjjDGrjTGrDwRSlqmMpCQ7FaSJaztetWhhm8TMm2fb4CqlVAWtN8ZcboxpbYxpjd0VxGeMMUOMMclHjhzxZRjud955tlJ2scV5UpLN6YsWeTguN5n29TT+b9H/0b9dfzpGdSTchBMXFacLB1XIcOWfjb8DvgYuyjvuXMX3bErRYvso0NQYcy0wHliP3e5p2NkvFJFkIBnsx4NVjMO/5S8B/+kn+OoruP32cl8ybBjccQesWmV7EiilVAU8SNHdn1oDT/goluBpEFOShLxPk0+ftp9QluHCC+1uTwsWwHA/3n5ZRHj0y0f569d/5ZbOt/DaNa8REe5aF2GlgokrI9b35380mPfx4N1VfM/9nNknFWwnx/0i8qGI9BKR0SJSrKjOF7SjGKX5+99h5Ei76KUcv/+9zdE6HUQpVQkPi8jl+Q/gLl8HFNQWLIBWreDXX8u8LCzMjlp/+qndAtsfOcXJvYvv5a9f/5VRF47ijWvf0KJahaxyC2sRWZBXzA4zxnQCNlbxPb8F2hhjauQd9wEWuvpiEUkRkVENQqWPt8Nhmwk8+GC5lzZsCO+/Dw+7PFlHKaUsEXntrFORvogjZFxwARw6BFOnlntpUhIcPQpff+2FuCoo15nLyI9H8sLKFxjbaywvJb1EeFi4r8NSymfKLayNMX/D7uBxGVAdmObqzY0xfYHbgObGmInGmFoikokd9X7eGDMF+EFEvqhU9KGgaVN47DE733rx4nIvT0rSJjFKKdcZY97O+/qTMSYt7/ETMNvHoQW31q3tJtWvvmqn/JWhf3+46CK75tGfnM49zc3v38xr617D0dfB36/8OyZYO9ko5SJXpoIczpsCkiYi3wMHXb25iCwTkZEi0kJEpojIybzzn4vIXSIyUUQmVyTgkJsKAnDPPbZpzAMPQE5OuZf/5z92BolSSrnAkff1GRGJznu0Ayb6MKbQ8MgjEB5ebifGOnXsesdBg7wUlwtOZp/kuneu493Ud5kxYAaTEidpUa0UrhXW+e2h8hcL1ivtQm8IuakgcGb7vcsvh6ysci//7DN4+23dHUQp5ZIJxpjbReSFwidFZI6vAgoZLVrA6NHwxhuwZ0+5l588CcfO3mfLB46dOsbVb1/Nom2LeDnpZR7o9YCvQ1LKb7hSWG81xqQCw40xK4FUD8ekSvK738GLL0LduuVeOnMmrF17ZmMRpZQqQ4aIvO7rIM4WMp9OPvyw7f7SvHmZl6WnQ2SkbXPuSwdPHqT/m/353y//463r32JU91G+DUgpP+PK4sWXsHOsHdgWt696OqiyhEyyLc0338ALL5R5Sd26tqjOzfVSTEqpQFbiZ1vGmBFejqOIkPl08pxzoEdeY+IyPmaMirI1eJ8+XoqrBPuO7+Py1y9n3d51vH/T+9zS+RbfBaOUn3Jl8WIEcDmQCPQzxpS96aaHhUyyLc0bb9i51lu2lHnZBx/YTxnT070Ul1IqUI0qtGix8OLFGb4OLKTce2+5G1U/9pjvehT8duQ3LnvtMrYf3M6CmxdwTdw1vglEKT/nylSQudimMGnABXnHyleefBJq1Sp3+73oaNi3D95910txKaUC1QfYLotnP97yZVAhp04d24Rgw4ZSL3E64dtv4ccfvRgXsP3gdi7956XsPb6Xz279jCtjrvRuAEoFEFcK630iMkZEnhGR0UCGp4MqS8hPBcnffm/BArtKsRRdukB8vDaLUUqVK11ENp712IDthKu8ZexYO4/P4Sj1EhG7per06d4La8P+DVz6z0s5fvo4S25fQp/WPpyLolQAcKWw3n3WsU8L65CfCgJ2+73o6DK33zPGtjj/5hv4+WfvhqeUCigXG2NuP/ukiJS/BZFyn8hIuP9+2+Vr3boSLwkPh6uvtm0NvLGGZvXu1fR9rS8Gw1d//IoLm1/o+TdVKsCVWlgbY5YYY74EhhljfjbGLDXG/Azc5LXoVMlq1IDnnoMRI8pc7HJL3rqSt9/2TlhKqcAjIpf5464gIen++20L3SefLPWSpCS7dmblSs+G8r9f/scVr19B/Rr1+fpPXxPfJN6zb6hUkKhWxnMrKd55y2C7JipfS0qyjzK0bWtXkM+bZ1eT6/Z7SqlAYYwZAgxp3769r0PxnoYNbcI+//xSLxk40I5cL1gAvXp5JozPdnzGtf++ltYNWvPf4f+lZf2WnnkjpYJQqSPWIjJeRH456/Ez4NPlcCE/x/ps//oX/PWvpT49bBikpsL69V6MSSmlqihkp/1dfbVtd16Khg3h0kttYe0J8zfNZ8i/htAhsgNf/fErLaqVqiBXttu70BjznDFmrjHmn8BLXoirVCGbbEvz5Zfw+OOwdWuJT994I1SrposYlVIqYPz6KwwYYLcAKcHgwfDDD/Yyd3pz/Zvc+O6NXNj8QpbcvoSmdZq69w2UCgGuLF58AVgCvA68BpS8qkL5xpQpZW6/FxUFgwbZgW2n08uxKaWUqrjGje0CxkmTSnw6fxbgwoXue8s5q+Yw/MPh9G3bl89v+5xGtRq57+ZKhRBXCus1IvKhiCwTkWXANE8HpSrgnHPg0UchJVXfNxcAACAASURBVAU+/7zES558EhYtgjBX/msrpZTyrbp1YcIEm9P/979iT8fGQkyM+6aD/P2bvzPmkzEkdUhi4S0LqVu9rnturFQIcqXUWmKMed0Y87gx5nF8PBVEleDee6Fdu1K33+vaFTp39kFcSimlKmf0aGjWzE71O4sxdtR606bi2+45ljowk02xh2Opo9h9RITHvnyMcf8dx9BOQ/ngpg+oWa2mh34gpUKDkTK2awMwxiwH3gEO550aIiI3eDqwMuLJXyl+57Zt23wVhv9ZvBhOnIDrry9x+4916+DFF2HWLLtbn1LKPYwxa0QkwddxBKuEhARZvXq1r8PwjeeftwMnX3wBV1xR5KkTJ+wswNI+iUx8LRGApSOWlvi8iHD/p/fz3HfP8aeufyJ5SDLhYeFuDF6p4OJqri9ru71860TkuUI3/qZKkVWRiKQAKQkJCXf6Mg6/M2hQmU/v2wfvvQdjxkC3bl6KSSmlVOWNGgWnT0NC8b/L69Sp/G1znbncteAuXv3+Ve696F6eGfgMYUbnCirlDq78Scoxxkw2xgw3xgwHSt/bTfmWCEydWuKCl/794c9/hgsvtAPahR9ldNBVSinlKzVr2oXp9euX+PQ//gEXXVRmn7BisnOzGfbBMF79/lUmXjqRZwc+q0W1Um7kyp+mSwEn0C7v0dijEanKMwbS0uy+1tu3F3kqPNxuIOJ0wmWXQd++NhmLaGGtlFJ+7ZNP4LbbilXQ9etDq1Zw9Khrt8nKyeL6/1zPOxvf4en+T/PkFU9itHOYUm7lSmF9t4hMzn8Aoz0dlKqCKVPsJOoStt/btw/i4mDvXh/EpZRSqnL27IG33iq2v97QoXaKnyttHY6fPs7gtwezYOsCZl89m3F9xnkoWKVCW7mFtYisOOvU5R6KRblDs2Z2+72PPrILXgpp2hROnbKD2cuWQadOdoBbKaX8jXbZLWT4cIiOtjuElDDvY8+esl9+6OQhrnzzSpb+vJQ3rn2Du3vc7aFAlVKudF48ZIxJM8b8ZIw5DhTf+8eLNNm64L77oG1bGDu2SBI2xq4kz28Us3kzDBnimxCVUqos2mW3kIgIW1R//70dNClk9mxo0cJ+IlmS/Sf2c8UbV7Bm9xrevfFdbutymxcCVip0uTIVZJSIRItIOyAKKLkVlJdosnVBzZowdy688kqxrfcOHTrzvdMJW7Z4OTallFIVN2wYdOhgC+xCbXR797bjJ598cubStENprNq9imW/LKPVs63YdGATKTencH3H630QuFKhxZWpIO8W+j4Lu4BR+bvLLz+zRVOhUevY2KKXtW3rvZCUUkpVUrVqMGOGXT9TKKd36WJHrAt3YRzyryFkZmcCcDr3NM3rNmdg+4HejlipkFTuPtbGmCVA/p/i+sA6j0ak3MfphLvugshImGY70aek2C6MmZl2p5D9++1U7H79fByrUkqpsiUlFTuV34Vx3jy7huaH9FVsOrCpyDW/Hf3NWxEqFfJcmQqyAvhj3mOAiNzh2ZCU24SF2X63zzxTsP1edDT06GG320tLgzZtbG+Zt97ycaxKKaXKl51tR67nzy84dfVgJ8ePQ4+JY+n5Ss8iW+iFmTBio2JLupNSygNKLayNMecDiMjDIvJL3uOg90JTbvHUU1C9Oowbh8NhRzeWLbOPNm1gwwZbbLdq5etAlVJKlSs8HN54A8aP5+TJYySvSebBbRdCtZP89F0nnhnwDGvvWkvtiNoAxEXFkXJzio+DVip0lDUV5AljzIclPSEib3goHuVuzZvDI4/Ao4/i+MsSHFL2bokpKTBggN0KWymllJ8JC+PIww/Q4OYRjBvRglnx/9/efYdHVW0NHP7tNHqJSJMWQpAYSpAmTekqV2PviIJeFcWrIgIKKhEUUOEqXMX2qaBibxgbClIUlSIE6SUBBKUJJJQEUmZ/f6wZJgkJJmRqst7nOU/mnJmcWZlJdtbss/beh2lfvz3ndj1A2o7BPNDZYAx0PKsjAAsGLfBvvEqVM6cqBUkHtgHbndsh4Emgt/fDUh41bJh0T48Zc8qHbdgAl18uVxmVUkoFlo1/b2TIl0Oot+UufqsPoxfksvCmuSy7Yxl33tiArVsNGzb4O0qlyrdTJdaPWmsXWWsXAmnA08Bz1tpBPomsCDqP9WmoVAneew86d5ZakIKbc03z2Fj45huZ/hoKXYdAKaWUD1lrWbR9EZe/fzmxL8YyI3kGA+Nvoc7kl6i/N4MLFm7FGMMll8jjk7TqQym/MvYfsidjzECkp3qwtfYHn0RVDB06dLDLly/3dxjBx1oZuRgSAgsWFPmwAwfgiivg6aehSxffhadUsDHG/Gat7eDvOMqq8trW5zhy+GTdJ0z+ZTLL/1pOrUq1GNpxKEM7DaVOlTrSlg8dKvNbd+sGwA8/wNzjTzFx6aMnnW9sj7Ek9kz08U+hVNlR3La+yBprY0woMBXoCvSw1m5zHq9krc30VKDKh7KzJaletgxycmRN86QkGb1YQFqaLJPbuze8/TZcc40f4lVKqXLm8PHDvL7ydZ7/9Xm2p2+n+RnNeemSl7gl/pYTAxIBudo4fXq+7+3dG3ozhgn9T132p5TynlOVgiwCWgM3AQ5jTGNjTBPgCZ9EpjwvPBzWrZOkGk65pnl0NPzyC7RrB9deC88+q6UhSinlLX8e+pNR34+i0XONGDZnGI1rNObz6z9nw70bGNJhSP6kOq/du2X8zLFjZGTAuHHw/fe+jV0p5XaqWUGygAXAdUDedbHbeTMg5WVHjrhv/8Oa5meeKYvHDBoEI0dCSgq88IIsAKaUUqr0Vu1exZRfpvDemvdwWAfXxF3D8C7D6dSgU/FOsH49TJgAdetSYeh9vPSS9J306+fduJVShTtVivS4tfbHggeNMd28GI/ythYtpNfapXIRvSBOFSvCu+9C06ayeOP27fDhh1CtmpfjVEqpMspay5yUOUz5ZQpzU+dSJbwKQzsO5f7z7qdpZNOSnaxXL+jZEyZMIPTf/+a22yozfjyMH5//YWPHnhinrpTyoiJLQQpLqp3HF3svHOV1SUnuZLp5c/jyS7mdkiL1eq4ykTxCQmDiRHj1VbnEeP75sHOnD2NWSqky4HjOcWYkz6DNy23oP6s/6/atY1KfSewYtoPnL36+5Em1y7hxsGcPTJ/OU0+5x6j36CG3rdWkWilfKc6S5qqsSEyEZs0gI0P2N2+WljcxUUYoDh0KbdrAV18VWlB9xx1yV2oqvPiiTyNXSpUBxpjKxphnjDH3GmOu9Xc8vnIg8wATf5xI1NQoBs8ejMEw84qZbL1/K6O6jyKyUmTpnuD886X24+mncRw6QkICbN3qmdiVUiWj1bLlSWJi0d0W1sK558KIEXDppdC3L0yeDPHx+R520UXw229SGgJw7JiUiyilyidjTD1kStZ4a23HPMf7AlcBewFrrX3Cub/MWvuRc2Xfj/wRszelHkwl4b0ENv69kaaRTenasCsfr/+YjOwMLmx2IW9d8RZ9o/tijPnnk5XE+PEwdSohmUfJzKzK33+722mllO9oj7USxsiyi2vWwNSpsGIFvPZaoQ9t3lwGMO7ZA61awcyZPo5VKRVIugOzyTPI3RhTGXgZGGatTQTaGGP6AI2Afc6HVfJxnD6R8F4CG/7eQK7NZcuBLbz1+1tcG3ctq4asYs7Nc+jXrJ/nk2qA886TATF163LppXJhMlMnxlXK5zSxVvlFRMB998GWLe7RLz/9JDV8R4/me2iVKjIdX8uWfohTKRUQrLUfA4cLHO4CbLfWHnfuLwYuAXYAtZ3Hikz7jDF3GmOWG2OW79u3r6iHBZzH5z/Oun3rcFhHvuNRNaNoU7eNb4L4/Xf6HPwYgKVLpX1OTfXNUyulNLFWRYmMlA1gzhwZUt6ihXRPO+SfRtWqMkNIB+c6RO++qz0kSikA6pA/2T7kPPYp0NEYcy8wq6hvtta+aq3tYK3tULt27aIeFlB2pO/grVVv+TsMuP56moy7jZocAGDDulwSmq3V0YtK+UjAJNbGmDBjzBhjzKv+jkUVMH48/PgjNGggk1p36ACLFuV7yOrVcPPNsvJXEHUwKaW8Yy+Qd1LO6sBea22GtXaktfYFa22Zqa/+cO2HtHm5Dfsz9/Nsv2eJOzOOUBNKXO04Uu5L8e1S4u+/T3UO8yDPAeAglI2hLTWxVspHAiaxBqoA3xJYMSmX7t1lKcZ334X9+2H58nx3t24NH38MycnQuXPR684kJko5d8FN23ylypRfgCbGmArO/W7AVyU5gTEmwRjzanp6useD85TDxw8z6PNBXP/x9cSeGUvyXck81PUh1g5dS87jOay9Zy3RkdG+DSo+nkVVLmY0T5FNGGtoSTOTypYtvg1DqfLKWC+uU13C0eIYY6KAR621//6nc3fo0MEuL5DcKR/JzITQUKnHnjULliyBxx+HM89kyRJZJT0nBz7/HC64oPBT9OwpXxcs8FXQSnmHMeY3a20Hf8fhL8aYHsAtwMXAS8AUa22mMaYfcA0yWDHb1c6XVKC29b/u/JUBnw5gW9o2Hj3/UR7r8RhhIYEx0VZ2kxjC/kjBALmEsCkklmti17J6taxLoJQqueK29d7+EyvJaPF/FKwDWsqcSpUkqQbYtEkmtY6JgSlTOK/tcX79FerWlWlVZxWsonR2WS9YKJt2WSsV3Ky1C621t1trG1hrn7TWZjqPf2+tvcta++jpJtWBKMeRw7iF4+j+Rncc1sGiQYt4otcTAZNUA4T/ue3EP91QHMSajbz14mFNqpXyAa/+mZVwtHhxzhd0A1rKvCeegN9/h65d4aGHIC6O6E3f8vPPcujmm+HJJ/OsN5OYCNaSXKMHyTV0WTClVOECsRRkW9o2es7oydgFY7mx9Y0k35VMt8bd/B1WPomJsDa3BbnOf++5hPBXbl3i+jeGRx7hmWG7mDy50DXAlFIe4I/Pr4WOFjcysef1QAtjTDs/xKVOV8uW8PXXMntI5cqQmUlkJMz51jJwIDz2GIwZ4+8glVLBxFqbZK29s0aNGv4OBYBZv88i/uV4Vu9dzayrZvH2lW9To2JgxJZXYiK0TEkitLKs3BUaF0uDpFeodGlf7DPPMGxaFJ1fux3Wr/duEDqYRpVT/rh2VdRocQs87dyKZIxJABJiYmK8F6E6PRdeKKMXndcbI54ez8zs9XQeMZEeA6L8G5tSSp2G9GPp3PP1Pby7+l26NerGO1e9Q1TNKH+HdWrR0dDROazJNZDl0ksxKSmETfkv3Wa8ibl5Jakf/UbtOoZq1Yo80+lxrfKrg2lUOeSPHutSjRYPtF4MVUBoqPRMAISHY2bP5p5psbR8+2FsWjoTJsC2bX6NUCmliuWnP34i/uV4PljzAeN7jWfBoAWBn1SfSrNmmOkvYrZvJ/u1GVzc3/Cvrmkc695HpnXKzfV3hEoFPa8m1s7R4gOB+saYR40xlay1GcDdwDRjzJPA79baeSU4Z8DV3akiPPKIzLt33XXw9NM4Yprz+1NJTJsGVx9+k/bp83RVMKVUofzZ1mfnZvP4/MfpMaMHoSGhLL5tMY9e8GhADVAskqsMY+FC2Qorw6hdm/D2bZg6FRwpW9n963a49lpZBOyll3SlL1V2paZK+WpYmNeWJfXqdHveFKhTMKki/PYbjBjBruGTGfRATZ7bcglns5mNtODBmCTmbPbxXK9KeUh5n27P23zd1qccSGHApwNY8ucSBrUdxLSLp1GtgqdrJQJHcjIk/CuXHgc/54Umz1Bz41KoXRs2bIAzzijdybUURAWS3bshPl5WsbNWylZjY2Ht2mJ9e6BMt6eUaN8efviB+pe0479bEohlA2Hkcg7rmbalP7Nny9zXSinlD9ZaZiTPoO0rbdm4fyMfXPMBb17+ZplOqgHatoVfloay+uyrOXPzr3w5YiHcc487qZ4xA1JS/BqjUqXyxRdw2WVQvz7s3eueEsfhgHXr3B8APSToEmstBQl+Ldh44hcvBMvZbCLkigQG1fmaMWO0NEQp5VsHMw9ywyc3MHj2YNrXb8+qIau4ruV1/g7LZxo2hB9/hL79DAnPXsCY7EQcDiAtDYYOhbPPllKRpUv9HapSxbNjh/v2Cy/AsmUwahQ0a+ZeJSkkBOLiPH5FJegSax28GPwcMfnnWM2tWYu+Z6zg6sh5TJoEMc0c3N3pNz5433L8+D+cTClVJvmqE2XBtgW0ebkNn67/lIl9JjLvlnk0rtHYq88ZiKpXh6QkuOMOmDABBgyA3Go1YcsWGDkSvv8ezjtPevfWrPF3uEqdLD0dXnlFfk+jouCvv+T4zJmSaE+aBN99J+UfoaHyNSnJ42EEXWKtgl/EnCSyQipikTlWw35bSqW9f3BlciLbt8PMW+fz0rIOtLqxFXseehZ27dIyEaXKGW93omTlZvHI3EfoPbM3lcIq8cvtv/Bw94cJDQn1yvMFg/BwyUsmTYIGDST3oH59mDhREpMpU+Sr6z3580+090P53datshpdvXowZAhkZMDkyVClitxfv74MVgSZinLtWqk9XbtW9j0s6BJrLQUJbgt6JkKzZlRyZMiSu+vWQbNmLOgzHqpVo2FDGDi1A46XXqFhq5o0fmEkNGzI+uh/cWOv3bpamFKq1Dbt30S3N7oxafEkbj/3dlbctYIOZ+n4U5AJREaNkrwEYNUqZ4l1tWrw4IOweTM0aiR3Dhokickzz0hvodOJ9WEWLpBN14dRnrZ9u/vKSYUK8O23MHiwlHz8/jsMG+b+AOhjOiuICmybNsHMmex553tevuVnxo4Pw87+gpnf1uXcuzoR39b4O0JVzumsIN7lybbeWsvrK1/n/m/vp2JYRV5LeI2rzrnKI+cuixwOOPdc6cletsy9RMEJ330Hzz4Lc+dK4n3nnXD//ScS7+SaPQFom7bAp3GrMiozEz7/HN54A+bNg379ZMVngOxs+UX1ouK29ZpYq+BiLdkxsYSnbmIDLZjbcBCR/xnIZXc38PzqYUoVgybW3uWptn5/xn7uSLqDzzZ8Rp+mfZh5xUwaVG/ggQjLts2bIStLpvy1tpDkGmDlSkmwP/xQCrRHjoTUVDKbt6aiIwMTFye1rF647K7KiaeflhqltDSpnx48GG69FZo08VkIOt2eKpuMIXzlMo5Me51qzepw785HuGFUYybXmsgdd8ig9SD9rKiUysOTZX/zUufR5uU2fLnpSyb3m8x3A7/TpLqYmjd3J9X33Sf580lt7LnnwrvvykDHIUPk2AUXSFINMid2QoKPI1dBbd8+mDpV6qVB6qUvuUR6qlNS4PHHfZpUl0TQJdZaY62oXp2q/7mNBlsWYTdvYddtj1K5b1fefRduOW8Dn9S6k4+G/UxmhmbYSgUrTwxePJ5znBHfjaDv232pXqE6S/69hOFdhxNigu5fn9/l5soUwCNHyjTXhQ4oj4qS6UUAdu3iROe2a77g8eO158MffLDaoEfk5MBXX8HVV8vo2QcekEQa4N574Z13oHdv93R5p8EXL4WWgqgy49AhWDL8Q7q9cRuVHUexMc0xg25lV9+B1OvUuPBLmEqVkpaCeFdx2/rEBYk8sfCJQu+7u8PdTL5wMpXDK3s6vHLF4YDRo+Wq/L/+Be+/T9EleC1bYtetk+TaGBlgFh8Pv/4q948eLYPL+vSRHu/Q8jsbi9e1bClXDRwOeS/q15fpXyIjZSGgRo2galX/xrh7N7RrB7t2ycqfAwdKuUerVqU+9fbtMH++bPPmyVM4HCVeeFFrrFU5duQI6W98Qo3PZsCCBWSYyjxw0z5efafyKYoElTo9mlh7V0na+tSDqbSe3pqMnAwMhpoVazLzipkktNAyBE965RVZN6Z1a/jyS+lcPElhNdaNGskAM2uhUydwva81asj82LfeClde6csfpezJyYHVq+Hnn2X79FM4duzU3/P22zJd3a+/wg03SLKdd7v3Xklw//xTRrEWvL9ixZLHefgwfPQR7N8PI0a4a4369JFPbRERp/fzI2G6Eun582U2PpBfvezskx/fo0fx1ogpblsfVsJ4lQp8VatS475b4b5bydm8lSVTV3D9ldJTdfyCvizeGUXIxRfS8I1xRGVtZFtECyLmJNG4pw6sUSqYXfrupWTkSE2mxVK7cm1Nqr3grrugcWO47jro3Fmu3rdpU+BB0dFsrNYRgLZrF+S/zxhJ0Hbvlsznhx9kW7NGEutDh+RJeveWRKtpU+0QKcqBA7LFxEgCXbeuvH4AZ50lte1LlsDOne5u2uhoqYk/eFC+t3NneXzVqpJlus65erV8veEGuX/xYrj++pNjWLwYunaVT1lTprh7wl3bv/8tMfXvL3X41avL/OeZmdChAzz0kLy///tfqV6KBx+U38VNm2Q/MlJ+nAcekF+luDj5MOjqvHf1WHt44UVNrFXZFta8Kb1eaCo7WVn8XTWKTts+pOrLb2ABAzTNWs/mPgk8NW4tNWtK50lCgnw9eBCOHpX2qRRlXUopH9i0f1O+/ZSDKX6KpOzr3x9++knGk3XvDh9/DBdeWMKT1KsHN94oG7gLt1NTYeFCqTUBGaTWu7f0bJ5zTvHOnZgITxRSGjR2bHBPqL1pk7zwrh7p9evlhZ8zR3qOR42SxLlrV7lCYIy8ngkJsHEjtGhR9AwtrVrJKoVFufhiWLHCnXi7tmbN5H6HQ4rxN2+W4/v3SwK9e7cMRHRJS5Ovt90G//d/p/2hado0+cwwa5bsb9smg23vugt69ZIPewUrjJKSTn4pPC3oSkGMMQlAQkxMzB2bN2/2dzgqCC2v3ptzDy8kFMeJYzmE0obf+ZSrSKYt/R5qS63e8Uz/uS1Dn6zHkSOGKlWkTZ41C2rWPPV2ww0yOGLPHrn01LBh/hjKaptfHmkpiHecTlvfcnpL1u1bB0CICSH2zFjW3lPMAkp1WnbulOQ6O1vW5QjL011XqnmsrZWuRVdv9vz50rXYpo0kkV98Icl2r17SK1qUnhKDx7slfeHoUenZ37JFen1Bfub58+Vn7tJFEugePaBbN//GWpTMTJkmb9y4k+8r5j+8tDRYtEh+7B9/lM9cVapIrf+iRfKr4IsSfa2xVuoUtlRoSdOsDYTiIJcQtkbE0uSX93E8+jghq5MJ37ntxGO/uf9bLn7uIsymjSye9huz/2jLhtyzOXAojLQ0TmxHj7rPn5Mjf+j33CM9OXv3yvFBg6RhqFlT/me4ZhIyRuoUP/1UBtbXru2rV0KVlibW3lXawYtje4wlsWeiFyJTLocOSQdlVJQk2KGhcoXPowvE5ObKSY2RrsrRo6XRNQbatpWEc8KEk2tzgy2xXrwYPvhAeqOTk+XnDg+XlS0rVZIe4ypV4Oyzg6s8Ju8Ayn8YNXj4sHTKu2qkV6yQb6tYUT5H/N//SXWQr2lirdQp/LEglayLEoqusU5Lk+6X5GS5TFm7tkzgOnKk3F+xohRrtW0LEydCrVpkZ1nSDxnS091XxpYskctTrrK0adOkAyItTcrRCtO4sYxiBvkwX7EiPPyw7K9bB2eeKeEEU5talmli7V3a1gcPa2X8YU6OXNlbFdkT8NLKi9nZsnCBq0f7r78kcTNGpvXLypKykTvukB6MQFukJitL/r+4SjqmTpXZOp57Dh57DM47T7LIrl3l9ql65f9BcStBvKoYQaxYIYNily1zf57o0kUuSvTqJS/D6YyT9BRNrJXytOxsabhXrZIGMTlZPnFv2yZTSQ0fDrNnS7Ldtq1MLdW2rdSBFJIFt2wpiTLIB/ioKGlTjx+Ha6+V49dfLw2Jq+ztrLNkqqBKlaTsMCoq/9akiST12uPtO5pYe5e29cHDWul/yP52HmPm9z35Ad6sdcvNddcDXH659Fw43OV+GCOJ9tq1cMst7iWwIyJk694dbrpJHjt+vNS0RES4H9O2rQzyy82V2gPX97m2Ro2krc/JgT/+cH/vX3/JeTdvloRy0iR5kZYtc8/WERUlvdSdOsmHgIiI/DU1/yAnx/3wpUtlzOHu3fK/4ocfpAy7INdb8dRTUmny5pty/L//lX9plSvL/5m8X123a9WSlwvkRw0Pl88E4O6QLqiwvLpWLbjsMrmSO3iwPO9NN7kT6a5d5TkDhc4KopSnhYdLL3Xr1jI1UUHt2kkrk5wMn3wix+rWlRYOZEojkIT7nHP45sUdHOmdwNl2I9vCWhDx+skzk3zwQf6nePVVmTpo2zbZtm+X9nn/fvdjbrgB3ntPbl99NVxzjXS65+TIY6OiJKyQkGL03CulVDEZ47yoN7IPqamWuDjpKPBJZ3HeItvZs6V04owz3Mm1tZLVgXxNS5Ne46wsSbIjIiSrs1ZW9SvowQclsc7MhKuuOvl+V6a6b5/7kmVB69bBZZeRXq0hNe65RzLHLl2kx8TFmUlaK4Pnd+8+edu1C6ZPlznEExOl1jgjQ17/l192J8mRkf9ce5yVlX82voULpW45M1Peu8LExrqT9QED5F/jDz/I/tlnw44dJyflW7a4n2fdOkmy16yRqpYKFeR4VJR03gc77bFWyhsOH5Zug7//lo/kID0eq1bJ7YgICAnBcew4IVjJcmvVko/tebVr564jGT1aekvy6tIFrriCwwdzODZ8DOmHoFJFqdfOyYXhX/SkxQP9uefWo6QPH8fLr8i3hYZCjepwYdoHNLQ78tWaxxzXwV4loT3W3qVtfXCKi8vfU3rmmfDZZ3I5PzzcR0EUvCxY3NVAHA5Jtl2Jd1aWZIg1a0obvHq1OyF33R8dLVNSHD0qHStZWfDZZ9ivvybv9UoLbLvvOepMeIAqVSScDz+UKeEiI6V+eNw4SaALm3O5QgXpHV64UMoG582TsuxHHpHX1TWrXt267oS1BOXN+eTmSjKckSGJtuurq6wd4LvvZL9fP9l//nkZtJ/3e+bMkX+FBRV3/uhAUWZLQXRWEBW0cnLkcmByMulDRlHj0I6THuIwLeaDfQAADmJJREFUIYRUyDP4ZsAAaWlB5v/Lysr/DUOGSP1IVpbcX9BDD8llzf37sQ0b4nBIT4i14MhxEEY2objbAAusD21F3BsjsL37YBoWtvKDyksTa+/SxDo4hYWd3A8AMlVyz56SiPXtK9UZXhsvkpoqVxj9UGOdkyPDdKp0aklMrnug/AZiacVa5syRWfI+/1w6wJOTZcKTb76RK5X16p281a8vU0CX9PUKhBrr003uA0mx23prbVBu7du3t0oFtbg4a0NCJM8NCZF9H9scEWdzkBhyMTad6ja7Ri1X7m1vb/it3bzZWrt/v3UcOOjz+IIBsNwGQJtYVjdt64NTXNyJZsSGhFjbooW1n3xi7d13WxsT475v7Fh5fFaWtbt3ezCAsWPdT5J3cz2hhzkc8nX9emt797a2ShV5uqak2DXE2WxC7RribDQp9o03rN2xQx6flWVtdrZXQgooKSnyOxEaKl9TUvwdUckVt63XJS+U8pekJPnYHhoqX70xU/0/iJiTxNaIWHIIJTXiHNLmryTswF5YuZItdz3L0Vbn0agR8Mor2Fq1WFP1PBZ0Hc2KZ+dxLO0flslVqhSMMQnGmFfT09P9HYo6DUlJ7oFnsbHw9dfSMzt9uly427pVxoxccYU85uefpVd27lzZT093T0d6WhITC0urPTJ40lr39KoZGTJs5vnnZT8yUmqjBw+WsS5hzaNpxVrCyaFNyFoqxkUzeLB7bYPw8BKNUwxa0dHSQ52TI18DZXIWbwi6UhAXvTyolA+tWsWKRz8lbOE8zjm8hHBySKMG1/faR89+4Vwcv4s2fesQGuGDWfoDjJaCeJe29cGrJFNI//GHrLJ9991S1TZhgiyi1a2blIz06ydDTnyxEEhB2dlSqrF4sXvr3FnWHQCZYvCSS2SJ94L8WI2iPExnBVFKeU58PO2S4oEnOPzXYVa++iM7Fm1l9/5wRo+GXlzJYbOR7U17Ev9gX+jTR4r5dLJtpVQxNG7snq8fJJE+eBC+/x7GjJEtMlLWgenXTzZvJagHD8Ivv7iT6KVLZSAeyJSmvXrJ6t4up1oFPDoaOnaU28E0UE+dPk2slVIlUu2sanRK/BedgKuRVSU3P/Ugq7/9jlZ758G9nwPwZeRA/pz4FnfdhQwTr1vXn2ErpYJIx47uhHTPHpnO7fvvZXPNZtqpkyzCBTI1nGsWjJLauROWL3eXpQwYIIMIQ0Ph3HNljZlu3WRrUILx3ImJ0uvu4upn8OZ03sr/NLFWSpVKnTpQZ+p1gPM6aGoqGV/OY/XHDYg5A/mvWK8eqREt2NGiLxX69yF2SE9qNo30Z9hKqSBRt67MxX/jje7pqOfOzT//cps2Uo7x3//Kvmv9l4KysmDlSumJvv12KTuZMUOmrj5wQGbUGz0aRoyQxL1KldOPOzFRE+jySBNrpZRnRUdT+b5oHrnPuX8gnD+HT2H/W3Npv3oGVVe/SO4zIYxs9hGh115F5+praZV4LU2yNvltkRpdKEep4GCMDIaMjXUfy8mRtV1atZL93bshJgbat5ckfN8+KSNp2lQWJXEl5PHxUrU2aJAs1li9uhx3rSqo1OnQwYtKKZ/JOpLF+hlLOPjxPP6XcTtfrGzEtpyzOItdGGQe7SwTwdF6MVxebT4L1tUh9JXp7E18kUOH8p/rhqZLyQypwuC/n+WK9BmALHhmgL374JoWa1j0o4HERHb976MTo/gBjplKXB8t7cfwPSO4af80KpCFgRIvlKODF71L2/rgU7AEwsVXJRA7d8qqs0uXnnxf584yvX/Xru5luJUqjjI7eDHPAjH+DkUpVUIRVSOIv/d8uPd8egKLa/Sn3qE9J1YmM0C4zWbfXktMR2fzVLs2RxrFsW9P/nO1OCeErFCosL0u+/6KA6DZOXKSI6mytC4A9euT3iCOgwfc35sdWpE4+RaObcgkguwTMYTiICprI8k1e9I2bYGHXwGlyj5/l0A0bAj9+xeeWF90kSTdSnmL9lgrpfxqS4WWNM3a4Ndl1UsTg/ZYe5e29ep0lYXV/lTgKG5brwvEKKX8Ku8iNVsjYomY49+FcvwVg1LKswJgDS5VDgVdKYhSqmxp3DManL3D/irwCoQYlFKe5VrtTylf0h5rpZRSAUeXNFdKBSNNrJVSSgUca22StfbOGjVq+DsUpZQqNk2slVJKKaWU8gBNrJVSSimllPIATayVUkoppZTyAE2slVJKKaWU8oCgXSDGGLMP2F7CbzsT+NsL4ZRUIMShMWgMGoNnYmhira3tjWBUULf1GoPGoDEEZgzgxbY+aBPr02GMWR4IK6QFQhwag8agMQRmDKr0AuF91Bg0Bo0hMGPwdhxaCqKUUkoppZQHaGKtlFJKKaWUB5S3xPpVfwfgFAhxaAxCYxAagwiEGFTpBcL7qDEIjUFoDCIQYgAvxlGuaqyVUkoppZTylvLWY62UUkoppZRXhPk7AE8yxtQDngTirbUdnccqApOBP4HmwCRr7SbnfTcD5wK5QIq19hUPxNDMGcMKoCGw31o7zhhzBjAJSHXGMdpau8f5PSOA6kAk8J219otSxhACJAFLgAigGXAbUMlXMTjPWckZw3fW2od8/V44z/srcMy5m2ut7ePL98J5zhbAjUAm0ANIBPYCjwFbgChguLX2iPO9mwAcAZoAr1trfy3l80cB84AdzkPVgd+BB/Ht6zAC+Vn/dj7f7fj+d3IY0AA4ClQAHnGe32cxqNLzd1uv7fxJsZT7tt7f7bwzhii0rXfF4L+23lpbZjbgGiABWJ7n2MPASOft1sCPztsNgWTc5TDLgOYeiKEjcHme/XVAe+Bl4DrnsQTgbeft84CvnbfDgc1AzVLGEAI8mmd/NjDAlzE4zzUFmAlM9sd74TxXYiHHfPlehAJfASHO/fpAbeBboJPz2H+A8c7bNwDTnbfPADYBoaWMoRbQN8/+E0B3H78O9YADeV4Hn/9OAm2B5Dz7nwBX+vrvQrfSb/i5rUfb+YKxlOu2ngBo553n0rbe+r+tL1OlINbaj4HDBQ5fAvzivH81EG+MqQ5cBPxmna+k8zH9PRDDMmvt7DyHQpBPTCfiABY79wEuzRNfNrAeuKCUMTistU8CGGPCkMZsoy9jMMYMdD7H1jyHffpeOLU2xowyxiQaY1w/r89eB+QfsAH+Y4x5BPljTgN6If9UCsaQ9zU6gPTAtCxNANba/dbauQDGmApAB2vtT/j2dcgAspAeAYCqwFofx9Acd08OSK9FHx/HoDzA3229tvNu2tYDAdDOO8+lbb3wa1tfpkpBilCH/A3wIeexoo57jDHmSmCOtXaDMSbv8x0CIp2NYR3kTfR4HMaYi4BhwJfW2uW+isEYEwecY60dbYxpk+cuf7wXT1trlxpjQoFFxpjDBZ7P2+9FE6ALcKO1Nt0Y8w7Sq5CZ559L3ufx9u/lTcB7hTyXV18Ha+0h56W2D4wxu4CdyOVRX74Xy4CJzsvUx4EOSOPr879N5RV+aevLazvvfG5t60WgtfOgbb3f2voy1WNdhL1AtTz71Z3HijruEcaYXsin1WGFxFEdOGitzfFmHNbaOdbai4Gmxph7fBjDlcAxY8zDyGWoTsaYB07xPN58DZY6v+YCPyLviS/fi0PABmttunP/J6AVUMkYYwp5Hq/+XgLXAh8U8lxefR2MMW2BEcAl1tpBSO3d476MwVq7DbgTqXm8H+lF+cOXMSiv8nn7Us7bedC23iXQ2nnQtt5vbX15SKy/Qj5JYoxpDayy1h4C5gDt8/zSdwG+8cQTOi9DXYS8ofWMMV3yxgF0c+4DfJknvjAgDlhUyuePy3MpDOQSXbSvYrDWPmWtHWetnYQ0MEuttc/j4/fCGBNrjLk9z6HmyCdnn70XyICeWs5eFJCejbXAfOTyYcEY8r5GZwAVnY8vNWcS8LPzUle+58L7r0MD4ICzEQPYhfxsvowBZwxjnL+PNYFZfohBeYev25dy3c6DtvV5BEw77zyntvV+bOvL1DzWxpgewC3AxcBLyIAKkNHJu4AYYILNPzq5AzI6eZP1zKwg7YGFwHLnoSrAi8AXwNPAdmT09sM2/2jUSOf2jS396ORmwLPIiPVw4BzgPqTuyScxOM95NTAUGbH+IvA5vn0vznI+7wrkE2g4Mjq6Jr59Ha4EegP7gMbIIJa6yKf4VOexB617tPhEpE6tMfCa9cBocWcc7wH/sdb+7dw/A9/9ToYC05BawjSkN+cB5DKdL9+LRUhv1nFgnbX2Y1++Dsoz/N3Wazt/Uizlvq0PlHbeGYu29X5s68tUYq2UUkoppZS/lIdSEKWUUkoppbxOE2ullFJKKaU8QBNrpZRSSimlPEATa6WUUkoppTxAE2ullFJKKaU8QBNrpZRSSimlPEATa6WUUkoppTxAE2tVrhhjPjLGvGCM+ckYs90Y090Y85YxZpMx5nV/x6eUUqp0jDGDjTG7jTEjjDFvG2O+McZcZ4x53RizyBhT3d8xqrJLE2tV3rQGUq213YGZwOvAKGR1qKuMMRX8GZxSSqnSsda+CWwAVlhrByKr71Wz1t4OrAT6+TM+VbaF+TsApXzFGFMRWeL2eeehTOB1a+0u5/0ZyHLASimlgl+K82tantsHgWr+CUeVB9pjrcqTlkgPhsO5Hw8sATDGNAT+stZafwWnlFJKqeCmibUqT1oDq/LstwF+d96Oz3NbKaVUkDLG9AOaAIOMMe2Qtn6gMeZs4AIgwRhT158xqrJLS0FUedIaWAonykIqWWsPOu/Lm2QrpZQKUtba74GmeQ61y3O7t4/DUeWM0SvfSimllFJKlZ6WgiillFJKKeUBmlgrpZRSSinlAZpYK6WUUkop5QGaWCullFJKKeUBmlgrpZRSSinlAZpYK6WUUkop5QGaWCullFJKKeUBmlgrpZRSSinlAf8P1DoISPPqtHIAAAAASUVORK5CYII=\n", 1026 | "text/plain": [ 1027 | "
" 1028 | ] 1029 | }, 1030 | "metadata": {}, 1031 | "output_type": "display_data" 1032 | } 1033 | ], 1034 | "source": [ 1035 | "s = '1e-3'\n", 1036 | "\n", 1037 | "m = np.array(m)\n", 1038 | "Igem = np.zeros((4,len(m)))\n", 1039 | "Imax = np.zeros((4,len(m)))\n", 1040 | "Imin = np.zeros((4,len(m)))\n", 1041 | "Tgem = np.zeros((4,len(m)))\n", 1042 | "Tmax = np.zeros((4,len(m)))\n", 1043 | "Tmin = np.zeros((4,len(m)))\n", 1044 | "\n", 1045 | "for i in range(4):\n", 1046 | " for j in range(len(m)):\n", 1047 | " Igem[i,j] = Ieps[s][i,j].prod()**(1.0/K)\n", 1048 | " Imax[i,j] = Ieps[s][i,j].max() \n", 1049 | " Imin[i,j] = Ieps[s][i,j].min()\n", 1050 | " Tgem[i,j] = Teps[s][i,j].prod()**(1.0/K)\n", 1051 | " Tmax[i,j] = Teps[s][i,j].max()\n", 1052 | " Tmin[i,j] = Teps[s][i,j].min()\n", 1053 | "\n", 1054 | "# Plot required number of iterations and time\n", 1055 | "plt.subplots(1, 2, figsize=(12, 4))\n", 1056 | "plt.subplots_adjust(wspace=0.3)\n", 1057 | "\n", 1058 | "labels = [r\"FW-KY\", r\"FW-away\", r\"BPG-LS\", r\"ABPG-g\"]\n", 1059 | "linestyles = ['k:', 'g-', 'b-.', 'r--']\n", 1060 | "\n", 1061 | "ax1 = plt.subplot(1,2,1)\n", 1062 | "for i in range(4):\n", 1063 | " idx = np.nonzero(Igem[i] <= Nmax)[0]\n", 1064 | " if len(idx) > 0:\n", 1065 | " ax1.errorbar(m[idx], Igem[i,idx], yerr=[Igem[i,idx]-Imin[i,idx], Imax[i,idx]-Igem[i,idx]], \n", 1066 | " fmt=linestyles[i], label=labels[i], marker='o', markersize=4, capsize=3)\n", 1067 | "ax1.legend()\n", 1068 | "ax1.set_yscale('log')\n", 1069 | "ax1.set_xlabel(r\"$m$\")\n", 1070 | "ax1.set_ylabel(r\"Number of iterations\")\n", 1071 | "\n", 1072 | "ax2 = plt.subplot(1,2,2)\n", 1073 | "for i in range(4):\n", 1074 | " idx = np.nonzero(Igem[i] <= Nmax)[0]\n", 1075 | " if len(idx) > 0:\n", 1076 | " ax2.errorbar(m[idx], Tgem[i,idx], yerr=[Tgem[i,idx]-Tmin[i,idx], Tmax[i,idx]-Tgem[i,idx]], \n", 1077 | " fmt=linestyles[i], label=labels[i], marker='o', markersize=4, capsize=3)\n", 1078 | "ax2.legend()\n", 1079 | "ax2.set_yscale('log')\n", 1080 | "ax2.set_xlabel(r\"m\")\n", 1081 | "ax2.set_ylabel(r\"Time (sec)\")\n" 1082 | ] 1083 | }, 1084 | { 1085 | "cell_type": "code", 1086 | "execution_count": null, 1087 | "metadata": { 1088 | "collapsed": true 1089 | }, 1090 | "outputs": [], 1091 | "source": [] 1092 | } 1093 | ], 1094 | "metadata": { 1095 | "kernelspec": { 1096 | "display_name": "Python 3", 1097 | "language": "python", 1098 | "name": "python3" 1099 | }, 1100 | "language_info": { 1101 | "codemirror_mode": { 1102 | "name": "ipython", 1103 | "version": 3 1104 | }, 1105 | "file_extension": ".py", 1106 | "mimetype": "text/x-python", 1107 | "name": "python", 1108 | "nbconvert_exporter": "python", 1109 | "pygments_lexer": "ipython3", 1110 | "version": "3.6.2" 1111 | } 1112 | }, 1113 | "nbformat": 4, 1114 | "nbformat_minor": 2 1115 | } 1116 | --------------------------------------------------------------------------------