├── .gitignore ├── .idea ├── .name ├── linvpy.iml ├── misc.xml ├── modules.xml └── vcs.xml ├── LICENSE ├── README.rst ├── __init__.py ├── docs ├── Makefile ├── linvpy.rst ├── linvpy.tests.rst ├── make.bat ├── modules.rst └── source │ ├── Makefile │ ├── conf.py │ ├── images │ ├── EPFL_logo.png │ ├── bisquare.png │ ├── cauchy.png │ ├── huber.png │ ├── lcav_logo_.png │ ├── optimal.png │ ├── outlier_effect.eps │ ├── outlier_effect.png │ ├── rho_functions_points.eps │ └── sum_elements.pdf │ ├── index.rst │ └── make.bat ├── linvpy.py ├── packaging_tutorial.pdf ├── setup.py └── tests ├── __init__.py ├── generate_random.py ├── regularizedtau ├── Sridge │ ├── __init__.py │ ├── matlab │ │ ├── RidgeCode │ │ │ ├── CVRidRob.m │ │ │ ├── MMRid.m │ │ │ ├── Mloca.m │ │ │ ├── PeYoRid.m │ │ │ ├── READ_ME.m │ │ │ ├── RidSEMM.m │ │ │ ├── RobRidge.m │ │ │ ├── SPC.m │ │ │ ├── centrar.m │ │ │ ├── desprepa.m │ │ │ ├── desrobrid.m │ │ │ ├── divcol.m │ │ │ ├── findlam.m │ │ │ ├── mscale.m │ │ │ ├── prepara.m │ │ │ ├── svdecon.m │ │ │ ├── tauscale.m │ │ │ └── unitol.m │ │ ├── __init__.py │ │ └── sridge.m │ └── python │ │ ├── __init__.py │ │ └── sridge.py ├── __init__.py ├── figures │ ├── asv_l1.eps │ ├── asv_l2.eps │ ├── bs_l1.eps │ ├── bs_l2.eps │ ├── experiment_one.eps │ ├── experiment_three.eps │ ├── experiment_two.eps │ ├── final_versions │ │ ├── asv_l1.eps │ │ ├── asv_l2.eps │ │ ├── bs_l1.eps │ │ ├── bs_l2.eps │ │ ├── experiment_one.eps │ │ ├── experiment_three.eps │ │ ├── experiment_three_edited.eps │ │ ├── experiment_two.eps │ │ ├── real_data_regression_ls.eps │ │ ├── real_data_tau_reg.eps │ │ ├── real_data_tau_reg_50.eps │ │ └── sensitivitycurve_l1.png │ ├── real_data_regression.eps │ ├── sc_l1.eps │ └── sc_none.eps ├── linvpy_latest.py ├── ls.p ├── m.p ├── mathematica_data │ ├── IFtauL1.mat │ ├── IFtauL2.mat │ └── IFtauNonReg.mat ├── matlab_data │ └── experimentalData.mat ├── mes.p ├── results_data │ ├── asv_l1.pkl │ ├── asv_l2.pkl │ ├── bs_l1.pkl │ ├── bs_l2.pkl │ ├── errors_tau_l2.pkl │ ├── errors_tau_l2_51.24_c1_1.5_2_c2_3_4_l_-7_-5.pkl │ ├── errors_tau_l2_c1_1_2_c2_2_4_reg_-7_-3.pkl │ ├── experiment_one.pkl │ ├── experiment_three.pkl │ ├── experiment_two.pkl │ ├── final_versions │ │ ├── asv_l1.pkl │ │ ├── asv_l2.pkl │ │ ├── bs_l1.pkl │ │ ├── bs_l2.pkl │ │ ├── experiment_one.pkl │ │ ├── experiment_two.pkl │ │ └── sc_l1.pkl │ ├── initial_x_ls.pkl │ ├── sc.mat │ ├── sc.p │ ├── sc_l1.pkl │ └── sc_none.pkl ├── sensitivitycurve.eps ├── sourcethree.p ├── sourcetwo.p ├── tau.p ├── tauscale.m ├── toolboxexperiments.py ├── toolboxinverse_latest.py ├── toolboxutilities.py └── toolboxutilities_latest.py ├── test.py └── test_final.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | 55 | # Sphinx documentation 56 | docs/_build/ 57 | 58 | # PyBuilder 59 | target/ 60 | 61 | #Ipython Notebook 62 | .ipynb_checkpoints 63 | 64 | #Custom rules (Guillaume) 65 | *.xml 66 | *.iml 67 | .idea 68 | .idea/* -------------------------------------------------------------------------------- /.idea/.name: -------------------------------------------------------------------------------- 1 | linvpy -------------------------------------------------------------------------------- /.idea/linvpy.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 11 | 12 | 15 | 16 | 18 | -------------------------------------------------------------------------------- /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Guillaume Beaud, Marta Martinez-Camara 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 15 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 17 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 18 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 20 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 21 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 22 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | LinvPy is a Python package designed to solve linear inverse problems of the form y=Ax+n, where y is a vector of measured values, A a known matrix, x an unknown input vector and n is noise. The goal is to find x, or at least the best possible estimation; if the matrix A is invertible, the solution is easy to find by multiplying by the inverse. If A is not invertible, we need to use regression techniques such as least squares method to find x. The first motivation for this project is that Marta Martinez-Camara, PhD student in Communications Systems at EPFL (Switzerland) desgined some new algorithms to solve linear inverse problems (namely the Tau-Estimator), and this package is a Python implementation of these algorithms, which may not be available anywhere else than here. LinvPy also contains several other famous but not implemented or not publicly distributed algorithms such as regularization functions, loss functions or M-estimator. 2 | 3 | To install from pip, simply run : 4 | $ sudo pip install linvpy 5 | 6 | PyPi link : https://pypi.python.org/pypi/linvpy 7 | 8 | ReadTheDocs link : http://linvpy.readthedocs.org/en/latest/ -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ['linvpy'] 2 | 3 | # in your __init__.py 4 | from linvpy import * -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 21 | 22 | .PHONY: help 23 | help: 24 | @echo "Please use \`make ' where is one of" 25 | @echo " html to make standalone HTML files" 26 | @echo " dirhtml to make HTML files named index.html in directories" 27 | @echo " singlehtml to make a single large HTML file" 28 | @echo " pickle to make pickle files" 29 | @echo " json to make JSON files" 30 | @echo " htmlhelp to make HTML files and a HTML help project" 31 | @echo " qthelp to make HTML files and a qthelp project" 32 | @echo " applehelp to make an Apple Help Book" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | @echo " coverage to run coverage check of the documentation (if enabled)" 49 | 50 | .PHONY: clean 51 | clean: 52 | rm -rf $(BUILDDIR)/* 53 | 54 | .PHONY: html 55 | html: 56 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 57 | @echo 58 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 59 | 60 | .PHONY: dirhtml 61 | dirhtml: 62 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 63 | @echo 64 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 65 | 66 | .PHONY: singlehtml 67 | singlehtml: 68 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 69 | @echo 70 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 71 | 72 | .PHONY: pickle 73 | pickle: 74 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 75 | @echo 76 | @echo "Build finished; now you can process the pickle files." 77 | 78 | .PHONY: json 79 | json: 80 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 81 | @echo 82 | @echo "Build finished; now you can process the JSON files." 83 | 84 | .PHONY: htmlhelp 85 | htmlhelp: 86 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 87 | @echo 88 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 89 | ".hhp project file in $(BUILDDIR)/htmlhelp." 90 | 91 | .PHONY: qthelp 92 | qthelp: 93 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 94 | @echo 95 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 96 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 97 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/linpy.qhcp" 98 | @echo "To view the help file:" 99 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/linpy.qhc" 100 | 101 | .PHONY: applehelp 102 | applehelp: 103 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 104 | @echo 105 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 106 | @echo "N.B. You won't be able to view it unless you put it in" \ 107 | "~/Library/Documentation/Help or install it in your application" \ 108 | "bundle." 109 | 110 | .PHONY: devhelp 111 | devhelp: 112 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 113 | @echo 114 | @echo "Build finished." 115 | @echo "To view the help file:" 116 | @echo "# mkdir -p $$HOME/.local/share/devhelp/linpy" 117 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/linpy" 118 | @echo "# devhelp" 119 | 120 | .PHONY: epub 121 | epub: 122 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 123 | @echo 124 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 125 | 126 | .PHONY: latex 127 | latex: 128 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 129 | @echo 130 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 131 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 132 | "(use \`make latexpdf' here to do that automatically)." 133 | 134 | .PHONY: latexpdf 135 | latexpdf: 136 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 137 | @echo "Running LaTeX files through pdflatex..." 138 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 139 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 140 | 141 | .PHONY: latexpdfja 142 | latexpdfja: 143 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 144 | @echo "Running LaTeX files through platex and dvipdfmx..." 145 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 146 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 147 | 148 | .PHONY: text 149 | text: 150 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 151 | @echo 152 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 153 | 154 | .PHONY: man 155 | man: 156 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 157 | @echo 158 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 159 | 160 | .PHONY: texinfo 161 | texinfo: 162 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 163 | @echo 164 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 165 | @echo "Run \`make' in that directory to run these through makeinfo" \ 166 | "(use \`make info' here to do that automatically)." 167 | 168 | .PHONY: info 169 | info: 170 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 171 | @echo "Running Texinfo files through makeinfo..." 172 | make -C $(BUILDDIR)/texinfo info 173 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 174 | 175 | .PHONY: gettext 176 | gettext: 177 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 178 | @echo 179 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 180 | 181 | .PHONY: changes 182 | changes: 183 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 184 | @echo 185 | @echo "The overview file is in $(BUILDDIR)/changes." 186 | 187 | .PHONY: linkcheck 188 | linkcheck: 189 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 190 | @echo 191 | @echo "Link check complete; look for any errors in the above output " \ 192 | "or in $(BUILDDIR)/linkcheck/output.txt." 193 | 194 | .PHONY: doctest 195 | doctest: 196 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 197 | @echo "Testing of doctests in the sources finished, look at the " \ 198 | "results in $(BUILDDIR)/doctest/output.txt." 199 | 200 | .PHONY: coverage 201 | coverage: 202 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 203 | @echo "Testing of coverage in the sources finished, look at the " \ 204 | "results in $(BUILDDIR)/coverage/python.txt." 205 | 206 | .PHONY: xml 207 | xml: 208 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 209 | @echo 210 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 211 | 212 | .PHONY: pseudoxml 213 | pseudoxml: 214 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 215 | @echo 216 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 217 | -------------------------------------------------------------------------------- /docs/linvpy.rst: -------------------------------------------------------------------------------- 1 | linvpy package 2 | ============== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | 9 | linvpy.regularizedtau 10 | linvpy.tests 11 | 12 | Submodules 13 | ---------- 14 | 15 | linvpy.linvpy module 16 | -------------------- 17 | 18 | .. automodule:: linvpy.linvpy 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | linvpy.linvpy_old module 24 | ------------------------ 25 | 26 | .. automodule:: linvpy.linvpy_old 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | linvpy.setup module 32 | ------------------- 33 | 34 | .. automodule:: linvpy.setup 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | linvpy.test module 40 | ------------------ 41 | 42 | .. automodule:: linvpy.test 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | linvpy.test_final module 48 | ------------------------ 49 | 50 | .. automodule:: linvpy.test_final 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | linvpy.toolboxutilities module 56 | ------------------------------ 57 | 58 | .. automodule:: linvpy.toolboxutilities 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | 63 | 64 | Module contents 65 | --------------- 66 | 67 | .. automodule:: linvpy 68 | :members: 69 | :undoc-members: 70 | :show-inheritance: 71 | -------------------------------------------------------------------------------- /docs/linvpy.tests.rst: -------------------------------------------------------------------------------- 1 | linvpy.tests package 2 | ==================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | linvpy.tests.automatic_tests module 8 | ----------------------------------- 9 | 10 | .. automodule:: linvpy.tests.automatic_tests 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | linvpy.tests.generate_random module 16 | ----------------------------------- 17 | 18 | .. automodule:: linvpy.tests.generate_random 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | linvpy.tests.mestimator_marta module 24 | ------------------------------------ 25 | 26 | .. automodule:: linvpy.tests.mestimator_marta 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | linvpy.tests.optimal module 32 | --------------------------- 33 | 34 | .. automodule:: linvpy.tests.optimal 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | linvpy.tests.profiling module 40 | ----------------------------- 41 | 42 | .. automodule:: linvpy.tests.profiling 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | linvpy.tests.scrap_test module 48 | ------------------------------ 49 | 50 | .. automodule:: linvpy.tests.scrap_test 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | linvpy.tests.scrap_test_2 module 56 | -------------------------------- 57 | 58 | .. automodule:: linvpy.tests.scrap_test_2 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | 63 | linvpy.tests.test_basic_tau module 64 | ---------------------------------- 65 | 66 | .. automodule:: linvpy.tests.test_basic_tau 67 | :members: 68 | :undoc-members: 69 | :show-inheritance: 70 | 71 | linvpy.tests.test_fast_tau module 72 | --------------------------------- 73 | 74 | .. automodule:: linvpy.tests.test_fast_tau 75 | :members: 76 | :undoc-members: 77 | :show-inheritance: 78 | 79 | linvpy.tests.test_irls_M module 80 | ------------------------------- 81 | 82 | .. automodule:: linvpy.tests.test_irls_M 83 | :members: 84 | :undoc-members: 85 | :show-inheritance: 86 | 87 | linvpy.tests.test_irls_lasso module 88 | ----------------------------------- 89 | 90 | .. automodule:: linvpy.tests.test_irls_lasso 91 | :members: 92 | :undoc-members: 93 | :show-inheritance: 94 | 95 | linvpy.tests.test_irls_tau module 96 | --------------------------------- 97 | 98 | .. automodule:: linvpy.tests.test_irls_tau 99 | :members: 100 | :undoc-members: 101 | :show-inheritance: 102 | 103 | linvpy.tests.toolboxinverse module 104 | ---------------------------------- 105 | 106 | .. automodule:: linvpy.tests.toolboxinverse 107 | :members: 108 | :undoc-members: 109 | :show-inheritance: 110 | 111 | 112 | Module contents 113 | --------------- 114 | 115 | .. automodule:: linvpy.tests 116 | :members: 117 | :undoc-members: 118 | :show-inheritance: 119 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source 10 | set I18NSPHINXOPTS=%SPHINXOPTS% source 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | echo. coverage to run coverage check of the documentation if enabled 41 | goto end 42 | ) 43 | 44 | if "%1" == "clean" ( 45 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 46 | del /q /s %BUILDDIR%\* 47 | goto end 48 | ) 49 | 50 | 51 | REM Check if sphinx-build is available and fallback to Python version if any 52 | %SPHINXBUILD% 1>NUL 2>NUL 53 | if errorlevel 9009 goto sphinx_python 54 | goto sphinx_ok 55 | 56 | :sphinx_python 57 | 58 | set SPHINXBUILD=python -m sphinx.__init__ 59 | %SPHINXBUILD% 2> nul 60 | if errorlevel 9009 ( 61 | echo. 62 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 63 | echo.installed, then set the SPHINXBUILD environment variable to point 64 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 65 | echo.may add the Sphinx directory to PATH. 66 | echo. 67 | echo.If you don't have Sphinx installed, grab it from 68 | echo.http://sphinx-doc.org/ 69 | exit /b 1 70 | ) 71 | 72 | :sphinx_ok 73 | 74 | 75 | if "%1" == "html" ( 76 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 77 | if errorlevel 1 exit /b 1 78 | echo. 79 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 80 | goto end 81 | ) 82 | 83 | if "%1" == "dirhtml" ( 84 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 85 | if errorlevel 1 exit /b 1 86 | echo. 87 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 88 | goto end 89 | ) 90 | 91 | if "%1" == "singlehtml" ( 92 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 93 | if errorlevel 1 exit /b 1 94 | echo. 95 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 96 | goto end 97 | ) 98 | 99 | if "%1" == "pickle" ( 100 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 101 | if errorlevel 1 exit /b 1 102 | echo. 103 | echo.Build finished; now you can process the pickle files. 104 | goto end 105 | ) 106 | 107 | if "%1" == "json" ( 108 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 109 | if errorlevel 1 exit /b 1 110 | echo. 111 | echo.Build finished; now you can process the JSON files. 112 | goto end 113 | ) 114 | 115 | if "%1" == "htmlhelp" ( 116 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 117 | if errorlevel 1 exit /b 1 118 | echo. 119 | echo.Build finished; now you can run HTML Help Workshop with the ^ 120 | .hhp project file in %BUILDDIR%/htmlhelp. 121 | goto end 122 | ) 123 | 124 | if "%1" == "qthelp" ( 125 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 129 | .qhcp project file in %BUILDDIR%/qthelp, like this: 130 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\linpy.qhcp 131 | echo.To view the help file: 132 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\linpy.ghc 133 | goto end 134 | ) 135 | 136 | if "%1" == "devhelp" ( 137 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 138 | if errorlevel 1 exit /b 1 139 | echo. 140 | echo.Build finished. 141 | goto end 142 | ) 143 | 144 | if "%1" == "epub" ( 145 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 146 | if errorlevel 1 exit /b 1 147 | echo. 148 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 149 | goto end 150 | ) 151 | 152 | if "%1" == "latex" ( 153 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 154 | if errorlevel 1 exit /b 1 155 | echo. 156 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 157 | goto end 158 | ) 159 | 160 | if "%1" == "latexpdf" ( 161 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 162 | cd %BUILDDIR%/latex 163 | make all-pdf 164 | cd %~dp0 165 | echo. 166 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 167 | goto end 168 | ) 169 | 170 | if "%1" == "latexpdfja" ( 171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 172 | cd %BUILDDIR%/latex 173 | make all-pdf-ja 174 | cd %~dp0 175 | echo. 176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 177 | goto end 178 | ) 179 | 180 | if "%1" == "text" ( 181 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 182 | if errorlevel 1 exit /b 1 183 | echo. 184 | echo.Build finished. The text files are in %BUILDDIR%/text. 185 | goto end 186 | ) 187 | 188 | if "%1" == "man" ( 189 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 190 | if errorlevel 1 exit /b 1 191 | echo. 192 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 193 | goto end 194 | ) 195 | 196 | if "%1" == "texinfo" ( 197 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 198 | if errorlevel 1 exit /b 1 199 | echo. 200 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 201 | goto end 202 | ) 203 | 204 | if "%1" == "gettext" ( 205 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 206 | if errorlevel 1 exit /b 1 207 | echo. 208 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 209 | goto end 210 | ) 211 | 212 | if "%1" == "changes" ( 213 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 214 | if errorlevel 1 exit /b 1 215 | echo. 216 | echo.The overview file is in %BUILDDIR%/changes. 217 | goto end 218 | ) 219 | 220 | if "%1" == "linkcheck" ( 221 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 222 | if errorlevel 1 exit /b 1 223 | echo. 224 | echo.Link check complete; look for any errors in the above output ^ 225 | or in %BUILDDIR%/linkcheck/output.txt. 226 | goto end 227 | ) 228 | 229 | if "%1" == "doctest" ( 230 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 231 | if errorlevel 1 exit /b 1 232 | echo. 233 | echo.Testing of doctests in the sources finished, look at the ^ 234 | results in %BUILDDIR%/doctest/output.txt. 235 | goto end 236 | ) 237 | 238 | if "%1" == "coverage" ( 239 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage 240 | if errorlevel 1 exit /b 1 241 | echo. 242 | echo.Testing of coverage in the sources finished, look at the ^ 243 | results in %BUILDDIR%/coverage/python.txt. 244 | goto end 245 | ) 246 | 247 | if "%1" == "xml" ( 248 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 249 | if errorlevel 1 exit /b 1 250 | echo. 251 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 252 | goto end 253 | ) 254 | 255 | if "%1" == "pseudoxml" ( 256 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 257 | if errorlevel 1 exit /b 1 258 | echo. 259 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 260 | goto end 261 | ) 262 | 263 | :end 264 | -------------------------------------------------------------------------------- /docs/modules.rst: -------------------------------------------------------------------------------- 1 | linvpy 2 | ====== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | linvpy 8 | -------------------------------------------------------------------------------- /docs/source/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help 23 | help: 24 | @echo "Please use \`make ' where is one of" 25 | @echo " html to make standalone HTML files" 26 | @echo " dirhtml to make HTML files named index.html in directories" 27 | @echo " singlehtml to make a single large HTML file" 28 | @echo " pickle to make pickle files" 29 | @echo " json to make JSON files" 30 | @echo " htmlhelp to make HTML files and a HTML help project" 31 | @echo " qthelp to make HTML files and a qthelp project" 32 | @echo " applehelp to make an Apple Help Book" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | @echo " coverage to run coverage check of the documentation (if enabled)" 49 | 50 | .PHONY: clean 51 | clean: 52 | rm -rf $(BUILDDIR)/* 53 | 54 | .PHONY: html 55 | html: 56 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 57 | @echo 58 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 59 | 60 | .PHONY: dirhtml 61 | dirhtml: 62 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 63 | @echo 64 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 65 | 66 | .PHONY: singlehtml 67 | singlehtml: 68 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 69 | @echo 70 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 71 | 72 | .PHONY: pickle 73 | pickle: 74 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 75 | @echo 76 | @echo "Build finished; now you can process the pickle files." 77 | 78 | .PHONY: json 79 | json: 80 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 81 | @echo 82 | @echo "Build finished; now you can process the JSON files." 83 | 84 | .PHONY: htmlhelp 85 | htmlhelp: 86 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 87 | @echo 88 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 89 | ".hhp project file in $(BUILDDIR)/htmlhelp." 90 | 91 | .PHONY: qthelp 92 | qthelp: 93 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 94 | @echo 95 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 96 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 97 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/linvpy.qhcp" 98 | @echo "To view the help file:" 99 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/linvpy.qhc" 100 | 101 | .PHONY: applehelp 102 | applehelp: 103 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 104 | @echo 105 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 106 | @echo "N.B. You won't be able to view it unless you put it in" \ 107 | "~/Library/Documentation/Help or install it in your application" \ 108 | "bundle." 109 | 110 | .PHONY: devhelp 111 | devhelp: 112 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 113 | @echo 114 | @echo "Build finished." 115 | @echo "To view the help file:" 116 | @echo "# mkdir -p $$HOME/.local/share/devhelp/linvpy" 117 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/linvpy" 118 | @echo "# devhelp" 119 | 120 | .PHONY: epub 121 | epub: 122 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 123 | @echo 124 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 125 | 126 | .PHONY: latex 127 | latex: 128 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 129 | @echo 130 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 131 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 132 | "(use \`make latexpdf' here to do that automatically)." 133 | 134 | .PHONY: latexpdf 135 | latexpdf: 136 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 137 | @echo "Running LaTeX files through pdflatex..." 138 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 139 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 140 | 141 | .PHONY: latexpdfja 142 | latexpdfja: 143 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 144 | @echo "Running LaTeX files through platex and dvipdfmx..." 145 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 146 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 147 | 148 | .PHONY: text 149 | text: 150 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 151 | @echo 152 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 153 | 154 | .PHONY: man 155 | man: 156 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 157 | @echo 158 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 159 | 160 | .PHONY: texinfo 161 | texinfo: 162 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 163 | @echo 164 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 165 | @echo "Run \`make' in that directory to run these through makeinfo" \ 166 | "(use \`make info' here to do that automatically)." 167 | 168 | .PHONY: info 169 | info: 170 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 171 | @echo "Running Texinfo files through makeinfo..." 172 | make -C $(BUILDDIR)/texinfo info 173 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 174 | 175 | .PHONY: gettext 176 | gettext: 177 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 178 | @echo 179 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 180 | 181 | .PHONY: changes 182 | changes: 183 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 184 | @echo 185 | @echo "The overview file is in $(BUILDDIR)/changes." 186 | 187 | .PHONY: linkcheck 188 | linkcheck: 189 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 190 | @echo 191 | @echo "Link check complete; look for any errors in the above output " \ 192 | "or in $(BUILDDIR)/linkcheck/output.txt." 193 | 194 | .PHONY: doctest 195 | doctest: 196 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 197 | @echo "Testing of doctests in the sources finished, look at the " \ 198 | "results in $(BUILDDIR)/doctest/output.txt." 199 | 200 | .PHONY: coverage 201 | coverage: 202 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 203 | @echo "Testing of coverage in the sources finished, look at the " \ 204 | "results in $(BUILDDIR)/coverage/python.txt." 205 | 206 | .PHONY: xml 207 | xml: 208 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 209 | @echo 210 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 211 | 212 | .PHONY: pseudoxml 213 | pseudoxml: 214 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 215 | @echo 216 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 217 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # linvpy documentation build configuration file, created by 4 | # sphinx-quickstart on Thu Dec 8 09:50:43 2016. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | #sys.path.insert(0, os.path.abspath('.')) 22 | 23 | ########### TRICK FOUND ON SOME TUTORIAL : ADD IN THE MOCK_MODULES ANY EXTERNAL MODULE YOU'RE USING IN YOUR PACKAGE. 24 | 25 | import mock 26 | 27 | MOCK_MODULES = ['numpy', 'scipy', 'sklearn', 'matplotlib', 'matplotlib.pyplot', 'scipy.interpolate', 'scipy.special', 'math', '__future__', 'toolboxutilities'] 28 | for mod_name in MOCK_MODULES: 29 | sys.modules[mod_name] = mock.Mock() 30 | 31 | # another trick for readthedocs 32 | # import os 33 | # on_rtd = os.environ.get('READTHEDOCS') == 'True' 34 | # if on_rtd: 35 | # html_theme = 'default' 36 | # else: 37 | # html_theme = 'nature' 38 | 39 | 40 | 41 | # -- General configuration ------------------------------------------------ 42 | 43 | # If your documentation needs a minimal Sphinx version, state it here. 44 | #needs_sphinx = '1.0' 45 | 46 | # Add any Sphinx extension module names here, as strings. They can be 47 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 48 | # ones. 49 | extensions = [ 50 | 'sphinx.ext.autodoc', 51 | 'sphinx.ext.doctest', 52 | 'sphinx.ext.coverage', 53 | 'sphinx.ext.mathjax', 54 | 'sphinx.ext.viewcode', 55 | 56 | 'sphinx.ext.autosummary', 57 | #'sphinxcontrib.fulltoc' 58 | 59 | #'sphinx.ext.napoleon', 60 | 61 | # from Numpy 62 | #'sphinx.ext.pngmath', 63 | #'sphinx.ext.intersphinx', 64 | #'sphinx.ext.autosummary', 65 | ] 66 | 67 | #napoleon_google_docstring = False 68 | #napoleon_use_param = False 69 | #napoleon_use_ivar = True 70 | 71 | # Add any paths that contain templates here, relative to this directory. 72 | templates_path = ['_templates'] 73 | 74 | # The suffix(es) of source filenames. 75 | # You can specify multiple suffix as a list of string: 76 | # source_suffix = ['.rst', '.md'] 77 | source_suffix = ['.rst', '.py'] 78 | 79 | # The encoding of source files. 80 | #source_encoding = 'utf-8-sig' 81 | 82 | # The master toctree document. 83 | master_doc = 'index' 84 | 85 | # General information about the project. 86 | project = u'linvpy' 87 | copyright = u'2016, Guillaume Beaud' 88 | author = u'Guillaume Beaud' 89 | 90 | # The version info for the project you're documenting, acts as replacement for 91 | # |version| and |release|, also used in various other places throughout the 92 | # built documents. 93 | # 94 | # The short X.Y version. 95 | version = u'2' 96 | # The full version, including alpha/beta/rc tags. 97 | release = u'2.0' 98 | 99 | # The language for content autogenerated by Sphinx. Refer to documentation 100 | # for a list of supported languages. 101 | # 102 | # This is also used if you do content translation via gettext catalogs. 103 | # Usually you set "language" from the command line for these cases. 104 | language = None 105 | 106 | # There are two options for replacing |today|: either, you set today to some 107 | # non-false value, then it is used: 108 | #today = '' 109 | # Else, today_fmt is used as the format for a strftime call. 110 | #today_fmt = '%B %d, %Y' 111 | 112 | # List of patterns, relative to source directory, that match files and 113 | # directories to ignore when looking for source files. 114 | exclude_patterns = ['_build'] 115 | 116 | # The reST default role (used for this markup: `text`) to use for all 117 | # documents. 118 | #default_role = None 119 | 120 | # If true, '()' will be appended to :func: etc. cross-reference text. 121 | add_function_parentheses = True 122 | 123 | # If true, the current module name will be prepended to all description 124 | # unit titles (such as .. function::). 125 | #add_module_names = True 126 | 127 | # If true, sectionauthor and moduleauthor directives will be shown in the 128 | # output. They are ignored by default. 129 | #show_authors = False 130 | 131 | # The name of the Pygments (syntax highlighting) style to use. 132 | pygments_style = 'sphinx' 133 | 134 | # A list of ignored prefixes for module index sorting. 135 | #modindex_common_prefix = [] 136 | 137 | # If true, keep warnings as "system message" paragraphs in the built documents. 138 | #keep_warnings = False 139 | 140 | # If true, `todo` and `todoList` produce output, else they produce nothing. 141 | todo_include_todos = False 142 | 143 | 144 | # -- Options for HTML output ---------------------------------------------- 145 | 146 | # The theme to use for HTML and HTML Help pages. See the documentation for 147 | # a list of builtin themes. 148 | html_theme = 'haiku' 149 | 150 | # Good themes : haiku, sphinx_rtd_theme 151 | 152 | # Theme options are theme-specific and customize the look and feel of a theme 153 | # further. For a list of options available for each theme, see the 154 | # documentation. 155 | #html_theme_options = {} 156 | 157 | # Add any paths that contain custom themes here, relative to this directory. 158 | #html_theme_path = [] 159 | 160 | # The name for this set of Sphinx documents. If None, it defaults to 161 | # " v documentation". 162 | #html_title = None 163 | 164 | # A shorter title for the navigation bar. Default is the same as html_title. 165 | #html_short_title = None 166 | 167 | # The name of an image file (relative to this directory) to place at the top 168 | # of the sidebar. 169 | #html_logo = None 170 | 171 | # The name of an image file (within the static path) to use as favicon of the 172 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 173 | # pixels large. 174 | #html_favicon = None 175 | 176 | # Add any paths that contain custom static files (such as style sheets) here, 177 | # relative to this directory. They are copied after the builtin static files, 178 | # so a file named "default.css" will overwrite the builtin "default.css". 179 | html_static_path = ['_static'] 180 | 181 | # Add any extra paths that contain custom files (such as robots.txt or 182 | # .htaccess) here, relative to this directory. These files are copied 183 | # directly to the root of the documentation. 184 | #html_extra_path = [] 185 | 186 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 187 | # using the given strftime format. 188 | #html_last_updated_fmt = '%b %d, %Y' 189 | 190 | # If true, SmartyPants will be used to convert quotes and dashes to 191 | # typographically correct entities. 192 | #html_use_smartypants = True 193 | 194 | # Custom sidebar templates, maps document names to template names. 195 | #html_sidebars = {} 196 | #html_sidebars = { '**': ['globaltoc.html', 'relations.html', 'sourcelink.html', 'searchbox.html'], } 197 | 198 | # Additional templates that should be rendered to pages, maps page names to 199 | # template names. 200 | #html_additional_pages = {} 201 | 202 | # If false, no module index is generated. 203 | #html_domain_indices = True 204 | 205 | # If false, no index is generated. 206 | #html_use_index = True 207 | 208 | # If true, the index is split into individual pages for each letter. 209 | #html_split_index = False 210 | 211 | # If true, links to the reST sources are added to the pages. 212 | html_show_sourcelink = True 213 | 214 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 215 | html_show_sphinx = True 216 | 217 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 218 | html_show_copyright = True 219 | 220 | # Adds a link to source code 221 | viewcode_import = True 222 | 223 | # If true, an OpenSearch description file will be output, and all pages will 224 | # contain a tag referring to it. The value of this option must be the 225 | # base URL from which the finished HTML is served. 226 | #html_use_opensearch = '' 227 | 228 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 229 | #html_file_suffix = None 230 | 231 | # Language to be used for generating the HTML full-text search index. 232 | # Sphinx supports the following languages: 233 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' 234 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' 235 | #html_search_language = 'en' 236 | 237 | # A dictionary with options for the search language support, empty by default. 238 | # Now only 'ja' uses this config value 239 | #html_search_options = {'type': 'default'} 240 | 241 | # The name of a javascript file (relative to the configuration directory) that 242 | # implements a search results scorer. If empty, the default will be used. 243 | #html_search_scorer = 'scorer.js' 244 | 245 | # Output file base name for HTML help builder. 246 | htmlhelp_basename = 'linvpydoc' 247 | 248 | # -- Options for LaTeX output --------------------------------------------- 249 | 250 | latex_elements = { 251 | # The paper size ('letterpaper' or 'a4paper'). 252 | #'papersize': 'letterpaper', 253 | 254 | # The font size ('10pt', '11pt' or '12pt'). 255 | #'pointsize': '10pt', 256 | 257 | # Additional stuff for the LaTeX preamble. 258 | #'preamble': '', 259 | 260 | # Latex figure (float) alignment 261 | #'figure_align': 'htbp', 262 | } 263 | 264 | # Grouping the document tree into LaTeX files. List of tuples 265 | # (source start file, target name, title, 266 | # author, documentclass [howto, manual, or own class]). 267 | latex_documents = [ 268 | (master_doc, 'linvpy.tex', u'linvpy Documentation', 269 | u'Guillaume Beaud', 'manual'), 270 | ] 271 | 272 | # The name of an image file (relative to this directory) to place at the top of 273 | # the title page. 274 | #latex_logo = None 275 | 276 | # For "manual" documents, if this is true, then toplevel headings are parts, 277 | # not chapters. 278 | #latex_use_parts = False 279 | 280 | # If true, show page references after internal links. 281 | #latex_show_pagerefs = False 282 | 283 | # If true, show URL addresses after external links. 284 | #latex_show_urls = False 285 | 286 | # Documents to append as an appendix to all manuals. 287 | #latex_appendices = [] 288 | 289 | # If false, no module index is generated. 290 | #latex_domain_indices = True 291 | 292 | 293 | # -- Options for manual page output --------------------------------------- 294 | 295 | # One entry per manual page. List of tuples 296 | # (source start file, name, description, authors, manual section). 297 | man_pages = [ 298 | (master_doc, 'linvpy', u'linvpy Documentation', 299 | [author], 1) 300 | ] 301 | 302 | # If true, show URL addresses after external links. 303 | #man_show_urls = False 304 | 305 | 306 | # -- Options for Texinfo output ------------------------------------------- 307 | 308 | # Grouping the document tree into Texinfo files. List of tuples 309 | # (source start file, target name, title, author, 310 | # dir menu entry, description, category) 311 | texinfo_documents = [ 312 | (master_doc, 'linvpy', u'linvpy Documentation', 313 | author, 'linvpy', 'One line description of project.', 314 | 'Miscellaneous'), 315 | ] 316 | 317 | # Documents to append as an appendix to all manuals. 318 | #texinfo_appendices = [] 319 | 320 | # If false, no module index is generated. 321 | #texinfo_domain_indices = True 322 | 323 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 324 | #texinfo_show_urls = 'footnote' 325 | 326 | # If true, do not generate a @detailmenu in the "Top" node's menu. 327 | #texinfo_no_detailmenu = False 328 | 329 | 330 | sys.path.insert(0, os.path.abspath('../..')) 331 | 332 | # import ../../linvpy.py 333 | -------------------------------------------------------------------------------- /docs/source/images/EPFL_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/docs/source/images/EPFL_logo.png -------------------------------------------------------------------------------- /docs/source/images/bisquare.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/docs/source/images/bisquare.png -------------------------------------------------------------------------------- /docs/source/images/cauchy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/docs/source/images/cauchy.png -------------------------------------------------------------------------------- /docs/source/images/huber.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/docs/source/images/huber.png -------------------------------------------------------------------------------- /docs/source/images/lcav_logo_.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/docs/source/images/lcav_logo_.png -------------------------------------------------------------------------------- /docs/source/images/optimal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/docs/source/images/optimal.png -------------------------------------------------------------------------------- /docs/source/images/outlier_effect.eps: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/docs/source/images/outlier_effect.eps -------------------------------------------------------------------------------- /docs/source/images/outlier_effect.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/docs/source/images/outlier_effect.png -------------------------------------------------------------------------------- /docs/source/images/rho_functions_points.eps: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/docs/source/images/rho_functions_points.eps -------------------------------------------------------------------------------- /docs/source/images/sum_elements.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/docs/source/images/sum_elements.pdf -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. linvpy documentation master file, created by sphinx-quickstart on Thu Dec 8 09:50:43 2016. 2 | .. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. 3 | 4 | .. image:: images/EPFL_logo.png 5 | :align: left 6 | :width: 20 % 7 | .. image:: images/lcav_logo_.png 8 | :align: right 9 | :width: 20 % 10 | 11 | 12 | | 13 | | 14 | | 15 | | 16 | 17 | 18 | Welcome to linvpy's documentation ! 19 | =================================== 20 | 21 | LinvPy is a Python package designed to solve linear inverse problems of the 22 | form : 23 | 24 | .. math:: y = Ax + n 25 | 26 | where :math:`y` is a vector of measured values, :math:`A` is a known matrix, 27 | :math:`x` is an unknown input vector and :math:`n` is noise. 28 | 29 | The goal is to find :math:`x`, or at least the best possible estimation; if 30 | the matrix :math:`A` is invertible, the solution is easy to find by 31 | multiplying by the inverse, if not, we need to use regression techniques 32 | such as least squares method to find :math:`x`. The first motivation for 33 | this project is that Marta Martinez-Camara, PhD student in Communications 34 | Systems at EPFL (Switzerland) designed some new algorithms for solving linear 35 | inverse problems. LinvPy is a Python implementation of these algorithms, 36 | which may not be available anywhere else than here. LinvPy also contains 37 | several other known functions such as loss functions regularization 38 | functions or M-estimators. 39 | 40 | Source code is on GitHub : https://github.com/LCAV/linvpy. 41 | 42 | Paper of reference: **The regularized tau estimator: A robust and efficient solution 43 | to ill-posed linear inverse problems**, by Martinez-Camara et al. You can find it at: https://arxiv.org/abs/1606.00812 44 | 45 | Get it 46 | ====== 47 | 48 | LinvPy is available from PyPi and Python 2.7 compatible. If you have pip already installed, simply run : :: 49 | 50 | $ sudo pip2 install --ignore-installed --upgrade linvpy 51 | 52 | If you don't have pip installed, run : :: 53 | 54 | $ sudo easy_install pip 55 | $ sudo pip2 install --ignore-installed --upgrade linvpy 56 | 57 | 58 | 59 | Quick start 60 | =========== 61 | To solve :math:`y=Ax` with outliers knowing :math:`y, A` : :: 62 | 63 | import numpy as np 64 | import linvpy as lp 65 | 66 | a = np.matrix([[1, 2], [3, 4], [5, 6]]) 67 | y = np.array([1, 2, 3]) 68 | 69 | You can create a tau estimator object with the default parameters : :: 70 | 71 | # Using the Tau-estimator : 72 | tau = lp.TauEstimator() 73 | tau.estimate(a,y) 74 | # returns : (array([ 1.45956448e-16, 5.00000000e-01]), 1.9242827743815571) 75 | # where array([ 1.45956448e-16, 5.00000000e-01]) is the best x to solve y=Ax 76 | # and 1.9242827743815571 is the value of the tau scale for this x 77 | 78 | Or an M estimator : :: 79 | 80 | # Using the M-estimator : 81 | m = lp.MEstimator() 82 | m.estimate(a,y) 83 | # returns [ -2.95552481e-16 5.00000000e-01], the best x to solve y=Ax 84 | 85 | You can easily choose the loss function you want to use when you create the object : :: 86 | 87 | # By default both estimators use the Huber loss function, but you can use any of Huber, Cauchy, Bisquare or Optimal (all described in the doc below) : 88 | tau = lp.TauEstimator(loss_function=lp.Cauchy) 89 | tau.estimate(a,y) 90 | 91 | And the rest of the parameters: :: 92 | 93 | # or you can give one, two, three... or all parameters : 94 | tau = lp.TauEstimator( 95 | loss_function=lp.Optimal, 96 | clipping_1=0.6, 97 | clipping_2=1.5, 98 | lamb=3, 99 | scale=1.5, 100 | b=0.7, 101 | tolerance=1e4, ) 102 | tau.estimate(a,y) 103 | 104 | 105 | Or you can change the parameters later: :: 106 | 107 | # to change the clipping or any other parameter of the estimator : 108 | tau.loss_function_1.clipping = 0.7 109 | tau.tolerance = 1e3 110 | m.lamb = 3 111 | 112 | You can also choose a particular initial solution for the irls algorithm. To get the solution you run the method 'estimate' with your data a and y, and initial solution x_0 if any (this is not necessary): :: 113 | 114 | # running with an initial solution : 115 | x = np.array([5, 6]) 116 | x_tau_estimate = tau.estimate(a,y, initial_x=x) 117 | m_tau_estimate = m.estimate(a,y, initial_x=x) 118 | 119 | 120 | 121 | 122 | .. index:: 123 | 124 | Module contents 125 | =============== 126 | 127 | .. automodule:: linvpy 128 | 129 | .. rubric:: Estimators 130 | .. autosummary:: 131 | :nosignatures: 132 | 133 | MEstimator 134 | TauEstimator 135 | 136 | .. rubric:: Loss Functions 137 | .. autosummary:: 138 | :nosignatures: 139 | 140 | Bisquare 141 | Cauchy 142 | Huber 143 | Optimal 144 | 145 | .. rubric:: Regularization Functions 146 | .. autosummary:: 147 | :nosignatures: 148 | 149 | Lasso 150 | Tikhonov 151 | 152 | 153 | Using custom regularization functions 154 | ===================================== 155 | 156 | To use a custom regularization function : 157 | 158 | 1) copy paste this code into your python file 159 | 160 | 2) change the name CustomRegularization with the name of your function 161 | 162 | 3) define the regularization function in the definition of regularize 163 | 164 | 4) create your custom tau by passing an instance of your regularization, i.e. with "()" after the name 165 | 166 | :: 167 | 168 | # Define your own regularization that extends lp.Regularization 169 | class CustomRegularization(lp.Regularization): 170 | pass 171 | # Define your regularization function here 172 | def regularize(self, a, y, lamb=0): 173 | return np.ones(a.shape[1]) 174 | 175 | a = np.matrix([[1, 2], [3, 4], [5, 6]]) 176 | y = np.array([1, 2, 3]) 177 | 178 | # Create your custom tau estimator with custom regularization function 179 | # Pay attention to pass the loss function as a REFERENCE (without the "()" 180 | # after the name, and the regularization as an OBJECT, i.e. with the "()"). 181 | custom_tau = lp.TauEstimator(regularization=CustomRegularization()) 182 | print custom_tau.estimate(a,y) 183 | 184 | Using custom loss functions 185 | =========================== 186 | 187 | To use a custom loss function : 188 | 189 | 1) copy paste this code into your python file 190 | 191 | 2) change the name "CustomLoss" with the name of your loss function 192 | 193 | 3) change the two "0.7" with the value of your default clipping 194 | 195 | 4) define your rho function in the unit_rho definition 196 | 197 | 5) define your psi function as the derivative of the rho function in unit_psi 198 | 199 | 6) create your own tau estimator by passing your loss function name to it 200 | 201 | :: 202 | 203 | # Define your own loss function that extends lp.LossFunction 204 | class CustomLoss(lp.LossFunction): 205 | 206 | # Set your custom clipping 207 | def __init__(self, clipping=0.7): 208 | lp.LossFunction.__init__(self, clipping) 209 | if clipping is None: 210 | self.clipping = 0.7 211 | 212 | # Define your rho function : you can copy paste this and just change what's 213 | # inside the unit_rho 214 | def rho(self, array): 215 | # rho function of your loss function on ONE single element 216 | def unit_rho(element): 217 | # Simply return clipping * element for example 218 | return element + self.clipping 219 | # Vectorize the function 220 | vfunc = np.vectorize(unit_rho) 221 | return vfunc(array) 222 | 223 | # Define your psi function as the derivative of the rho function : you can 224 | # copy paste this and just change what's inside the unit_psi 225 | def psi(self, array): 226 | # rho function of your loss function on ONE single element 227 | def unit_psi(element): 228 | # Simply return the clipping for example 229 | return 1 230 | # Vectorize the function 231 | vfunc = np.vectorize(unit_psi) 232 | return vfunc(array) 233 | 234 | a = np.matrix([[1, 2], [3, 4], [5, 6]]) 235 | y = np.array([1, 2, 3]) 236 | 237 | custom_tau = lp.TauEstimator(loss_function=CustomLoss) 238 | print custom_tau.estimate(a,y) 239 | 240 | 241 | Tutorial 242 | ======== 243 | Why do we need robust estimators? 244 | --------------------------------- 245 | The nature of the errors that appear in a problem may pose a significant challenge. This is quite an old problem, and it was already mentioned in the first publications about least squares, more than two centuries ago. Legendre wrote in 1805 246 | 247 | .. epigraph:: 248 | If among these errors are some which appear too large to be admissible, then those observations which produced these errors will be rejected, as coming from too faulty experiments, and the unknowns will be determined by means of the other observations, which will then give much smaller errors. 249 | 250 | .. figure:: images/outlier_effect.png 251 | :scale: 70 % 252 | 253 | Contribute 254 | ========== 255 | 256 | If you want to contribute to this project, feel free to fork our GitHub main repository repository : https://github.com/LCAV/linvpy. Then, submit a 'pull request'. Please follow this workflow, step by step: 257 | 258 | 1. Fork the project repository: click on the 'Fork' button near the top of the page. This creates a copy of the repository in your GitHub account. 259 | 260 | 2. Clone this copy of the repository in your local disk. 261 | 262 | 3. Create a branch to develop the new feature : :: 263 | 264 | $ git checkout -b new_feature 265 | 266 | Work in this branch, never in the master branch. 267 | 268 | 4. To upload your changes to the repository : :: 269 | 270 | $ git add modified_files 271 | $ git commit -m "what did you implement in this commit" 272 | $ git push origin new_feature 273 | 274 | When your are done, go to the webpage of the main repository, and click 'Pull request' to send your changes for review. 275 | 276 | 277 | Documentation 278 | ============= 279 | 280 | .. module:: 281 | .. automodule:: linvpy 282 | :members: 283 | :exclude-members: m_scale, score_function, tau_weights, tau_scale, m_weights, Estimator, LossFunction, Regularization 284 | :undoc-members: 285 | :show-inheritance: 286 | 287 | 288 | Indices and tables 289 | ================== 290 | 291 | :ref:`genindex` 292 | :ref:`modindex` 293 | :ref:`search` -------------------------------------------------------------------------------- /docs/source/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | echo. coverage to run coverage check of the documentation if enabled 41 | goto end 42 | ) 43 | 44 | if "%1" == "clean" ( 45 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 46 | del /q /s %BUILDDIR%\* 47 | goto end 48 | ) 49 | 50 | 51 | REM Check if sphinx-build is available and fallback to Python version if any 52 | %SPHINXBUILD% 1>NUL 2>NUL 53 | if errorlevel 9009 goto sphinx_python 54 | goto sphinx_ok 55 | 56 | :sphinx_python 57 | 58 | set SPHINXBUILD=python -m sphinx.__init__ 59 | %SPHINXBUILD% 2> nul 60 | if errorlevel 9009 ( 61 | echo. 62 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 63 | echo.installed, then set the SPHINXBUILD environment variable to point 64 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 65 | echo.may add the Sphinx directory to PATH. 66 | echo. 67 | echo.If you don't have Sphinx installed, grab it from 68 | echo.http://sphinx-doc.org/ 69 | exit /b 1 70 | ) 71 | 72 | :sphinx_ok 73 | 74 | 75 | if "%1" == "html" ( 76 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 77 | if errorlevel 1 exit /b 1 78 | echo. 79 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 80 | goto end 81 | ) 82 | 83 | if "%1" == "dirhtml" ( 84 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 85 | if errorlevel 1 exit /b 1 86 | echo. 87 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 88 | goto end 89 | ) 90 | 91 | if "%1" == "singlehtml" ( 92 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 93 | if errorlevel 1 exit /b 1 94 | echo. 95 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 96 | goto end 97 | ) 98 | 99 | if "%1" == "pickle" ( 100 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 101 | if errorlevel 1 exit /b 1 102 | echo. 103 | echo.Build finished; now you can process the pickle files. 104 | goto end 105 | ) 106 | 107 | if "%1" == "json" ( 108 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 109 | if errorlevel 1 exit /b 1 110 | echo. 111 | echo.Build finished; now you can process the JSON files. 112 | goto end 113 | ) 114 | 115 | if "%1" == "htmlhelp" ( 116 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 117 | if errorlevel 1 exit /b 1 118 | echo. 119 | echo.Build finished; now you can run HTML Help Workshop with the ^ 120 | .hhp project file in %BUILDDIR%/htmlhelp. 121 | goto end 122 | ) 123 | 124 | if "%1" == "qthelp" ( 125 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 129 | .qhcp project file in %BUILDDIR%/qthelp, like this: 130 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\linvpy.qhcp 131 | echo.To view the help file: 132 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\linvpy.ghc 133 | goto end 134 | ) 135 | 136 | if "%1" == "devhelp" ( 137 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 138 | if errorlevel 1 exit /b 1 139 | echo. 140 | echo.Build finished. 141 | goto end 142 | ) 143 | 144 | if "%1" == "epub" ( 145 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 146 | if errorlevel 1 exit /b 1 147 | echo. 148 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 149 | goto end 150 | ) 151 | 152 | if "%1" == "latex" ( 153 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 154 | if errorlevel 1 exit /b 1 155 | echo. 156 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 157 | goto end 158 | ) 159 | 160 | if "%1" == "latexpdf" ( 161 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 162 | cd %BUILDDIR%/latex 163 | make all-pdf 164 | cd %~dp0 165 | echo. 166 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 167 | goto end 168 | ) 169 | 170 | if "%1" == "latexpdfja" ( 171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 172 | cd %BUILDDIR%/latex 173 | make all-pdf-ja 174 | cd %~dp0 175 | echo. 176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 177 | goto end 178 | ) 179 | 180 | if "%1" == "text" ( 181 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 182 | if errorlevel 1 exit /b 1 183 | echo. 184 | echo.Build finished. The text files are in %BUILDDIR%/text. 185 | goto end 186 | ) 187 | 188 | if "%1" == "man" ( 189 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 190 | if errorlevel 1 exit /b 1 191 | echo. 192 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 193 | goto end 194 | ) 195 | 196 | if "%1" == "texinfo" ( 197 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 198 | if errorlevel 1 exit /b 1 199 | echo. 200 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 201 | goto end 202 | ) 203 | 204 | if "%1" == "gettext" ( 205 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 206 | if errorlevel 1 exit /b 1 207 | echo. 208 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 209 | goto end 210 | ) 211 | 212 | if "%1" == "changes" ( 213 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 214 | if errorlevel 1 exit /b 1 215 | echo. 216 | echo.The overview file is in %BUILDDIR%/changes. 217 | goto end 218 | ) 219 | 220 | if "%1" == "linkcheck" ( 221 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 222 | if errorlevel 1 exit /b 1 223 | echo. 224 | echo.Link check complete; look for any errors in the above output ^ 225 | or in %BUILDDIR%/linkcheck/output.txt. 226 | goto end 227 | ) 228 | 229 | if "%1" == "doctest" ( 230 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 231 | if errorlevel 1 exit /b 1 232 | echo. 233 | echo.Testing of doctests in the sources finished, look at the ^ 234 | results in %BUILDDIR%/doctest/output.txt. 235 | goto end 236 | ) 237 | 238 | if "%1" == "coverage" ( 239 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage 240 | if errorlevel 1 exit /b 1 241 | echo. 242 | echo.Testing of coverage in the sources finished, look at the ^ 243 | results in %BUILDDIR%/coverage/python.txt. 244 | goto end 245 | ) 246 | 247 | if "%1" == "xml" ( 248 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 249 | if errorlevel 1 exit /b 1 250 | echo. 251 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 252 | goto end 253 | ) 254 | 255 | if "%1" == "pseudoxml" ( 256 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 257 | if errorlevel 1 exit /b 1 258 | echo. 259 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 260 | goto end 261 | ) 262 | 263 | :end 264 | -------------------------------------------------------------------------------- /packaging_tutorial.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/packaging_tutorial.pdf -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | from codecs import open 3 | from os import path 4 | 5 | here = path.abspath(path.dirname(__file__)) 6 | 7 | # Get the long description from the README file 8 | with open(path.join(here, 'README.rst'), encoding='utf-8') as f: 9 | long_description = f.read() 10 | 11 | setup( 12 | 13 | name='linvpy', 14 | 15 | version='2.6', 16 | 17 | description='Package to solve linear inverse problems', 18 | 19 | long_description=long_description, 20 | 21 | # The project's main homepage. 22 | url='https://github.com/LCAV/linvpy', 23 | 24 | # Author details 25 | author='Guillaume Beaud, Marta Martinez-Camara', 26 | author_email='beaudguillaume@gmail.com', 27 | 28 | # Choose your license 29 | license='BSD', 30 | 31 | # See https://pypi.python.org/pypi?%3Aaction=list_classifiers 32 | classifiers=[ 33 | # How mature is this project? Common values are 34 | # 3 - Alpha 35 | # 4 - Beta 36 | # 5 - Production/Stable 37 | 'Development Status :: 2 - Pre-Alpha', 38 | 39 | # Specify the Python versions you support here. In particular, ensure 40 | # that you indicate whether you support Python 2, Python 3 or both. 41 | 'Programming Language :: Python :: 2', 42 | 'Programming Language :: Python :: 2.6', 43 | 'Programming Language :: Python :: 2.7', 44 | ], 45 | 46 | # What does your project relate to? 47 | keywords='linear inverse M-estimator Tau-estimator regression', 48 | 49 | # You can just specify the packages manually here if your project is 50 | # simple. Or you can use find_packages(). 51 | # packages=find_packages(exclude=['contrib', 'docs', 'tests']), 52 | 53 | # packages=['linvpy'], 54 | 55 | packages=find_packages(exclude=['docs', 'tests']), 56 | 57 | # Alternatively, if you want to distribute just a my_module.py, uncomment 58 | # this: 59 | # py_modules=['linvpy.py'], 60 | 61 | # List run-time dependencies here. These will be installed by pip when 62 | # your project is installed. For an analysis of "install_requires" vs pip's 63 | # requirements files see: 64 | # https://packaging.python.org/en/latest/requirements.html 65 | install_requires=['numpy'], 66 | 67 | # List additional groups of dependencies here (e.g. development 68 | # dependencies). You can install these using the following syntax, 69 | # for example: 70 | # $ pip install -e .[dev,test] 71 | extras_require={ 72 | #'dev': ['check-manifest'], 73 | #'test': ['coverage'], 74 | }, 75 | 76 | # If there are data files included in your packages that need to be 77 | # installed, specify them here. If using Python 2.6 or less, then these 78 | # have to be included in MANIFEST.in as well. 79 | package_data={ 80 | #'sample': ['package_data.dat'], 81 | #'linvpy' : ['linvpy/regression.py'] 82 | }, 83 | 84 | # Although 'package_data' is the preferred approach, in some case you may 85 | # need to place data files outside of your packages. See: 86 | # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa 87 | # In this case, 'data_file' will be installed into '/my_data' 88 | #data_files=[('my_data', ['data/data_file'])], 89 | 90 | # To provide executable scripts, use entry points in preference to the 91 | # "scripts" keyword. Entry points provide cross-platform support and allow 92 | # pip to create the appropriate form of executable for the target platform. 93 | entry_points={ 94 | #'console_scripts': [ 95 | # 'sample=sample:main', 96 | #], 97 | }, 98 | ) -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | #__all__ = ['linvpy'] 2 | 3 | # in your __init__.py 4 | from tests import * -------------------------------------------------------------------------------- /tests/generate_random.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Generation of some random matrix, vectors etc for test purpose. 3 | ''' 4 | 5 | import numpy as np 6 | 7 | CONDITION_NUMBER_LOWERBOUND = 10000 8 | 9 | def generate_random(rows,columns): 10 | ''' 11 | :param size: (int) size of matrix and vector 12 | 13 | :return tuple(np.matrix, array): a random tuple A,y of matching dimensions 14 | ''' 15 | 16 | if rows == 1: 17 | return np.array([[np.random.rand()]]), [np.random.rand()] 18 | return np.random.rand(rows,columns) , np.random.rand(rows) 19 | 20 | def gen_noise(rows, columns, coeff_noise=0.5): 21 | 22 | matrix_a = np.random.rand(rows,columns) 23 | vector_x = np.random.rand(columns) 24 | vector_y = np.dot(matrix_a, vector_x) 25 | 26 | noise_vector = coeff_noise * np.random.rand(vector_y.shape[0]) 27 | vector_y += noise_vector 28 | 29 | initial_x = np.random.rand(columns) 30 | residuals = vector_y - np.dot(matrix_a, initial_x) 31 | scale = np.median(np.abs(residuals))/0.6745 32 | 33 | return matrix_a, vector_x, vector_y, initial_x, scale 34 | 35 | 36 | 37 | def generate_random_ill_conditioned(size): 38 | ''' 39 | For test purpose only. Function generating a random ill-conditioned matrix 40 | of the size given in parameter. 41 | 42 | :param size: (int) size of matrix and vector 43 | 44 | :return tuple(np.matrix, array): a random tuple A,y of matching 45 | dimensions with A being an ill-conditioned matrix 46 | ''' 47 | 48 | # An ill-conditioned matrix of size 1 makes no sense so it calls the 49 | # normal random generator 50 | if(size == 1): 51 | return generate_random(1) 52 | 53 | # Generates a random matrix 54 | random_matrix = np.matrix(np.random.rand(size,size-1)) 55 | 56 | # Unitary_1, unitary_2 are np.matrix types ; singular is a vector 57 | unitary_1, singular, unitary_2 = np.linalg.svd(random_matrix, 58 | full_matrices=False) 59 | 60 | # Finds the largest singular value, multiplies it by 61 | # CONDITION_NUMBER_LOWERBOUND and put it as the first element of the 62 | # singular values vector to make sure the condition number is greater 63 | # than CONDITION_NUMBER_LOWERBOUND 64 | max_value = max(singular) 65 | singular[0]= max_value * CONDITION_NUMBER_LOWERBOUND 66 | 67 | # Diagnolize the singular vector 68 | S = np.diag(singular) 69 | 70 | # Recomposes the random matrix which is ill-conditioned now 71 | ill_conditioned_matrix = np.dot( 72 | unitary_1, 73 | np.dot(S, unitary_2) 74 | ) 75 | 76 | # Generates a random vector 77 | random_vector = np.random.rand(size) 78 | 79 | return ill_conditioned_matrix, random_vector 80 | -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/Sridge/__init__.py -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/CVRidRob.m: -------------------------------------------------------------------------------- 1 | function mse =CVRidRob(XX,yy,nfold,orden,lam,gradlib) 2 | %mse=CVRidRob(XX,yy,nfold,orden,lam,gradlib) 3 | %XX,yy= data 4 | %If nfold>1, performs nfold-CV 5 | %If 0=2 15 | nestim=n*(1-1/nfold); % #(Xesti) 16 | lamcv=lam; 17 | deltaesc=0.5*(1-gradlib/nestim); 18 | inint=floor(linspace(0,n,nfold+1)); 19 | resid=zeros(n,1); 20 | for kk=1:nfold 21 | testk=(inint(kk)+1):inint(kk+1); 22 | estik=setdiff(indin,testk); 23 | Xtest=X(testk,:); Xesti=X(estik,:); 24 | ytest=y(testk); yesti=y(estik); 25 | [betaSE,~,~,~]=PeYoRid(Xesti,yesti,lamcv,deltaesc); 26 | beta=betaSE(1:p); bint=betaSE(p+1); 27 | fitk=Xtest*beta+bint; resid(testk)=ytest-fitk; 28 | end 29 | else 30 | ntest=floor(n*nfold); nestim=n-ntest; 31 | lamcv=lam; 32 | deltaesc=0.5*(1-gradlib/nestim); 33 | if ntest<5 | ntest>=n, disp('wrong nfold in CVRidRob'), end 34 | Xtest=X(1:ntest,:); ytest=y(1:ntest); 35 | Xesti=X(ntest+1:n,:); yesti=y(ntest+1:n); 36 | [betaSE,~,~,~]=PeYoRid(Xesti,yesti,lamcv,deltaesc); 37 | beta=betaSE(1:p); bint=betaSE(p+1); 38 | fit=Xtest*beta+bint; resid=ytest-fit; 39 | end 40 | ktau=5; 41 | mse=tauscale(resid,ktau).^2; 42 | 43 | -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/MMRid.m: -------------------------------------------------------------------------------- 1 | function [beta res edf w mse]=MMRid(X,y,lam, betin,sigma,kefi,niter,tol) 2 | %MMRID [beta res edf w mse]=MMRid(X,y,lam, betin,sigma,kefi,niter,tol) 3 | %RR-MM descent starting from initial estimate "betin", with given scale 4 | % "sigma" and penalty "lam" 5 | %Minimizes criterion (k*sigma)^2*sum{rho(res/k*sigma)}+lamda*||beta1||^2 6 | %Here rho''(0)=2 7 | %niter=#(max. iterations), default=50 8 | %tol: tolerance for relative change of criterion and residuals, default=1.e-3; 9 | %edf= equiv. deg fr. 10 | %kefi= constant for efficiency, default =3.88 11 | %For the following efficiencies: %0.8 0.85 0.9 0.95, 12 | % use kefi= 3.14 3.44 3.58 4.68 13 | %res= residuals, w =weights. 14 | %mse=estimated pred. error 15 | %mse(j) for j=1:3 are based on: FPE, CV(n) and GCV(n) 16 | if nargin<6, kefi=3.88; end %eff.=90% 17 | if nargin<7, niter=50; end 18 | if nargin<8, tol=1.e-3; end 19 | 20 | [n p]=size(X); 21 | kasig=kefi*sigma; 22 | betinte=betin(p+1); betinslo=betin(1:p); 23 | res0=y-X*betinslo-betinte; 24 | crit0=kasig^2*sum(rho(res0/kasig))+lam*norm(betinslo)^2; 25 | %Iterations 26 | iter=0; delta=inf; conve=inf; 27 | binter=betinte; 28 | while (itertol | conve>tol)) 29 | iter=iter+1; 30 | tt=res0/kasig; 31 | w=weights(tt); rw=sqrt(w); 32 | ycen=y-binter; 33 | Xw=X.*repmat(rw,1,p); yw=ycen.*rw; 34 | Xau=[Xw; sqrt(lam)*eye(p)]; %augment X 35 | yau=[yw; zeros(p,1)]; 36 | beta =Xau\yau; resin=y-X*beta; %here beta=slopes 37 | if sum(w)>0, binter=sum(resin .*w)/sum(w); 38 | else, binter=median(resin); 39 | end 40 | res=resin-binter; % centered residuals 41 | crit=kasig^2*sum(rho(res/kasig))+lam*norm(beta)^2; 42 | deltold=delta; delta=1-crit/crit0; 43 | conve=max(abs(res-res0))/sigma; %measures convergence of residuals 44 | res0=res; crit0=crit; 45 | end 46 | beta=[beta; binter]; 47 | hmat=Xau*((Xau'*Xau)\Xau'); h=diag(hmat); edf=sum(h(1:n)); 48 | %Three versions of MSE: 49 | %1: FPE 50 | aa=mean(psibis(res/kasig).^2); bb=mean(psipri(res/kasig)); 51 | if(bb<0.001), disp('MMrid'), disp([aa bb]), end 52 | mse1=sigma^2*(mean(rho(res/kasig))+ edf*aa/(n*bb) ); 53 | %2: approximate leave-one-out CV 54 | D=diag(psipri(res/kasig)); H=(X'*D)*X; U=H+2*lam*eye(p); 55 | h=diag(X*(U\X')); hpri=psipri(res/kasig).*h; 56 | kapsi=kasig*psibis(res/kasig); 57 | resin=res+h.*kapsi./(1-hpri); %resid CV 58 | ktau=5; %coonstant for tau-scale 59 | mse2=tauscale(resin,ktau).^2; 60 | %3: the same, with GCV 61 | hpri=mean(hpri); 62 | resin=res+h.*(kasig*psibis(res/kasig))/(1-hpri); %resid CV 63 | mse3=tauscale(resin,ktau).^2; 64 | 65 | mse=[mse1 mse2 mse3]; 66 | 67 | 68 | function r=rho(x) %Bisquare 69 | r= (1-(1-x.^2).^3 .*(abs(x)<=1))/3; %to make rho''(0)=2 70 | function z=psibis(r) %psibis=rho' 71 | z=2*(abs(r)<=1).*r.*(1-r.^2).^2; %psi bisquare 72 | function z=psipri(r) %psipri=psibis' 73 | z=2*(abs(r)<=1).*(1-r.^2).*(1-5*r.^2); 74 | function w=weights(r) %w=(psibis(r)/r)/2 to eliminate the "2" from 2*lam 75 | w=(abs(r)<=1).*(1-r.^2).^2; 76 | -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/Mloca.m: -------------------------------------------------------------------------------- 1 | function mu=Mloca(y,sig,efi) 2 | %mu=Mloca(y,sig,efi) 3 | %mu (column vector) = columnwise bisquare location M-estimator of matrix y. 4 | %sig (optional): scale, either a single number or a column vector. 5 | % Default=Normalized columnwise MAD 6 | %efi (optional): desired efficiency, Choose 0.9 (default), 0.85 or 0.95 7 | if nargin<2, sig=mad(y,1)'/0.675; 8 | end 9 | if nargin<3, efi=0.9; end 10 | if efi==.85, kefi=3.44; 11 | elseif efi==.9, kefi=3.88; 12 | elseif efi==.95, kefi=4.68; 13 | else, disp('wrong efi in Mloca') 14 | end 15 | [q n]=size(y); 16 | niter=10; 17 | nsig=size(sig,1); 18 | if nsig==n, sigrep=repmat(sig',q,1); 19 | else, sigrep=repmat(sig,q,n); %f all sig's equal 20 | end; 21 | if q==1, mu=y; 22 | else 23 | mume=median(y); %inicial 24 | mu=mume; 25 | for j=1:niter 26 | z=(y-repmat(mu,q,1))./sigrep; 27 | w=bisq(z/kefi); 28 | sw=sum(w); nulos=(sw==0); 29 | if sum(nulos)==0, mu=sum(y.*w)./sw; 30 | else, nonul=~nulos; %to avoid division by 0 31 | mu(nonul)=sum(y(:,nonul).*w(:,nonul))./sw(nonul); 32 | mu(nulos)=mume(nulos); 33 | end 34 | end 35 | end 36 | mu=mu'; 37 | 38 | function w=bisq(z) %bisquare weight function 39 | t=z.^2; w=(t<=1).*(1-t).^2; -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/PeYoRid.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/Sridge/matlab/RidgeCode/PeYoRid.m -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/READ_ME.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/Sridge/matlab/RidgeCode/READ_ME.m -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/RidSEMM.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/Sridge/matlab/RidgeCode/RidSEMM.m -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/RobRidge.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/Sridge/matlab/RidgeCode/RobRidge.m -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/SPC.m: -------------------------------------------------------------------------------- 1 | function [lamda b mu scores]=SPC(x,cent); 2 | %SPC Spherical Principal Components (Locantore et al., 1999) [lamda b mu scores]=SPC(x,cent) 3 | %lamda= Robust "eigenvalues" (increasing); b=Maatrix of eigenvectors 4 | %mu=spatial mediana; scores=projection of x (centered) on eigenvectors 5 | %If cent>0 (default), x is centered 6 | %cent=0 (no centgering) is used for "RobRidge" 7 | if nargin<2, cent=1; end 8 | [n p]=size(x); 9 | [mu,w]=spamed(x,cent); xcen=centrar(x,mu'); 10 | y=xcen.*(w*ones(1,p)); 11 | [a s b]=svdecon(y); %uses "economic" SVD 12 | scores=xcen*b; 13 | %lamda=squared robust scales along prinipal directions 14 | lamda=robsq(scores); 15 | [lamda ind]=sortrows(lamda); %sort lamdas increasing 16 | b=b(:,ind); scores=scores(:,ind); 17 | 18 | function [mu,w]= spamed(x,cent); 19 | %Spatial M-median , w=weights 20 | %If cent>0: mu=spatial median; else, mu=0 21 | % w=1/||x-mu|| normalized 22 | [n p]=size(x); del0=1.e-5; 23 | if cent>0 24 | niter=20; tol=1.e-5; mu0=median(x)'; 25 | dife=inf; ite=0; 26 | while itetol*p 27 | ite=ite+1; 28 | xc=centrar(x,mu0'); 29 | w= sqrt(sum(xc'.^2))'; 30 | deldis=del0*median(w); 31 | w=w.*(w>=deldis)+deldis*(w=deldis)+deldis*(wtolcrit | conve>tolres)) 25 | iter=iter+1; 26 | tt=res0/sig0; 27 | w=weights(tt); rw=sqrt(w); 28 | ycen=y-binter; 29 | xw=x.*repmat(rw,1,p); yw=ycen.*rw; 30 | lala=mean(w.*tt.^2)*lam; 31 | xau=[xw; sqrt(lala)*eye(p)]; %augment x 32 | yau=[yw; zeros(p,1)]; 33 | beta =xau\yau; resin=y-x*beta; 34 | binter=sum(resin .*w)/sum(w); 35 | res=resin-binter; 36 | sig=mscale(res,0,delsca); 37 | crit=n*sig^2+lam*beta'*beta; deltold=delta; 38 | delta=1-crit/crit0; 39 | conve=max(abs(res-res0))/sig; %measures convergence of residuals 40 | res0=res; sig0=sig; crit0=crit; 41 | end 42 | beta=[beta; binter]; 43 | hmat=xau*((xau'*xau)\xau'); h=diag(hmat); edf=sum(h(1:n)); 44 | 45 | function w=weights(r) %Bisquare weights 46 | w=(abs(r)<=1).*(1-r.^2).^2; 47 | 48 | -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/divcol.m: -------------------------------------------------------------------------------- 1 | function Y=divcol(X,sig) 2 | %DIVCOL Divides columns of X by their std (default) o by row vector sig 3 | [n p]=size(X); 4 | if nargin<2; divi=std(X); 5 | else divi=sig; 6 | end 7 | Y=X./repmat(divi,n,1); 8 | -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/findlam.m: -------------------------------------------------------------------------------- 1 | function lamr= findlam(vals,r) 2 | %FINDLAM lamr= findlam(vals,r) column vector 3 | %Finds lamdas which yield edf=r 4 | p=length(vals); nr=length(r); 5 | lamr=zeros(nr,1); 6 | lam1=0; lam2=max(vals)*(p./r-1); 7 | for i=1:nr 8 | lam0=[lam1 lam2(i)+0.5]; %the value 0.5 is for lam=0 9 | lamr(i)=fzero(@(lam) sumrat(lam,vals,r(i)),lam0, optimset('Display','off')); 10 | end 11 | 12 | function susu=sumrat(lam,vals,r) 13 | susu=sum(vals./(vals+lam))-r; 14 | -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/mscale.m: -------------------------------------------------------------------------------- 1 | function sig=mscale(x,normz,delta,tole) 2 | %MSCALE sig=mscale(x,normz,delta,tole) 3 | %sig=M-scale of x 4 | % sigma= solution of ave{rho(x_i/sigma)}=delta, where rho=bisquare 5 | %delta: optional, default=0.5 6 | %tole optional, error tolerance, default=1.e-5 7 | %normz: optional; if >0 (default), normalize sig for consistency at the normal 8 | 9 | if nargin<2, normz=1; end 10 | if nargin<3, delta=0.5; end 11 | if nargin<4; tole=1.e-5; end; 12 | 13 | n=length(x); y=sort(abs(x)); 14 | n1=floor(n*(1-delta)); n2=ceil(n*(1-delta)/(1-delta/2)); 15 | qq=[y(n1) y(n2)]; 16 | u=rhoinv(delta/2); 17 | sigin=[qq(1) qq(2)/u]; %initial interval 18 | if qq(1)>=1, tolera=tole; %relative or absolute tolerance, for sigma> or < 1 19 | else, tolera=tole*qq(1); 20 | end 21 | if mean(x==0)>1-delta; sig=0; 22 | else 23 | sig=fzero(@(sigma) averho(x,sigma,delta), sigin, optimset('TolX',tolera, 'Display','off')); 24 | end 25 | if normz>0, sig=sig/1.56; %normalize 26 | end 27 | 28 | function r=rhobisq(x) %Bisquare 29 | r= 1-(1-x.^2).^3 .*(abs(x)<=1); 30 | 31 | function aa=averho(x,sig,delta) 32 | aa=mean(rhobisq(x/sig))-delta; 33 | 34 | function x=rhoinv(u) %inverse function of rho 35 | x=sqrt(1-(1-u)^(1/3)); -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/prepara.m: -------------------------------------------------------------------------------- 1 | function [Xnor ycen mux sigx muy sigy]=prepara(X,y,robu) 2 | % [Xnor ycen mux sigx muy sigy]=prepara(X,y, robu) 3 | %Centers y and the columns of X to zero location, and normalizes X to unit scale. 4 | %If robu>0 (default): with robust M-estimates; else, means and SD 5 | %Xnor=centered normalized X; ycen=centered y; mux=location vector of X; 6 | %muy, sigy=location and scale of y 7 | if nargin<3, robu=1; end 8 | [n p]=size(X); 9 | if robu>0, mux=Mloca(X); Xnor=centrar(X,mux'); 10 | sigx=zeros(1,p); muy=Mloca(y); ycen=y-muy; 11 | sigy=mscale(ycen); 12 | for j=1:p, sigx(j)=mscale(Xnor(:,j)); end 13 | else, mux=mean(X)'; Xnor=centrar(X,mux'); 14 | sigx=std(X); muy=mean(y); sigy=std(y); ycen=y-muy; 15 | end 16 | Xnor=divcol(Xnor,sigx); 17 | -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/svdecon.m: -------------------------------------------------------------------------------- 1 | function [a s b]=svdecon(x) 2 | %[a s b]=svdecon(x), x=a*diag(s)*b' 3 | %Economic SVD economica: a=nxq and b=pxq with q=rank(x); 4 | %s=q-vector, decreasing 5 | [n p]=size(x); q=max(n,p); 6 | [a s b]=svd(x,0); 7 | if p>n, s=s(:,1:n); %now s and b are nxn 8 | b=b(:,1:n); 9 | end 10 | epsil=1.e-8; s=diag(s); 11 | sm=q*s(1)*epsil; %omit null singular values 12 | a=a(:,s>sm); b=b(:,s>sm); s=s(s>sm); -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/tauscale.m: -------------------------------------------------------------------------------- 1 | function sigmas=tauscale(x,ktau,delta) 2 | % tau scales (row vector) of x for several constants ktau (row) 3 | %delta= "delta" for initial M-scale, default=0.5 4 | if nargin<3, delta=0.5; end 5 | sigmas=[]; s0=mscale(x,0,delta); 6 | %constant for consistency of s0 7 | c0=7.8464-34.6565*delta + 75.2573*delta^2 -62.5880*delta^3; 8 | s0=s0/c0; 9 | for k=ktau 10 | romed=mean(rho(x/(s0*k))); sig=k*s0*sqrt(romed); 11 | sigmas=[sigmas sig]; 12 | end 13 | 14 | function r=rho(x) %Bisquare 15 | r= (1-(1-x.^2).^3 .*(abs(x)<=1))/3; %para que rho''(0)=2 -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/RidgeCode/unitol.m: -------------------------------------------------------------------------------- 1 | function [y ind]=unitol(x,eps) 2 | %y=x(ind) 3 | %Same as function "unique" but with tolerance eps 4 | if nargin<2, eps=1.e-10; end 5 | n=length(x); 6 | [y ind]=sort(x); ydif=diff(y); 7 | z=y(2:n); ii=ind(2:n); 8 | y=[y(1);z(ydif>eps)]; ind=[ind(1); ii(ydif>eps)]; -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/Sridge/matlab/__init__.py -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/matlab/sridge.m: -------------------------------------------------------------------------------- 1 | function [beta,resid,sigma,edf,lamin]= sridge(X,y,numlam, cualcv,showhist,nkeep,niter) 2 | %sridge Calculates the s-ridge regression estimate 3 | % adapted from Ricardo A. Maronnas matlab code from "Robust ridge regression for high dimensional 4 | % data" (2012) 5 | % 6 | % Inputs: 7 | % X - Regression Matrix [N x P] 8 | % y - respones [N x 1] 9 | % numlam - number of lambda values, default =min(n,p,20) 10 | % cualcv - method for estimating prediction error. 11 | % cualcv--fold CV ("N_{lambda}"; 12 | % showhist - if >0, print edf and mse for eadh lambda (default=0) 13 | % nkeep - number of candidates to be kept for full iteration in the Pe?a-Yohai procedure (default=5) 14 | % niter - maximum number of iteration steps for the 15 | % s-ridge calculation [1 x 1] 16 | % 17 | % Outputs: 18 | % beta - (p+1)-vector of regression parameters, %beta(1)=intercept 19 | % resid - residual vector 20 | % edf - final equivalent degrees of freedom ("p hat") 21 | % lamin - optimal lambda 22 | 23 | disp(X) 24 | disp(y) 25 | n=size(X,1); 26 | if nargin<3, numlam=min([20 n size(X,2)]); end 27 | if nargin<4, cualcv=1; end 28 | if nargin<5, showhist=0; end 29 | if nargin<6, nkeep=5; end 30 | 31 | % %Normalize and center X and y 32 | % [Xnor,ynor,mux,sigx,muy]=prepara(X,y); 33 | Xnor=X; 34 | ynor=y; 35 | %Spherical Principal COmponents (no centering) 36 | %privar, Beig= vector of robust "eigenvalues" and matrix of eigenvectors 37 | %Xnor is now =PCA scores= "orthonormalized Xnor " 38 | [privar,Beig, ~, Xnor]=SPC(Xnor,0); 39 | [n,p]=size(Xnor); %p is now the "actual" dimension 40 | privar=privar*n; %Makes the robust eigenvalues of the same order as those of classical PCA used for LS 41 | nlam=min([p numlam]); 42 | pmax=min([p n/2]); %edf<=n/2 to keep BDP >=0.25 43 | pp=linspace(1,pmax,nlam); %"candidate edf's" 44 | lamdas=findlam(privar,pp); %find lambdas corresponding to the edf's 45 | deltas=0.5*(1-pp/n); %for the M-escale used with Pe?a-Yohai 46 | msemin=inf; historia=[]; 47 | 48 | %Actual CV, or test sample 49 | [~,orden]=sort(randn(n,1)); %Random permutation 50 | for klam=1:nlam 51 | [klam;nlam] 52 | lam=lamdas(klam); deltaesc=deltas(klam); 53 | mse=CVRidRob(Xnor,ynor,cualcv,orden,lam,pp(klam)); 54 | if mse0, disp(historia), end 60 | %Denormalize beta 61 | betaslo=Beig*beta(1:p); bint=beta(p+1); 62 | % beta=desprepa(betaslo, mux,sigx,muy+bint); 63 | beta=[bint;betaslo]; 64 | 65 | %put intercept to the beging of the vector 66 | % beta=[beta(end);beta(1:end-1)]; 67 | 68 | 69 | -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/python/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/Sridge/python/__init__.py -------------------------------------------------------------------------------- /tests/regularizedtau/Sridge/python/sridge.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def rho_inverse(u): 5 | ''' 6 | inverse function of rho 7 | :param u: scalar input for the function 8 | :return: another scalar 9 | ''' 10 | 11 | f = np.sqrt(1 - (1 - u) ** (1 / 3)) 12 | return f 13 | 14 | 15 | def mscale(x, normz=1, b=0.5, tolerance=1e-5): 16 | ''' 17 | Implementation of the m-scale from Michael's code 18 | :param x: data to compute the m scale 19 | :param normz: (optional) if > o, normalized sigma for consistency 20 | :param b: b in Equation 7 of our paper 21 | :param tolerance: error tolerance 22 | :return: m scale of x 23 | ''' 24 | 25 | # get dimensions of data 26 | n = len (x) 27 | 28 | # sorting data 29 | y = np.sort(np.abs(x)) 30 | 31 | # ask why do we do this 32 | n1 = np.floor(n * (1 - b)) 33 | n2 = np.ceil(n * (1 - b) / (1 - b / 2)) 34 | 35 | # bounds for the data? 36 | qq = [y[n1 - 1], y[n2 - 1]] 37 | 38 | # rho inverse for b\2 39 | binverse = rho_inverse(b / 2) 40 | 41 | # initial interval where sigma is 42 | sigma_initial = [qq[0], qq[1] / binverse] 43 | 44 | # relative or absolute tolerance, for sigma> or < 1 45 | if qq[0] >= 1: 46 | tol = tolerance 47 | else: 48 | tol = tolerance * qq[0] 49 | 50 | # compute the sigma for this iteration 51 | if np.mean(x[x == 0] > (1 - b)): 52 | sigma_m = 0 53 | else: 54 | print 'find roots of a function' 55 | 56 | # TBD 57 | 58 | 59 | def regularized_s(x, y, nlambdas, cvtype, nkeep, niter, verbose): 60 | ''' 61 | This function calculates the ridge regression estimate. 62 | It is the traslation of the matlab code by Michael's student. 63 | 64 | Dependencies: PeYoRid method, mscale 65 | :param x: regression matrix (N x P) 66 | :param y: measurement vector (N x 1) 67 | :param nlambdas: number of lambdas values we use 68 | :param cvtype: type of the cross-validation (write options) 69 | :param nkeep: number of candidates to be kept for full iteration 70 | :param niter: maximum number of iterations for S-ridge calculation 71 | :return: 72 | ''' 73 | 74 | 75 | print 'here goes the translation of matlab code' 76 | 77 | 78 | def main(): 79 | fakedata = np.random.rand(10) 80 | sigma = mscale(fakedata) 81 | 82 | 83 | if __name__ == '__main__': 84 | # To launch manually in console: 85 | # python sridge.py 86 | main() 87 | -------------------------------------------------------------------------------- /tests/regularizedtau/__init__.py: -------------------------------------------------------------------------------- 1 | # __all__ = ['linvpy_latest', 'toolboxutilities_latest', 'toolboxinverse_latest'] 2 | 3 | # in your __init__.py 4 | from regularizedtau import * -------------------------------------------------------------------------------- /tests/regularizedtau/figures/final_versions/experiment_three_edited.eps: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/figures/final_versions/experiment_three_edited.eps -------------------------------------------------------------------------------- /tests/regularizedtau/figures/final_versions/sensitivitycurve_l1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/figures/final_versions/sensitivitycurve_l1.png -------------------------------------------------------------------------------- /tests/regularizedtau/ls.p: -------------------------------------------------------------------------------- 1 | cnumpy.core.multiarray 2 | _reconstruct 3 | p0 4 | (cnumpy 5 | ndarray 6 | p1 7 | (I0 8 | tp2 9 | S'b' 10 | p3 11 | tp4 12 | Rp5 13 | (I1 14 | (I5 15 | tp6 16 | cnumpy 17 | dtype 18 | p7 19 | (S'f8' 20 | p8 21 | I0 22 | I1 23 | tp9 24 | Rp10 25 | (I3 26 | S'<' 27 | p11 28 | NNNI-1 29 | I-1 30 | I0 31 | tp12 32 | bI00 33 | S'\xac#F\xbe=\x99\x92?/\x07\x8e\xce\xc9s\xf8?\xf8ID\x89\xd7m\xf8?/\x07\x8e\xce\xc9s\xf8?/\x07\x8e\xce\xc9s\xf8?' 34 | p13 35 | tp14 36 | b. -------------------------------------------------------------------------------- /tests/regularizedtau/m.p: -------------------------------------------------------------------------------- 1 | cnumpy.core.multiarray 2 | _reconstruct 3 | p0 4 | (cnumpy 5 | ndarray 6 | p1 7 | (I0 8 | tp2 9 | S'b' 10 | p3 11 | tp4 12 | Rp5 13 | (I1 14 | (I5 15 | tp6 16 | cnumpy 17 | dtype 18 | p7 19 | (S'f8' 20 | p8 21 | I0 22 | I1 23 | tp9 24 | Rp10 25 | (I3 26 | S'<' 27 | p11 28 | NNNI-1 29 | I-1 30 | I0 31 | tp12 32 | bI00 33 | S'\xb2\x83\xc3\xba\x9a\xcc\x95?/>K\x95\xa4\xbb\x98?\xce\xb4\x90\xd3pR\x9c?\x93\xe1\x94\x02\xcd\xe9\xa0?q\\\xfa\x9b,\x18\xb1?' 34 | p13 35 | tp14 36 | b. -------------------------------------------------------------------------------- /tests/regularizedtau/mathematica_data/IFtauL1.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/mathematica_data/IFtauL1.mat -------------------------------------------------------------------------------- /tests/regularizedtau/mathematica_data/IFtauL2.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/mathematica_data/IFtauL2.mat -------------------------------------------------------------------------------- /tests/regularizedtau/mathematica_data/IFtauNonReg.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/mathematica_data/IFtauNonReg.mat -------------------------------------------------------------------------------- /tests/regularizedtau/matlab_data/experimentalData.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LCAV/linvpy/471fb8b456af3bf22ced5e8fe1ec0fbbad8a08c8/tests/regularizedtau/matlab_data/experimentalData.mat -------------------------------------------------------------------------------- /tests/regularizedtau/mes.p: -------------------------------------------------------------------------------- 1 | cnumpy.core.multiarray 2 | _reconstruct 3 | p0 4 | (cnumpy 5 | ndarray 6 | p1 7 | (I0 8 | tp2 9 | S'b' 10 | p3 11 | tp4 12 | Rp5 13 | (I1 14 | (I5 15 | tp6 16 | cnumpy 17 | dtype 18 | p7 19 | (S'f8' 20 | p8 21 | I0 22 | I1 23 | tp9 24 | Rp10 25 | (I3 26 | S'<' 27 | p11 28 | NNNI-1 29 | I-1 30 | I0 31 | tp12 32 | bI00 33 | S'\x89\xf1h8\xf0\xfb\x98?/\x07\x8e\xce\xc9s\xf8?/\x07\x8e\xce\xc9s\xf8?/\x07\x8e\xce\xc9s\xf8?/\x07\x8e\xce\xc9s\xf8?' 34 | p13 35 | tp14 36 | b. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/asv_l1.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I10 16 | tp7 17 | cnumpy 18 | dtype 19 | p8 20 | (S'f8' 21 | p9 22 | I0 23 | I1 24 | tp10 25 | Rp11 26 | (I3 27 | S'<' 28 | p12 29 | NNNI-1 30 | I-1 31 | I0 32 | tp13 33 | bI00 34 | S'3\xca[\xbd\xff\xc8\xf1?U\x0e\xe2,\xacL\xf4?@Z\xda2\xf1\xd5\x02@\xef1\x14\x9a\xf4\xae\x0e@y\x7f9\x7f\x92/\x13@\xd7B\x12\x9b\x9a~ ?\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' 35 | p14 36 | tp15 37 | bag1 38 | (g2 39 | (I0 40 | tp16 41 | g4 42 | tp17 43 | Rp18 44 | (I1 45 | (I10 46 | tp19 47 | g11 48 | I00 49 | S'\x8a;\xc0\xf2w\xfe\xf7?8Wd1j\xae\xf2?h\xb3\xe1\xc4L\x06\xeb?\x7f9\xc3UX\xd5\xdf?J\x99\x95\xe9\xcc\xed\xc4?_\x84\xd5 H\xb1\x02?\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' 50 | p20 51 | tp21 52 | bag1 53 | (g2 54 | (I0 55 | tp22 56 | g4 57 | tp23 58 | Rp24 59 | (I1 60 | (I10 61 | tp25 62 | g11 63 | I00 64 | S'\x00\x00\x00\x00\x00\x00\x00\x00\x9a\x99\x99\x99\x99\x99\xb9?\x9a\x99\x99\x99\x99\x99\xc9?433333\xd3?\x9a\x99\x99\x99\x99\x99\xd9?\x00\x00\x00\x00\x00\x00\xe0?433333\xe3?gfffff\xe6?\x9a\x99\x99\x99\x99\x99\xe9?\xcd\xcc\xcc\xcc\xcc\xcc\xec?' 65 | p26 66 | tp27 67 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/asv_l2.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I10 16 | tp7 17 | cnumpy 18 | dtype 19 | p8 20 | (S'f8' 21 | p9 22 | I0 23 | I1 24 | tp10 25 | Rp11 26 | (I3 27 | S'<' 28 | p12 29 | NNNI-1 30 | I-1 31 | I0 32 | tp13 33 | bI00 34 | S'\xd1o\xf2\xe4\xdf@\xf0?1!Yo\xbe\xef\xe8?\xab\x17P\xeb\x82\x90\xe1?\xe5\xac\xca\x94\xf8\xc9\xd4?\x8f\x88\xce\xbf8+\xd1?\x84@\xae\r\x07\xf3\xc8?\xc4\xd0\xdc\xd6[M\xc1?\xd3\x88#\x1f\xa90\xc2?\x14\xa9\t\x8d8\x8a\xbf?\x07I\xc5\x9bd\xad\xbb?' 35 | p14 36 | tp15 37 | bag1 38 | (g2 39 | (I0 40 | tp16 41 | g4 42 | tp17 43 | Rp18 44 | (I1 45 | (I10 46 | tp19 47 | g11 48 | I00 49 | S'\x17~;01\xfb\xf7?\x1b\xb9\xe8\x14\x0c\xcc\xec?}\x10\xf6\x0b\xa7\x7f\xe4?\xca\xe3\xffJ\x16\xda\xdf?\r\xb7\xc7"4d\xda?6\xfb\xb9~\xed!\xd6?\xb2\xb3\xbc\xe1\xafD\xd3?\xd9\xc3\xfc\xc2\x0c\r\xd1?iR\x01\xf4\xb1\x18\xce?;?\xcc\x8a6a\xcb?' 50 | p20 51 | tp21 52 | bag1 53 | (g2 54 | (I0 55 | tp22 56 | g4 57 | tp23 58 | Rp24 59 | (I1 60 | (I10 61 | tp25 62 | g11 63 | I00 64 | S'\x00\x00\x00\x00\x00\x00\x00\x00\x9a\x99\x99\x99\x99\x99\xb9?\x9a\x99\x99\x99\x99\x99\xc9?433333\xd3?\x9a\x99\x99\x99\x99\x99\xd9?\x00\x00\x00\x00\x00\x00\xe0?433333\xe3?gfffff\xe6?\x9a\x99\x99\x99\x99\x99\xe9?\xcd\xcc\xcc\xcc\xcc\xcc\xec?' 65 | p26 66 | tp27 67 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/bs_l1.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I10 16 | I1 17 | tp7 18 | cnumpy 19 | dtype 20 | p8 21 | (S'f8' 22 | p9 23 | I0 24 | I1 25 | tp10 26 | Rp11 27 | (I3 28 | S'<' 29 | p12 30 | NNNI-1 31 | I-1 32 | I0 33 | tp13 34 | bI00 35 | S'\x00\x8eN\xb0\xa5\x8cn\xbf\xf8K#euG\xc8?\xf0\x9b\x963|\xf3\xd8?\x1d!\xee-\x88\x8e\xe2?\xb2\x03\xdeq\xfeI\xe9?\x9a:\x19\xa9q\xb1\xee?\xa6%\xb6\xc8\xe2\xa5\xf2?\xcc\xc2ay9\x05\xf6?\x00\x00\x00\x00\x00\x00\xf8?\x00\x00\x00\x00\x00\x00\xf8?' 36 | p14 37 | tp15 38 | bag1 39 | (g2 40 | (I0 41 | tp16 42 | g4 43 | tp17 44 | Rp18 45 | (I1 46 | (I10 47 | tp19 48 | g11 49 | I00 50 | S'\x00\x00\x00\x00\x00\x00\x00\x00\x9a\x99\x99\x99\x99\x99\xa9?\x9a\x99\x99\x99\x99\x99\xb9?433333\xc3?\x9a\x99\x99\x99\x99\x99\xc9?\x00\x00\x00\x00\x00\x00\xd0?433333\xd3?gfffff\xd6?\x9a\x99\x99\x99\x99\x99\xd9?\xcd\xcc\xcc\xcc\xcc\xcc\xdc?' 51 | p20 52 | tp21 53 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/bs_l2.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I10 16 | I1 17 | tp7 18 | cnumpy 19 | dtype 20 | p8 21 | (S'f8' 22 | p9 23 | I0 24 | I1 25 | tp10 26 | Rp11 27 | (I3 28 | S'<' 29 | p12 30 | NNNI-1 31 | I-1 32 | I0 33 | tp13 34 | bI00 35 | S'\x00\xa8\x8f\xec\x1a\xafh\xbf\xb4\x04F\xda\xc76\xda?[\x8b\xeb\xe8h\xf1\xe4?\xdbg\xc3\xd9d\xce\xe9?\xd0\x9d8+(O\xed?\x94\xadDVZ\x9f\xef?\xd8\xe6\xbd~y\xe2\xf0?\x8f\xfc\xb8\x85\xad\x9a\xf1?\xa4\x04(\xbaR.\xf2?\x13\x1b\x9d\x98\x9f\xc9\xf2?' 36 | p14 37 | tp15 38 | bag1 39 | (g2 40 | (I0 41 | tp16 42 | g4 43 | tp17 44 | Rp18 45 | (I1 46 | (I10 47 | tp19 48 | g11 49 | I00 50 | S'\x00\x00\x00\x00\x00\x00\x00\x00\x9a\x99\x99\x99\x99\x99\xa9?\x9a\x99\x99\x99\x99\x99\xb9?433333\xc3?\x9a\x99\x99\x99\x99\x99\xc9?\x00\x00\x00\x00\x00\x00\xd0?433333\xd3?gfffff\xd6?\x9a\x99\x99\x99\x99\x99\xd9?\xcd\xcc\xcc\xcc\xcc\xcc\xdc?' 51 | p20 52 | tp21 53 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/experiment_one.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I5 16 | I1 17 | tp7 18 | cnumpy 19 | dtype 20 | p8 21 | (S'f8' 22 | p9 23 | I0 24 | I1 25 | tp10 26 | Rp11 27 | (I3 28 | S'<' 29 | p12 30 | NNNI-1 31 | I-1 32 | I0 33 | tp13 34 | bI00 35 | S'.\xee\xe9\xf8\x16\x85\xe6?\t\xd0\x9d\x87W\xbd/@7\xa1\x03\xda\xb9&5@\x91\xfd\x99\x9c\xf5q<@\x8dMT1o\x9d?@' 36 | p14 37 | tp15 38 | bag1 39 | (g2 40 | (I0 41 | tp16 42 | g4 43 | tp17 44 | Rp18 45 | (I1 46 | (I5 47 | I1 48 | tp19 49 | g11 50 | I00 51 | S'\xa9\x99e\xdc\xce\xed\xe6?\xcf\xa6k\xa4\xb77\xef?\xaaF\xbaQ\x01\x10\xfd?|y[\xbdX\x9e\x13@\xb2\xd9@\x03Th+@' 52 | p20 53 | tp21 54 | bag1 55 | (g2 56 | (I0 57 | tp22 58 | g4 59 | tp23 60 | Rp24 61 | (I1 62 | (I5 63 | I1 64 | tp25 65 | g11 66 | I00 67 | S"\xfc\x99\xea\x1e\x04:\xe7?D0\x08\xbc\xb2&\x17@\xa1_\x0e\xe1=c'@Mkj#\xb5%4@\xc6\xa1\x14\x01\xc1'9@" 68 | p26 69 | tp27 70 | bag1 71 | (g2 72 | (I0 73 | tp28 74 | g4 75 | tp29 76 | Rp30 77 | (I1 78 | (I5 79 | I1 80 | tp31 81 | g11 82 | I00 83 | S'\x01P\xdbZ\x81[\xf3?\xee\xc4\xb1>\x02H\xf4?I\xb6\x9a9\x14\xed\xf4?]+\xc5\xb6\xdb\x14\xf6?\\\xa4F\xea$t!@' 84 | p32 85 | tp33 86 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/experiment_three.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I5 16 | tp7 17 | cnumpy 18 | dtype 19 | p8 20 | (S'f8' 21 | p9 22 | I0 23 | I1 24 | tp10 25 | Rp11 26 | (I3 27 | S'<' 28 | p12 29 | NNNI-1 30 | I-1 31 | I0 32 | tp13 33 | bI00 34 | S'\xd98\x91.!)\x91?1\x07\x8e\xce\xc9s\xf8?1\x07\x8e\xce\xc9s\xf8?1\x07\x8e\xce\xc9s\xf8?1\x07\x8e\xce\xc9s\xf8?' 35 | p14 36 | tp15 37 | bag1 38 | (g2 39 | (I0 40 | tp16 41 | g4 42 | tp17 43 | Rp18 44 | (I1 45 | (I5 46 | tp19 47 | g11 48 | I00 49 | S'\x85/\x86\x14-\x85\x94?\x1cI\x8f\xbc=\xc6\x91?\xc4\xba>\x0f\xa2\x16\x9f?R\xe9\x95ay\xff\xa7?\xe5O\x8a\xe9@\x94\xb2?' 50 | p20 51 | tp21 52 | bag1 53 | (g2 54 | (I0 55 | tp22 56 | g4 57 | tp23 58 | Rp24 59 | (I1 60 | (I5 61 | tp25 62 | g11 63 | I00 64 | S'\xd0.\xd6i>\x88\x94?1\x07\x8e\xce\xc9s\xf8?1\x07\x8e\xce\xc9s\xf8?1\x07\x8e\xce\xc9s\xf8?1\x07\x8e\xce\xc9s\xf8?' 65 | p26 66 | tp27 67 | bag1 68 | (g2 69 | (I0 70 | tp28 71 | g4 72 | tp29 73 | Rp30 74 | (I1 75 | (I5 76 | tp31 77 | g11 78 | I00 79 | S'\x1ds#\x91{}\xa1?o%Wq\xd7\x04\x9b?\xc3\x115\x89\x18\x80\xa1?\x99\xc4R\x94\xf0H\x95?\xf4{ev\x14I\x99?' 80 | p32 81 | tp33 82 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/experiment_two.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I5 16 | tp7 17 | cnumpy 18 | dtype 19 | p8 20 | (S'f8' 21 | p9 22 | I0 23 | I1 24 | tp10 25 | Rp11 26 | (I3 27 | S'<' 28 | p12 29 | NNNI-1 30 | I-1 31 | I0 32 | tp13 33 | bI00 34 | S'\xd3\xa4h6\x92\x80\xd0?E\xb0\x0e\xc4R\xb0\t@\x8cf\x9f!\x19\xb5\t@2\x028mT\xcc\t@\xd7PH\xfdZ\xb7\t@' 35 | p14 36 | tp15 37 | bag1 38 | (g2 39 | (I0 40 | tp16 41 | g4 42 | tp17 43 | Rp18 44 | (I1 45 | (I5 46 | tp19 47 | g11 48 | I00 49 | S'RHZ\xc0\x7fe\xcc?\xe6kj\\\xd7\xb0\xd0?\x9f`\xbb\xe6\xe7\x8b\xd2?\xa8\x00\xef\x92\xea<\xdd?\x9cA\xa0\xc4ku\xee?' 50 | p20 51 | tp21 52 | bag1 53 | (g2 54 | (I0 55 | tp22 56 | g4 57 | tp23 58 | Rp24 59 | (I1 60 | (I5 61 | tp25 62 | g11 63 | I00 64 | S'-W\xeac\xe5a\xd7?\xae\xaf\xe8Mk\xa4\xfe?39h3-`\x00@\xc4\x93U\x172\xa4\x02@\x17o\xa3\x0c|\x11\x06@' 65 | p26 66 | tp27 67 | bag1 68 | (g2 69 | (I0 70 | tp28 71 | g4 72 | tp29 73 | Rp30 74 | (I1 75 | (I5 76 | tp31 77 | g11 78 | I00 79 | S'\xc0g\x9b\x04o\xec\xd2?V\xff\x0fk\xc3\x1b\xd2?\xed\xbf\xba\xd57\x95\xd5?\xc0d|\xfa\xf5\xc2\xda?\x96\xab^\xb5H-\xde?' 80 | p32 81 | tp33 82 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/final_versions/asv_l1.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I10 16 | tp7 17 | cnumpy 18 | dtype 19 | p8 20 | (S'f8' 21 | p9 22 | I0 23 | I1 24 | tp10 25 | Rp11 26 | (I3 27 | S'<' 28 | p12 29 | NNNI-1 30 | I-1 31 | I0 32 | tp13 33 | bI00 34 | S'3\xca[\xbd\xff\xc8\xf1?U\x0e\xe2,\xacL\xf4?@Z\xda2\xf1\xd5\x02@\xef1\x14\x9a\xf4\xae\x0e@y\x7f9\x7f\x92/\x13@\xd7B\x12\x9b\x9a~ ?\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' 35 | p14 36 | tp15 37 | bag1 38 | (g2 39 | (I0 40 | tp16 41 | g4 42 | tp17 43 | Rp18 44 | (I1 45 | (I10 46 | tp19 47 | g11 48 | I00 49 | S'\x8a;\xc0\xf2w\xfe\xf7?8Wd1j\xae\xf2?h\xb3\xe1\xc4L\x06\xeb?\x7f9\xc3UX\xd5\xdf?J\x99\x95\xe9\xcc\xed\xc4?_\x84\xd5 H\xb1\x02?\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' 50 | p20 51 | tp21 52 | bag1 53 | (g2 54 | (I0 55 | tp22 56 | g4 57 | tp23 58 | Rp24 59 | (I1 60 | (I10 61 | tp25 62 | g11 63 | I00 64 | S'\x00\x00\x00\x00\x00\x00\x00\x00\x9a\x99\x99\x99\x99\x99\xb9?\x9a\x99\x99\x99\x99\x99\xc9?433333\xd3?\x9a\x99\x99\x99\x99\x99\xd9?\x00\x00\x00\x00\x00\x00\xe0?433333\xe3?gfffff\xe6?\x9a\x99\x99\x99\x99\x99\xe9?\xcd\xcc\xcc\xcc\xcc\xcc\xec?' 65 | p26 66 | tp27 67 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/final_versions/asv_l2.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I10 16 | tp7 17 | cnumpy 18 | dtype 19 | p8 20 | (S'f8' 21 | p9 22 | I0 23 | I1 24 | tp10 25 | Rp11 26 | (I3 27 | S'<' 28 | p12 29 | NNNI-1 30 | I-1 31 | I0 32 | tp13 33 | bI00 34 | S'\xd1o\xf2\xe4\xdf@\xf0?1!Yo\xbe\xef\xe8?\xab\x17P\xeb\x82\x90\xe1?\xe5\xac\xca\x94\xf8\xc9\xd4?\x8f\x88\xce\xbf8+\xd1?\x84@\xae\r\x07\xf3\xc8?\xc4\xd0\xdc\xd6[M\xc1?\xd3\x88#\x1f\xa90\xc2?\x14\xa9\t\x8d8\x8a\xbf?\x07I\xc5\x9bd\xad\xbb?' 35 | p14 36 | tp15 37 | bag1 38 | (g2 39 | (I0 40 | tp16 41 | g4 42 | tp17 43 | Rp18 44 | (I1 45 | (I10 46 | tp19 47 | g11 48 | I00 49 | S'\x17~;01\xfb\xf7?\x1b\xb9\xe8\x14\x0c\xcc\xec?}\x10\xf6\x0b\xa7\x7f\xe4?\xca\xe3\xffJ\x16\xda\xdf?\r\xb7\xc7"4d\xda?6\xfb\xb9~\xed!\xd6?\xb2\xb3\xbc\xe1\xafD\xd3?\xd9\xc3\xfc\xc2\x0c\r\xd1?iR\x01\xf4\xb1\x18\xce?;?\xcc\x8a6a\xcb?' 50 | p20 51 | tp21 52 | bag1 53 | (g2 54 | (I0 55 | tp22 56 | g4 57 | tp23 58 | Rp24 59 | (I1 60 | (I10 61 | tp25 62 | g11 63 | I00 64 | S'\x00\x00\x00\x00\x00\x00\x00\x00\x9a\x99\x99\x99\x99\x99\xb9?\x9a\x99\x99\x99\x99\x99\xc9?433333\xd3?\x9a\x99\x99\x99\x99\x99\xd9?\x00\x00\x00\x00\x00\x00\xe0?433333\xe3?gfffff\xe6?\x9a\x99\x99\x99\x99\x99\xe9?\xcd\xcc\xcc\xcc\xcc\xcc\xec?' 65 | p26 66 | tp27 67 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/final_versions/bs_l1.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I10 16 | I1 17 | tp7 18 | cnumpy 19 | dtype 20 | p8 21 | (S'f8' 22 | p9 23 | I0 24 | I1 25 | tp10 26 | Rp11 27 | (I3 28 | S'<' 29 | p12 30 | NNNI-1 31 | I-1 32 | I0 33 | tp13 34 | bI00 35 | S'\x00\x8eN\xb0\xa5\x8cn\xbf\xf8K#euG\xc8?\xf0\x9b\x963|\xf3\xd8?\x1d!\xee-\x88\x8e\xe2?\xb2\x03\xdeq\xfeI\xe9?\x9a:\x19\xa9q\xb1\xee?\xa6%\xb6\xc8\xe2\xa5\xf2?\xcc\xc2ay9\x05\xf6?\x00\x00\x00\x00\x00\x00\xf8?\x00\x00\x00\x00\x00\x00\xf8?' 36 | p14 37 | tp15 38 | bag1 39 | (g2 40 | (I0 41 | tp16 42 | g4 43 | tp17 44 | Rp18 45 | (I1 46 | (I10 47 | tp19 48 | g11 49 | I00 50 | S'\x00\x00\x00\x00\x00\x00\x00\x00\x9a\x99\x99\x99\x99\x99\xa9?\x9a\x99\x99\x99\x99\x99\xb9?433333\xc3?\x9a\x99\x99\x99\x99\x99\xc9?\x00\x00\x00\x00\x00\x00\xd0?433333\xd3?gfffff\xd6?\x9a\x99\x99\x99\x99\x99\xd9?\xcd\xcc\xcc\xcc\xcc\xcc\xdc?' 51 | p20 52 | tp21 53 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/final_versions/bs_l2.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I10 16 | I1 17 | tp7 18 | cnumpy 19 | dtype 20 | p8 21 | (S'f8' 22 | p9 23 | I0 24 | I1 25 | tp10 26 | Rp11 27 | (I3 28 | S'<' 29 | p12 30 | NNNI-1 31 | I-1 32 | I0 33 | tp13 34 | bI00 35 | S'\x00\xa8\x8f\xec\x1a\xafh\xbf\xb4\x04F\xda\xc76\xda?[\x8b\xeb\xe8h\xf1\xe4?\xdbg\xc3\xd9d\xce\xe9?\xd0\x9d8+(O\xed?\x94\xadDVZ\x9f\xef?\xd8\xe6\xbd~y\xe2\xf0?\x8f\xfc\xb8\x85\xad\x9a\xf1?\xa4\x04(\xbaR.\xf2?\x13\x1b\x9d\x98\x9f\xc9\xf2?' 36 | p14 37 | tp15 38 | bag1 39 | (g2 40 | (I0 41 | tp16 42 | g4 43 | tp17 44 | Rp18 45 | (I1 46 | (I10 47 | tp19 48 | g11 49 | I00 50 | S'\x00\x00\x00\x00\x00\x00\x00\x00\x9a\x99\x99\x99\x99\x99\xa9?\x9a\x99\x99\x99\x99\x99\xb9?433333\xc3?\x9a\x99\x99\x99\x99\x99\xc9?\x00\x00\x00\x00\x00\x00\xd0?433333\xd3?gfffff\xd6?\x9a\x99\x99\x99\x99\x99\xd9?\xcd\xcc\xcc\xcc\xcc\xcc\xdc?' 51 | p20 52 | tp21 53 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/final_versions/experiment_one.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I5 16 | I1 17 | tp7 18 | cnumpy 19 | dtype 20 | p8 21 | (S'f8' 22 | p9 23 | I0 24 | I1 25 | tp10 26 | Rp11 27 | (I3 28 | S'<' 29 | p12 30 | NNNI-1 31 | I-1 32 | I0 33 | tp13 34 | bI00 35 | S'.\xee\xe9\xf8\x16\x85\xe6?\t\xd0\x9d\x87W\xbd/@7\xa1\x03\xda\xb9&5@\x91\xfd\x99\x9c\xf5q<@\x8dMT1o\x9d?@' 36 | p14 37 | tp15 38 | bag1 39 | (g2 40 | (I0 41 | tp16 42 | g4 43 | tp17 44 | Rp18 45 | (I1 46 | (I5 47 | I1 48 | tp19 49 | g11 50 | I00 51 | S'\xa9\x99e\xdc\xce\xed\xe6?\xcf\xa6k\xa4\xb77\xef?\xaaF\xbaQ\x01\x10\xfd?|y[\xbdX\x9e\x13@\xb2\xd9@\x03Th+@' 52 | p20 53 | tp21 54 | bag1 55 | (g2 56 | (I0 57 | tp22 58 | g4 59 | tp23 60 | Rp24 61 | (I1 62 | (I5 63 | I1 64 | tp25 65 | g11 66 | I00 67 | S"\xfc\x99\xea\x1e\x04:\xe7?D0\x08\xbc\xb2&\x17@\xa1_\x0e\xe1=c'@Mkj#\xb5%4@\xc6\xa1\x14\x01\xc1'9@" 68 | p26 69 | tp27 70 | bag1 71 | (g2 72 | (I0 73 | tp28 74 | g4 75 | tp29 76 | Rp30 77 | (I1 78 | (I5 79 | I1 80 | tp31 81 | g11 82 | I00 83 | S'\x01P\xdbZ\x81[\xf3?\xee\xc4\xb1>\x02H\xf4?I\xb6\x9a9\x14\xed\xf4?]+\xc5\xb6\xdb\x14\xf6?\\\xa4F\xea$t!@' 84 | p32 85 | tp33 86 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/final_versions/experiment_two.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | cnumpy.core.multiarray 3 | _reconstruct 4 | p1 5 | (cnumpy 6 | ndarray 7 | p2 8 | (I0 9 | tp3 10 | S'b' 11 | p4 12 | tp5 13 | Rp6 14 | (I1 15 | (I5 16 | tp7 17 | cnumpy 18 | dtype 19 | p8 20 | (S'f8' 21 | p9 22 | I0 23 | I1 24 | tp10 25 | Rp11 26 | (I3 27 | S'<' 28 | p12 29 | NNNI-1 30 | I-1 31 | I0 32 | tp13 33 | bI00 34 | S'\xd3\xa4h6\x92\x80\xd0?E\xb0\x0e\xc4R\xb0\t@\x8cf\x9f!\x19\xb5\t@2\x028mT\xcc\t@\xd7PH\xfdZ\xb7\t@' 35 | p14 36 | tp15 37 | bag1 38 | (g2 39 | (I0 40 | tp16 41 | g4 42 | tp17 43 | Rp18 44 | (I1 45 | (I5 46 | tp19 47 | g11 48 | I00 49 | S'RHZ\xc0\x7fe\xcc?\xe6kj\\\xd7\xb0\xd0?\x9f`\xbb\xe6\xe7\x8b\xd2?\xa8\x00\xef\x92\xea<\xdd?\x9cA\xa0\xc4ku\xee?' 50 | p20 51 | tp21 52 | bag1 53 | (g2 54 | (I0 55 | tp22 56 | g4 57 | tp23 58 | Rp24 59 | (I1 60 | (I5 61 | tp25 62 | g11 63 | I00 64 | S'-W\xeac\xe5a\xd7?\xae\xaf\xe8Mk\xa4\xfe?39h3-`\x00@\xc4\x93U\x172\xa4\x02@\x17o\xa3\x0c|\x11\x06@' 65 | p26 66 | tp27 67 | bag1 68 | (g2 69 | (I0 70 | tp28 71 | g4 72 | tp29 73 | Rp30 74 | (I1 75 | (I5 76 | tp31 77 | g11 78 | I00 79 | S'\xc0g\x9b\x04o\xec\xd2?V\xff\x0fk\xc3\x1b\xd2?\xed\xbf\xba\xd57\x95\xd5?\xc0d|\xfa\xf5\xc2\xda?\x96\xab^\xb5H-\xde?' 80 | p32 81 | tp33 82 | ba. -------------------------------------------------------------------------------- /tests/regularizedtau/results_data/final_versions/sc_l1.pkl: -------------------------------------------------------------------------------- 1 | cnumpy.core.multiarray 2 | _reconstruct 3 | p0 4 | (cnumpy 5 | ndarray 6 | p1 7 | (I0 8 | tp2 9 | S'b' 10 | p3 11 | tp4 12 | Rp5 13 | (I1 14 | (I21 15 | I21 16 | tp6 17 | cnumpy 18 | dtype 19 | p7 20 | (S'f8' 21 | p8 22 | I0 23 | I1 24 | tp9 25 | Rp10 26 | (I3 27 | S'<' 28 | p11 29 | NNNI-1 30 | I-1 31 | I0 32 | tp12 33 | bI00 34 | S'\xa2\xf93\xee\x1b@f\xdbuAD\xd6#@\x90V \xf6f\x92%c\x0f@\xe6\xa7\x8d\xf8r\xd2\xe7\x1b@\x0f\xeeg\xb9\xc1\x14\x02\xc0\xec\x82\xdeD\x8c\x8d)\xc0\xfe\xc1\xa9N\xb2e\xe1\xbfr\xda\x86\xfb\x0f\xb6h\x07y?\xe1\x12\x006\x7f\xdez?0YIx\xf4%q?V\xdcp\xfd-ub?M\r\xfc\x83\xef\x04J?\x16\xdbU\xf0\xa2\xa3L?\xcep\xa2\x82\xde|U?6&\xcd\xca>\xc0W?\x90\xa2\xcf\xf3\x90\xb5O?\x06\\\x86N\xb7\xce\xfb>\xa6\x020}~F\xa4\xbe<\xa7\x86Q\xe9\x87N>\x1cf\xa2*\xc7\xf8b>\xd7mI\xda\xab\xddX>i[sl\x90\xc9\xe3\xbd\x8b\x9b\xa4\xb9\x1e\xb5\xeb\xbd\xfd\x12\xb8\x9c\xb7_\t\xbe8q`q)#\x0f\xbe\x97?\ty%\xa8\x18\xbet\x88\\\xea\xec\xb7\xca>\x10\xe8\xf6}<\xc6\x05?\xec\xb4\x0f\xca\xf6\x98$?L\x8e\xc1,wv8?\xdd\xe6\xc1\xc3\x85^??\xbci\xdc(}\xcdG?\x19\x81\xdb\xd0\xab\xa1O?\xf1"\xc1\x9d\'\xefP?\x8e5\xc3?\x03\x00N?v\x1d\x0f\x1c\xaf_P?1\x12\x1c\xa9w\xe1N?:\x84C,\xc8=X?\xa4\xa6\xa2&TNf?\x0f\x9f\xca\x0b\x8a\xf3x?61\xcf\x9c\xe7\x0cx? \xc8\x0b\xb8\x1d?q?}/\x89\x1c\xc5mc?j7\xc5\x98M\x98X?-\xcf\x00\x00\x00\x00\x00\x00\x00\x00\xb0\x90+\x98\x80}\x88\xbd\x0f.:\x99\xa2\xf3\x1b\xbe\xbc\x1d\x9d\xcb\xdclf\xbe\x8ebg\xa7o\x10\x9b\xbe\xf2\xb9\x96\xab_\xd7\xba\xbe\xc8\x1f\xf8\x88\x8b\x1a\xc7\xbe\x96\xcb\xb5Xg\x9d\xca\xbe\x02\xce~Q\xc6f,\xbf^\xcd;\xbeS\xb1c\xbfb\xbd\xc8\xa82;y?\xc6\x0e\xc4.$w\xc6?\x83\x8b\xbd\xe4CN\xc1?qd#p1J\xbf?X\x18\xaa;\xe6\xdb\xc0?\xba\xcb\xf4\x02\xaa\x95\xb1\xbfu\xd5d\xe3\x80\x01\xd5?FT\xe8\xabs\x04\xfe?\x1b@f\xdbuAD\xd6#@\x90V \xf6f\x92%c\x0f@\xe6\xa7\x8d\xf8r\xd2\xe7\x1b@\x0f\xeeg\xb9\xc1\x14\x02\xc0\xec\x82\xdeD\x8c\x8d)\xc0\xfe\xc1\xa9N\xb2e\xe1\xbfr\xda\x86\xfb\x95\t\xc5\xbfxj`9\x95\t\xc5\xbf\x00\xe8\x8d4\x95\t\xc5\xbf(Z\x98=\x95\t\xc5\xbf\xc0\xad\xa9/\x95\t\xc5\xbf\xc0\x15\x188\x95\t\xc5\xbf\xb8\xf6\x12H\x95\t\xc5\xbf\xd8E\xa50\x95\t\xc5\xbfPO\xf0@\x95\t\xc5\xbf\xd8\xc6\x0c0\x95\t\xc5\xbf\x08\xff\xcc@\x95\t\xc5\xbf\xc0l%5\x95\t\xc5\xbf\xc0\x9b\xa7.\x95\t\xc5\xbfX0\xbaz \xf2(\xc0\xf3!\x83VH\xe8\x00\xc0\xbd\xe0A\xaa~\xdb\x17@3s\x87L\x1a\xad\x1b@\x88\xbei3\x95\t\xc5\xbf\xf8q6S\x95\t\xc5\xbf8!\xbc/\x95\t\xc5\xbfx -F\x95\t\xc5\xbf\xa8010\x95\t\xc5\xbf CY1\x95\t\xc5\xbf8\x97\x04S\x95\t\xc5\xbf\x18\x06\xa6?\x95\t\xc5\xbf\xd8\x8fe7\x95\t\xc5\xbf\x00\x0b\xacL\x95\t\xc5\xbf\xc0\x84f:\x95\t\xc5\xbf\x88\x9a\tA\x95\t\xc5\xbf\xf0;28\x95\t\xc5\xbfP\x04\xd48\x95\t\xc5\xbf 3&/\x95\t\xc5\xbf\x80\x98\xb2A\x95\t\xc5\xbf\xc0\xca\xe43\x95\t\xc5\xbfS2\xf0#l\xe3%\xc0\xfd\x1cD\xa5G\x8c\x1e\xc0W\xfc\x9a\x1e\x9c\xfe\xf2?\x14\x14 j^\xe2\x1b@\x98\x88\x85\x9e\xbam\xc2\xbfPa76\x95\t\xc5\xbf\x88d_.\x95\t\xc5\xbf\x18\x06\xa6?\x95\t\xc5\xbf\x18VF/\x95\t\xc5\xbf\xe8t\xc4>\x95\t\xc5\xbf0\xdc(3\x95\t\xc5\xbf\xc0\x7f\xb1/\x95\t\xc5\xbf\xb8\xc4\x1d/\x95\t\xc5\xbf8`\xe7B\x95\t\xc5\xbf\xd8\xa3M3\x95\t\xc5\xbf\xd8r\xe5>\x95\t\xc5\xbf\xc0\x7f\xb1/\x95\t\xc5\xbf\x10\xbb\xea:\x95\t\xc5\xbf\xb8\xc4\x1d/\x95\t\xc5\xbf@H\x8a.\x95\t\xc5\xbfH\xd8\x0b;\x95\t\xc5\xbf\x10\x0eC>\x95\t\xc5\xbf\xa8\x9c\x9eE\xbf\xbe*\xc0HF\xf3\xe8\xa6\x93\x10\xc0\x0e\xb3\xe7\xb4\xdb\xeb\n@\xa3\xee8\xc0\x95e\x1b@\xc0\x84f:\x95\t\xc5\xbfx\xdb\xc2.\x95\t\xc5\xbf\xf8\\\xaa6\x95\t\xc5\xbf\xa8\xe8\x9eC\x95\t\xc5\xbf -\xb92\x95\t\xc5\xbf\x80\x9bP.\x95\t\xc5\xbf\x801\xa0:\x95\t\xc5\xbf\xa0\xf5\x911\x95\t\xc5\xbf\xb8\xe0\xe55\x95\t\xc5\xbf\x90\xb0\xecK\x95\t\xc5\xbf\x90\xc7D<\x95\t\xc5\xbf\xd8\x8fe7\x95\t\xc5\xbf\xd8\xc6\x0c0\x95\t\xc5\xbf`\x88\xa41\x95\t\xc5\xbf\xa0\x0b20\x95\t\xc5\xbf\x88\xb8*/\x95\t\xc5\xbf@z\t0\x95\t\xc5\xbf\xe0\xa4\x07ME&\xcc\xbf\x896}\x10\x93)!\xc0\x8a \xf7\xe21\xf1\xee\xbf\xe1\x10\x1d\xc0s\x9f\x0f@\xfbv\xcc\xa5\xaco\x04@`<\xe73\x95\t\xc5\xbf@B\x1d2\x95\t\xc5\xbf\xe8\x1c\x898\x95\t\xc5\xbf\xc8\xad\x1a4\x95\t\xc5\xbfh\xdd<8\x95\t\xc5\xbf\x880\xfa1\x95\t\xc5\xbf\xe8ht.\x95\t\xc5\xbf0\xdc(3\x95\t\xc5\xbf\xd8\x8fe7\x95\t\xc5\xbf\xa8\xb6M:\x95\t\xc5\xbfH\xd2\xfa.\x95\t\xc5\xbf\xd8\x8fe7\x95\t\xc5\xbf\x08\\\x8c.\x95\t\xc5\xbfH\xa7\x163\x95\t\xc5\xbfxP\x98T\x95\t\xc5\xbf\xc0\x83\xdfk\x95\t\xc5\xbf\xb8\xc1\xf2.\x95\t\xc5\xbf\xf7\xe9b\xb0\x8f\xf7!\xc0\x1d2p@\xa5\x11\x13\xc0\xb9\xf1~\x04IQ\xf1?k\xf7k~\xd2\x01\x0e@\xc8\x93\n0\x95\t\xc5\xbf(\xd4\x92/\x95\t\xc5\xbf`\x97\x92.\x95\t\xc5\xbf\xb8\xf6\x12H\x95\t\xc5\xbf\xa0A!7\x95\t\xc5\xbf\xd0\xc6V7\x95\t\xc5\xbf@di1\x95\t\xc5\xbf\x08\\\x8c.\x95\t\xc5\xbf@H\x8a.\x95\t\xc5\xbf@A\xaa4\x95\t\xc5\xbf\xc8\xad\x1a4\x95\t\xc5\xbf\x88\x98\xad.\x95\t\xc5\xbf\xd0\xc2\xcf5\x95\t\xc5\xbfp\xf4{D\x95\t\xc5\xbf\xd8Q\xdeD\x95\t\xc5\xbf\xf0F\x0fK\x95\t\xc5\xbf\xb0\xf9\x118\x95\t\xc5\xbf\x08\xc4\x113\x95\t\xc5\xbf\xf1\x01<\x19\x97Q!\xc0[\x90\x8b\x81\xd1c\x01\xc0\xee\xf4\x8d\x9b<\x13\xff?\xe0Z\x11M5=\x02@0nM.\x95\t\xc5\xbf\xa0pG@\x95\t\xc5\xbf\xe8ht.\x95\t\xc5\xbf\xd8\xb2;0\x95\t\xc5\xbf\xb0v\xdb9\x95\t\xc5\xbf\x88\xb2JF\x95\t\xc5\xbf\xd8\xb2;0\x95\t\xc5\xbf\xc8D\xf49\x95\t\xc5\xbf\xd8\x8fe7\x95\t\xc5\xbf8\xa8K7\x95\t\xc5\xbf\xb8G\x0f9\x95\t\xc5\xbf\xb8\xb8\x15>\x95\t\xc5\xbf \x11"O\x95\t\xc5\xbf(\xce\xb2F\x95\t\xc5\xbf@z\t0\x95\t\xc5\xbf\x10\x88\xf8;\x95\t\xc5\xbf\xb8\x117.\x95\t\xc5\xbf\x82u\xf075)\xed\xbf*~\x15s%\x9a\x13\xc0\xe0\xc4\x07\xcaKM\xd0\xbf\x7f\xbdD\xbfn#\xf7?\x80\x1eZq\x98\x9b\xb9\xbfXx\xe9.\x95\t\xc5\xbfxA\x900\x95\t\xc5\xbf\xd8\xc6\x0c0\x95\t\xc5\xbf\xf0:\xed2\x95\t\xc5\xbf\x88\x87\x94B\x95\t\xc5\xbfx\xbfp?\x95\t\xc5\xbf\xf8!\xaa4\x95\t\xc5\xbf\x90&\x1bH\x95\t\xc5\xbf`\xce\xf42\x95\t\xc5\xbfp\xd8\x0f.\x95\t\xc5\xbf\xf0\xac8=\x95\t\xc5\xbf\xe0\x90w1\x95\t\xc5\xbf\xa8E^1\x95\t\xc5\xbf\xf8\xbf\x1e@\x95\t\xc5\xbf\xa0\xa6\x927\x95\t\xc5\xbf\xc0\x8a\x01/\x95\t\xc5\xbf\xd8\xdd\xc3;\x95\t\xc5\xbf\x11\xef\xbc\xd0\xcb\x03\x18\xc0\xb0\xb5\xbc2\x01\xf9\x01\xc0\xa862\xe5S\xc2\xde?\x80\xb0\xe1\x9a\x9d@\x8b?\x102^<\x95\t\xc5\xbfpz93\x95\t\xc5\xbf\xb8\x15\x907\x95\t\xc5\xbf`\x8eBQ\x95\t\xc5\xbf\xd8\x03iD\x95\t\xc5\xbf8!\xbc/\x95\t\xc5\xbf\xf0\xd1\xc68\x95\t\xc5\xbf\x80%\x0b6\x95\t\xc5\xbfX\x12\xf1_\x95\t\xc5\xbf\x08\x04\xde;\x95\t\xc5\xbf \xec\xc1K\x95\t\xc5\xbf\xf8?o2\x95\t\xc5\xbf\x10x\n.\x95\t\xc5\xbf\x90\xc7D<\x95\t\xc5\xbf\x90\xe6e;\x95\t\xc5\xbfx\xf8\x14/\x95\t\xc5\xbf\xc0\xad\xa9/\x95\t\xc5\xbf\xa8yPA\x95\t\xc5\xbf`\xf7o`\xe2\xa1\x10\xc0\xfeg\x8aC\x94\xaa\xe3\xbf\x80\x82\xb0\x9c\x05\x99\x87?\xf6\xf0-\xe5\x95]\x01\xc0\x88\x9a\tA\x95\t\xc5\xbf\xb0\x98U1\x95\t\xc5\xbf(\x89H/\x95\t\xc5\xbf\x98\x97\x92Q\x95\t\xc5\xbf\x90\x0eM/\x95\t\xc5\xbf\xe0\xf0\x05/\x95\t\xc5\xbf\xd8\xa3M3\x95\t\xc5\xbf\xa8\x05\xf1C\x95\t\xc5\xbf\xd8\xb2;0\x95\t\xc5\xbf\x88\x98\xad.\x95\t\xc5\xbf\xe8\x1c\x898\x95\t\xc5\xbf\x90&\x1bH\x95\t\xc5\xbf\xb8\xc4\x1d/\x95\t\xc5\xbf\x90\x86\xdah\x95\t\xc5\xbf\xa0\xa7MT\x95\t\xc5\xbfP\x95\xb3.\x95\t\xc5\xbf\xd8l\xa6:\x95\t\xc5\xbf\xe8\xa2^\xd7\x8ac\xf2\xbf)\xaa\xe8\xfaVu\xf8\xbf\x00\x96V\x84\xb0\xcd\x8a\xbfRF\xa3\xfaVu\xf8\xbfx@L\xdf\x8ac\xf2\xbfX\x9a\xd9=\x95\t\xc5\xbf\xd8\xf9\xb9:\x95\t\xc5\xbf\x00z\x9b3\x95\t\xc5\xbfx\xc7\xda2\x95\t\xc5\xbf\x88\x98\xad.\x95\t\xc5\xbf\x10x\n.\x95\t\xc5\xbf\xd8*"/\x95\t\xc5\xbf\xe0]\x852\x95\t\xc5\xbf\x90\xca\xb40\x95\t\xc5\xbf\xd8\x9eS4\x95\t\xc5\xbf\xd0\x804.\x95\t\xc5\xbf\xf0\xc7\xe96\x95\t\xc5\xbfX\xc5\xeb1\x95\t\xc5\xbfh\xdeSE\x95\t\xc5\xbf\xf8RqD\x95\t\xc5\xbf\x183Y:\x95\t\xc5\xbf@+!2\x95\t\xc5\xbf*\xe3\xe1\xc7\x95]\x01\xc0\x80i\r@\x05\x99\x87?\xc4\x9en4\x94\xaa\xe3\xbf\xbb\x95Da\xe2\xa1\x10\xc0\x90\xeb\x03J\x95\t\xc5\xbf@z\t0\x95\t\xc5\xbf\xb8x29\x95\t\xc5\xbf@=\x0fb\x95\t\xc5\xbf\x08\x14\xfaA\x95\t\xc5\xbf \n\x112\x95\t\xc5\xbf\x10x\n.\x95\t\xc5\xbf\xf0\x85N/\x95\t\xc5\xbf\xf0F\x0fK\x95\t\xc5\xbf\x98\x8dm0\x95\t\xc5\xbf\x08Be.\x95\t\xc5\xbf\x80|//\x95\t\xc5\xbf\x98-\x9a>\x95\t\xc5\xbf\x08\\\x8c.\x95\t\xc5\xbf \n\x112\x95\t\xc5\xbf\xe8W\xb72\x95\t\xc5\xbfh\x98H8\x95\t\xc5\xbf`Os.\x95\t\xc5\xbf\x00\xe7\x96\xe3\xa2@\x8b?`[v\x18S\xc2\xde?\xd5*\x18.\x01\xf9\x01\xc0M\xe9o\xd1\xcb\x03\x18\xc0\xf8\xa3\xfaH\x95\t\xc5\xbfX\x0c\xaf0\x95\t\xc5\xbf@\x90d:\x95\t\xc5\xbf\xd8\xb2;0\x95\t\xc5\xbf\x08\x04\xde;\x95\t\xc5\xbf\x88\x9a\tA\x95\t\xc5\xbf\x10\x04\x944\x95\t\xc5\xbfh@$.\x95\t\xc5\xbf\x10\xbb\xea:\x95\t\xc5\xbf\xd8\xb2;0\x95\t\xc5\xbf\xc0\x15\x188\x95\t\xc5\xbf\xc0\xca\xe43\x95\t\xc5\xbfpe#.\x95\t\xc5\xbf\xc0\x17\x15/\x95\t\xc5\xbf\x10\x9b\xb2.\x95\t\xc5\xbf\xb0T\xd4.\x95\t\xc5\xbf(\xf9\t/\x95\t\xc5\xbfP\xdd`\x7f\x98\x9b\xb9\xbf\xe4\x17I\xbfn#\xf7?pI\xf4\xb5LM\xd0\xbf`\xe9\xf9a%\x9a\x13\xc0L\xbb\x9395)\xed\xbf\xc8m 3\x95\t\xc5\xbfxW0/\x95\t\xc5\xbf(O\xd5Q\x95\t\xc5\xbf\xf0\xd1\xc68\x95\t\xc5\xbfP\x0b\x86:\x95\t\xc5\xbfP\xc7\x1b4\x95\t\xc5\xbf@\xcf\x196\x95\t\xc5\xbfx\xa7wY\x95\t\xc5\xbf \x11"O\x95\t\xc5\xbf\x18s\x813\x95\t\xc5\xbf\xb8X\xb58\x95\t\xc5\xbf\xd8\x9eS4\x95\t\xc5\xbf\xd8*"/\x95\t\xc5\xbf\x88\x87\x94B\x95\t\xc5\xbf Y\xfcZ\x95\t\xc5\xbf\xd8\xc5\xb0.\x95\t\xc5\xbf(\xca+E\x95\t\xc5\xbf\x9c\xcd\xaak5=\x02@ \xdc\x84a<\x13\xff?\xfch\x94c\xd1c\x01\xc0oX> \x97Q!\xc00\xbfcF\x95\t\xc5\xbf\xc0\xad\xa9/\x95\t\xc5\xbf\x10\x15\xc7G\x95\t\xc5\xbf\xe8t\xc4>\x95\t\xc5\xbf \n\x112\x95\t\xc5\xbfx\xe2t0\x95\t\xc5\xbf\x880\xfa1\x95\t\xc5\xbf\xa0\x0b20\x95\t\xc5\xbf\xe0\xc4\xc51\x95\t\xc5\xbf\x00&\xa2:\x95\t\xc5\xbfx -F\x95\t\xc5\xbf\xc0\xca\xe43\x95\t\xc5\xbf\xd8E\xa50\x95\t\xc5\xbf\xd85D6\x95\t\xc5\xbf\xd8\x9eS4\x95\t\xc5\xbf\x98\xc3\x8a/\x95\t\xc5\xbf@A\xaa4\x95\t\xc5\xbfP\x19F.\x95\t\xc5\xbf\xfa\x91\x93\x9d\xd2\x01\x0e@<\x91u\x06IQ\xf1?\x1a\xbaVA\xa5\x11\x13\xc0\x9c\x83\\\xb0\x8f\xf7!\xc0Pa76\x95\t\xc5\xbf\xc8M\xa32\x95\t\xc5\xbf\xe0\xe6\x111\x95\t\xc5\xbfx\xfc\x9b0\x95\t\xc5\xbfHv\n/\x95\t\xc5\xbf`\xd2\xf1K\x95\t\xc5\xbf\x10\xbb\xea:\x95\t\xc5\xbf\xe8ht.\x95\t\xc5\xbf\x00&\xa2:\x95\t\xc5\xbf \x11"O\x95\t\xc5\xbf\x80V\\.\x95\t\xc5\xbf\xd0\xed\xb31\x95\t\xc5\xbf\x88gE:\x95\t\xc5\xbfh!KN\x95\t\xc5\xbfHB\xa52\x95\t\xc5\xbf\xa8\x8fL0\x95\t\xc5\xbf\x90\xd8/0\x95\t\xc5\xbf\x93>\xe1\xa4\xaco\x04@\xc2T\x8f\xc0s\x9f\x0f@JK{]1\xf1\xee\xbf9ZH\t\x93)!\xc0\xb8\xb9\x0f\x82C&\xcc\xbf\xe8-\xeaC\x95\t\xc5\xbf\xd8\xf9\xb9:\x95\t\xc5\xbf\xe0\x90w1\x95\t\xc5\xbfX\x0c\xaf0\x95\t\xc5\xbf@\xcf\x196\x95\t\xc5\xbf\xe0\xd9\x96B\x95\t\xc5\xbf\xe0]\x852\x95\t\xc5\xbfH\x06I/\x95\t\xc5\xbf0_\x1a=\x95\t\xc5\xbfX\x9a\xd9=\x95\t\xc5\xbf \xdb\x04P\x95\t\xc5\xbf\xb8\xcb\\D\x95\t\xc5\xbf\x88^:Q\x95\t\xc5\xbf\xd8\x1c\xa7/\x95\t\xc5\xbfHTy;\x95\t\xc5\xbf\xd8\xca\x931\x95\t\xc5\xbf\xd8\xffT/\x95\t\xc5\xbft\xde\xcc\xc0\x95e\x1b@\xfe\xed\x1f\xd5\xdb\xeb\n@K\xe8\x14\xf9\xa6\x93\x10\xc0\x11`\x1a=\xbf\xbe*\xc0\xc0E\xf62\x95\t\xc5\xbf\x10\x89\x991\x95\t\xc5\xbf\xb8\xc4\x1d/\x95\t\xc5\xbfP\xc7\x1b4\x95\t\xc5\xbf\xf0\xd1\xc68\x95\t\xc5\xbf@B\x1d2\x95\t\xc5\xbf\x18\x06\xa6?\x95\t\xc5\xbf\xd8\x8fe7\x95\t\xc5\xbf\xc0\x7f\xb1/\x95\t\xc5\xbf\x88\x98\xad.\x95\t\xc5\xbfp\xd8\x0f.\x95\t\xc5\xbfhVh<\x95\t\xc5\xbf\xe8ht.\x95\t\xc5\xbf\xc0\x7f\xb1/\x95\t\xc5\xbf\xf0\xd1\xc68\x95\t\xc5\xbf\xd0$\xbaE\x95\t\xc5\xbfp\xd8\x0f.\x95\t\xc5\xbf\x00\x1c\x05\x9e\xbam\xc2\xbf\xb9?\x1dj^\xe2\x1b@\xe7mv!\x9c\xfe\xf2?\xd8f\xb0\xa4G\x8c\x1e\xc0\xf0\xcb\xdc#l\xe3%\xc0\xe8t\t3\x95\t\xc5\xbf\xf0\x1e\xe07\x95\t\xc5\xbf\x88\x9a\tA\x95\t\xc5\xbf\xf8?o2\x95\t\xc5\xbfX>.2\x95\t\xc5\xbf\xc0\xad\xa9/\x95\t\xc5\xbfH\xd2\xfa.\x95\t\xc5\xbf\x00\xe6\xa79\x95\t\xc5\xbf\xb8\x06\x8b>\x95\t\xc5\xbf\x98\xd8[@\x95\t\xc5\xbfH>\xd9<\x95\t\xc5\xbf\xd0\x89\xb5.\x95\t\xc5\xbf\xd8r\xe5>\x95\t\xc5\xbfp\xd7W<\x95\t\xc5\xbf\xf8\xa4\xb2:\x95\t\xc5\xbf\x00\t~2\x95\t\xc5\xbf0\x94 /\x95\t\xc5\xbf7\xac\x1dK\x1a\xad\x1b@@\x14\x15\xaa~\xdb\x17@\xa0\xf4cRH\xe8\x00\xc0\x89Qzz \xf2(\xc0X\x0c\xaf0\x95\t\xc5\xbf\xe0]\x852\x95\t\xc5\xbfP\x15\x94_\x95\t\xc5\xbf\xc8\xef\xe33\x95\t\xc5\xbf\x00cq;\x95\t\xc5\xbf\xc8\xad\x1a4\x95\t\xc5\xbf0\xdc(3\x95\t\xc5\xbf\x88\xd9bL\x95\t\xc5\xbf\x08\x04\xde;\x95\t\xc5\xbf\x18\xe6\x84/\x95\t\xc5\xbf\xc8\x8c-a\x95\t\xc5\xbf\x80\xebf5\x95\t\xc5\xbf\x90\xeb\x8d2\x95\t\xc5\xbf\xc8D\xf49\x95\t\xc5\xbf 3&/\x95\t\xc5\xbf\xc8;\\=\x95\t\xc5\xbf\xe8t\xc4>\x95\t\xc5\xbf\x10\xc9\xaa.\x95\t\xc5\xbf\xa3\x0ed\x99p\x96%@\xbb\xfe\xfd\xa6g\xf4\x10@\n\xe9!b\xa8\xcf\x19\xc0:\x00|\xaa\xe8\x192\xc0\x98o\xa82\x95\t\xc5\xbfH>\xd9<\x95\t\xc5\xbf' 35 | p13 36 | tp14 37 | b. -------------------------------------------------------------------------------- /tests/regularizedtau/sourcethree.p: -------------------------------------------------------------------------------- 1 | cnumpy.core.multiarray 2 | _reconstruct 3 | p0 4 | (cnumpy 5 | ndarray 6 | p1 7 | (I0 8 | tp2 9 | S'b' 10 | p3 11 | tp4 12 | Rp5 13 | (I1 14 | (I20 15 | I1 16 | tp6 17 | cnumpy 18 | dtype 19 | p7 20 | (S'f8' 21 | p8 22 | I0 23 | I1 24 | tp9 25 | Rp10 26 | (I3 27 | S'<' 28 | p11 29 | NNNI-1 30 | I-1 31 | I0 32 | tp12 33 | bI00 34 | S'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00dK\x7f\xdc\x1fU\xd9\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x99S\x16<\xda\x1a\xe6\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00H\x95\xba\x87I\xdd\xf4\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf3\x98\xa7\x98\xa9H\xa2\xbf' 35 | p13 36 | tp14 37 | b. -------------------------------------------------------------------------------- /tests/regularizedtau/sourcetwo.p: -------------------------------------------------------------------------------- 1 | cnumpy.core.multiarray 2 | _reconstruct 3 | p0 4 | (cnumpy 5 | ndarray 6 | p1 7 | (I0 8 | tp2 9 | S'b' 10 | p3 11 | tp4 12 | Rp5 13 | (I1 14 | (I20 15 | I1 16 | tp6 17 | cnumpy 18 | dtype 19 | p7 20 | (S'f8' 21 | p8 22 | I0 23 | I1 24 | tp9 25 | Rp10 26 | (I3 27 | S'<' 28 | p11 29 | NNNI-1 30 | I-1 31 | I0 32 | tp12 33 | bI00 34 | S"u\xbf\xb3\xe6\xa3\x0b\xe3\xbf+\xce\x06&\x93`\xc6?\x1e?\xf2\xb5\x9d\x1e\xb4?G\xa5\xc7\xe8[\xa5\xea\xbf\xb4\xd6{\x85&\xd0\xd9\xbf(\xe8\xcb\xb1L\x88\xe8?R\x8e\xac\xc8\xe2\x1c\xfb\xbf\x85\xf1.\xba\x15\x1e\xd4?\x97^\xd1*\xc9!\xe1\xbf\xe5?\xab\xd8\x7f#\xee?3\x08\xe4\xc3\xdb\xc6\xea?\xd6\x15\xf7<\xa8\xb6\xd2?\xd2\xcfH\xd6H#\xd6?\xbb\xe6\xb5)\xcb\xd4\xcb\xbfAl\xc1l\xe1|\xe3?\x13=\xf7ts\xb2\xe5\xbf\\yU\x7f\xbbB\xcb\xbf[\xf6aq'~\xf2\xbf+\xb4#\xb5\x88\x9b\xee?\xa6\x88ML|t\xec\xbf" 35 | p13 36 | tp14 37 | b. -------------------------------------------------------------------------------- /tests/regularizedtau/tau.p: -------------------------------------------------------------------------------- 1 | cnumpy.core.multiarray 2 | _reconstruct 3 | p0 4 | (cnumpy 5 | ndarray 6 | p1 7 | (I0 8 | tp2 9 | S'b' 10 | p3 11 | tp4 12 | Rp5 13 | (I1 14 | (I5 15 | tp6 16 | cnumpy 17 | dtype 18 | p7 19 | (S'f8' 20 | p8 21 | I0 22 | I1 23 | tp9 24 | Rp10 25 | (I3 26 | S'<' 27 | p11 28 | NNNI-1 29 | I-1 30 | I0 31 | tp12 32 | bI00 33 | S':\xeb\x96T\x84\xe3\x97?\xc43\x1f\x0e\xf9\xee\xa3?F\xcaAz\x007\xa2?\x86\x8b\xce\t\x07\xce\xaa?\x99w\xa6\xc7\x9e\xcb\xa2?' 34 | p13 35 | tp14 36 | b. -------------------------------------------------------------------------------- /tests/regularizedtau/tauscale.m: -------------------------------------------------------------------------------- 1 | function sigmas=tauscale(x,ktau,delta) 2 | % tau scales (row vector) of x for several constants ktau (row) 3 | %delta= "delta" for initial M-scale, default=0.5 4 | if nargin<3, delta=0.5; end 5 | sigmas=[]; s0=mscale(x,0,delta); 6 | %constant for consistency of s0 7 | c0=7.8464-34.6565*delta + 75.2573*delta^2 -62.5880*delta^3; 8 | s0=s0/c0; 9 | for k=ktau 10 | romed=mean(rho(x/(s0*k))); sig=k*s0*sqrt(romed); 11 | sigmas=[sigmas sig]; 12 | end 13 | 14 | function r=rho(x) %Bisquare 15 | r= (1-(1-x.^2).^3 .*(abs(x)<=1))/3; %para que rho''(0)=2 -------------------------------------------------------------------------------- /tests/regularizedtau/toolboxutilities.py: -------------------------------------------------------------------------------- 1 | """ 2 | =================================================================== 3 | UTILITIES 4 | This toolbox contains many useful functions, which are not related 5 | to solving the inverse problem itself. 6 | Marta Martinez-Camara, EPFL 7 | =================================================================== 8 | """ 9 | 10 | # take the division operator from future versions 11 | from __future__ import division 12 | import linvpy as lp 13 | 14 | 15 | # ------------------------------------------------------------------- 16 | # Getting the source vector 17 | # ------------------------------------------------------------------- 18 | def getsource(sourcetype, sourcesize, k=1): 19 | import numpy as np 20 | import pickle 21 | import sys # to be able to exit 22 | if sourcetype == 'random': # Gaussian iid source, mu = 0, sigma = 1 23 | x = np.random.randn(sourcesize, 1) # source vector 24 | elif sourcetype == 'sparse': 25 | sparsity = k * sourcesize # 20 % sparsity for the source 26 | x = np.zeros((sourcesize, 1)) # initialization with a zero source 27 | p = np.random.permutation(sourcesize) 28 | nonz = p[0:sparsity] # getting random indexes for the nonzero values 29 | # get randomly the value for the non zero elements 30 | x[nonz] = np.random.randn(sparsity, 1) 31 | elif sourcetype == 'constant': 32 | x = np.zeros((sourcesize, 1)) # initialization with a zero source 33 | x[7:15] = 1 # making a piecewise source 34 | 35 | else: # unknown type of source 36 | sys.exit('unknown source type %s' % sourcetype) # die gracefully 37 | x = np.asarray(x) 38 | return x # what we were asked to deliver 39 | 40 | 41 | # ------------------------------------------------------------------- 42 | # Getting the sensing matrix 43 | # ------------------------------------------------------------------- 44 | def getmatrix(sourcesize, matrixtype, measurementsize, conditionnumber=1): 45 | import numpy as np 46 | import pickle 47 | import sys # to be able to exit 48 | if matrixtype == 'random': # Gaussian iid matrix, mu = 0, sigma = 1 49 | a = np.random.randn(measurementsize, sourcesize) # sensing matrix 50 | elif matrixtype == 'illposed': 51 | # random well conditioned matrix 52 | a = np.random.randn(measurementsize, sourcesize) 53 | u, s, v = np.linalg.svd(a) # get the svd decomposition 54 | nsv = min(sourcesize, measurementsize) # number of sv 55 | # modify the sv to make cond(A) = conditionnumber 56 | s[np.nonzero(s)] = np.linspace(conditionnumber, 1, nsv) 57 | sm = np.zeros((measurementsize, sourcesize)) 58 | sm[:sourcesize, :sourcesize] = np.diag(s) 59 | a = np.dot(u, np.dot(sm, v)) # putting everything together 60 | else: # unknown type of matrix 61 | sys.exit('unknown matrix type %s' % matrixtype) # die gracefully 62 | a = np.asarray(a) 63 | return a # what we were asked to deliver 64 | 65 | 66 | # ------------------------------------------------------------------- 67 | # Getting the measurements 68 | # ------------------------------------------------------------------- 69 | def getmeasurements(a, x, noisetype, var=1, outlierproportion=0): 70 | import numpy as np 71 | import pickle 72 | import sys # to be able to exit 73 | import matplotlib.pyplot as plt 74 | # import statistics as st 75 | measurementsize = a.shape[0] # number of measurements 76 | y = np.dot(a, x) # noiseless measurements 77 | if noisetype == 'none': # noiseless case 78 | n = np.zeros((measurementsize, 1)) # zero noise 79 | elif noisetype == 'gaussian': # gaussian noise 80 | n = var * np.random.randn(measurementsize, 1) 81 | elif noisetype == 'outliers': # gaussian noise 82 | # additive Gaussian noise 83 | n = var * np.random.randn(measurementsize, 1) 84 | p = np.random.permutation(measurementsize) 85 | # how many measurements are outliers 86 | noutliers = np.round(outlierproportion * measurementsize) 87 | outindex = p[0:noutliers] # getting random indexes for the outliers 88 | # the outliers have a variance ten times larger than clean data 89 | n[outindex] = np.var(y) * 10 * np.random.randn(noutliers, 1) 90 | 91 | else: # unknown type of additive noise 92 | sys.exit('unknown noise type %s' % noisetype) # die gracefully 93 | yn = y + n # get the measurements 94 | yn = np.asarray(yn) 95 | #plt.stem(n, 'b') 96 | # plt.show(block=True) 97 | #plt.stem(y, 'kd-') 98 | #plt.stem(yn, 'rs--') 99 | # plt.show() # show figure 100 | 101 | return yn # what we were asked to deliver 102 | 103 | 104 | # ------------------------------------------------------------------- 105 | # Score functions for the robust regressors 106 | # ------------------------------------------------------------------- 107 | def scorefunction(u, kind, clipping): 108 | import sys # to tbe able to exit 109 | 110 | # print"u =", u 111 | 112 | if kind == 'huber': # least squares 113 | score = huber(u, clipping) # get the estimate 114 | elif kind == 'squared': 115 | score = u 116 | elif kind == 'optimal': 117 | score = scoreoptimal(u, clipping) 118 | elif kind == 'tau': 119 | # here we compute the score function for the tau. 120 | # psi_tau = weighttau * psi_1 + psi_2 121 | weighttau = tauweights(u, 'optimal', clipping) 122 | 123 | #weighttau = lp.tau_weights_new(u, clipping) 124 | 125 | score = weighttau * \ 126 | scoreoptimal(u, clipping[0]) + scoreoptimal(u, clipping[1]) 127 | else: # unknown method 128 | sys.exit('unknown method %s' % kind) # die gracefully 129 | return score # return the score function that we need 130 | 131 | 132 | # ------------------------------------------------------------------- 133 | # Huber score function 134 | # ------------------------------------------------------------------- 135 | def huber(u, clipping): 136 | import numpy as np 137 | u = np.array(u) # converting to np array 138 | p = np.zeros(u.shape) # new array for the output 139 | u_abs = np.abs(u) 140 | i = u_abs <= clipping # logical array 141 | p[i] = u[i] # middle part of the function 142 | i = u_abs > clipping # outer part of the function 143 | p[i] = np.sign(u[i]) * clipping 144 | return p 145 | 146 | 147 | # ------------------------------------------------------------------- 148 | # Optimal score function 149 | # ------------------------------------------------------------------- 150 | def scoreoptimal(u, clipping): 151 | import numpy as np 152 | u = np.array(u) 153 | p = np.zeros(u.shape) 154 | uabs = np.abs(u) # u absolute values 155 | i = uabs <= 2 * clipping # central part of teh score function 156 | p[i] = u[i] / clipping ** 2 / 3.25 157 | i = np.logical_and(uabs > 2 * clipping, uabs <= 3 * clipping) 158 | f = lambda z: (-1.944 * z / clipping ** 2 + 1.728 * z ** 3 / clipping ** 4 - 0.312 * z ** 5 / clipping ** 6 + 159 | 0.016 * z ** 7 / clipping ** 8) / 3.25 160 | p[i] = f(u[i]) 161 | return p 162 | 163 | 164 | # ------------------------------------------------------------------- 165 | # Rho functions 166 | # ------------------------------------------------------------------- 167 | def rhofunction(u, kind, clipping): 168 | import sys # to tbe able to exit 169 | if kind == 'optimal': # least squares 170 | r = rhooptimal(u, clipping) # get the estimate 171 | else: # unknown method 172 | sys.exit('unknown rho function %s' % kind) # die gracefully 173 | return r # return the score function that we need 174 | 175 | 176 | # ----------------------------------------------- 177 | # Optimal loss function (rho) 178 | # ------------------------------------------------------------------- 179 | def rhooptimal(u, clipping): 180 | """ 181 | The Fast-Tau Estimator for Regression, Matias SALIBIAN-BARRERA, Gert WILLEMS, and Ruben ZAMAR 182 | www.tandfonline.com/doi/pdf/10.1198/106186008X343785 183 | 184 | The equation is found p. 611. To get the exact formula, it is necessary to use 3*c instead of c. 185 | """ 186 | 187 | import numpy as np 188 | y = np.abs(u / clipping) 189 | r = np.ones(u.shape) 190 | i = y <= 2. # middle part of the curve 191 | r[i] = y[i] ** 2 / 2. / 3.25 192 | i = np.logical_and(y > 2, y <= 3) # intermediate part of the curve 193 | f = lambda z: (1.792 - 0.972 * z ** 2 + 0.432 * z ** 194 | 4 - 0.052 * z ** 6 + 0.002 * z ** 8) / 3.25 195 | r[i] = f(y[i]) 196 | return r 197 | 198 | 199 | # ------------------------------------------------------------------- 200 | # Weight for the score in the tau 201 | # ------------------------------------------------------------------- 202 | def tauweights(u, lossfunction, clipping): 203 | """ 204 | This routine computes the 'weighttau', necessary to build the psi_tau function 205 | :param u: vector with all arguments we pass to the weights. so we just need to compute to compute this value once 206 | to find the psi_tau 207 | :param lossfunction: huber, bisquare, optimal, etc 208 | :param clipping: the two values of the clipping parameters corresponding to rho_1, rho_2 209 | :return: 210 | """ 211 | 212 | import numpy as np 213 | import sys 214 | if np.sum(scoreoptimal(u, clipping[0]) * u) == 0 : 215 | return np.zeros(u.shape) 216 | if lossfunction == 'optimal': # weights for the rho tau. 217 | w = np.sum(2. * rhooptimal(u, clipping[1]) - scoreoptimal(u, clipping[1]) * u) \ 218 | / np.sum(scoreoptimal(u, clipping[0]) * u) 219 | else: 220 | sys.exit('unknown type of loss function %s' % 221 | lossfunction) # die gracefully 222 | return w 223 | 224 | 225 | # ------------------------------------------------------------------- 226 | # Weight functions for the IRLS 227 | # ------------------------------------------------------------------- 228 | def weights(u, kind, lossfunction, clipping, nmeasurements): 229 | import sys # to be able to exit 230 | import numpy as np 231 | if kind == 'M': # if M-estimator 232 | if lossfunction == 'huber': # with Huber loss function 233 | # call the huber score function 234 | z = scorefunction(u, 'huber', clipping) 235 | w = np.zeros(u.shape) 236 | i = np.nonzero(u) 237 | # definition of the weights for M-estimator 238 | w[i] = z[i] / (2 * u[i]) 239 | elif lossfunction == 'squared': # with square function 240 | # call the ls score function 241 | z = scorefunction(u, 'squared', clipping) 242 | w = np.zeros(u.shape) 243 | i = np.nonzero(u) 244 | w[i] = z[i] / (2 * u[i]) 245 | elif lossfunction == 'optimal': 246 | z = scorefunction(u, 'optimal', clipping) 247 | w = np.zeros(u.shape) 248 | i = np.nonzero(u) 249 | w[i] = z[i] / (2 * u[i]) 250 | else: # unknown loss function 251 | sys.exit('unknown type of loss function %s' % 252 | lossfunction) # die gracefully 253 | elif kind == 'tau': # if tau estimator 254 | # I called scorefunction to our psi function 255 | z = scorefunction(u, 'tau', clipping) 256 | w = np.zeros(u.shape) 257 | 258 | # only for the non zero u elements 259 | i = np.nonzero(u) 260 | w[i] = z[i] / (2 * nmeasurements * u[i]) 261 | else: 262 | # unknown method 263 | sys.exit('unknown type of weights %s' % kind) # die gracefully 264 | 265 | return w 266 | 267 | 268 | # ------------------------------------------------------------------- 269 | # M - scale estimator function 270 | # ------------------------------------------------------------------- 271 | def mscaleestimator(u, tolerance, b, clipping, kind): 272 | import numpy as np 273 | maxiter = 100 274 | s = np.median(np.abs(u)) / .6745 # initial MAD estimation of the scale 275 | if (s==0): 276 | s=1.0 277 | rho_old = np.mean(rhofunction(u / s, kind, clipping)) - b 278 | k = 0 279 | while np.abs(rho_old) > tolerance and k < maxiter: 280 | #TODO : I added this test to avoid division by zero 281 | if (s == 0): 282 | s=1.0 283 | delta = rho_old / \ 284 | np.mean(scorefunction(u / s, kind, clipping) * u / s) / s 285 | isqu = 1 286 | ok = 0 287 | while isqu < 30 and ok != 1: 288 | rho_new = np.mean(rhofunction(u / (s + delta), kind, clipping)) - b 289 | if np.abs(rho_new) < np.abs(rho_old): 290 | s = s + delta 291 | ok = 1 292 | else: 293 | delta /= 2 294 | isqu += 1 295 | if isqu == 30: 296 | # we tell it to stop, but we keep the iter for info 297 | maxiter = k 298 | rho_old = rho_new 299 | k += 1 300 | return np.abs(s) 301 | 302 | 303 | # ------------------------------------------------------------------- 304 | # tau - scale ** 2 305 | # ------------------------------------------------------------------- 306 | def tauscale(u, lossfunction, clipping, b, tolerance=1e-5): 307 | import numpy as np 308 | m, n = u.shape 309 | mscale = mscaleestimator( 310 | u, tolerance, b, clipping[0], lossfunction) # M scale 311 | #TODO : maybe remove this, I added it to avoid dividing by zero 312 | if (mscale == 0): 313 | mscale = 1.0 314 | tscale = mscale ** 2 * \ 315 | (1 / m) * np.sum(rhofunction(u / mscale, lossfunction, clipping[1]) 316 | ) # (tau scale) ** 2 317 | return tscale 318 | 319 | -------------------------------------------------------------------------------- /tests/regularizedtau/toolboxutilities_latest.py: -------------------------------------------------------------------------------- 1 | """ 2 | =================================================================== 3 | UTILITIES 4 | This toolbox contains many useful functions, which are not related 5 | to solving the inverse problem itself. 6 | Marta Martinez-Camara, EPFL 7 | =================================================================== 8 | """ 9 | 10 | # take the division operator from future versions 11 | from __future__ import division 12 | 13 | 14 | # ------------------------------------------------------------------- 15 | # Getting the source vector 16 | # ------------------------------------------------------------------- 17 | def getsource(sourcetype, sourcesize, k=1): 18 | import numpy as np 19 | import pickle 20 | import sys # to be able to exit 21 | if sourcetype == 'random': # Gaussian iid source, mu = 0, sigma = 1 22 | x = np.random.randn(sourcesize, 1) # source vector 23 | elif sourcetype == 'sparse': 24 | sparsity = k * sourcesize # 20 % sparsity for the source 25 | x = np.zeros((sourcesize, 1)) # initialization with a zero source 26 | p = np.random.permutation(sourcesize) 27 | nonz = p[0:sparsity] # getting random indexes for the nonzero values 28 | # get randomly the value for the non zero elements 29 | x[nonz] = np.random.randn(sparsity, 1) 30 | elif sourcetype == 'constant': 31 | x = np.zeros((sourcesize, 1)) # initialization with a zero source 32 | x[7:15] = 1 # making a piecewise source 33 | 34 | else: # unknown type of source 35 | sys.exit('unknown source type %s' % sourcetype) # die gracefully 36 | x = np.asarray(x) 37 | return x # what we were asked to deliver 38 | 39 | 40 | # ------------------------------------------------------------------- 41 | # Getting the sensing matrix 42 | # ------------------------------------------------------------------- 43 | def getmatrix(sourcesize, matrixtype, measurementsize, conditionnumber=1): 44 | import numpy as np 45 | import pickle 46 | import sys # to be able to exit 47 | if matrixtype == 'random': # Gaussian iid matrix, mu = 0, sigma = 1 48 | a = np.random.randn(measurementsize, sourcesize) # sensing matrix 49 | elif matrixtype == 'illposed': 50 | # random well conditioned matrix 51 | a = np.random.randn(measurementsize, sourcesize) 52 | u, s, v = np.linalg.svd(a) # get the svd decomposition 53 | nsv = min(sourcesize, measurementsize) # number of sv 54 | # modify the sv to make cond(A) = conditionnumber 55 | s[np.nonzero(s)] = np.linspace(conditionnumber, 1, nsv) 56 | sm = np.zeros((measurementsize, sourcesize)) 57 | sm[:sourcesize, :sourcesize] = np.diag(s) 58 | a = np.dot(u, np.dot(sm, v)) # putting everything together 59 | else: # unknown type of matrix 60 | sys.exit('unknown matrix type %s' % matrixtype) # die gracefully 61 | a = np.asarray(a) 62 | return a # what we were asked to deliver 63 | 64 | 65 | # ------------------------------------------------------------------- 66 | # Getting the measurements 67 | # ------------------------------------------------------------------- 68 | def getmeasurements(a, x, noisetype, var=1, outlierproportion=0): 69 | import numpy as np 70 | import pickle 71 | import sys # to be able to exit 72 | import matplotlib.pyplot as plt 73 | # import statistics as st 74 | measurementsize = a.shape[0] # number of measurements 75 | y = np.dot(a, x) # noiseless measurements 76 | if noisetype == 'none': # noiseless case 77 | n = np.zeros((measurementsize, 1)) # zero noise 78 | elif noisetype == 'gaussian': # gaussian noise 79 | n = var * np.random.randn(measurementsize, 1) 80 | elif noisetype == 'outliers': # gaussian noise 81 | # additive Gaussian noise 82 | n = var * np.random.randn(measurementsize, 1) 83 | p = np.random.permutation(measurementsize) 84 | # how many measurements are outliers 85 | noutliers = np.round(outlierproportion * measurementsize) 86 | outindex = p[0:noutliers] # getting random indexes for the outliers 87 | # the outliers have a variance ten times larger than clean data 88 | n[outindex] = np.var(y) * 10 * np.random.randn(noutliers, 1) 89 | 90 | else: # unknown type of additive noise 91 | sys.exit('unknown noise type %s' % noisetype) # die gracefully 92 | yn = y + n # get the measurements 93 | yn = np.asarray(yn) 94 | #plt.stem(n, 'b') 95 | # plt.show(block=True) 96 | #plt.stem(y, 'kd-') 97 | #plt.stem(yn, 'rs--') 98 | # plt.show() # show figure 99 | 100 | return yn # what we were asked to deliver 101 | 102 | 103 | # ------------------------------------------------------------------- 104 | # Score functions for the robust regressors 105 | # ------------------------------------------------------------------- 106 | def scorefunction(u, kind, clipping): 107 | import sys # to tbe able to exit 108 | if kind == 'huber': # least squares 109 | score = huber(u, clipping) # get the estimate 110 | elif kind == 'squared': 111 | score = u 112 | elif kind == 'optimal': 113 | score = scoreoptimal(u, clipping) 114 | elif kind == 'tau': 115 | # here we compute the score function for the tau. 116 | # psi_tau = weighttau * psi_1 + psi_2 117 | weighttau = tauweights(u, 'optimal', clipping) 118 | # print weighttau 119 | 120 | #weighttau = lp.tau_weights_new(u, clipping) 121 | 122 | score = weighttau * \ 123 | scoreoptimal(u, clipping[0]) + scoreoptimal(u, clipping[1]) 124 | else: # unknown method 125 | sys.exit('unknown method %s' % kind) # die gracefully 126 | return score # return the score function that we need 127 | 128 | 129 | # ------------------------------------------------------------------- 130 | # Huber score function 131 | # ------------------------------------------------------------------- 132 | def huber(u, clipping): 133 | import numpy as np 134 | u = np.array(u) # converting to np array 135 | p = np.zeros(u.shape) # new array for the output 136 | u_abs = np.abs(u) 137 | i = u_abs <= clipping # logical array 138 | p[i] = u[i] # middle part of the function 139 | i = u_abs > clipping # outer part of the function 140 | p[i] = np.sign(u[i]) * clipping 141 | return p 142 | 143 | 144 | # ------------------------------------------------------------------- 145 | # Optimal score function 146 | # ------------------------------------------------------------------- 147 | def scoreoptimal(u, clipping): 148 | import numpy as np 149 | u = np.array(u) 150 | p = np.zeros(u.shape) 151 | uabs = np.abs(u) # u absolute values 152 | i = uabs <= 2 * clipping # central part of teh score function 153 | p[i] = u[i] / clipping ** 2 / 3.25 154 | i = np.logical_and(uabs > 2 * clipping, uabs <= 3 * clipping) 155 | f = lambda z: (-1.944 * z / clipping ** 2 + 1.728 * z ** 3 / clipping ** 4 - 0.312 * z ** 5 / clipping ** 6 + 156 | 0.016 * z ** 7 / clipping ** 8) / 3.25 157 | p[i] = f(u[i]) 158 | return p 159 | 160 | 161 | # ------------------------------------------------------------------- 162 | # Rho functions 163 | # ------------------------------------------------------------------- 164 | def rhofunction(u, kind, clipping): 165 | import sys # to tbe able to exit 166 | if kind == 'optimal': # least squares 167 | r = rhooptimal(u, clipping) # get the estimate 168 | else: # unknown method 169 | sys.exit('unknown rho function %s' % kind) # die gracefully 170 | return r # return the score function that we need 171 | 172 | 173 | # ----------------------------------------------- 174 | # Optimal loss function (rho) 175 | # ------------------------------------------------------------------- 176 | def rhooptimal(u, clipping): 177 | """ 178 | The Fast-Tau Estimator for Regression, Matias SALIBIAN-BARRERA, Gert WILLEMS, and Ruben ZAMAR 179 | www.tandfonline.com/doi/pdf/10.1198/106186008X343785 180 | 181 | The equation is found p. 611. To get the exact formula, it is necessary to use 3*c instead of c. 182 | """ 183 | 184 | import numpy as np 185 | u = np.array(u) 186 | y = np.abs(u / clipping) 187 | r = np.ones(u.shape) 188 | i = y <= 2. # middle part of the curve 189 | r[i] = y[i] ** 2 / 2. / 3.25 190 | i = np.logical_and(y > 2, y <= 3) # intermediate part of the curve 191 | f = lambda z: (1.792 - 0.972 * z ** 2 + 0.432 * z ** 192 | 4 - 0.052 * z ** 6 + 0.002 * z ** 8) / 3.25 193 | r[i] = f(y[i]) 194 | return r 195 | 196 | 197 | # ------------------------------------------------------------------- 198 | # Weight for the score in the tau 199 | # ------------------------------------------------------------------- 200 | def tauweights(u, lossfunction, clipping): 201 | """ 202 | This routine computes the 'weighttau', necessary to build the psi_tau function 203 | :param u: vector with all arguments we pass to the weights. so we just need to compute to compute this value once 204 | to find the psi_tau 205 | :param lossfunction: huber, bisquare, optimal, etc 206 | :param clipping: the two values of the clipping parameters corresponding to rho_1, rho_2 207 | :return: 208 | """ 209 | 210 | import numpy as np 211 | import sys 212 | if np.sum(scoreoptimal(u, clipping[0]) * u) == 0: 213 | # return np.zeros(u.shape) 214 | return np.ones(u.shape) 215 | if lossfunction == 'optimal': # weights for the rho tau. 216 | w = (np.sum(2. * rhooptimal(u, clipping[1]) - scoreoptimal(u, clipping[1]) * u) 217 | ) / np.sum(scoreoptimal(u, clipping[0]) * u) 218 | else: 219 | sys.exit('unknown type of loss function %s' % 220 | lossfunction) # die gracefully 221 | return w 222 | 223 | 224 | # ------------------------------------------------------------------- 225 | # Weight functions for the IRLS 226 | # ------------------------------------------------------------------- 227 | def weights(u, kind, lossfunction, clipping, nmeasurements): 228 | import sys # to be able to exit 229 | import numpy as np 230 | if kind == 'M': # if M-estimator 231 | if lossfunction == 'huber': # with Huber loss function 232 | # call the huber score function 233 | z = scorefunction(u, 'huber', clipping) 234 | w = np.zeros(u.shape) 235 | i = np.nonzero(u) 236 | # definition of the weights for M-estimator 237 | w[i] = z[i] / (2 * u[i]) 238 | elif lossfunction == 'squared': # with square function 239 | # call the ls score function 240 | z = scorefunction(u, 'squared', clipping) 241 | w = np.zeros(u.shape) 242 | i = np.nonzero(u) 243 | w[i] = z[i] / (2 * u[i]) 244 | elif lossfunction == 'optimal': 245 | z = scorefunction(u, 'optimal', clipping) 246 | w = np.zeros(u.shape) 247 | i = np.nonzero(u) 248 | w[i] = z[i] / (2 * u[i]) 249 | else: # unknown loss function 250 | sys.exit('unknown type of loss function %s' % 251 | lossfunction) # die gracefully 252 | elif kind == 'tau': # if tau estimator 253 | # I called scorefunction to our psi function 254 | z = scorefunction(u, 'tau', clipping) 255 | # if r = zero, weights are equal to one 256 | w = np.ones(u.shape) 257 | 258 | # only for the non zero u elements 259 | i = np.nonzero(u) 260 | w[i] = z[i] / (2 * nmeasurements * u[i]) 261 | else: 262 | # unknown method 263 | sys.exit('unknown type of weights %s' % kind) # die gracefully 264 | 265 | return w 266 | 267 | 268 | # ------------------------------------------------------------------- 269 | # M - scale estimator function 270 | # ------------------------------------------------------------------- 271 | def mscaleestimator(u, tolerance, b, clipping, kind): 272 | import numpy as np 273 | maxiter = 100 274 | 275 | #TODO : changed by Guillaume 276 | u = np.array(u) 277 | 278 | s = np.median(np.abs(u)) / .6745 # initial MAD estimation of the scale 279 | # if (s==0): 280 | # s=1.0 281 | rho_old = np.mean(rhofunction(u / s, kind, clipping)) - b 282 | k = 0 283 | while np.abs(rho_old) > tolerance and k < maxiter: 284 | 285 | #TODO : I added this test to avoid division by zero 286 | # if (s == 0): 287 | # s=1.0 288 | 289 | # print 'Marta score function = ', scorefunction(u / s, kind, clipping) 290 | # 291 | # # TODO : remove this 292 | # print 'Marta mean = ', np.mean(scorefunction(u / s, kind, clipping) * u / s) 293 | # if np.mean(scorefunction(u / s, kind, clipping) * u / s) == 0: 294 | # print 'MARTA MEAN = 0 !' 295 | 296 | delta = rho_old / \ 297 | np.mean(scorefunction(u / s, kind, clipping) * u / s) / s 298 | isqu = 1 299 | ok = 0 300 | while isqu < 30 and ok != 1: 301 | rho_new = np.mean(rhofunction(u / (s + delta), kind, clipping)) - b 302 | if np.abs(rho_new) < np.abs(rho_old): 303 | s = s + delta 304 | ok = 1 305 | else: 306 | delta /= 2 307 | isqu += 1 308 | if isqu == 30: 309 | # we tell it to stop, but we keep the iter for info 310 | maxiter = k 311 | rho_old = rho_new 312 | k += 1 313 | return np.abs(s) 314 | 315 | 316 | # ------------------------------------------------------------------- 317 | # Looking for initial solutions 318 | # ------------------------------------------------------------------- 319 | def getinitialsolution(y, a): 320 | import numpy as np 321 | import toolboxinverse_latest as inv 322 | import sys 323 | 324 | # line added to keep a constant initialx for testing purpose. remove this later 325 | #return np.array([-0.56076046, -2.96528342]).reshape(-1,1) 326 | 327 | # TODO : remove this line "return np.random.rand(a.shape[1])", only for testing purpose 328 | #return np.random.rand(a.shape[1]) 329 | #return np.random.rand(a.shape[1]) 330 | 331 | m = a.shape[0] # getting dimensions 332 | n = a.shape[1] # getting dimensions 333 | k = 0 # counting iterations 334 | while k < 100: 335 | perm = np.random.permutation(m) 336 | subsample = perm[0:n] # random subsample 337 | ysubsample = y[subsample] # random measurements 338 | asubsample = a[subsample, :] # getting the rows 339 | r = np.linalg.matrix_rank(asubsample) 340 | 341 | # we assume that in these cases asubsample is well condtitioned 342 | if r == n: 343 | # use it to generate a solution 344 | initialx = inv.leastsquares(ysubsample, asubsample) 345 | return initialx 346 | else: 347 | k += 1 348 | if k == 100: 349 | # die gracefully 350 | sys.exit('I could not find initial solutions!') 351 | 352 | 353 | # ------------------------------------------------------------------- 354 | # tau - scale ** 2 355 | # ------------------------------------------------------------------- 356 | def tauscale(u, lossfunction, clipping, b, tolerance=1e-5): 357 | import numpy as np 358 | 359 | #TODO : uncomment this line and remove m=u.shape[0] 360 | # m, n = u.shape 361 | 362 | m = u.shape[0] 363 | 364 | mscale = mscaleestimator(u, tolerance, b, clipping[0], lossfunction) # M scale 365 | 366 | # if mscale is zero, tauscale is zero as well 367 | if (mscale == 0): 368 | tscale = 0 369 | else: 370 | # (tau scale) ** 2 371 | tscale = mscale ** 2 * (1 / m) * np.sum(rhofunction(u / mscale, lossfunction, clipping[1])) 372 | return tscale 373 | 374 | -------------------------------------------------------------------------------- /tests/test.py: -------------------------------------------------------------------------------- 1 | __author__ = 'GuillaumeBeaud' 2 | 3 | 4 | # print '=========================================== LIMIT =====================================' 5 | 6 | 7 | # ============================================== ABOVE IS OK ===================================== 8 | # ============================================== DEMO ===================================== 9 | 10 | import numpy as np 11 | import linvpy as lp 12 | 13 | a = np.matrix([[1, 2], [3, 4], [5, 6]]) 14 | y = np.array([1, 2, 3]) 15 | 16 | # Define your own loss function 17 | class CustomLoss(lp.LossFunction): 18 | 19 | # Set your custom clipping 20 | def __init__(self, clipping=1.5): 21 | lp.LossFunction.__init__(self, clipping) 22 | if clipping is None: 23 | self.clipping = 0.7 24 | 25 | # Define your rho function : you can copy paste this and just change what's 26 | # inside the unit_rho 27 | def rho(self, array): 28 | # rho function of your loss function on ONE single element 29 | def unit_rho(element): 30 | # Simply return clipping * element for example 31 | return element + self.clipping 32 | # Vectorize the function 33 | vfunc = np.vectorize(unit_rho) 34 | return vfunc(array) 35 | 36 | # Define your psi function as the derivative of the rho function : you can 37 | # copy paste this and just change what's inside the unit_rho 38 | def psi(self, array): 39 | # rho function of your loss function on ONE single element 40 | def unit_psi(element): 41 | # Simply return the clipping for example 42 | return 1 43 | # Vectorize the function 44 | vfunc = np.vectorize(unit_psi) 45 | return vfunc(array) 46 | 47 | custom_tau = lp.TauEstimator(loss_function=CustomLoss) 48 | print custom_tau.estimate(a,y) 49 | 50 | 51 | # Define your own regularization 52 | class CustomRegularization(lp.Regularization): 53 | pass 54 | # Define your regularization function here 55 | def regularize(self, a, y, lamb=0): 56 | return np.ones(a.shape[1]) 57 | 58 | # Create your custom tau estimator with custom loss and regularization functions 59 | # Pay attenation to pass the loss function as a REFERENCE (without the "()" 60 | # after the name, and the regularization as an OBJECT, i.e. with the "()"). 61 | custom_tau = lp.TauEstimator(regularization=CustomRegularization()) 62 | print custom_tau.estimate(a,y) -------------------------------------------------------------------------------- /tests/test_final.py: -------------------------------------------------------------------------------- 1 | __author__ = 'GuillaumeBeaud' 2 | 3 | import linvpy as lp 4 | import numpy as np 5 | import random 6 | from random import randint 7 | from tests import generate_random as gen 8 | from regularizedtau import toolboxutilities as util 9 | from regularizedtau import toolboxutilities_latest as util_l 10 | from regularizedtau import linvpy_latest as lp_l 11 | 12 | # ===================================== DEFINITIONS =================================== 13 | 14 | TESTING_ITERATIONS = 20 15 | 16 | LOSS_FUNCTIONS = [lp.Huber, lp.Bisquare, lp.Cauchy, lp.Optimal] # references to loss classes, not instances 17 | 18 | 19 | # ===================================== TESTS ==================================== 20 | 21 | # sets the print precision to 20 decimals 22 | np.set_printoptions(precision=10) 23 | 24 | def plot_loss_functions(interval): 25 | for loss in LOSS_FUNCTIONS: 26 | loss = loss() # instanciates the loss functions 27 | loss.plot(interval) 28 | 29 | 30 | def test_MEstimator(): 31 | for loss in LOSS_FUNCTIONS: 32 | m_estimator = lp.MEstimator(loss_function=loss) # creates an m-estimator with each of the loss functions 33 | for i in range(2, TESTING_ITERATIONS): 34 | # random (A,y) tuple with i rows and A has a random number of columns between i and i+100 35 | m_estimator.estimate( 36 | np.random.rand(i, i + randint(0, 100)), 37 | np.random.rand(i).reshape(-1) 38 | ) 39 | 40 | 41 | def test_M_weights(): 42 | toolbox_losses = ['huber', 'optimal'] 43 | lp_losses = [lp.Huber, lp.Optimal] 44 | 45 | for i in range(0, 2): 46 | A = np.random.rand(randint(1, 10), randint(1, 10)) 47 | clipping = np.random.uniform(0.1, 5) 48 | 49 | # creates an instance of the loss function with the current clipping 50 | my_loss = lp_losses[i](clipping=clipping) 51 | 52 | uw = util.weights(A, 'M', toolbox_losses[i], clipping, None) 53 | lw = my_loss.m_weights(A) 54 | 55 | np.testing.assert_allclose(uw, lw) 56 | 57 | 58 | def test_MEstimator_ill_conditioned(): 59 | for loss in LOSS_FUNCTIONS: 60 | m_estimator = lp.MEstimator(loss_function=loss) # creates an m-estimator with each of the loss functions 61 | for i in range(2, TESTING_ITERATIONS): 62 | # random (A,y) ill conditioned tuple with i rows 63 | m_estimator.estimate( 64 | gen.generate_random_ill_conditioned(i)[0], 65 | gen.generate_random_ill_conditioned(i)[1].reshape(-1) 66 | ) 67 | 68 | 69 | # this is a crash test that checks that the function never crashes, not a value test 70 | def test_tikhonov(): 71 | tiko = lp.Tikhonov() 72 | for i in range(2, TESTING_ITERATIONS): 73 | # random (A,y) ill conditioned tuple with i rows 74 | tiko.regularize( 75 | gen.generate_random_ill_conditioned(i)[0], 76 | gen.generate_random_ill_conditioned(i)[1].reshape(-1), 77 | lamb=randint(0, 20) 78 | ) 79 | 80 | 81 | # tests the rho_optimal and psi_optimal of LinvPy VS rhooptimal and scoreoptimal of toolbox 82 | def test_Optimal(): 83 | for i in range(2, TESTING_ITERATIONS): 84 | # random clipping between 0.1 and 5 85 | CLIPPING = np.random.uniform(0.1, 5) 86 | 87 | # creates an instance of lp.Optimal 88 | opt = lp.Optimal(clipping=CLIPPING) 89 | 90 | # generates a random vector of size between 0 and 100 91 | y = np.random.rand(randint(1, 100)) 92 | 93 | # optimal rho function of toolbox and optimal rho function of LinvPy 94 | rho_util = util.rhooptimal(np.asarray(y), CLIPPING) 95 | rho_lp = opt.rho(y) 96 | 97 | # optimal psi function of toolbox and optimal psi function of LinvPy 98 | psi_util = util.scoreoptimal(np.asarray(y), CLIPPING) 99 | psi_lp = opt.psi(y) 100 | 101 | # returns an error if the toolbox's rhooptimal and lp.Optimal.rho() are not equal 102 | np.testing.assert_allclose(rho_lp, rho_util) 103 | 104 | # returns an error if the toolbox's scoreoptimal and lp.Optimal.psi() are not equal 105 | np.testing.assert_allclose(psi_lp, psi_util) 106 | 107 | 108 | # tests the scorefunction of LinvPy VS scorefunction of toolbox 109 | def test_scorefunction(): 110 | for i in range(2, TESTING_ITERATIONS): 111 | # CLIPPINGS = two random numbers between 0.1 and 5 112 | CLIPPINGS = (np.random.uniform(0.1, 5), np.random.uniform(0.1, 5)) 113 | 114 | # creates an instance of tau estimator with the two random clippings 115 | tau = lp.TauEstimator(clipping_1=CLIPPINGS[0], clipping_2=CLIPPINGS[1], loss_function=lp.Optimal) 116 | 117 | # y = random vector of size between 0 and 100 118 | y = np.random.rand(randint(1, 100)) 119 | 120 | # toolbox's scorefunction 121 | score_util = util.scorefunction(np.asarray(y), 'tau', CLIPPINGS) 122 | 123 | # linvpy's scorefunction 124 | score_lp = tau.score_function(y) 125 | 126 | # returns an error if the toolbox's scorefunction and lp's scorefunction are not equal 127 | np.testing.assert_allclose(score_lp, score_util) 128 | 129 | 130 | # tests linvpy's mscale VS toolbox mscale 131 | def test_mscale(): 132 | for i in range(2, TESTING_ITERATIONS): 133 | # generates a random clipping between 0.1 and 5 134 | CLIPPING = np.random.uniform(0.1, 5) 135 | 136 | # creates an instance of TauEstimator 137 | tau = lp.TauEstimator(clipping_1=CLIPPING, clipping_2=CLIPPING, loss_function=lp.Optimal) 138 | 139 | # generates a random vector of size between 0 and 100 140 | y = np.random.rand(randint(1, 100)) 141 | 142 | # computes the mscale for linvpy and toolbox 143 | linvpy_scale = tau.m_scale(y) 144 | toolbox_scale = util.mscaleestimator(u=y, tolerance=1e-5, b=0.5, clipping=CLIPPING, kind='optimal') 145 | 146 | # verifies that both results are the same 147 | assert toolbox_scale == linvpy_scale 148 | 149 | 150 | def test_tau_scale(): 151 | for i in range(2, TESTING_ITERATIONS): 152 | # generates random clipping between 0.1 and 5 153 | clipping_1 = np.random.uniform(0.1, 5) 154 | clipping_2 = np.random.uniform(0.1, 5) 155 | 156 | # generates a random vector of size between 0 and 100 157 | x = np.random.rand(randint(1, 100)) 158 | 159 | my_tau = lp.TauEstimator(loss_function=lp.Optimal, clipping_1=clipping_1, clipping_2=clipping_2) 160 | 161 | linvpy_t = my_tau.tau_scale(x) 162 | util_t = util_l.tauscale(x, lossfunction='optimal', b=0.5, clipping=(clipping_1, clipping_2)) 163 | 164 | np.testing.assert_allclose(linvpy_t, util_t) 165 | 166 | 167 | def test_M_estimator_VS_Marta(): 168 | for i in range(3, TESTING_ITERATIONS): 169 | NOISE = np.random.uniform(0, 1.0) 170 | # NOISE = 0 171 | # lamb = np.random.uniform(0,1.0) 172 | lamb = 0 173 | clipping = np.random.uniform(0.1, 5) 174 | 175 | A, x, y, initial_vector, initial_scale = gen.gen_noise(i, i, NOISE) 176 | 177 | xhat_marta = lp_l.irls( 178 | matrix_a=A, 179 | vector_y=y, 180 | loss_function='huber', 181 | kind='M', 182 | regularization=lp_l.tikhonov_regularization, 183 | lamb=lamb, 184 | initial_x=initial_vector.reshape(-1, 1), 185 | scale=initial_scale, 186 | clipping=clipping) 187 | 188 | my_m = lp.MEstimator(clipping=clipping, 189 | loss_function=lp.Huber, 190 | scale=initial_scale, 191 | lamb=lamb) 192 | 193 | xhat_linvpy = my_m.estimate(A, y, initial_x=initial_vector) 194 | 195 | # print 'xhat marta = ', xhat_marta 196 | # print 'xhat linvpy = ', xhat_linvpy 197 | # print 'real x = ', x 198 | # very robust test; passes sometimes and sometimes not (a difference of 0.0000001 makes it fail) 199 | np.testing.assert_allclose(xhat_linvpy, xhat_marta) 200 | # print '==========================================' 201 | 202 | 203 | # This test checks LinvPy2.0's tau estimator on all possible inputs and verify there's no error. 204 | # NB: this DOES NOT test the mathematical correctness of outputs, it only tests that TauEstimator() 205 | # can handle any types of inputs without crashing. 206 | # For mathematical correctness, see test_TauEstimator_VS_Marta() 207 | def test_TauEstimator_alone(): 208 | for i in range(2, TESTING_ITERATIONS): 209 | 210 | # tests all regularizations 211 | for reg in (lp.Tikhonov(), lp.Lasso()): 212 | # tests all loss functions 213 | for loss in LOSS_FUNCTIONS: 214 | 215 | # intiates random inputs 216 | lamb = randint(0, 20) 217 | c1 = np.random.uniform(0.1, 5) 218 | c2 = np.random.uniform(0.1, 5) 219 | 220 | # clippings are randomly chosen between a random number or None with predominance for number 221 | clipping_1 = random.choice([c1, c1, c1, None]) 222 | clipping_2 = random.choice([c2, c2, c2, None]) 223 | 224 | # creates a tau instance 225 | tau_estimator = lp.TauEstimator( 226 | loss_function=loss, 227 | regularization=reg, 228 | lamb=lamb, 229 | clipping_1=clipping_1, 230 | clipping_2=clipping_2) # creates a tau-estimator with each of the loss functions 231 | 232 | # random (A,y) tuple with i rows and A has a random number of columns between i and i+100 233 | tau_estimator.estimate( 234 | # A=np.random.rand(i, i + randint(0, 100)), 235 | a=np.random.rand(i, i + randint(0, 100)), 236 | y=np.random.rand(i).reshape(-1) 237 | ) 238 | 239 | 240 | def test_score_function_is_odd(): 241 | for loss in LOSS_FUNCTIONS: 242 | 243 | my_tau = lp.TauEstimator(loss_function=loss) 244 | 245 | # print 'loss = ', loss 246 | 247 | for i in range(2, TESTING_ITERATIONS): 248 | 249 | # generates a random vector of size i with negative and positive values 250 | y = np.random.randn(100) 251 | 252 | score = my_tau.score_function(y) 253 | 254 | # print y, score 255 | 256 | for i in range(0, score.__len__()): 257 | assert np.sign(score[i]) == np.sign(y[i]) 258 | 259 | def test_TauEstimator_VS_Marta(): 260 | for i in range(2, TESTING_ITERATIONS): 261 | # generates random clipping between 0.1 and 5 262 | clipping_1 = np.random.uniform(0.1, 5) 263 | clipping_2 = np.random.uniform(0.1, 5) 264 | 265 | # generates a random n_initial_x 266 | n_initial_x = 1 267 | 268 | # generates a random matrix of size i x i + random(0,100) 269 | A = np.random.rand(i, i + randint(0, 10)) 270 | 271 | # generates a random vector of size i 272 | y = np.random.rand(i) 273 | 274 | my_tau = lp.TauEstimator(loss_function=lp.Optimal, clipping_1=clipping_1, clipping_2=clipping_2) 275 | 276 | linvpy_output = my_tau.estimate(a=A, y=y) 277 | 278 | marta_t = lp_l.basictau( 279 | a=A, 280 | y=np.matrix(y), 281 | loss_function='optimal', 282 | b=0.5, 283 | clipping=(clipping_1, clipping_2), 284 | ninitialx=n_initial_x 285 | ) 286 | 287 | # print 'LinvPy Tau result = ', linvpy_output 288 | # print 'Marta Tau result = ', marta_t 289 | # print '========================' 290 | # print '========================' 291 | # print '========================' 292 | # print '========================' 293 | 294 | # asserts xhat are the same 295 | np.testing.assert_allclose(linvpy_output[0].reshape(-1, 1), marta_t[0]) 296 | 297 | # test that simply covers the fast_tau for code coverage purpose 298 | def cover_fast_tau(): 299 | my_tau = lp.TauEstimator() 300 | A = np.matrix([[2, 2], [3, 4], [7, 6]]) 301 | y = np.array([1, 4, 3]) 302 | my_tau.fast_estimate(A, y) 303 | 304 | # ===================================== MAIN ================================== 305 | 306 | 307 | # plot_loss_functions(15) 308 | 309 | test_TauEstimator_alone() 310 | 311 | test_TauEstimator_VS_Marta() 312 | 313 | test_M_weights() 314 | 315 | test_MEstimator() 316 | 317 | test_MEstimator_ill_conditioned() 318 | 319 | test_tikhonov() 320 | 321 | test_Optimal() 322 | 323 | test_scorefunction() 324 | 325 | test_mscale() 326 | 327 | test_tau_scale() 328 | 329 | test_M_estimator_VS_Marta() 330 | 331 | test_score_function_is_odd() 332 | 333 | cover_fast_tau() --------------------------------------------------------------------------------