├── .github ├── ISSUE_TEMPLATE │ ├── bug-report.md │ ├── enhancement.md │ └── feature_request.md ├── PULL_REQUEST_TEMPLATE │ └── pull_request_template.md └── workflows │ ├── main.yml │ └── publish-to-pypi.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── .tools ├── create_algo_selection_code.py ├── envs │ ├── testenv-linux.yml │ ├── testenv-numpy.yml │ ├── testenv-others.yml │ └── testenv-pandas.yml ├── test_create_algo_selection_code.py ├── update_algo_selection_hook.py └── update_envs.py ├── .yamllint.yml ├── CHANGES.md ├── CITATION ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── codecov.yml ├── docs ├── Makefile ├── make.bat ├── rtd_environment.yml └── source │ ├── _static │ ├── css │ │ ├── custom.css │ │ ├── termynal.css │ │ └── termynal_custom.css │ ├── images │ │ ├── aai-institute-logo.svg │ │ ├── aida.jpg │ │ ├── annica.jpeg │ │ ├── autocomplete_1.png │ │ ├── autocomplete_2.png │ │ ├── bahar.jpg │ │ ├── book.svg │ │ ├── books.svg │ │ ├── bullseye.svg │ │ ├── coding.svg │ │ ├── dashboard.gif │ │ ├── differentiation.svg │ │ ├── estimagic_icon.svg │ │ ├── estimagic_icon_dark_mode.svg │ │ ├── estimagic_logo.svg │ │ ├── estimagic_logo_dark_mode.svg │ │ ├── history_cobyla.gif │ │ ├── history_l-bfgs-b.gif │ │ ├── history_nelder-mead.gif │ │ ├── history_trust-ncg.gif │ │ ├── hmg.jpg │ │ ├── hoover_logo.png │ │ ├── installation.svg │ │ ├── janos.jpg │ │ ├── ken.jpeg │ │ ├── klara.jpg │ │ ├── light-bulb.svg │ │ ├── list.svg │ │ ├── logo.svg │ │ ├── mariam.jpg │ │ ├── miscellaneous.svg │ │ ├── numfocus_logo.png │ │ ├── optimagic_logo.svg │ │ ├── optimagic_logo_dark_mode.svg │ │ ├── optimization.svg │ │ ├── sebi.jpg │ │ ├── stylized_direct_search.gif │ │ ├── stylized_gradient_based_trust_region.gif │ │ ├── stylized_gradient_free_trust_region.gif │ │ ├── stylized_line_search.gif │ │ ├── tim.jpeg │ │ ├── tobi.png │ │ ├── tra_logo.png │ │ ├── transferlab-logo.svg │ │ └── video.svg │ └── js │ │ ├── custom.js │ │ └── termynal.js │ ├── algorithms.md │ ├── conf.py │ ├── development │ ├── changes.md │ ├── code_of_conduct.md │ ├── credits.md │ ├── enhancement_proposals.md │ ├── ep-00-governance-model.md │ ├── ep-01-pytrees.md │ ├── ep-02-typing.md │ ├── ep-03-alignment.md │ ├── how_to_contribute.md │ ├── index.md │ └── styleguide.md │ ├── estimagic │ ├── explanation │ │ ├── bootstrap_ci.md │ │ ├── bootstrap_montecarlo_comparison.ipynb │ │ ├── cluster_robust_likelihood_inference.md │ │ └── index.md │ ├── index.md │ ├── reference │ │ └── index.md │ └── tutorials │ │ ├── bootstrap_overview.ipynb │ │ ├── estimation_tables_overview.ipynb │ │ ├── example_estimation_table_tex.pdf │ │ ├── index.md │ │ ├── likelihood_overview.ipynb │ │ └── msm_overview.ipynb │ ├── explanation │ ├── explanation_of_numerical_optimizers.md │ ├── implementation_of_constraints.md │ ├── index.md │ ├── internal_optimizers.md │ ├── numdiff_background.md │ ├── tests_for_supported_optimizers.md │ └── why_optimization_is_hard.ipynb │ ├── how_to │ ├── how_to_add_optimizers.ipynb │ ├── how_to_algorithm_selection.ipynb │ ├── how_to_benchmarking.ipynb │ ├── how_to_bounds.ipynb │ ├── how_to_constraints.md │ ├── how_to_criterion_function.ipynb │ ├── how_to_derivatives.ipynb │ ├── how_to_errors_during_optimization.ipynb │ ├── how_to_globalization.ipynb │ ├── how_to_logging.ipynb │ ├── how_to_multistart.ipynb │ ├── how_to_scaling.md │ ├── how_to_slice_plot.ipynb │ ├── how_to_specify_algorithm_and_algo_options.md │ ├── how_to_start_parameters.md │ ├── how_to_visualize_histories.ipynb │ └── index.md │ ├── index.md │ ├── installation.md │ ├── reference │ ├── algo_options.md │ ├── batch_evaluators.md │ ├── index.md │ └── utilities.md │ ├── refs.bib │ ├── tutorials │ ├── index.md │ ├── numdiff_overview.ipynb │ └── optimization_overview.ipynb │ └── videos.md ├── environment.yml ├── pyproject.toml ├── src ├── estimagic │ ├── __init__.py │ ├── batch_evaluators.py │ ├── bootstrap.py │ ├── bootstrap_ci.py │ ├── bootstrap_helpers.py │ ├── bootstrap_outcomes.py │ ├── bootstrap_samples.py │ ├── config.py │ ├── estimate_ml.py │ ├── estimate_msm.py │ ├── estimation_summaries.py │ ├── estimation_table.py │ ├── examples │ │ ├── __init__.py │ │ ├── diabetes.csv │ │ ├── exam_points.csv │ │ ├── logit.py │ │ └── sensitivity_probit_example_data.csv │ ├── lollipop_plot.py │ ├── ml_covs.py │ ├── msm_covs.py │ ├── msm_sensitivity.py │ ├── msm_weighting.py │ ├── py.typed │ ├── shared_covs.py │ └── utilities.py └── optimagic │ ├── __init__.py │ ├── algorithms.py │ ├── batch_evaluators.py │ ├── benchmarking │ ├── __init__.py │ ├── benchmark_reports.py │ ├── cartis_roberts.py │ ├── get_benchmark_problems.py │ ├── more_wild.py │ ├── noise_distributions.py │ ├── process_benchmark_results.py │ └── run_benchmark.py │ ├── config.py │ ├── constraints.py │ ├── decorators.py │ ├── deprecations.py │ ├── differentiation │ ├── __init__.py │ ├── derivatives.py │ ├── finite_differences.py │ ├── generate_steps.py │ ├── numdiff_options.py │ └── richardson_extrapolation.py │ ├── examples │ ├── __init__.py │ ├── criterion_functions.py │ └── numdiff_functions.py │ ├── exceptions.py │ ├── logging │ ├── __init__.py │ ├── base.py │ ├── logger.py │ ├── read_log.py │ ├── sqlalchemy.py │ └── types.py │ ├── mark.py │ ├── optimization │ ├── __init__.py │ ├── algo_options.py │ ├── algorithm.py │ ├── convergence_report.py │ ├── create_optimization_problem.py │ ├── error_penalty.py │ ├── fun_value.py │ ├── history.py │ ├── internal_optimization_problem.py │ ├── multistart.py │ ├── multistart_options.py │ ├── optimization_logging.py │ ├── optimize.py │ ├── optimize_result.py │ ├── process_results.py │ └── scipy_aliases.py │ ├── optimizers │ ├── __init__.py │ ├── _pounders │ │ ├── __init__.py │ │ ├── _conjugate_gradient.py │ │ ├── _steihaug_toint.py │ │ ├── _trsbox.py │ │ ├── bntr.py │ │ ├── gqtpar.py │ │ ├── linear_subsolvers.py │ │ ├── pounders_auxiliary.py │ │ └── pounders_history.py │ ├── bhhh.py │ ├── fides.py │ ├── iminuit_migrad.py │ ├── ipopt.py │ ├── nag_optimizers.py │ ├── neldermead.py │ ├── nevergrad_optimizers.py │ ├── nlopt_optimizers.py │ ├── pounders.py │ ├── pygmo_optimizers.py │ ├── scipy_optimizers.py │ ├── tao_optimizers.py │ └── tranquilo.py │ ├── parameters │ ├── __init__.py │ ├── block_trees.py │ ├── bounds.py │ ├── check_constraints.py │ ├── consolidate_constraints.py │ ├── constraint_tools.py │ ├── conversion.py │ ├── kernel_transformations.py │ ├── nonlinear_constraints.py │ ├── process_constraints.py │ ├── process_selectors.py │ ├── scale_conversion.py │ ├── scaling.py │ ├── space_conversion.py │ ├── tree_conversion.py │ └── tree_registry.py │ ├── py.typed │ ├── shared │ ├── __init__.py │ ├── check_option_dicts.py │ ├── compat.py │ └── process_user_function.py │ ├── timing.py │ ├── type_conversion.py │ ├── typing.py │ ├── utilities.py │ └── visualization │ ├── __init__.py │ ├── convergence_plot.py │ ├── deviation_plot.py │ ├── history_plots.py │ ├── plotting_utilities.py │ ├── profile_plot.py │ └── slice_plot.py └── tests ├── __init__.py ├── conftest.py ├── estimagic ├── __init__.py ├── examples │ └── test_logit.py ├── pickled_statsmodels_ml_covs │ ├── logit_hessian.pickle │ ├── logit_hessian_matrix.pickle │ ├── logit_jacobian.pickle │ ├── logit_jacobian_matrix.pickle │ ├── logit_sandwich.pickle │ ├── probit_hessian.pickle │ ├── probit_hessian_matrix.pickle │ ├── probit_jacobian.pickle │ ├── probit_jacobian_matrix.pickle │ └── probit_sandwich.pickle ├── test_bootstrap.py ├── test_bootstrap_ci.py ├── test_bootstrap_outcomes.py ├── test_bootstrap_samples.py ├── test_estimate_ml.py ├── test_estimate_msm.py ├── test_estimate_msm_dict_params_and_moments.py ├── test_estimation_table.py ├── test_lollipop_plot.py ├── test_ml_covs.py ├── test_msm_covs.py ├── test_msm_sensitivity.py ├── test_msm_sensitivity_via_estimate_msm.py ├── test_msm_weighting.py └── test_shared.py └── optimagic ├── __init__.py ├── benchmarking ├── __init__.py ├── test_benchmark_reports.py ├── test_cartis_roberts.py ├── test_get_benchmark_problems.py ├── test_more_wild.py ├── test_noise_distributions.py └── test_run_benchmark.py ├── differentiation ├── binary_choice_inputs.pickle ├── test_compare_derivatives_with_jax.py ├── test_derivatives.py ├── test_finite_differences.py ├── test_generate_steps.py └── test_numdiff_options.py ├── examples └── test_criterion_functions.py ├── logging ├── test_base.py ├── test_logger.py ├── test_sqlalchemy.py └── test_types.py ├── optimization ├── test_algorithm.py ├── test_convergence_report.py ├── test_create_optimization_problem.py ├── test_error_penalty.py ├── test_fun_value.py ├── test_function_formats_ls.py ├── test_function_formats_scalar.py ├── test_history.py ├── test_history_collection.py ├── test_internal_optimization_problem.py ├── test_invalid_jacobian_value.py ├── test_jax_derivatives.py ├── test_many_algorithms.py ├── test_multistart.py ├── test_multistart_options.py ├── test_optimize.py ├── test_optimize_result.py ├── test_params_versions.py ├── test_process_result.py ├── test_pygmo_optimizers.py ├── test_scipy_aliases.py ├── test_useful_exceptions.py ├── test_with_advanced_constraints.py ├── test_with_bounds.py ├── test_with_constraints.py ├── test_with_logging.py ├── test_with_multistart.py ├── test_with_nonlinear_constraints.py └── test_with_scaling.py ├── optimizers ├── __init__.py ├── _pounders │ ├── __init__.py │ ├── fixtures │ │ ├── add_points_until_main_model_fully_linear_i.yaml │ │ ├── add_points_until_main_model_fully_linear_ii.yaml │ │ ├── find_affine_points_nonzero_i.yaml │ │ ├── find_affine_points_nonzero_ii.yaml │ │ ├── find_affine_points_nonzero_iii.yaml │ │ ├── find_affine_points_zero_i.yaml │ │ ├── find_affine_points_zero_ii.yaml │ │ ├── find_affine_points_zero_iii.yaml │ │ ├── find_affine_points_zero_iv.yaml │ │ ├── get_coefficients_residual_model.yaml │ │ ├── get_interpolation_matrices_residual_model.yaml │ │ ├── interpolate_f_iter_4.yaml │ │ ├── interpolate_f_iter_7.yaml │ │ ├── pounders_example_data.csv │ │ ├── scalar_model.pkl │ │ ├── update_initial_residual_model.yaml │ │ ├── update_intial_residual_model.yaml │ │ ├── update_main_from_residual_model.yaml │ │ ├── update_main_with_new_accepted_x.yaml │ │ ├── update_residual_model.yaml │ │ └── update_residual_model_with_new_accepted_x.yaml │ ├── test_linear_subsolvers.py │ ├── test_pounders_history.py │ ├── test_pounders_unit.py │ └── test_quadratic_subsolvers.py ├── test_bhhh.py ├── test_fides_options.py ├── test_iminuit_migrad.py ├── test_ipopt_options.py ├── test_nag_optimizers.py ├── test_neldermead.py ├── test_pounders_integration.py └── test_tao_optimizers.py ├── parameters ├── test_block_trees.py ├── test_bounds.py ├── test_check_constraints.py ├── test_constraint_tools.py ├── test_conversion.py ├── test_kernel_transformations.py ├── test_nonlinear_constraints.py ├── test_process_constraints.py ├── test_process_selectors.py ├── test_scale_conversion.py ├── test_scaling.py ├── test_space_conversion.py ├── test_tree_conversion.py └── test_tree_registry.py ├── shared ├── __init__.py └── test_process_user_functions.py ├── test_algo_selection.py ├── test_batch_evaluators.py ├── test_constraints.py ├── test_decorators.py ├── test_deprecations.py ├── test_mark.py ├── test_timing.py ├── test_type_conversion.py ├── test_typed_dicts_consistency.py ├── test_utilities.py └── visualization ├── test_convergence_plot.py ├── test_deviation_plot.py ├── test_history_plots.py ├── test_profile_plot.py └── test_slice_plot.py /.github/ISSUE_TEMPLATE/bug-report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug Report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | ### Bug description 11 | 12 | A clear and concise description of what the bug is. 13 | 14 | ### To Reproduce 15 | 16 | Ideally, provide a minimal code example. If that's not possible, describe steps to reproduce the bug. 17 | 18 | ### Expected behavior 19 | 20 | A clear and concise description of what you expected to happen. 21 | 22 | ### Screenshots/Error messages 23 | 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | ### System 27 | 28 | - OS: [e.g. Ubuntu 18.04] 29 | - Version [e.g. 0.0.1] 30 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/enhancement.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Enhancement 3 | about: Enhance an existing component. 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | * optimagic version used, if any: 11 | * Python version, if any: 12 | * Operating System: 13 | 14 | ### What would you like to enhance and why? Is it related to an issue/problem? 15 | 16 | A clear and concise description of the current implementation and its limitations. 17 | 18 | ### Describe the solution you'd like 19 | 20 | A clear and concise description of what you want to happen. 21 | 22 | ### Describe alternatives you've considered 23 | 24 | A clear and concise description of any alternative solutions or features you've 25 | considered and why you have discarded them. 26 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: feature-request 6 | assignees: '' 7 | 8 | --- 9 | 10 | ### Current situation 11 | 12 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]; Currently there is no way of [...] 13 | 14 | ### Desired Situation 15 | 16 | What functionality should become possible or easier? 17 | 18 | ### Proposed implementation 19 | 20 | How would you implement the new feature? Did you consider alternative implementations? 21 | You can start by describing interface changes like a new argument or a new function. There is no need to get too detailed here. 22 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ### What problem do you want to solve? 2 | 3 | Reference the issue or discussion, if there is any. Provide a description of your 4 | proposed solution. 5 | 6 | ### Todo 7 | 8 | - [ ] Target the right branch and pick an appropriate title. 9 | - [ ] Put `Closes #XXXX` in the first PR comment to auto-close the relevant issue once 10 | the PR is accepted. This is not applicable if there is no corresponding issue. 11 | - [ ] Any steps that still need to be done. 12 | -------------------------------------------------------------------------------- /.github/workflows/publish-to-pypi.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: PyPI 3 | on: push 4 | jobs: 5 | build-n-publish: 6 | name: Build and publish optimagic Python 🐍 distributions 📦 to PyPI 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v4 10 | - name: Set up Python 3.10 11 | uses: actions/setup-python@v5 12 | with: 13 | python-version: '3.10' 14 | - name: Install pypa/build 15 | run: >- 16 | python -m 17 | pip install 18 | build 19 | --user 20 | - name: Build a binary wheel and a source tarball 21 | run: >- 22 | python -m 23 | build 24 | --sdist 25 | --wheel 26 | --outdir dist/ 27 | - name: Publish distribution 📦 to PyPI 28 | if: startsWith(github.ref, 'refs/tags') 29 | uses: pypa/gh-action-pypi-publish@release/v1 30 | with: 31 | password: ${{ secrets.PYPI_API_TOKEN_OPTIMAGIC }} 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # MacOS specific service store 7 | .DS_Store 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | MANIFEST 30 | *build/ 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | *.sublime-workspace 38 | *.sublime-project 39 | 40 | # Installer logs 41 | pip-log.txt 42 | pip-delete-this-directory.txt 43 | 44 | # Unit test / coverage reports 45 | htmlcov/ 46 | .tox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | .hypothesis/ 54 | .pytest_cache/ 55 | 56 | # Translations 57 | *.mo 58 | *.pot 59 | 60 | # Django stuff: 61 | *.log 62 | local_settings.py 63 | db.sqlite3 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | docs/build/ 75 | docs/source/_build/ 76 | docs/source/**/*.db 77 | docs/source/**/*.db-shm 78 | docs/source/**/*.db-wal 79 | docs/source/refs.bib.bak 80 | 81 | # PyBuilder 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # pyenv 88 | .python-version 89 | 90 | # celery beat schedule file 91 | celerybeat-schedule 92 | 93 | # SageMath parsed files 94 | *.sage.py 95 | 96 | # Environments 97 | .env 98 | .venv 99 | env/ 100 | venv/ 101 | ENV/ 102 | env.bak/ 103 | venv.bak/ 104 | 105 | # Spyder project settings 106 | .spyderproject 107 | .spyproject 108 | 109 | # VSCode project settings 110 | .vscode 111 | 112 | # Rope project settings 113 | .ropeproject 114 | 115 | # mkdocs documentation 116 | /site 117 | 118 | # mypy 119 | .mypy_cache/ 120 | 121 | *notes/ 122 | 123 | .idea/ 124 | 125 | *.bak 126 | 127 | 128 | *.db 129 | 130 | 131 | .pytask.sqlite3 132 | 133 | 134 | src/estimagic/_version.py 135 | src/optimagic/_version.py 136 | 137 | *.~lock.* 138 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2 3 | build: 4 | os: ubuntu-22.04 5 | tools: 6 | python: mambaforge-4.10 7 | conda: 8 | environment: docs/rtd_environment.yml 9 | sphinx: 10 | builder: html 11 | configuration: docs/source/conf.py 12 | fail_on_warning: false 13 | -------------------------------------------------------------------------------- /.tools/envs/testenv-linux.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: optimagic 3 | channels: 4 | - conda-forge 5 | - nodefaults 6 | dependencies: 7 | - petsc4py 8 | - jax 9 | - cyipopt>=1.4.0 # dev, tests 10 | - pygmo>=2.19.0 # dev, tests, docs 11 | - nlopt # dev, tests, docs 12 | - pip # dev, tests, docs 13 | - pytest # dev, tests 14 | - pytest-cov # tests 15 | - pytest-xdist # dev, tests 16 | - statsmodels # dev, tests 17 | - cloudpickle # run, tests 18 | - joblib # run, tests 19 | - numpy >= 2 # run, tests 20 | - pandas # run, tests 21 | - plotly<6.0.0 # run, tests 22 | - pybaum>=0.1.2 # run, tests 23 | - scipy>=1.2.1 # run, tests 24 | - sqlalchemy # run, tests 25 | - seaborn # dev, tests 26 | - mypy=1.14.1 # dev, tests 27 | - pyyaml # dev, tests 28 | - jinja2 # dev, tests 29 | - annotated-types # dev, tests 30 | - iminuit # dev, tests 31 | - pip: # dev, tests, docs 32 | - nevergrad # dev, tests 33 | - DFO-LS>=1.5.3 # dev, tests 34 | - Py-BOBYQA # dev, tests 35 | - fides==0.7.4 # dev, tests 36 | - kaleido # dev, tests 37 | - pandas-stubs # dev, tests 38 | - types-cffi # dev, tests 39 | - types-openpyxl # dev, tests 40 | - types-jinja2 # dev, tests 41 | - sqlalchemy-stubs # dev, tests 42 | - sphinxcontrib-mermaid # dev, tests, docs 43 | - -e ../../ 44 | -------------------------------------------------------------------------------- /.tools/envs/testenv-numpy.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: optimagic 3 | channels: 4 | - conda-forge 5 | - nodefaults 6 | dependencies: 7 | - pandas>=2 8 | - numpy<2 9 | - cyipopt>=1.4.0 # dev, tests 10 | - pygmo>=2.19.0 # dev, tests, docs 11 | - nlopt # dev, tests, docs 12 | - pip # dev, tests, docs 13 | - pytest # dev, tests 14 | - pytest-cov # tests 15 | - pytest-xdist # dev, tests 16 | - statsmodels # dev, tests 17 | - cloudpickle # run, tests 18 | - joblib # run, tests 19 | - plotly<6.0.0 # run, tests 20 | - pybaum>=0.1.2 # run, tests 21 | - scipy>=1.2.1 # run, tests 22 | - sqlalchemy # run, tests 23 | - seaborn # dev, tests 24 | - mypy=1.14.1 # dev, tests 25 | - pyyaml # dev, tests 26 | - jinja2 # dev, tests 27 | - annotated-types # dev, tests 28 | - iminuit # dev, tests 29 | - pip: # dev, tests, docs 30 | - nevergrad # dev, tests 31 | - DFO-LS>=1.5.3 # dev, tests 32 | - Py-BOBYQA # dev, tests 33 | - fides==0.7.4 # dev, tests 34 | - kaleido # dev, tests 35 | - types-cffi # dev, tests 36 | - types-openpyxl # dev, tests 37 | - types-jinja2 # dev, tests 38 | - sqlalchemy-stubs # dev, tests 39 | - sphinxcontrib-mermaid # dev, tests, docs 40 | - -e ../../ 41 | -------------------------------------------------------------------------------- /.tools/envs/testenv-others.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: optimagic 3 | channels: 4 | - conda-forge 5 | - nodefaults 6 | dependencies: 7 | - cyipopt>=1.4.0 # dev, tests 8 | - pygmo>=2.19.0 # dev, tests, docs 9 | - nlopt # dev, tests, docs 10 | - pip # dev, tests, docs 11 | - pytest # dev, tests 12 | - pytest-cov # tests 13 | - pytest-xdist # dev, tests 14 | - statsmodels # dev, tests 15 | - cloudpickle # run, tests 16 | - joblib # run, tests 17 | - numpy >= 2 # run, tests 18 | - pandas # run, tests 19 | - plotly<6.0.0 # run, tests 20 | - pybaum>=0.1.2 # run, tests 21 | - scipy>=1.2.1 # run, tests 22 | - sqlalchemy # run, tests 23 | - seaborn # dev, tests 24 | - mypy=1.14.1 # dev, tests 25 | - pyyaml # dev, tests 26 | - jinja2 # dev, tests 27 | - annotated-types # dev, tests 28 | - iminuit # dev, tests 29 | - pip: # dev, tests, docs 30 | - nevergrad # dev, tests 31 | - DFO-LS>=1.5.3 # dev, tests 32 | - Py-BOBYQA # dev, tests 33 | - fides==0.7.4 # dev, tests 34 | - kaleido # dev, tests 35 | - pandas-stubs # dev, tests 36 | - types-cffi # dev, tests 37 | - types-openpyxl # dev, tests 38 | - types-jinja2 # dev, tests 39 | - sqlalchemy-stubs # dev, tests 40 | - sphinxcontrib-mermaid # dev, tests, docs 41 | - -e ../../ 42 | -------------------------------------------------------------------------------- /.tools/envs/testenv-pandas.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: optimagic 3 | channels: 4 | - conda-forge 5 | - nodefaults 6 | dependencies: 7 | - pandas<2 8 | - numpy<2 9 | - cyipopt>=1.4.0 # dev, tests 10 | - pygmo>=2.19.0 # dev, tests, docs 11 | - nlopt # dev, tests, docs 12 | - pip # dev, tests, docs 13 | - pytest # dev, tests 14 | - pytest-cov # tests 15 | - pytest-xdist # dev, tests 16 | - statsmodels # dev, tests 17 | - cloudpickle # run, tests 18 | - joblib # run, tests 19 | - plotly<6.0.0 # run, tests 20 | - pybaum>=0.1.2 # run, tests 21 | - scipy>=1.2.1 # run, tests 22 | - sqlalchemy # run, tests 23 | - seaborn # dev, tests 24 | - mypy=1.14.1 # dev, tests 25 | - pyyaml # dev, tests 26 | - jinja2 # dev, tests 27 | - annotated-types # dev, tests 28 | - iminuit # dev, tests 29 | - pip: # dev, tests, docs 30 | - nevergrad # dev, tests 31 | - DFO-LS>=1.5.3 # dev, tests 32 | - Py-BOBYQA # dev, tests 33 | - fides==0.7.4 # dev, tests 34 | - kaleido # dev, tests 35 | - types-cffi # dev, tests 36 | - types-openpyxl # dev, tests 37 | - types-jinja2 # dev, tests 38 | - sqlalchemy-stubs # dev, tests 39 | - sphinxcontrib-mermaid # dev, tests, docs 40 | - -e ../../ 41 | -------------------------------------------------------------------------------- /.tools/test_create_algo_selection_code.py: -------------------------------------------------------------------------------- 1 | from create_algo_selection_code import _generate_category_combinations 2 | 3 | 4 | def test_generate_category_combinations() -> None: 5 | categories = ["a", "b", "c"] 6 | got = _generate_category_combinations(categories) 7 | expected = [ 8 | ("a", "b", "c"), 9 | ("a", "b"), 10 | ("a", "c"), 11 | ("b", "c"), 12 | ("a",), 13 | ("b",), 14 | ("c",), 15 | ] 16 | assert got == expected 17 | -------------------------------------------------------------------------------- /.tools/update_algo_selection_hook.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import subprocess 3 | import sys 4 | from pathlib import Path 5 | from typing import Any 6 | 7 | ROOT = Path(__file__).resolve().parents[1] 8 | 9 | # sys.executable guarantees we stay inside the pre‑commit venv 10 | PYTHON = [sys.executable] 11 | # "-m" lets us call std‑lib modules (e.g. pip) the same way 12 | PYTHON_MINUS_M = [*PYTHON, "-m"] 13 | 14 | 15 | def run(cmd: list[str], **kwargs: Any) -> None: 16 | subprocess.check_call(cmd, cwd=ROOT, **kwargs) 17 | 18 | 19 | def ensure_optimagic_is_locally_installed() -> None: 20 | try: 21 | run(PYTHON_MINUS_M + ["pip", "show", "optimagic"], stdout=subprocess.DEVNULL) 22 | except subprocess.CalledProcessError: 23 | run(PYTHON_MINUS_M + ["pip", "install", "-e", "."]) 24 | 25 | 26 | def main() -> int: 27 | ensure_optimagic_is_locally_installed() 28 | run(PYTHON + [".tools/create_algo_selection_code.py"]) 29 | 30 | ruff_args = [ 31 | "--silent", 32 | "--config", 33 | "pyproject.toml", 34 | "src/optimagic/algorithms.py", 35 | ] 36 | run(["ruff", "format", *ruff_args]) 37 | run(["ruff", "check", "--fix", *ruff_args]) 38 | return 0 # explicit success code 39 | 40 | 41 | if __name__ == "__main__": 42 | sys.exit(main()) 43 | -------------------------------------------------------------------------------- /.tools/update_envs.py: -------------------------------------------------------------------------------- 1 | from copy import deepcopy 2 | from pathlib import Path 3 | 4 | 5 | def _keep_line(line: str, flag: str) -> bool: 6 | """Return True if line contains flag and does not include a comment. 7 | 8 | Examples: 9 | >>> assert _keep_line(" - jax # tests", "tests") is True 10 | >>> assert _keep_line("name: env", "tests") is True 11 | >>> assert _keep_line(" - jax # run", "tests") is False 12 | 13 | """ 14 | return flag in line or "#" not in line 15 | 16 | 17 | def main() -> None: 18 | lines = Path("environment.yml").read_text().splitlines() 19 | 20 | # create standard testing environments 21 | 22 | test_env = [line for line in lines if _keep_line(line, "tests")] 23 | test_env.append(" - -e ../../") # add local installation 24 | 25 | # find index to insert additional dependencies 26 | _insert_idx = [i for i, line in enumerate(lines) if "dependencies:" in line][0] + 1 27 | 28 | ## linux 29 | test_env_linux = deepcopy(test_env) 30 | test_env_linux.insert(_insert_idx, " - jax") 31 | test_env_linux.insert(_insert_idx, " - petsc4py") 32 | 33 | ## test environment others 34 | test_env_others = deepcopy(test_env) 35 | 36 | ## test environment for pandas version < 2 (requires numpy < 2) 37 | test_env_pandas = deepcopy(test_env) 38 | for pkg in ["numpy", "pandas"]: 39 | test_env_pandas = [line for line in test_env_pandas if pkg not in line] 40 | test_env_pandas.insert(_insert_idx, f" - {pkg}<2") 41 | 42 | ## test environment for numpy version < 2 (with pandas >= 2) 43 | test_env_numpy = deepcopy(test_env) 44 | for pkg in ["numpy", "pandas"]: 45 | test_env_numpy = [line for line in test_env_numpy if pkg not in line] 46 | test_env_numpy.insert(_insert_idx, " - numpy<2") 47 | test_env_numpy.insert(_insert_idx, " - pandas>=2") 48 | 49 | # test environment for documentation 50 | docs_env = [line for line in lines if _keep_line(line, "docs")] 51 | docs_env.append(" - -e ../../") # add local installation 52 | 53 | # write environments 54 | for name, env in zip( 55 | ["linux", "others", "pandas", "numpy"], 56 | [test_env_linux, test_env_others, test_env_pandas, test_env_numpy], 57 | strict=False, 58 | ): 59 | # Specify newline to avoid wrong line endings on Windows. 60 | # See: https://stackoverflow.com/a/69869641 61 | Path(f".tools/envs/testenv-{name}.yml").write_text( 62 | "\n".join(env) + "\n", newline="\n" 63 | ) 64 | 65 | 66 | if __name__ == "__main__": 67 | main() 68 | -------------------------------------------------------------------------------- /.yamllint.yml: -------------------------------------------------------------------------------- 1 | --- 2 | yaml-files: 3 | - '*.yaml' 4 | - '*.yml' 5 | - .yamllint 6 | rules: 7 | braces: enable 8 | brackets: enable 9 | colons: enable 10 | commas: enable 11 | comments: 12 | level: warning 13 | comments-indentation: 14 | level: warning 15 | document-end: disable 16 | document-start: 17 | level: warning 18 | empty-lines: enable 19 | empty-values: disable 20 | float-values: disable 21 | hyphens: enable 22 | indentation: {spaces: 2} 23 | key-duplicates: enable 24 | key-ordering: disable 25 | line-length: 26 | max: 88 27 | allow-non-breakable-words: true 28 | allow-non-breakable-inline-mappings: false 29 | new-line-at-end-of-file: enable 30 | new-lines: 31 | type: unix 32 | octal-values: disable 33 | quoted-strings: disable 34 | trailing-spaces: enable 35 | truthy: 36 | level: warning 37 | -------------------------------------------------------------------------------- /CITATION: -------------------------------------------------------------------------------- 1 | 2 | Please use one of the following samples to cite the optimagic version (change 3 | x.y) from this installation 4 | 5 | Text: 6 | 7 | [optimagic] optimagic x.y, 2024 8 | Janos Gabler, https://github.com/optimagic-dev/optimagic 9 | 10 | BibTeX: 11 | 12 | @Unpublished{Gabler2024, 13 | Title = {optimagic: A library for nonlinear optimization}, 14 | Author = {Janos Gabler}, 15 | Year = {2024}, 16 | Url = {https://github.com/optimagic-dev/optimagic} 17 | } 18 | 19 | If you are unsure about which version of optimagic you are using run: `conda list optimagic`. 20 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2019-2021 Janos Gabler 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 4 | software and associated documentation files (the "Software"), to deal in the Software 5 | without restriction, including without limitation the rights to use, copy, modify, 6 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 7 | permit persons to whom the Software is furnished to do so, subject to the following 8 | conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all copies or 11 | substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 14 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR 15 | PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 16 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT 17 | OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 18 | OTHER DEALINGS IN THE SOFTWARE. 19 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | --- 2 | codecov: 3 | notify: 4 | require_ci_to_pass: true 5 | coverage: 6 | precision: 2 7 | round: down 8 | range: 50...100 9 | status: 10 | patch: 11 | default: 12 | target: 80% 13 | project: 14 | default: 15 | target: 90% 16 | ignore: 17 | - setup.py 18 | # Uses numba 19 | - src/optimagic/benchmarking/cartis_roberts.py 20 | # not installed on CI 21 | - src/optimagic/optimizers/tranquilo.py 22 | - tests/**/* 23 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = optimagic 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | set SPHINXPROJ=optimagic 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/rtd_environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: optimagic-docs 3 | channels: 4 | - conda-forge 5 | - nodefaults 6 | dependencies: 7 | - python=3.11 8 | - typing-extensions 9 | - pip 10 | - setuptools_scm 11 | - toml 12 | - sphinx 13 | - sphinxcontrib-bibtex 14 | - sphinx-copybutton 15 | - sphinx-design 16 | - sphinx-panels 17 | - ipython 18 | - ipython_genutils 19 | - myst-nb 20 | - furo 21 | - pybaum 22 | - matplotlib 23 | - seaborn 24 | - numpy 25 | - pandas 26 | - scipy 27 | - patsy 28 | - joblib 29 | - plotly 30 | - nlopt 31 | - annotated-types 32 | - pygmo>=2.19.0 33 | - pip: 34 | - ../ 35 | - kaleido 36 | - Py-BOBYQA 37 | - DFO-LS 38 | - pandas-stubs # dev, tests 39 | - types-cffi # dev, tests 40 | - types-openpyxl # dev, tests 41 | - types-jinja2 # dev, tests 42 | - sqlalchemy-stubs # dev, tests 43 | - sphinxcontrib-mermaid # dev, tests, docs 44 | - intersphinx-registry # docs 45 | - fides==0.7.4 # dev, tests 46 | -------------------------------------------------------------------------------- /docs/source/_static/css/custom.css: -------------------------------------------------------------------------------- 1 | /* Remove execution count for notebook cells. */ 2 | div.prompt { 3 | display: none; 4 | } 5 | 6 | 7 | /* Classes for the index page. */ 8 | .index-card-image { 9 | padding-top: 1rem; 10 | height: 68px; 11 | text-align: center; 12 | } 13 | 14 | .index-card-link { 15 | color: var(--sd-color-card-text); 16 | font-weight: bold; 17 | } 18 | 19 | pre { 20 | padding-left: 20px 21 | } 22 | 23 | li pre { 24 | padding-left: 20px 25 | } 26 | 27 | .highlight { 28 | background: #f5f5f5 29 | } 30 | 31 | .highlight button.copybtn{ 32 | background-color: #f5f5f5; 33 | } 34 | 35 | .highlight button.copybtn:hover { 36 | background-color: #f5f5f5; 37 | } 38 | -------------------------------------------------------------------------------- /docs/source/_static/css/termynal.css: -------------------------------------------------------------------------------- 1 | /** 2 | * termynal.js 3 | * 4 | * @author Ines Montani 5 | * @version 0.0.1 6 | * @license MIT 7 | */ 8 | 9 | :root { 10 | --color-bg: #0c0c0c; 11 | --color-text: #f2f2f2; 12 | --color-text-subtle: #a2a2a2; 13 | } 14 | 15 | [data-termynal] { 16 | width: 750px; 17 | max-width: 100%; 18 | background: var(--color-bg); 19 | color: var(--color-text); 20 | /* font-size: 18px; */ 21 | font-size: 15px; 22 | /* font-family: 'Fira Mono', Consolas, Menlo, Monaco, 'Courier New', Courier, monospace; */ 23 | font-family: 'Roboto Mono', 'Fira Mono', Consolas, Menlo, Monaco, 'Courier New', Courier, monospace; 24 | border-radius: 4px; 25 | padding: 75px 45px 35px; 26 | position: relative; 27 | -webkit-box-sizing: border-box; 28 | box-sizing: border-box; 29 | line-height: 1.2; 30 | } 31 | 32 | [data-termynal]:before { 33 | content: ''; 34 | position: absolute; 35 | top: 15px; 36 | left: 15px; 37 | display: inline-block; 38 | width: 15px; 39 | height: 15px; 40 | border-radius: 50%; 41 | /* A little hack to display the window buttons in one pseudo element. */ 42 | background: #d9515d; 43 | -webkit-box-shadow: 25px 0 0 #f4c025, 50px 0 0 #3ec930; 44 | box-shadow: 25px 0 0 #f4c025, 50px 0 0 #3ec930; 45 | } 46 | 47 | [data-termynal]:after { 48 | content: 'bash'; 49 | position: absolute; 50 | color: var(--color-text-subtle); 51 | top: 5px; 52 | left: 0; 53 | width: 100%; 54 | text-align: center; 55 | } 56 | 57 | a[data-terminal-control] { 58 | text-align: right; 59 | display: block; 60 | color: #aebbff; 61 | } 62 | 63 | [data-ty] { 64 | display: block; 65 | line-height: 2; 66 | } 67 | 68 | [data-ty]:before { 69 | /* Set up defaults and ensure empty lines are displayed. */ 70 | content: ''; 71 | display: inline-block; 72 | vertical-align: middle; 73 | } 74 | 75 | [data-ty="input"]:before, 76 | [data-ty-prompt]:before { 77 | margin-right: 0.75em; 78 | color: var(--color-text-subtle); 79 | } 80 | 81 | [data-ty="input"]:before { 82 | content: '$'; 83 | } 84 | 85 | [data-ty][data-ty-prompt]:before { 86 | content: attr(data-ty-prompt); 87 | } 88 | 89 | [data-ty-cursor]:after { 90 | content: attr(data-ty-cursor); 91 | font-family: monospace; 92 | margin-left: 0.5em; 93 | -webkit-animation: blink 1s infinite; 94 | animation: blink 1s infinite; 95 | } 96 | 97 | 98 | /* Cursor animation */ 99 | 100 | @-webkit-keyframes blink { 101 | 50% { 102 | opacity: 0; 103 | } 104 | } 105 | 106 | @keyframes blink { 107 | 50% { 108 | opacity: 0; 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /docs/source/_static/css/termynal_custom.css: -------------------------------------------------------------------------------- 1 | .termynal-comment { 2 | color: #4a968f; 3 | font-style: italic; 4 | display: block; 5 | } 6 | 7 | .termy [data-termynal] { 8 | white-space: pre-wrap; 9 | } 10 | 11 | a.external-link::after { 12 | /* \00A0 is a non-breaking space 13 | to make the mark be on the same line as the link 14 | */ 15 | content: "\00A0[↪]"; 16 | } 17 | 18 | a.internal-link::after { 19 | /* \00A0 is a non-breaking space 20 | to make the mark be on the same line as the link 21 | */ 22 | content: "\00A0↪"; 23 | } 24 | 25 | :root { 26 | --termynal-green: #137C39; 27 | --termynal-red: #BF2D2D; 28 | --termynal-yellow: #F4C041; 29 | --termynal-white: #f2f2f2; 30 | --termynal-black: #0c0c0c; 31 | --termynal-blue: #11a8cd; 32 | --termynal-grey: #7f7f7f; 33 | } 34 | 35 | .termynal-failed { 36 | color: var(--termynal-red); 37 | } 38 | 39 | .termynal-failed-textonly { 40 | color: var(--termynal-white); 41 | background: var(--termynal-red); 42 | font-weight: bold; 43 | } 44 | 45 | .termynal-success { 46 | color: var(--termynal-green); 47 | } 48 | 49 | .termynal-success-textonly { 50 | color: var(--termynal-white); 51 | background: var(--termynal-green); 52 | font-weight: bold; 53 | } 54 | 55 | .termynal-skipped { 56 | color: var(--termynal-yellow); 57 | } 58 | 59 | .termynal-skipped-textonly { 60 | color: var(--termynal-black); 61 | background: var(--termynal-yellow); 62 | font-weight: bold; 63 | } 64 | 65 | .termynal-warning { 66 | color: var(--termynal-yellow); 67 | } 68 | 69 | .termynal-command { 70 | color: var(--termynal-green); 71 | font-weight: bold; 72 | } 73 | 74 | .termynal-option { 75 | color: var(--termynal-yellow); 76 | font-weight: bold; 77 | } 78 | 79 | .termynal-switch { 80 | color: var(--termynal-red); 81 | font-weight: bold; 82 | } 83 | 84 | .termynal-metavar { 85 | color: yellow; 86 | font-weight: bold; 87 | } 88 | 89 | .termynal-dim { 90 | color: var(--termynal-grey); 91 | } 92 | 93 | .termynal-number { 94 | color: var(--termynal-blue); 95 | } 96 | -------------------------------------------------------------------------------- /docs/source/_static/images/aida.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/aida.jpg -------------------------------------------------------------------------------- /docs/source/_static/images/annica.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/annica.jpeg -------------------------------------------------------------------------------- /docs/source/_static/images/autocomplete_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/autocomplete_1.png -------------------------------------------------------------------------------- /docs/source/_static/images/autocomplete_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/autocomplete_2.png -------------------------------------------------------------------------------- /docs/source/_static/images/bahar.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/bahar.jpg -------------------------------------------------------------------------------- /docs/source/_static/images/book.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /docs/source/_static/images/books.svg: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /docs/source/_static/images/bullseye.svg: -------------------------------------------------------------------------------- 1 | 2 | bullseye-line 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /docs/source/_static/images/coding.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /docs/source/_static/images/dashboard.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/dashboard.gif -------------------------------------------------------------------------------- /docs/source/_static/images/history_cobyla.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/history_cobyla.gif -------------------------------------------------------------------------------- /docs/source/_static/images/history_l-bfgs-b.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/history_l-bfgs-b.gif -------------------------------------------------------------------------------- /docs/source/_static/images/history_nelder-mead.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/history_nelder-mead.gif -------------------------------------------------------------------------------- /docs/source/_static/images/history_trust-ncg.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/history_trust-ncg.gif -------------------------------------------------------------------------------- /docs/source/_static/images/hmg.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/hmg.jpg -------------------------------------------------------------------------------- /docs/source/_static/images/hoover_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/hoover_logo.png -------------------------------------------------------------------------------- /docs/source/_static/images/installation.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /docs/source/_static/images/janos.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/janos.jpg -------------------------------------------------------------------------------- /docs/source/_static/images/ken.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/ken.jpeg -------------------------------------------------------------------------------- /docs/source/_static/images/klara.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/klara.jpg -------------------------------------------------------------------------------- /docs/source/_static/images/light-bulb.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /docs/source/_static/images/list.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /docs/source/_static/images/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 17 | 19 | 25 | 31 | 32 | 52 | 54 | 55 | 57 | image/svg+xml 58 | 60 | 61 | 62 | 63 | 64 | 69 | 73 | estimagic 84 | 88 | 89 | 90 | -------------------------------------------------------------------------------- /docs/source/_static/images/mariam.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/mariam.jpg -------------------------------------------------------------------------------- /docs/source/_static/images/miscellaneous.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /docs/source/_static/images/numfocus_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/numfocus_logo.png -------------------------------------------------------------------------------- /docs/source/_static/images/sebi.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/sebi.jpg -------------------------------------------------------------------------------- /docs/source/_static/images/stylized_direct_search.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/stylized_direct_search.gif -------------------------------------------------------------------------------- /docs/source/_static/images/stylized_gradient_based_trust_region.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/stylized_gradient_based_trust_region.gif -------------------------------------------------------------------------------- /docs/source/_static/images/stylized_gradient_free_trust_region.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/stylized_gradient_free_trust_region.gif -------------------------------------------------------------------------------- /docs/source/_static/images/stylized_line_search.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/stylized_line_search.gif -------------------------------------------------------------------------------- /docs/source/_static/images/tim.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/tim.jpeg -------------------------------------------------------------------------------- /docs/source/_static/images/tobi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/tobi.png -------------------------------------------------------------------------------- /docs/source/_static/images/tra_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/_static/images/tra_logo.png -------------------------------------------------------------------------------- /docs/source/_static/images/video.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /docs/source/development/changes.md: -------------------------------------------------------------------------------- 1 | (changes)= 2 | 3 | ```{include} ../../../CHANGES.md 4 | ``` 5 | -------------------------------------------------------------------------------- /docs/source/development/code_of_conduct.md: -------------------------------------------------------------------------------- 1 | (coc)= 2 | 3 | ```{include} ../../../CODE_OF_CONDUCT.md 4 | ``` 5 | -------------------------------------------------------------------------------- /docs/source/development/enhancement_proposals.md: -------------------------------------------------------------------------------- 1 | # Enhancement Proposals 2 | 3 | optimagic Enhancement Proposals (EPs) can be used to discuss and design large changes. 4 | EP-00 details the EP process, the optimagic governance model and the optimagic Code of 5 | Conduct. It is the only EP that gets continuously updated. 6 | 7 | These EPs are currently in place: 8 | 9 | ```{toctree} 10 | --- 11 | maxdepth: 1 12 | --- 13 | ep-00-governance-model.md 14 | ep-01-pytrees.md 15 | ep-02-typing.md 16 | ep-03-alignment.md 17 | ``` 18 | -------------------------------------------------------------------------------- /docs/source/development/index.md: -------------------------------------------------------------------------------- 1 | # Development 2 | 3 | ```{toctree} 4 | --- 5 | maxdepth: 1 6 | --- 7 | code_of_conduct 8 | how_to_contribute 9 | styleguide 10 | enhancement_proposals 11 | credits 12 | changes 13 | ``` 14 | -------------------------------------------------------------------------------- /docs/source/estimagic/explanation/cluster_robust_likelihood_inference.md: -------------------------------------------------------------------------------- 1 | (robust_likelihood_inference)= 2 | 3 | # Robust Likelihood inference 4 | 5 | (to be written.) 6 | 7 | In case of an urgent request for this guide, feel free to open an issue 8 | \[here\](). 9 | -------------------------------------------------------------------------------- /docs/source/estimagic/explanation/index.md: -------------------------------------------------------------------------------- 1 | # Explanation 2 | 3 | ```{toctree} 4 | --- 5 | maxdepth: 1 6 | --- 7 | bootstrap_ci 8 | bootstrap_montecarlo_comparison 9 | cluster_robust_likelihood_inference 10 | ``` 11 | -------------------------------------------------------------------------------- /docs/source/estimagic/index.md: -------------------------------------------------------------------------------- 1 | (estimagic)= 2 | 3 | # Estimagic 4 | 5 | *estimagic* is a subpackage of *optimagic* that helps you to fit nonlinear statistical 6 | models to data and perform inference on the estimated parameters. 7 | 8 | As a user, you need to code up the objective function that defines the estimator. This 9 | is either a likelihood (ML) function or a Method of Simulated Moments (MSM) objective 10 | function. Everything else is done by *estimagic*. 11 | 12 | Everything else means: 13 | 14 | - Optimize your objective function 15 | - Calculate asymptotic or bootstrapped standard errors and confidence intervals 16 | - Create publication quality tables 17 | - Perform sensitivity analysis on MSM models 18 | 19 | `````{grid} 1 2 2 2 20 | --- 21 | gutter: 3 22 | --- 23 | ````{grid-item-card} 24 | :text-align: center 25 | :img-top: ../_static/images/light-bulb.svg 26 | :class-img-top: index-card-image 27 | :shadow: md 28 | 29 | ```{button-link} tutorials/index.html 30 | --- 31 | click-parent: 32 | ref-type: ref 33 | class: stretched-link index-card-link sd-text-primary 34 | --- 35 | Tutorials 36 | ``` 37 | 38 | New users of estimagic should read this first. 39 | 40 | ```` 41 | 42 | 43 | 44 | ````{grid-item-card} 45 | :text-align: center 46 | :img-top: ../_static/images/books.svg 47 | :class-img-top: index-card-image 48 | :shadow: md 49 | 50 | ```{button-link} explanation/index.html 51 | --- 52 | click-parent: 53 | ref-type: ref 54 | class: stretched-link index-card-link sd-text-primary 55 | --- 56 | Explanations 57 | ``` 58 | 59 | Background information on key topics central to the package. 60 | 61 | ```` 62 | 63 | ````{grid-item-card} 64 | :text-align: center 65 | :columns: 12 66 | :img-top: ../_static/images/coding.svg 67 | :class-img-top: index-card-image 68 | :shadow: md 69 | 70 | ```{button-link} reference/index.html 71 | --- 72 | click-parent: 73 | ref-type: ref 74 | class: stretched-link index-card-link sd-text-primary 75 | --- 76 | API Reference 77 | ``` 78 | 79 | Detailed description of the estimagic API. 80 | 81 | ```` 82 | 83 | 84 | 85 | ````` 86 | 87 | ```{toctree} 88 | --- 89 | hidden: true 90 | maxdepth: 1 91 | --- 92 | tutorials/index 93 | explanation/index 94 | reference/index 95 | ``` 96 | -------------------------------------------------------------------------------- /docs/source/estimagic/reference/index.md: -------------------------------------------------------------------------------- 1 | # estimagic API 2 | 3 | ```{eval-rst} 4 | .. currentmodule:: estimagic 5 | ``` 6 | 7 | (estimation)= 8 | 9 | ## Estimation 10 | 11 | ```{eval-rst} 12 | .. dropdown:: estimate_ml 13 | 14 | .. autofunction:: estimate_ml 15 | 16 | ``` 17 | 18 | ```{eval-rst} 19 | .. dropdown:: estimate_msm 20 | 21 | .. autofunction:: estimate_msm 22 | 23 | ``` 24 | 25 | ```{eval-rst} 26 | .. dropdown:: get_moments_cov 27 | 28 | .. autofunction:: get_moments_cov 29 | 30 | ``` 31 | 32 | ```{eval-rst} 33 | .. dropdown:: lollipop_plot 34 | 35 | .. autofunction:: lollipop_plot 36 | 37 | ``` 38 | 39 | ```{eval-rst} 40 | .. dropdown:: estimation_table 41 | 42 | .. autofunction:: estimation_table 43 | 44 | ``` 45 | 46 | ```{eval-rst} 47 | .. dropdown:: render_html 48 | 49 | .. autofunction:: render_html 50 | 51 | ``` 52 | 53 | ```{eval-rst} 54 | .. dropdown:: render_latex 55 | 56 | .. autofunction:: render_latex 57 | 58 | ``` 59 | 60 | ```{eval-rst} 61 | .. dropdown:: LikelihoodResult 62 | 63 | .. autoclass:: LikelihoodResult 64 | :members: 65 | 66 | ``` 67 | 68 | ```{eval-rst} 69 | .. dropdown:: MomentsResult 70 | 71 | .. autoclass:: MomentsResult 72 | :members: 73 | 74 | 75 | 76 | ``` 77 | 78 | (bootstrap)= 79 | 80 | ## Bootstrap 81 | 82 | ```{eval-rst} 83 | .. dropdown:: bootstrap 84 | 85 | .. autofunction:: bootstrap 86 | ``` 87 | 88 | ```{eval-rst} 89 | .. dropdown:: BootstrapResult 90 | 91 | .. autoclass:: BootstrapResult 92 | :members: 93 | 94 | 95 | ``` 96 | -------------------------------------------------------------------------------- /docs/source/estimagic/tutorials/example_estimation_table_tex.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/docs/source/estimagic/tutorials/example_estimation_table_tex.pdf -------------------------------------------------------------------------------- /docs/source/estimagic/tutorials/index.md: -------------------------------------------------------------------------------- 1 | # Estimagic Tutorials 2 | 3 | Estimagic hast functions to estimate the parameters of maximum likelihood or simulation 4 | models. You provide a likelihood or moment simulation function. Estimagic produces 5 | parameter estimates and standard errors in a format that can be easily used to create 6 | publication quality latex or html tables. 7 | 8 | ```{toctree} 9 | --- 10 | maxdepth: 1 11 | --- 12 | likelihood_overview 13 | msm_overview 14 | bootstrap_overview 15 | estimation_tables_overview 16 | ``` 17 | -------------------------------------------------------------------------------- /docs/source/explanation/index.md: -------------------------------------------------------------------------------- 1 | # Explanation 2 | 3 | This section provides background information on numerical topics and details of 4 | optimagic. It is completely optional and not necessary if you are just starting out. 5 | 6 | ```{toctree} 7 | --- 8 | maxdepth: 1 9 | --- 10 | implementation_of_constraints 11 | internal_optimizers 12 | why_optimization_is_hard.ipynb 13 | explanation_of_numerical_optimizers 14 | tests_for_supported_optimizers 15 | numdiff_background 16 | ``` 17 | -------------------------------------------------------------------------------- /docs/source/explanation/numdiff_background.md: -------------------------------------------------------------------------------- 1 | # Numerical differentiation: methods 2 | 3 | In this section we explain the mathematical background of forward, backward and central 4 | differences. The main ideas in this chapter are taken from {cite}`Dennis1996`. x is used 5 | for the pandas DataFrame with parameters. We index the entries of x as a n-dimensional 6 | vector, where n is the number of variables in params_sr. The forward difference for the 7 | gradient is given by: 8 | 9 | $$ 10 | \nabla f(x) = \begin{pmatrix}\frac{f(x + e_0 * h_0) - f(x)}{h_0}\\ 11 | \frac{f(x + e_1 * h_1) - f(x)}{h_1}\\.\\.\\.\\ \frac{f(x + e_n * h_n) 12 | - f(x)}{h_n} \end{pmatrix} 13 | $$ 14 | 15 | The backward difference for the gradient is given by: 16 | 17 | $$ 18 | \nabla f(x) = \begin{pmatrix}\frac{f(x) - f(x - e_0 * h_0)}{h_0}\\ \frac{f(x) - 19 | f(x - e_1 * h_1)}{h_1}\\.\\.\\.\\ \frac{f(x) - f(x - e_n * h_n)}{h_n} 20 | \end{pmatrix} 21 | $$ 22 | 23 | The central difference for the gradient is given by: 24 | 25 | $$ 26 | \nabla f(x) = 27 | \begin{pmatrix}\frac{f(x + e_0 * h_0) - f(x - e_0 * h_0)}{2 h_0}\\ 28 | \frac{f(x + e_1 * h_1) - f(x - e_1 * h_1)}{2 h_1}\\.\\.\\.\\ \frac{f(x + e_n * h_n) 29 | - f(x - e_n * h_n)}{2 h_n} \end{pmatrix} 30 | $$ 31 | 32 | For the optimal stepsize h the following rule of thumb is applied: 33 | 34 | $$ 35 | h_i = (1 + |x[i]|) * \sqrt\epsilon 36 | $$ 37 | 38 | With the above in mind it is easy to calculate the Jacobian matrix. The calculation of 39 | the finite difference w.r.t. each variable of params_sr yields a vector, which is the 40 | corresponding column of the Jacobian matrix. The optimal stepsize remains the same. 41 | 42 | For the Hessian matrix, we repeatedly call the finite differences functions. As we allow 43 | for central finite differences in the second order derivative only, the deductions for 44 | forward and backward, are left to the interested reader: 45 | 46 | $$ 47 | f_{i,j}(x) 48 | = &\frac{f_i(x + e_j * h_j) - f_i(x - e_j * h_j)}{h_j} \\ 49 | = &\frac{\frac{f(x + e_j * h_j + e_i * h_i) - f(x + e_j * h_j - e_i * h_i)}{h_i} 50 | - \frac{ 51 | f(x - e_j * h_j + e_i * h_i) - f(x - e_j * h_j - e_i * h_i) 52 | }{h_i}}{h_j} \\ 53 | = &\frac{ 54 | f(x + e_j * h_j + e_i * h_i) - f(x + e_j * h_j - e_i * h_i) 55 | }{h_j * h_i} \\ 56 | &+ \frac{ 57 | - f(x - e_j * h_j + e_i * h_i) + f(x - e_j * h_j - e_i * h_i) 58 | }{h_j * h_i} 59 | $$ 60 | 61 | For the optimal stepsize a different rule is used: 62 | 63 | $$ 64 | h_i = (1 + |x[i]|) * \sqrt[3]\epsilon 65 | $$ 66 | 67 | Similar deviations lead to the elements of the Hessian matrix calculated by backward and 68 | central differences. 69 | 70 | **References:** 71 | 72 | ```{eval-rst} 73 | .. bibliography:: ../refs.bib 74 | :filter: docname in docnames 75 | ``` 76 | -------------------------------------------------------------------------------- /docs/source/how_to/how_to_globalization.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# How to choose a strategy for global optimization\n", 8 | "\n", 9 | "(to be written)" 10 | ] 11 | } 12 | ], 13 | "metadata": { 14 | "language_info": { 15 | "name": "python" 16 | } 17 | }, 18 | "nbformat": 4, 19 | "nbformat_minor": 2 20 | } 21 | -------------------------------------------------------------------------------- /docs/source/how_to/index.md: -------------------------------------------------------------------------------- 1 | (how-to)= 2 | 3 | # How-to Guides 4 | 5 | How-to Guides show how to achieve specific tasks. In many cases they show you how to use 6 | advanced options. For a more basic introduction, check out the [tutorials](tutorials). 7 | 8 | ```{toctree} 9 | --- 10 | maxdepth: 1 11 | --- 12 | how_to_criterion_function 13 | how_to_start_parameters 14 | how_to_derivatives 15 | how_to_specify_algorithm_and_algo_options 16 | how_to_algorithm_selection 17 | how_to_bounds 18 | how_to_constraints 19 | how_to_globalization 20 | how_to_multistart 21 | how_to_visualize_histories 22 | how_to_scaling 23 | how_to_logging 24 | how_to_errors_during_optimization 25 | how_to_slice_plot 26 | how_to_benchmarking 27 | how_to_add_optimizers 28 | ``` 29 | -------------------------------------------------------------------------------- /docs/source/installation.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | ## Basic installation 4 | 5 | The preferred way to install optimagic is via `conda` or `mamba`. To do so, open a 6 | terminal and type: 7 | 8 | ``` 9 | conda install -c conda-forge optimagic 10 | ``` 11 | 12 | Alternatively, you can install optimagic via pip: 13 | 14 | ``` 15 | pip install optimagic 16 | ``` 17 | 18 | In both cases, you get optimagic and all of its mandatory dependencies. 19 | 20 | ## Installing optional dependencies 21 | 22 | Only `scipy` is a mandatory dependency of optimagic. Other algorithms become available 23 | if you install more packages. We make this optional because you will rarely need all of 24 | them in the same project. 25 | 26 | For an overview of all optimizers and the packages you need to install to enable them, 27 | see {ref}`list_of_algorithms`. 28 | 29 | To enable all algorithms at once, do the following: 30 | 31 | ``` 32 | conda -c conda-forge install nlopt 33 | ``` 34 | 35 | ``` 36 | pip install Py-BOBYQA 37 | ``` 38 | 39 | ``` 40 | pip install DFO-LS 41 | ``` 42 | 43 | *Note*: We recommend to install `DFO-LS` version 1.5.3 or higher. Versions of 1.5.0 or 44 | lower also work but the versions `1.5.1` and `1.5.2` contain bugs that can lead to 45 | errors being raised. 46 | 47 | ``` 48 | conda install -c conda-forge petsc4py 49 | ``` 50 | 51 | *Note*: `` `petsc4py` `` is not available on Windows. 52 | 53 | ``` 54 | conda install -c conda-forge cyipopt 55 | ``` 56 | 57 | *Note*: Make sure you have at least `cyipopt` 1.4. 58 | 59 | ``` 60 | conda install -c conda-forge pygmo 61 | ``` 62 | 63 | ``` 64 | pip install fides>=0.7.4 65 | ``` 66 | 67 | *Note*: Make sure you have at least `fides` 0.7.4. 68 | -------------------------------------------------------------------------------- /docs/source/reference/algo_options.md: -------------------------------------------------------------------------------- 1 | (algo_options_docs)= 2 | 3 | # The default algorithm options 4 | 5 | ```{eval-rst} 6 | .. automodule:: optimagic.optimization.algo_options 7 | :members: 8 | ``` 9 | -------------------------------------------------------------------------------- /docs/source/reference/batch_evaluators.md: -------------------------------------------------------------------------------- 1 | (batch_evaluators)= 2 | 3 | # Batch evaluators 4 | 5 | ```{eval-rst} 6 | .. automodule:: optimagic.batch_evaluators 7 | :members: 8 | ``` 9 | -------------------------------------------------------------------------------- /docs/source/reference/utilities.md: -------------------------------------------------------------------------------- 1 | (utilities)= 2 | 3 | # Utility functions 4 | 5 | ```{eval-rst} 6 | .. automodule:: optimagic.utilities 7 | :members: 8 | ``` 9 | -------------------------------------------------------------------------------- /docs/source/tutorials/index.md: -------------------------------------------------------------------------------- 1 | (tutorials)= 2 | 3 | # Tutorials 4 | 5 | This section provides an overview of optimagic. It's a good starting point if you are 6 | new to optimagic. For more in-depth examples using advanced options, check out the 7 | [how-to guides](how-to). 8 | 9 | `````{grid} 1 2 2 2 10 | --- 11 | gutter: 3 12 | --- 13 | ````{grid-item-card} 14 | :text-align: center 15 | :img-top: ../_static/images/optimization.svg 16 | :class-img-top: index-card-image 17 | :shadow: md 18 | 19 | ```{button-link} optimization_overview.html 20 | --- 21 | click-parent: 22 | ref-type: ref 23 | class: stretched-link index-card-link sd-text-primary 24 | --- 25 | Optimization 26 | ``` 27 | 28 | Learn numerical optimization with estimagic. 29 | 30 | ```` 31 | 32 | ````{grid-item-card} 33 | :text-align: center 34 | :img-top: ../_static/images/differentiation.svg 35 | :class-img-top: index-card-image 36 | :shadow: md 37 | 38 | ```{button-link} numdiff_overview.html 39 | --- 40 | click-parent: 41 | ref-type: ref 42 | class: stretched-link index-card-link sd-text-primary 43 | --- 44 | Differentiation 45 | ``` 46 | 47 | Learn numerical differentiation with estimagic. 48 | 49 | ```` 50 | 51 | ````` 52 | 53 | ```{toctree} 54 | --- 55 | hidden: true 56 | maxdepth: 1 57 | --- 58 | optimization_overview 59 | numdiff_overview 60 | ``` 61 | -------------------------------------------------------------------------------- /docs/source/videos.md: -------------------------------------------------------------------------------- 1 | (list_of_videos)= 2 | 3 | # Videos 4 | 5 | Check out our tutorials, talks and screencasts about optimagic. 6 | 7 | ## Talks and tutorials 8 | 9 | ### EuroSciPy 2023 (Talk) 10 | 11 | ```{raw} html 12 | 17 | ``` 18 | 19 | ### EuroSciPy 2023 (Tutorial) 20 | 21 | ```{raw} html 22 | 27 | ``` 28 | 29 | ### SciPy 2022 (Tutorial) 30 | 31 | ```{raw} html 32 | 37 | ``` 38 | 39 | ## Screencasts 40 | 41 | The screencasts are part of the course _Effective Programming Practices for Economists_, 42 | taught at the University of Bonn by 43 | [Hans-Martin von Gaudecker](https://www.wiwi.uni-bonn.de/gaudecker/), and previously 44 | also [Janoś Gabler](https://github.com/janosg). You can find all screencasts of the 45 | course on the 46 | [course webite](https://effective-programming-practices.vercel.app/landing-page.html). 47 | Here, we show the screencasts about numerical optimization and optimagic. 48 | 49 | ### Introduction to numerical optimization 50 | 51 | ```{raw} html 52 | 57 | ``` 58 | 59 | ### Using optimagic’s minimize and maximize 60 | 61 | ```{raw} html 62 | 67 | ``` 68 | 69 | ### Visualizing optimizer histories 70 | 71 | ```{raw} html 72 | 77 | ``` 78 | 79 | ### Choosing optimization algorithms 80 | 81 | ```{raw} html 82 | 87 | ``` 88 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: optimagic 3 | channels: 4 | - conda-forge 5 | - nodefaults 6 | dependencies: 7 | - python=3.10 # dev 8 | - cyipopt>=1.4.0 # dev, tests 9 | - pygmo>=2.19.0 # dev, tests, docs 10 | - jupyterlab # dev, docs 11 | - nlopt # dev, tests, docs 12 | - pip # dev, tests, docs 13 | - pytest # dev, tests 14 | - pytest-cov # tests 15 | - pytest-xdist # dev, tests 16 | - setuptools_scm # dev 17 | - statsmodels # dev, tests 18 | - toml # dev 19 | - cloudpickle # run, tests 20 | - joblib # run, tests 21 | - numpy >= 2 # run, tests 22 | - pandas # run, tests 23 | - plotly<6.0.0 # run, tests 24 | - pybaum>=0.1.2 # run, tests 25 | - scipy>=1.2.1 # run, tests 26 | - sqlalchemy # run, tests 27 | - myst-nb # docs 28 | - sphinx # docs 29 | - sphinx-copybutton # docs 30 | - sphinx-design # docs 31 | - sphinx-panels # docs 32 | - sphinxcontrib-bibtex # docs 33 | - intersphinx-registry # docs 34 | - seaborn # dev, tests 35 | - mypy=1.14.1 # dev, tests 36 | - pyyaml # dev, tests 37 | - jinja2 # dev, tests 38 | - furo # dev, docs 39 | - annotated-types # dev, tests 40 | - iminuit # dev, tests 41 | - pip: # dev, tests, docs 42 | - nevergrad # dev, tests 43 | - DFO-LS>=1.5.3 # dev, tests 44 | - Py-BOBYQA # dev, tests 45 | - fides==0.7.4 # dev, tests 46 | - kaleido # dev, tests 47 | - pre-commit>=4 # dev 48 | - -e . # dev 49 | # type stubs 50 | - pandas-stubs # dev, tests 51 | - types-cffi # dev, tests 52 | - types-openpyxl # dev, tests 53 | - types-jinja2 # dev, tests 54 | - sqlalchemy-stubs # dev, tests 55 | - sphinxcontrib-mermaid # dev, tests, docs 56 | - pdbp # dev 57 | -------------------------------------------------------------------------------- /src/estimagic/batch_evaluators.py: -------------------------------------------------------------------------------- 1 | from optimagic.batch_evaluators import joblib_batch_evaluator as _joblib_batch_evaluator 2 | from optimagic.batch_evaluators import ( 3 | pathos_mp_batch_evaluator as _pathos_mp_batch_evaluator, 4 | ) 5 | from optimagic.batch_evaluators import ( 6 | process_batch_evaluator as _process_batch_evaluator, 7 | ) 8 | from optimagic.decorators import deprecated 9 | 10 | MSG = ( 11 | "estimagic.batch_evaluators.{name} has been deprecated in version 0.5.0. Use " 12 | "optimagic.batch_evaluators.{name} instead. This function will be removed in " 13 | "version 0.6.0." 14 | ) 15 | 16 | 17 | pathos_mp_batch_evaluator = deprecated( 18 | _pathos_mp_batch_evaluator, MSG.format(name="pathos_mp_batch_evaluator") 19 | ) 20 | 21 | joblib_batch_evaluator = deprecated( 22 | _joblib_batch_evaluator, MSG.format(name="joblib_batch_evaluator") 23 | ) 24 | 25 | process_batch_evaluator = deprecated( 26 | _process_batch_evaluator, MSG.format(name="process_batch_evaluator") 27 | ) 28 | -------------------------------------------------------------------------------- /src/estimagic/bootstrap_helpers.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | 4 | def check_inputs( 5 | data=None, 6 | weight_by=None, 7 | cluster_by=None, 8 | ci_method="percentile", 9 | ci_level=0.95, 10 | skipdata=False, 11 | ): 12 | """Check validity of inputs. 13 | 14 | Args: 15 | data (pd.DataFrame): Dataset. 16 | weight_by (str): Column name of variable with weights. 17 | cluster_by (str): Column name of variable to cluster by. 18 | ci_method (str): Method of choice for computing confidence intervals. 19 | The default is "percentile". 20 | ci_level (float): Confidence level for the calculation of confidence 21 | intervals. The default is 0.95. 22 | skipdata (bool): Whether to skip all checks on the data argument. 23 | 24 | """ 25 | ci_method_list = ["percentile", "bc", "t", "normal", "basic"] 26 | 27 | if not skipdata: 28 | if not isinstance(data, pd.DataFrame) and not isinstance(data, pd.Series): 29 | raise TypeError("Data must be a pandas.DataFrame or pandas.Series.") 30 | elif (weight_by is not None) and (weight_by not in data.columns.tolist()): 31 | raise ValueError( 32 | "Input 'weight_by' must be None or a column name of 'data'." 33 | ) 34 | elif (cluster_by is not None) and (cluster_by not in data.columns.tolist()): 35 | raise ValueError( 36 | "Input 'cluster_by' must be None or a column name of 'data'." 37 | ) 38 | 39 | if ci_method not in ci_method_list: 40 | msg = ( 41 | "ci_method must be 'percentile', 'bc', 't', 'basic' or 'normal', " 42 | f"'{ci_method}' was supplied" 43 | ) 44 | raise ValueError(msg) 45 | if ci_level > 1 or ci_level < 0: 46 | raise ValueError("Input 'ci_level' must be in [0,1].") 47 | -------------------------------------------------------------------------------- /src/estimagic/config.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | EXAMPLE_DIR = Path(__file__).parent / "examples" 4 | -------------------------------------------------------------------------------- /src/estimagic/estimation_summaries.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/estimagic/estimation_summaries.py -------------------------------------------------------------------------------- /src/estimagic/examples/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/estimagic/examples/__init__.py -------------------------------------------------------------------------------- /src/estimagic/examples/exam_points.csv: -------------------------------------------------------------------------------- 1 | points 2 | 275.5 3 | 351.5 4 | 346.25 5 | 228.25 6 | 108.25 7 | 380.75 8 | 346.25 9 | 360.75 10 | 196 11 | 414.75 12 | 370.5 13 | 371.75 14 | 143.75 15 | 333.5 16 | 397.5 17 | 405.75 18 | 154.75 19 | 321 20 | 279 21 | 326.5 22 | 49.5 23 | 402.75 24 | 389.75 25 | 382.25 26 | 337.75 27 | 311 28 | 105.5 29 | 380.5 30 | 236 31 | 326.5 32 | 343.75 33 | 328.75 34 | 316.25 35 | 348.25 36 | 338.75 37 | 375.75 38 | 410 39 | 17 40 | 414.25 41 | 21.25 42 | 369.625 43 | 318.875 44 | 336.125 45 | 429.875 46 | 407.5 47 | 415.75 48 | 332.375 49 | 397 50 | 375.875 51 | 419.125 52 | 270.125 53 | 299.25 54 | 384.125 55 | 335 56 | 408.5 57 | 414.25 58 | 253.5 59 | 339.25 60 | 338.75 61 | 355.375 62 | 326.375 63 | 240.375 64 | 385 65 | 435 66 | 317.25 67 | 365.625 68 | 372.75 69 | 365.125 70 | 349.625 71 | 366.75 72 | 386.5 73 | 391.75 74 | 403 75 | 258.5 76 | 386 77 | 411 78 | 350.25 79 | 402.25 80 | 294.625 81 | 291.125 82 | 378.125 83 | 442.0 84 | 428.1 85 | 347.3 86 | 431.8 87 | 430.4 88 | 426.0 89 | 433.5 90 | 331.1 91 | 405.7 92 | 415.5 93 | 406.4 94 | 418.6 95 | 400.7 96 | 408.8 97 | 404.8 98 | 409.4 99 | 410.8 100 | 402.5 101 | 401.0 102 | 415.3 103 | 390.8 104 | 394.6 105 | 399.0 106 | 380.0 107 | 397.5 108 | 368.7 109 | 394.7 110 | 304.3 111 | 391.1 112 | 388.4 113 | 370.3 114 | 384.6 115 | 383.5 116 | 305.6 117 | 286.5 118 | 367.9 119 | 329.8 120 | 288.2 121 | 338.5 122 | 333.6 123 | 268.6 124 | 335.2 125 | 296.3 126 | 269.1 127 | 243.2 128 | 159.4 129 | 448.4 130 | 449.8 131 | 435.9 132 | 429.4 133 | 428.3 134 | 427.5 135 | 422.5 136 | 409.8 137 | 415.8 138 | 413.4 139 | 416.8 140 | 406.7 141 | 383.9 142 | 389.0 143 | 387.2 144 | 368.6 145 | 399.5 146 | 382.6 147 | 355.9 148 | 389.9 149 | 342.5 150 | 365.2 151 | 320.3 152 | 341.5 153 | 248.1 154 | 305.0 155 | 279.2 156 | 275.7 157 | 204.5 158 | 235.0 159 | 102.2 160 | 112.3 161 | 130.6 162 | 60.2 163 | -------------------------------------------------------------------------------- /src/estimagic/examples/logit.py: -------------------------------------------------------------------------------- 1 | """Likelihood functions and derivatives of a logit model.""" 2 | 3 | import numpy as np 4 | import pandas as pd 5 | 6 | from optimagic import mark 7 | 8 | 9 | def logit_loglike_and_derivative(params, y, x): 10 | return logit_loglike(params, y, x), logit_jac(params, y, x) 11 | 12 | 13 | @mark.scalar 14 | def scalar_logit_fun_and_jac(params, y, x): 15 | return logit_loglike(params, y, x).sum(), logit_grad(params, y, x) 16 | 17 | 18 | @mark.likelihood 19 | def logit_loglike(params, y, x): 20 | """Log-likelihood function of a logit model. 21 | 22 | Args: 23 | params (pd.DataFrame): The index consists of the parameter names, 24 | the "value" column are the parameter values. 25 | y (np.array): 1d numpy array with the dependent variable 26 | x (np.array): 2d numpy array with the independent variables 27 | 28 | Returns: 29 | loglike (np.array): 1d numpy array with likelihood contribution per individual 30 | 31 | """ 32 | if isinstance(params, pd.DataFrame): 33 | p = params["value"].to_numpy() 34 | else: 35 | p = params 36 | q = 2 * y - 1 37 | contribs = np.log(1 / (1 + np.exp(-(q * np.dot(x, p))))) 38 | 39 | return contribs 40 | 41 | 42 | @mark.scalar 43 | def logit_grad(params, y, x): 44 | return logit_jac(params, y, x).sum(axis=0) 45 | 46 | 47 | def logit_jac(params, y, x): 48 | """Derivative of the log-likelihood for each observation of a logit model. 49 | 50 | Args: 51 | params (pd.DataFrame): The index consists of the parmater names, 52 | the "value" column are the parameter values. 53 | y (np.array): 1d numpy array with the dependent variable 54 | x (np.array): 2d numpy array with the independent variables 55 | 56 | Returns: 57 | jac : array-like 58 | The derivative of the loglikelihood for each observation evaluated 59 | at `params`. 60 | 61 | """ 62 | if isinstance(params, pd.DataFrame): 63 | p = params["value"].to_numpy() 64 | else: 65 | p = params 66 | y = y.to_numpy() 67 | c = 1 / (1 + np.exp(-(np.dot(x, p)))) 68 | jac = (y - c)[:, None] * x 69 | return jac 70 | 71 | 72 | def logit_hess(params, y, x): # noqa: ARG001 73 | """Hessian matrix of the log-likelihood. 74 | 75 | Args: 76 | params (pd.DataFrame): The index consists of the parmater names, 77 | the "value" column are the parameter values. 78 | y (np.array): 1d numpy array with the dependent variable 79 | x (np.array): 2d numpy array with the independent variables 80 | 81 | Returns: 82 | hessian (np.array) : 2d numpy array with the hessian of the 83 | logl-ikelihood function evaluated at `params` 84 | 85 | """ 86 | if isinstance(params, pd.DataFrame): 87 | p = params["value"].to_numpy() 88 | else: 89 | p = params 90 | c = 1 / (1 + np.exp(-(np.dot(x, p)))) 91 | return -np.dot(c * (1 - c) * x.T, x) 92 | -------------------------------------------------------------------------------- /src/estimagic/msm_covs.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from estimagic.shared_covs import process_pandas_arguments 4 | from optimagic.exceptions import INVALID_INFERENCE_MSG 5 | from optimagic.utilities import robust_inverse 6 | 7 | 8 | def cov_robust(jac, weights, moments_cov): 9 | """Calculate the cov of msm estimates with asymptotically non-efficient weights. 10 | 11 | Note that asymptotically non-efficient weights are typically preferrable because 12 | they lead to less finite sample bias. 13 | 14 | Args: 15 | jac (np.ndarray or pandas.DataFrame): Numpy array or DataFrame with the jacobian 16 | of simulate_moments with respect to params. The derivative needs to be taken 17 | at the estimated parameters. Has shape n_moments, n_params. 18 | weights (np.ndarray): The weighting matrix for msm estimation. 19 | moments_cov (np.ndarray): The covariance matrix of the empirical moments. 20 | 21 | Returns: 22 | numpy.ndarray: numpy array with covariance matrix. 23 | 24 | """ 25 | _jac, _weights, _moments_cov, names = process_pandas_arguments( 26 | jac=jac, weights=weights, moments_cov=moments_cov 27 | ) 28 | 29 | bread = robust_inverse( 30 | _jac.T @ _weights @ _jac, 31 | msg=INVALID_INFERENCE_MSG, 32 | ) 33 | 34 | butter = _jac.T @ _weights @ _moments_cov @ _weights @ _jac 35 | 36 | cov = bread @ butter @ bread 37 | 38 | if names: 39 | cov = pd.DataFrame(cov, columns=names.get("params"), index=names.get("params")) 40 | 41 | return cov 42 | 43 | 44 | def cov_optimal(jac, weights): 45 | """Calculate the cov of msm estimates with asymptotically efficient weights. 46 | 47 | Note that asymptotically efficient weights have substantial finite sample 48 | bias and are typically not a good choice. 49 | 50 | Args: 51 | jac (np.ndarray or pandas.DataFrame): Numpy array or DataFrame with the jacobian 52 | of simulate_moments with respect to params. The derivative needs to be taken 53 | at the estimated parameters. Has shape n_moments, n_params. 54 | weights (np.ndarray): The weighting matrix for msm estimation. 55 | moments_cov (np.ndarray): The covariance matrix of the empirical moments. 56 | 57 | Returns: 58 | numpy.ndarray: numpy array with covariance matrix. 59 | 60 | """ 61 | _jac, _weights, names = process_pandas_arguments(jac=jac, weights=weights) 62 | 63 | cov = robust_inverse(_jac.T @ _weights @ _jac, msg=INVALID_INFERENCE_MSG) 64 | 65 | if names: 66 | cov = pd.DataFrame(cov, columns=names.get("params"), index=names.get("params")) 67 | 68 | return cov 69 | -------------------------------------------------------------------------------- /src/estimagic/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/estimagic/py.typed -------------------------------------------------------------------------------- /src/optimagic/benchmarking/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/optimagic/benchmarking/__init__.py -------------------------------------------------------------------------------- /src/optimagic/benchmarking/noise_distributions.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def _standard_logistic(size, rng): 5 | scale = np.sqrt(3) / np.pi 6 | return rng.logistic(loc=0, scale=scale, size=size) 7 | 8 | 9 | def _standard_uniform(size, rng): 10 | ub = np.sqrt(3) 11 | lb = -ub 12 | return rng.uniform(lb, ub, size=size) 13 | 14 | 15 | def _standard_normal(size, rng): 16 | return rng.normal(size=size) 17 | 18 | 19 | def _standard_gumbel(size, rng): 20 | gamma = 0.577215664901532 21 | scale = np.sqrt(6) / np.pi 22 | loc = -scale * gamma 23 | return rng.gumbel(loc=loc, scale=scale, size=size) 24 | 25 | 26 | def _standard_laplace(size, rng): 27 | return rng.laplace(scale=np.sqrt(0.5), size=size) 28 | 29 | 30 | NOISE_DISTRIBUTIONS = { 31 | "normal": _standard_normal, 32 | "gumbel": _standard_gumbel, 33 | "logistic": _standard_logistic, 34 | "uniform": _standard_uniform, 35 | "laplace": _standard_laplace, 36 | } 37 | -------------------------------------------------------------------------------- /src/optimagic/config.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pandas as pd 4 | import plotly.express as px 5 | from packaging import version 6 | 7 | DOCS_DIR = Path(__file__).parent.parent / "docs" 8 | OPTIMAGIC_ROOT = Path(__file__).parent 9 | 10 | PLOTLY_TEMPLATE = "simple_white" 11 | PLOTLY_PALETTE = px.colors.qualitative.Set2 12 | 13 | DEFAULT_N_CORES = 1 14 | 15 | CRITERION_PENALTY_SLOPE = 0.1 16 | CRITERION_PENALTY_CONSTANT = 100 17 | 18 | # ====================================================================================== 19 | # Check Available Packages 20 | # ====================================================================================== 21 | 22 | try: 23 | from petsc4py import PETSc # noqa: F401 24 | except ImportError: 25 | IS_PETSC4PY_INSTALLED = False 26 | else: 27 | IS_PETSC4PY_INSTALLED = True 28 | 29 | try: 30 | import nlopt # noqa: F401 31 | except ImportError: 32 | IS_NLOPT_INSTALLED = False 33 | else: 34 | IS_NLOPT_INSTALLED = True 35 | 36 | try: 37 | import pybobyqa # noqa: F401 38 | except ImportError: 39 | IS_PYBOBYQA_INSTALLED = False 40 | else: 41 | IS_PYBOBYQA_INSTALLED = True 42 | 43 | try: 44 | import dfols # noqa: F401 45 | except ImportError: 46 | IS_DFOLS_INSTALLED = False 47 | else: 48 | IS_DFOLS_INSTALLED = True 49 | 50 | try: 51 | import pygmo # noqa: F401 52 | except ImportError: 53 | IS_PYGMO_INSTALLED = False 54 | else: 55 | IS_PYGMO_INSTALLED = True 56 | 57 | try: 58 | import cyipopt # noqa: F401 59 | except ImportError: 60 | IS_CYIPOPT_INSTALLED = False 61 | else: 62 | IS_CYIPOPT_INSTALLED = True 63 | 64 | try: 65 | import fides # noqa: F401 66 | except ImportError: 67 | IS_FIDES_INSTALLED = False 68 | else: 69 | IS_FIDES_INSTALLED = True 70 | 71 | try: 72 | import jax # noqa: F401 73 | except ImportError: 74 | IS_JAX_INSTALLED = False 75 | else: 76 | IS_JAX_INSTALLED = True 77 | 78 | 79 | try: 80 | import tranquilo # noqa: F401 81 | except ImportError: 82 | IS_TRANQUILO_INSTALLED = False 83 | else: 84 | IS_TRANQUILO_INSTALLED = True 85 | 86 | 87 | try: 88 | import numba # noqa: F401 89 | except ImportError: 90 | IS_NUMBA_INSTALLED = False 91 | else: 92 | IS_NUMBA_INSTALLED = True 93 | 94 | 95 | try: 96 | import iminuit # noqa: F401 97 | except ImportError: 98 | IS_IMINUIT_INSTALLED = False 99 | else: 100 | IS_IMINUIT_INSTALLED = True 101 | 102 | 103 | try: 104 | import nevergrad # noqa: F401 105 | except ImportError: 106 | IS_NEVERGRAD_INSTALLED = False 107 | else: 108 | IS_NEVERGRAD_INSTALLED = True 109 | 110 | 111 | # ====================================================================================== 112 | # Check if pandas version is newer or equal to version 2.1.0 113 | # ====================================================================================== 114 | 115 | IS_PANDAS_VERSION_NEWER_OR_EQUAL_TO_2_1_0 = version.parse( 116 | pd.__version__ 117 | ) >= version.parse("2.1.0") 118 | -------------------------------------------------------------------------------- /src/optimagic/differentiation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/optimagic/differentiation/__init__.py -------------------------------------------------------------------------------- /src/optimagic/examples/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/optimagic/examples/__init__.py -------------------------------------------------------------------------------- /src/optimagic/examples/numdiff_functions.py: -------------------------------------------------------------------------------- 1 | """Functions with known gradients, jacobians or hessians. 2 | 3 | All functions take a numpy array with parameters as their first argument. 4 | 5 | Example inputs for the binary choice functions are in binary_choice_inputs.pickle. They 6 | come from the statsmodels documentation: 7 | https://tinyurl.com/y4x67vwl 8 | We pickled them so we don't need statsmodels as a dependency. 9 | 10 | """ 11 | 12 | import numpy as np 13 | from scipy.stats import norm 14 | 15 | FLOAT_EPS = np.finfo(float).eps 16 | 17 | # ====================================================================================== 18 | # Logit 19 | # ====================================================================================== 20 | 21 | 22 | def logit_loglike(params, y, x): 23 | return logit_loglikeobs(params, y, x).sum() 24 | 25 | 26 | def logit_loglikeobs(params, y, x): 27 | q = 2 * y - 1 28 | return np.log(1 / (1 + np.exp(-(q * np.dot(x, params))))) 29 | 30 | 31 | def logit_loglike_gradient(params, y, x): 32 | c = 1 / (1 + np.exp(-(np.dot(x, params)))) 33 | return np.dot(y - c, x) 34 | 35 | 36 | def logit_loglikeobs_jacobian(params, y, x): 37 | c = 1 / (1 + np.exp(-(np.dot(x, params)))) 38 | return (y - c).reshape(-1, 1) * x 39 | 40 | 41 | def logit_loglike_hessian(params, y, x): # noqa: ARG001 42 | c = 1 / (1 + np.exp(-(np.dot(x, params)))) 43 | return -np.dot(c * (1 - c) * x.T, x) 44 | 45 | 46 | # ====================================================================================== 47 | # Probit 48 | # ====================================================================================== 49 | 50 | 51 | def probit_loglike(params, y, x): 52 | return probit_loglikeobs(params, y, x).sum() 53 | 54 | 55 | def probit_loglikeobs(params, y, x): 56 | q = 2 * y - 1 57 | return np.log(np.clip(norm.cdf(q * np.dot(x, params)), FLOAT_EPS, 1)) 58 | 59 | 60 | def probit_loglike_gradient(params, y, x): 61 | xb = np.dot(x, params) 62 | q = 2 * y - 1 63 | c = q * norm.pdf(q * xb) / np.clip(norm.cdf(q * xb), FLOAT_EPS, 1 - FLOAT_EPS) 64 | return np.dot(c, x) 65 | 66 | 67 | def probit_loglikeobs_jacobian(params, y, x): 68 | xb = np.dot(x, params) 69 | q = 2 * y - 1 70 | c = q * norm.pdf(q * xb) / np.clip(norm.cdf(q * xb), FLOAT_EPS, 1 - FLOAT_EPS) 71 | return c.reshape(-1, 1) * x 72 | 73 | 74 | def probit_loglike_hessian(params, y, x): 75 | xb = np.dot(x, params) 76 | q = 2 * y - 1 77 | c = q * norm.pdf(q * xb) / norm.cdf(q * xb) 78 | return np.dot(-c * (c + xb) * x.T, x) 79 | -------------------------------------------------------------------------------- /src/optimagic/exceptions.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from traceback import format_exception 3 | 4 | 5 | class OptimagicError(Exception): 6 | """Base exception for optimagic which should be inherited by all exceptions.""" 7 | 8 | 9 | class TableExistsError(OptimagicError): 10 | """Exception for database tables that should not exist but do.""" 11 | 12 | 13 | class InvalidFunctionError(OptimagicError): 14 | """Exception for invalid user provided functions. 15 | 16 | This includes user functions that do not comply with interfaces, raise errors or 17 | produce NaNs. 18 | 19 | """ 20 | 21 | 22 | class UserFunctionRuntimeError(OptimagicError): 23 | """Exception that is raised when user provided functions raise errors.""" 24 | 25 | 26 | class MissingInputError(OptimagicError): 27 | """Exception for missing user provided input.""" 28 | 29 | 30 | class AliasError(OptimagicError): 31 | """Exception for aliasing errors.""" 32 | 33 | 34 | class InvalidKwargsError(OptimagicError): 35 | """Exception for invalid user provided keyword arguments.""" 36 | 37 | 38 | class InvalidParamsError(OptimagicError): 39 | """Exception for invalid user provided parameters.""" 40 | 41 | 42 | class InvalidConstraintError(OptimagicError): 43 | """Exception for invalid user provided constraints.""" 44 | 45 | 46 | class InvalidBoundsError(OptimagicError): 47 | """Exception for invalid user provided bounds.""" 48 | 49 | 50 | class InvalidScalingError(OptimagicError): 51 | """Exception for invalid user provided scaling.""" 52 | 53 | 54 | class InvalidMultistartError(OptimagicError): 55 | """Exception for invalid user provided multistart options.""" 56 | 57 | 58 | class InvalidNumdiffOptionsError(OptimagicError): 59 | """Exception for invalid user provided numdiff options.""" 60 | 61 | 62 | class NotInstalledError(OptimagicError): 63 | """Exception when optional dependencies are needed but not installed.""" 64 | 65 | 66 | class NotAvailableError(OptimagicError): 67 | """Exception when something is not available, e.g. because a calculation failed.""" 68 | 69 | 70 | class InvalidAlgoOptionError(OptimagicError): 71 | """Exception for invalid user provided algorithm options.""" 72 | 73 | 74 | class InvalidAlgoInfoError(OptimagicError): 75 | """Exception for invalid user provided algorithm information.""" 76 | 77 | 78 | class StopOptimizationError(OptimagicError): 79 | def __init__(self, message, current_status): 80 | super().__init__(message) 81 | self.message = message 82 | self.current_status = current_status 83 | 84 | def __reduce__(self): 85 | """Taken from here: https://tinyurl.com/y6eeys2f.""" 86 | return (StopOptimizationError, (self.message, self.current_status)) 87 | 88 | 89 | def get_traceback(): 90 | tb = format_exception(*sys.exc_info()) 91 | if isinstance(tb, list): 92 | tb = "".join(tb) 93 | return tb 94 | 95 | 96 | INVALID_INFERENCE_MSG = ( 97 | "Taking the inverse of the information matrix failed. Only ever use this " 98 | "covariance matrix or standard errors based on it for diagnostic purposes, not for " 99 | "drawing conclusions." 100 | ) 101 | 102 | 103 | INVALID_SENSITIVITY_MSG = ( 104 | "Taking inverse failed during the calculation of sensitvity measures. Interpret " 105 | "them with caution." 106 | ) 107 | -------------------------------------------------------------------------------- /src/optimagic/logging/__init__.py: -------------------------------------------------------------------------------- 1 | from .logger import ( 2 | SQLiteLogOptions as SQLiteLogOptions, 3 | ) 4 | from .logger import ( 5 | SQLiteLogReader as SQLiteLogReader, 6 | ) 7 | from .types import ExistenceStrategy as ExistenceStrategy 8 | -------------------------------------------------------------------------------- /src/optimagic/logging/read_log.py: -------------------------------------------------------------------------------- 1 | """ 2 | Deprecated module: 3 | 4 | Functions to read data from the database used for logging. 5 | 6 | The functions in the module are meant for end users of optimagic. They do not require 7 | any knowledge of databases. 8 | 9 | When using them internally, make sure to supply a database to path_or_database. 10 | Otherwise, the functions may be very slow. 11 | 12 | """ 13 | 14 | from __future__ import annotations 15 | 16 | import warnings 17 | from dataclasses import dataclass 18 | 19 | from optimagic.logging.logger import SQLiteLogOptions, SQLiteLogReader 20 | 21 | 22 | @dataclass 23 | class OptimizeLogReader: 24 | def __new__(cls, *args, **kwargs): # type: ignore 25 | warnings.warn( 26 | "OptimizeLogReader is deprecated and will be removed in a future " 27 | "version. Please use optimagic.logging.SQLiteLogReader instead.", 28 | FutureWarning, 29 | ) 30 | sqlite_options = SQLiteLogOptions(*args, **kwargs) 31 | return SQLiteLogReader.from_options(sqlite_options) 32 | -------------------------------------------------------------------------------- /src/optimagic/optimization/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/optimagic/optimization/__init__.py -------------------------------------------------------------------------------- /src/optimagic/optimization/convergence_report.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from numpy.typing import NDArray 3 | 4 | from optimagic.optimization.history import History 5 | 6 | 7 | def get_convergence_report(history: History) -> dict[str, dict[str, float]] | None: 8 | is_accepted = history.is_accepted 9 | 10 | critvals = np.array(history.fun, dtype=np.float64)[is_accepted] 11 | params = np.array(history.flat_params, dtype=np.float64)[is_accepted] 12 | 13 | if len(critvals) < 2: 14 | out = None 15 | else: 16 | out = {} 17 | for name, n_entries in [("one_step", 2), ("five_steps", min(6, len(critvals)))]: 18 | relevant_critvals = critvals[-n_entries:] 19 | relevant_params = params[-n_entries:] 20 | 21 | max_f_rel, max_f_abs = _get_max_f_changes(relevant_critvals) 22 | max_x_rel, max_x_abs = _get_max_x_changes(relevant_params) 23 | 24 | col_dict = { 25 | "relative_criterion_change": max_f_rel, 26 | "relative_params_change": max_x_rel, 27 | "absolute_criterion_change": max_f_abs, 28 | "absolute_params_change": max_x_abs, 29 | } 30 | 31 | out[name] = col_dict 32 | 33 | return out 34 | 35 | 36 | def _get_max_f_changes(critvals: NDArray[np.float64]) -> tuple[float, float]: 37 | best_val = critvals[-1] 38 | worst_val = critvals[0] 39 | 40 | max_change_abs = np.abs(best_val - worst_val) 41 | denom = max(np.abs(best_val), 0.1) 42 | 43 | max_change_rel = max_change_abs / denom 44 | 45 | return max_change_rel, max_change_abs 46 | 47 | 48 | def _get_max_x_changes(params: NDArray[np.float64]) -> tuple[float, float]: 49 | best_x = params[-1] 50 | diffs = params - best_x 51 | denom = np.clip(np.abs(best_x), 0.1, np.inf) 52 | 53 | distances_abs = np.linalg.norm(diffs, axis=1) 54 | max_change_abs = distances_abs.max() 55 | 56 | scaled = diffs / denom 57 | 58 | distances_rel = np.linalg.norm(scaled, axis=1) 59 | max_change_rel = distances_rel.max() 60 | return max_change_rel, max_change_abs 61 | -------------------------------------------------------------------------------- /src/optimagic/optimization/optimization_logging.py: -------------------------------------------------------------------------------- 1 | from typing import Any, cast 2 | 3 | from optimagic.logging.logger import LogStore 4 | from optimagic.logging.types import StepResult, StepStatus 5 | 6 | 7 | def log_scheduled_steps_and_get_ids( 8 | steps: list[dict[str, Any]], logger: LogStore | None 9 | ) -> list[int]: 10 | """Add scheduled steps to the steps table of the database and get their ids. 11 | 12 | The ids are only determined once the steps are written to the database and the 13 | ids of all previously existing steps are known. 14 | 15 | Args: 16 | steps (list): List of dicts with entries for the steps table. 17 | logging (bool): Whether to actually write to the database. 18 | 19 | Returns: 20 | list: List of integers with the step ids. 21 | 22 | """ 23 | default_row = {"status": StepStatus.SCHEDULED.value} 24 | if logger: 25 | for row in steps: 26 | data = StepResult(**{**default_row, **row}) 27 | logger.step_store.insert(data) 28 | 29 | last_steps = logger.step_store.select_last_rows(len(steps)) 30 | step_ids = cast(list[int], [row.rowid for row in last_steps]) 31 | else: 32 | step_ids = list(range(len(steps))) 33 | 34 | return step_ids 35 | -------------------------------------------------------------------------------- /src/optimagic/optimization/scipy_aliases.py: -------------------------------------------------------------------------------- 1 | import functools 2 | 3 | from optimagic.exceptions import InvalidFunctionError 4 | from optimagic.utilities import propose_alternatives 5 | 6 | 7 | def map_method_to_algorithm(method): 8 | implemented = { 9 | "Nelder-Mead": "scipy_neldermead", 10 | "Powell": "scipy_powell", 11 | "CG": "scipy_conjugate_gradient", 12 | "BFGS": "scipy_bfgs", 13 | "Newton-CG": "scipy_newton_cg", 14 | "L-BFGS-B": "scipy_lbfgsb", 15 | "TNC": "scipy_truncated_newton", 16 | "COBYLA": "scipy_cobyla", 17 | "SLSQP": "scipy_slsqp", 18 | "trust-constr": "scipy_trust_constr", 19 | } 20 | 21 | not_implemented = { 22 | "dogleg": "scipy_dogleg", 23 | "trust-ncg": "scipy_trust_ncg", 24 | "trust-exact": "scipy_trust_exact", 25 | "trust-krylov": "scipy_trust_krylov", 26 | "COBYQA": "scipy_cobyqa", 27 | } 28 | 29 | if method in implemented: 30 | algo = implemented[method] 31 | elif method in not_implemented: 32 | msg = ( 33 | f"The method {method} is not yet wrapped in optimagic. Create an issue on " 34 | "https://github.com/optimagic-dev/optimagic/ if you have urgent need " 35 | "for this method." 36 | ) 37 | raise NotImplementedError(msg) 38 | else: 39 | alt = propose_alternatives(method, list(implemented) + list(not_implemented)) 40 | msg = ( 41 | "method is an alias for algorithm to select the scipy optimizers under " 42 | f"their original name. {method} is not a valid scipy algorithm name. " 43 | f"Did you mean {alt}?" 44 | ) 45 | raise ValueError(msg) 46 | return algo 47 | 48 | 49 | def split_fun_and_jac(fun_and_jac, target="fun"): 50 | index = 0 if target == "fun" else 1 51 | 52 | @functools.wraps(fun_and_jac) 53 | def fun(*args, **kwargs): 54 | raw = fun_and_jac(*args, **kwargs) 55 | try: 56 | out = raw[index] 57 | except TypeError as e: 58 | msg = ( 59 | "If you set `jac=True`, `fun` needs to return a tuple where the first " 60 | "entry is the value of your objective function and the second entry " 61 | "is its derivative." 62 | ) 63 | raise InvalidFunctionError(msg) from e 64 | return out 65 | 66 | return fun 67 | -------------------------------------------------------------------------------- /src/optimagic/optimizers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/optimagic/optimizers/__init__.py -------------------------------------------------------------------------------- /src/optimagic/optimizers/_pounders/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/optimagic/optimizers/_pounders/__init__.py -------------------------------------------------------------------------------- /src/optimagic/parameters/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/optimagic/parameters/__init__.py -------------------------------------------------------------------------------- /src/optimagic/parameters/tree_registry.py: -------------------------------------------------------------------------------- 1 | """Wrapper around pybaum get_registry to tailor it to optimagic.""" 2 | 3 | from functools import partial 4 | from itertools import product 5 | 6 | import numpy as np 7 | import pandas as pd 8 | from pybaum import get_registry as get_pybaum_registry 9 | 10 | 11 | def get_registry(extended=False, data_col="value"): 12 | """Return pytree registry. 13 | 14 | Special Rules 15 | ------------- 16 | If extended is True the registry contains pd.DataFrame. In optimagic a data frame 17 | can represent a 1d object with extra information, instead of a 2d object. This is 18 | only allowed for params data frames, in which case they contain a 'value' column. 19 | The extra information of such an object can be accessed using the data_col argument. 20 | By default the 'value' column is extracted. If data_col is not 'value' but the data 21 | frame contains a 'value' column, a list of np.nan is returned. 22 | 23 | Args: 24 | extended (bool): If True appends types 'numpy.ndarray', 'pandas.Series' and 25 | 'pandas.DataFrame' to the registry. 26 | data_col (str): This column is used as the data source in a data frame when 27 | flattening and unflattening a pytree. Defaults to 'value'; see special rules 28 | above for behavior with non-default values. 29 | 30 | Returns: 31 | dict: The pytree registry. 32 | 33 | """ 34 | types = ( 35 | ["numpy.ndarray", "pandas.Series", "jax.numpy.ndarray"] if extended else None 36 | ) 37 | registry = get_pybaum_registry(types=types) 38 | if extended: 39 | registry[pd.DataFrame] = { 40 | "flatten": partial(_flatten_df, data_col=data_col), 41 | "unflatten": partial(_unflatten_df, data_col=data_col), 42 | "names": _get_df_names, 43 | } 44 | return registry 45 | 46 | 47 | def _flatten_df(df, data_col): 48 | is_value_df = "value" in df 49 | if is_value_df: 50 | flat = df.get(data_col, default=np.full(len(df), np.nan)).tolist() 51 | else: 52 | flat = df.to_numpy().flatten().tolist() 53 | 54 | aux_data = { 55 | "is_value_df": is_value_df, 56 | "df": df, 57 | } 58 | return flat, aux_data 59 | 60 | 61 | def _unflatten_df(aux_data, leaves, data_col): 62 | if aux_data["is_value_df"]: 63 | out = aux_data["df"].assign(**{data_col: leaves}) 64 | else: 65 | out = pd.DataFrame( 66 | data=np.array(leaves).reshape(aux_data["df"].shape), 67 | columns=aux_data["df"].columns, 68 | index=aux_data["df"].index, 69 | ) 70 | return out 71 | 72 | 73 | def _get_df_names(df): 74 | index_strings = list(df.index.map(_index_element_to_string)) 75 | if "value" in df: 76 | out = index_strings 77 | else: 78 | out = ["_".join([loc, col]) for loc, col in product(index_strings, df.columns)] 79 | 80 | return out 81 | 82 | 83 | def _index_element_to_string(element): 84 | if isinstance(element, (tuple, list)): 85 | as_strings = [str(entry) for entry in element] 86 | res_string = "_".join(as_strings) 87 | else: 88 | res_string = str(element) 89 | 90 | return res_string 91 | -------------------------------------------------------------------------------- /src/optimagic/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/optimagic/py.typed -------------------------------------------------------------------------------- /src/optimagic/shared/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/optimagic/shared/__init__.py -------------------------------------------------------------------------------- /src/optimagic/shared/check_option_dicts.py: -------------------------------------------------------------------------------- 1 | """Check option dictionaries for minimize, maximize.""" 2 | 3 | 4 | def check_optimization_options(options, usage, algorithm_mandatory=True): 5 | """Check optimize_options or maximize_options for usage in estimation functions.""" 6 | options = {} if options is None else options 7 | 8 | if algorithm_mandatory: 9 | if not isinstance(options, dict) or "algorithm" not in options: 10 | raise ValueError( 11 | "optimize_options or maximize_options must be a dict containing at " 12 | "least the entry 'algorithm'" 13 | ) 14 | else: 15 | if not isinstance(options, dict): 16 | raise ValueError( 17 | "optimize_options or maximize_options must be a dict or None." 18 | ) 19 | 20 | criterion_options = { 21 | "criterion", 22 | "criterion_kwargs", 23 | "derivative", 24 | "derivative_kwargs", 25 | } 26 | 27 | invalid_criterion = criterion_options.intersection(options) 28 | if invalid_criterion: 29 | msg = ( 30 | "Entries related to the criterion function, its derivatives or keyword " 31 | "arguments of those functions are not valid entries of optimize_options " 32 | f"or maximize_options for {usage}. Remove: {invalid_criterion}" 33 | ) 34 | raise ValueError(msg) 35 | 36 | general_options = {"logging", "log_options", "constraints"} 37 | 38 | invalid_general = general_options.intersection(options) 39 | 40 | if invalid_general: 41 | msg = ( 42 | "The following are not valid entries of optimize_options because they are " 43 | "not only relevant for minimization but also for inference: " 44 | f"{invalid_general}" 45 | ) 46 | raise ValueError(msg) 47 | -------------------------------------------------------------------------------- /src/optimagic/shared/compat.py: -------------------------------------------------------------------------------- 1 | """Compatibility module. 2 | 3 | Contains wrapper functions to handle compatibility issues between different versions of 4 | external libraries. 5 | 6 | """ 7 | 8 | from optimagic.config import IS_PANDAS_VERSION_NEWER_OR_EQUAL_TO_2_1_0 9 | 10 | 11 | def pd_df_map(df, func, na_action=None, **kwargs): 12 | """Apply a function to a Dataframe elementwise. 13 | 14 | pandas has depricated the .applymap() function with version 2.1.0. This function 15 | calls either .map() (if pandas version is greater or equal to 2.1.0) or .applymap() 16 | (if pandas version is smaller than 2.1.0). 17 | 18 | Args: 19 | df (pd.DataFrame): A pandas DataFrame. 20 | func (callable): Python function, returns a single value from a single value. 21 | na_action (str): If 'ignore', propagate NaN values, without passing them to 22 | func. If None, pass NaN values to func. Default is None. 23 | **kwargs: Additional keyword arguments to pass as keywords arguments to func. 24 | 25 | Returns: 26 | pd.DataFrame: Transformed DataFrame. 27 | 28 | """ 29 | if IS_PANDAS_VERSION_NEWER_OR_EQUAL_TO_2_1_0: 30 | out = df.map(func, na_action=na_action, **kwargs) 31 | else: 32 | out = df.applymap(func, na_action=na_action, **kwargs) 33 | return out 34 | -------------------------------------------------------------------------------- /src/optimagic/timing.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Callable, Iterable 3 | 4 | 5 | @dataclass(frozen=True) 6 | class CostModel: 7 | fun: float | None 8 | jac: float | None 9 | fun_and_jac: float | None 10 | label: str 11 | aggregate_batch_time: Callable[[Iterable[float]], float] 12 | 13 | def __post_init__(self) -> None: 14 | if not callable(self.aggregate_batch_time): 15 | raise ValueError( 16 | "aggregate_batch_time must be a callable, got " 17 | f"{self.aggregate_batch_time}" 18 | ) 19 | 20 | 21 | evaluation_time = CostModel( 22 | fun=None, 23 | jac=None, 24 | fun_and_jac=None, 25 | label="Function time (seconds)", 26 | aggregate_batch_time=sum, 27 | ) 28 | 29 | fun_evaluations = CostModel( 30 | fun=1, 31 | jac=0, 32 | fun_and_jac=1, 33 | label="Number of criterion evaluations", 34 | aggregate_batch_time=sum, 35 | ) 36 | 37 | fun_batches = CostModel( 38 | fun=1, jac=0, fun_and_jac=1, label="Number of batches", aggregate_batch_time=max 39 | ) 40 | 41 | wall_time = "wall_time" 42 | 43 | 44 | TIMING_REGISTRY = { 45 | "evaluation_time": evaluation_time, 46 | "fun_evaluations": fun_evaluations, 47 | "fun_batches": fun_batches, 48 | "wall_time": wall_time, 49 | } 50 | -------------------------------------------------------------------------------- /src/optimagic/type_conversion.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from optimagic.typing import ( 4 | GtOneFloat, 5 | NonNegativeFloat, 6 | NonNegativeInt, 7 | PositiveFloat, 8 | PositiveInt, 9 | ) 10 | 11 | 12 | def _process_float_like(value: Any) -> float: 13 | """Process a value that should be converted to a float.""" 14 | return float(value) 15 | 16 | 17 | def _process_int_like(value: Any) -> int: 18 | """Process a value that should be converted to an int.""" 19 | if isinstance(value, int): 20 | return value 21 | elif isinstance(value, str): 22 | return int(float(value)) 23 | else: 24 | return int(value) 25 | 26 | 27 | def _process_positive_int_like(value: Any) -> PositiveInt: 28 | """Process a value that should be converted to a positive int.""" 29 | out = _process_int_like(value) 30 | if out <= 0: 31 | raise ValueError(f"Value must be positive, got {out}") 32 | return out 33 | 34 | 35 | def _process_non_negative_int_like(value: Any) -> NonNegativeInt: 36 | """Process a value that should be converted to a non-negative int.""" 37 | out = _process_int_like(value) 38 | if out < 0: 39 | raise ValueError(f"Value must be non-negative, got {out}") 40 | return out 41 | 42 | 43 | def _process_positive_float_like(value: Any) -> PositiveFloat: 44 | """Process a value that should be converted to a positive float.""" 45 | out = _process_float_like(value) 46 | if out <= 0: 47 | raise ValueError(f"Value must be positive, got {out}") 48 | return out 49 | 50 | 51 | def _process_non_negative_float_like(value: Any) -> NonNegativeFloat: 52 | """Process a value that should be converted to a non-negative float.""" 53 | out = _process_float_like(value) 54 | if out < 0: 55 | raise ValueError(f"Value must be non-negative, got {out}") 56 | return out 57 | 58 | 59 | def _process_gt_one_float_like(value: Any) -> GtOneFloat: 60 | """Process a value that should be converted to a float greater than one.""" 61 | out = _process_float_like(value) 62 | if out <= 1: 63 | raise ValueError(f"Value must be greater than one, got {out}") 64 | return out 65 | 66 | 67 | def _process_bool_like(value: Any) -> bool: 68 | """Process a value that should be converted to a bool.""" 69 | if isinstance(value, bool): 70 | return value 71 | elif isinstance(value, str): 72 | if value.lower() in {"true", "1", "yes"}: 73 | return True 74 | elif value.lower() in {"false", "0", "no"}: 75 | return False 76 | 77 | return bool(value) 78 | 79 | 80 | TYPE_CONVERTERS = { 81 | float: _process_float_like, 82 | int: _process_int_like, 83 | bool: _process_bool_like, 84 | PositiveInt: _process_positive_int_like, 85 | NonNegativeInt: _process_non_negative_int_like, 86 | PositiveFloat: _process_positive_float_like, 87 | NonNegativeFloat: _process_non_negative_float_like, 88 | GtOneFloat: _process_gt_one_float_like, 89 | } 90 | -------------------------------------------------------------------------------- /src/optimagic/visualization/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/src/optimagic/visualization/__init__.py -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pandas as pd 4 | import pytest 5 | import statsmodels.api as sm 6 | 7 | 8 | @pytest.fixture(autouse=True) 9 | def fresh_directory(tmp_path): # noqa: PT004 10 | """Each test is executed in a fresh directory.""" 11 | os.chdir(tmp_path) 12 | 13 | 14 | @pytest.fixture() 15 | def logit_inputs(): 16 | spector_data = sm.datasets.spector.load_pandas() 17 | spector_data.exog = sm.add_constant(spector_data.exog) 18 | x_df = sm.add_constant(spector_data.exog) 19 | out = { 20 | "y": spector_data.endog, 21 | "x": x_df.to_numpy(), 22 | "params": pd.DataFrame([-10, 2, 0.2, 2], index=x_df.columns, columns=["value"]), 23 | } 24 | return out 25 | 26 | 27 | @pytest.fixture() 28 | def logit_object(): 29 | spector_data = sm.datasets.spector.load_pandas() 30 | spector_data.exog = sm.add_constant(spector_data.exog) 31 | logit_mod = sm.Logit(spector_data.endog, spector_data.exog) 32 | return logit_mod 33 | -------------------------------------------------------------------------------- /tests/estimagic/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/estimagic/__init__.py -------------------------------------------------------------------------------- /tests/estimagic/examples/test_logit.py: -------------------------------------------------------------------------------- 1 | """Tests for the logit example.""" 2 | 3 | from numpy.testing import assert_array_almost_equal as aaae 4 | 5 | from estimagic.examples.logit import logit_grad, logit_hess, logit_jac, logit_loglike 6 | 7 | 8 | def test_logit_loglikes(logit_inputs, logit_object): 9 | x = logit_inputs["params"]["value"].to_numpy() 10 | expected = logit_object.loglikeobs(x) 11 | got = logit_loglike(**logit_inputs) 12 | 13 | aaae(got, expected) 14 | 15 | 16 | def test_logit_jac(logit_inputs, logit_object): 17 | x = logit_inputs["params"]["value"].to_numpy() 18 | expected = logit_object.score_obs(x) 19 | 20 | got = logit_jac(**logit_inputs) 21 | 22 | aaae(got, expected) 23 | 24 | 25 | def test_logit_grad(logit_inputs, logit_object): 26 | x = logit_inputs["params"]["value"].to_numpy() 27 | expected = logit_object.score(x) 28 | calculated = logit_grad(**logit_inputs) 29 | aaae(calculated, expected) 30 | 31 | 32 | def test_logit_hessian(logit_inputs, logit_object): 33 | x = logit_inputs["params"]["value"].to_numpy() 34 | expected = logit_object.hessian(x) 35 | got = logit_hess(**logit_inputs) 36 | aaae(got, expected) 37 | -------------------------------------------------------------------------------- /tests/estimagic/pickled_statsmodels_ml_covs/logit_hessian.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/estimagic/pickled_statsmodels_ml_covs/logit_hessian.pickle -------------------------------------------------------------------------------- /tests/estimagic/pickled_statsmodels_ml_covs/logit_hessian_matrix.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/estimagic/pickled_statsmodels_ml_covs/logit_hessian_matrix.pickle -------------------------------------------------------------------------------- /tests/estimagic/pickled_statsmodels_ml_covs/logit_jacobian.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/estimagic/pickled_statsmodels_ml_covs/logit_jacobian.pickle -------------------------------------------------------------------------------- /tests/estimagic/pickled_statsmodels_ml_covs/logit_jacobian_matrix.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/estimagic/pickled_statsmodels_ml_covs/logit_jacobian_matrix.pickle -------------------------------------------------------------------------------- /tests/estimagic/pickled_statsmodels_ml_covs/logit_sandwich.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/estimagic/pickled_statsmodels_ml_covs/logit_sandwich.pickle -------------------------------------------------------------------------------- /tests/estimagic/pickled_statsmodels_ml_covs/probit_hessian.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/estimagic/pickled_statsmodels_ml_covs/probit_hessian.pickle -------------------------------------------------------------------------------- /tests/estimagic/pickled_statsmodels_ml_covs/probit_hessian_matrix.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/estimagic/pickled_statsmodels_ml_covs/probit_hessian_matrix.pickle -------------------------------------------------------------------------------- /tests/estimagic/pickled_statsmodels_ml_covs/probit_jacobian.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/estimagic/pickled_statsmodels_ml_covs/probit_jacobian.pickle -------------------------------------------------------------------------------- /tests/estimagic/pickled_statsmodels_ml_covs/probit_jacobian_matrix.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/estimagic/pickled_statsmodels_ml_covs/probit_jacobian_matrix.pickle -------------------------------------------------------------------------------- /tests/estimagic/pickled_statsmodels_ml_covs/probit_sandwich.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/estimagic/pickled_statsmodels_ml_covs/probit_sandwich.pickle -------------------------------------------------------------------------------- /tests/estimagic/test_bootstrap_outcomes.py: -------------------------------------------------------------------------------- 1 | import functools 2 | 3 | import numpy as np 4 | import pandas as pd 5 | import pytest 6 | from numpy.testing import assert_array_almost_equal as aaae 7 | 8 | from estimagic.bootstrap_outcomes import ( 9 | _get_bootstrap_outcomes_from_indices, 10 | get_bootstrap_outcomes, 11 | ) 12 | from optimagic.batch_evaluators import joblib_batch_evaluator 13 | from optimagic.utilities import get_rng 14 | 15 | 16 | @pytest.fixture() 17 | def data(): 18 | df = pd.DataFrame([[1, 10], [2, 7], [3, 6], [4, 5]], columns=["x1", "x2"]) 19 | return df 20 | 21 | 22 | def _mean_return_series(data): 23 | out = np.mean(data, axis=0) 24 | return out 25 | 26 | 27 | def _mean_return_dict(data): 28 | out = np.mean(data, axis=0) 29 | return out.to_dict() 30 | 31 | 32 | def _mean_return_array(data): 33 | out = np.mean(data, axis=0).to_numpy() 34 | return out 35 | 36 | 37 | @pytest.mark.parametrize( 38 | "outcome", 39 | [ 40 | (functools.partial(np.mean, axis=0)), 41 | (_mean_return_series), 42 | (_mean_return_dict), 43 | (_mean_return_array), 44 | ], 45 | ) 46 | def test_get_bootstrap_estimates_runs(outcome, data): 47 | rng = get_rng(seed=1234) 48 | get_bootstrap_outcomes( 49 | data=data, 50 | outcome=outcome, 51 | rng=rng, 52 | n_draws=5, 53 | ) 54 | 55 | 56 | def test_bootstrap_estimates_from_indices_without_errors(data): 57 | calculated = _get_bootstrap_outcomes_from_indices( 58 | indices=[np.array([1, 3]), np.array([0, 2])], 59 | data=data, 60 | outcome=functools.partial(np.mean, axis=0), 61 | n_cores=1, 62 | error_handling="raise", 63 | batch_evaluator=joblib_batch_evaluator, 64 | ) 65 | 66 | expected = [[3.0, 6.0], [2, 8]] 67 | aaae(calculated, expected) 68 | 69 | 70 | def test_get_bootstrap_estimates_with_error_and_raise(data): 71 | rng = get_rng(seed=1234) 72 | 73 | def _raise_assertion_error(data): # noqa: ARG001 74 | raise AssertionError() 75 | 76 | with pytest.raises(AssertionError): 77 | get_bootstrap_outcomes( 78 | data=data, 79 | outcome=_raise_assertion_error, 80 | rng=rng, 81 | n_draws=2, 82 | error_handling="raise", 83 | ) 84 | 85 | 86 | def test_get_bootstrap_estimates_with_all_errors_and_continue(data): 87 | rng = get_rng(seed=1234) 88 | 89 | def _raise_assertion_error(data): # noqa: ARG001 90 | raise AssertionError() 91 | 92 | with pytest.warns(UserWarning): 93 | with pytest.raises(RuntimeError): 94 | get_bootstrap_outcomes( 95 | data=data, 96 | outcome=_raise_assertion_error, 97 | rng=rng, 98 | n_draws=2, 99 | error_handling="continue", 100 | ) 101 | 102 | 103 | def test_get_bootstrap_estimates_with_some_errors_and_continue(data): 104 | rng = get_rng(seed=1234) 105 | 106 | def _raise_assertion_error_sometimes(data): 107 | assert rng.uniform() > 0.5 108 | return data.mean() 109 | 110 | with pytest.warns(UserWarning): 111 | res_flat = get_bootstrap_outcomes( 112 | data=data, 113 | outcome=_raise_assertion_error_sometimes, 114 | rng=rng, 115 | n_draws=100, 116 | error_handling="continue", 117 | ) 118 | 119 | assert 30 <= len(res_flat) <= 70 120 | -------------------------------------------------------------------------------- /tests/estimagic/test_lollipop_plot.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | from estimagic.lollipop_plot import lollipop_plot 5 | 6 | 7 | def test_lollipop_plot_runs(): 8 | df = pd.DataFrame( 9 | np.arange(12).reshape(4, 3), 10 | index=pd.MultiIndex.from_tuples([(0, "a"), ("b", 1), ("a", "b"), (2, 3)]), 11 | columns=["a", "b", "c"], 12 | ) 13 | 14 | for grid in [True, False]: 15 | lollipop_plot(df, combine_plots_in_grid=grid) 16 | -------------------------------------------------------------------------------- /tests/estimagic/test_msm_covs.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | 3 | import numpy as np 4 | import pandas as pd 5 | import pytest 6 | from numpy.testing import assert_array_almost_equal as aaae 7 | from pandas.testing import assert_frame_equal 8 | 9 | from estimagic.msm_covs import cov_optimal, cov_robust 10 | from optimagic.utilities import get_rng 11 | 12 | rng = get_rng(seed=1234) 13 | 14 | jac_np = rng.uniform(size=(10, 5)) 15 | jac_pd = pd.DataFrame(jac_np) 16 | 17 | moments_cov_np = rng.uniform(size=(10, 10)) + np.eye(10) * 2.5 18 | moments_cov_pd = pd.DataFrame(moments_cov_np) 19 | 20 | test_cases = itertools.product([jac_np, jac_pd], [moments_cov_np, moments_cov_pd]) 21 | 22 | 23 | @pytest.mark.parametrize("jac, moments_cov", test_cases) 24 | def test_cov_robust_and_cov_optimal_are_equivalent_in_special_case(jac, moments_cov): 25 | weights = np.linalg.inv(moments_cov) 26 | if isinstance(moments_cov, pd.DataFrame): 27 | weights = pd.DataFrame( 28 | weights, index=moments_cov.index, columns=moments_cov.columns 29 | ) 30 | 31 | sandwich = cov_robust(jac, weights, moments_cov) 32 | optimal = cov_optimal(jac, weights) 33 | 34 | if isinstance(sandwich, pd.DataFrame): 35 | assert_frame_equal(sandwich, optimal) 36 | 37 | else: 38 | aaae(sandwich, optimal) 39 | -------------------------------------------------------------------------------- /tests/optimagic/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/optimagic/__init__.py -------------------------------------------------------------------------------- /tests/optimagic/benchmarking/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/optimagic/benchmarking/__init__.py -------------------------------------------------------------------------------- /tests/optimagic/benchmarking/test_cartis_roberts.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from numpy.testing import assert_array_almost_equal 4 | 5 | from optimagic.benchmarking.cartis_roberts import ( 6 | CARTIS_ROBERTS_PROBLEMS, 7 | get_start_points_bdvalues, 8 | get_start_points_msqrta, 9 | ) 10 | 11 | 12 | @pytest.mark.parametrize("name, specification", list(CARTIS_ROBERTS_PROBLEMS.items())) 13 | def test_cartis_roberts_function_at_start_x(name, specification): # noqa: ARG001 14 | _criterion = specification["fun"] 15 | _x = np.array(specification["start_x"]) 16 | assert isinstance(specification["start_x"], list) 17 | _contributions = _criterion(_x) 18 | calculated = _contributions @ _contributions 19 | expected = specification["start_criterion"] 20 | assert np.allclose(calculated, expected) 21 | assert isinstance(specification["start_x"], list) 22 | 23 | 24 | @pytest.mark.parametrize("name, specification", list(CARTIS_ROBERTS_PROBLEMS.items())) 25 | def test_cartis_roberts_function_at_solution_x(name, specification): # noqa: ARG001 26 | _criterion = specification["fun"] 27 | _x = specification["solution_x"] 28 | if _x is not None: 29 | assert isinstance(_x, list) 30 | _x = np.array(_x) 31 | _contributions = _criterion(_x) 32 | calculated = _contributions @ _contributions 33 | expected = specification["solution_criterion"] 34 | assert np.allclose(calculated, expected, atol=1e-7) 35 | 36 | 37 | def test_get_start_points_bdvalues(): 38 | expected = np.array([-0.1389, -0.2222, -0.2500, -0.2222, -0.1389]) 39 | result = get_start_points_bdvalues(5) 40 | assert_array_almost_equal(expected, result, decimal=4) 41 | 42 | 43 | def test_get_start_points_msqrta(): 44 | matlab_mat = np.array( 45 | [ 46 | [0.8415, -0.7568, 0.4121, -0.2879, -0.1324], 47 | [-0.9918, -0.9538, 0.9200, -0.6299, -0.5064], 48 | [0.9988, -0.4910, -0.6020, 0.9395, -0.9301], 49 | [-0.9992, -0.0265, -0.4041, 0.2794, -0.8509], 50 | [0.9235, 0.1935, 0.9365, -0.8860, 0.1760], 51 | ] 52 | ) 53 | expected = 0.2 * matlab_mat.flatten() 54 | result = get_start_points_msqrta(5) 55 | assert_array_almost_equal(result, expected, decimal=4) 56 | -------------------------------------------------------------------------------- /tests/optimagic/benchmarking/test_get_benchmark_problems.py: -------------------------------------------------------------------------------- 1 | from itertools import product 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from optimagic.benchmarking.get_benchmark_problems import ( 7 | _step_func, 8 | get_benchmark_problems, 9 | ) 10 | 11 | PARMETRIZATION = [] 12 | for name in ["more_wild", "cartis_roberts", "example", "estimagic"]: 13 | for additive, multiplicative, scaling in product([False, True], repeat=3): 14 | PARMETRIZATION.append((name, additive, multiplicative, scaling)) 15 | 16 | 17 | @pytest.mark.parametrize( 18 | "name, additive_noise, multiplicative_noise, scaling", PARMETRIZATION 19 | ) 20 | def test_get_problems(name, additive_noise, multiplicative_noise, scaling): 21 | is_noisy = any((additive_noise, multiplicative_noise)) 22 | problems = get_benchmark_problems( 23 | name=name, 24 | additive_noise=additive_noise, 25 | multiplicative_noise=multiplicative_noise, 26 | scaling=scaling, 27 | ) 28 | first_name = list(problems)[0] 29 | first = problems[first_name] 30 | func = first["inputs"]["fun"] 31 | params = first["inputs"]["params"] 32 | 33 | first_eval = func(params) 34 | second_eval = func(params) 35 | 36 | if is_noisy: 37 | assert not np.allclose(first_eval, second_eval) 38 | else: 39 | assert np.allclose(first_eval, second_eval) 40 | 41 | for problem in problems.values(): 42 | assert isinstance(problem["inputs"]["params"], np.ndarray) 43 | assert isinstance(problem["solution"]["params"], np.ndarray) 44 | 45 | 46 | def test_step_func(): 47 | p = np.array([0.0001, 0.0002]) 48 | got = _step_func(p, lambda x: x @ x) 49 | assert np.allclose(got, 0) 50 | assert not np.allclose(p @ p, 0) 51 | -------------------------------------------------------------------------------- /tests/optimagic/benchmarking/test_more_wild.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from optimagic.benchmarking.more_wild import ( 5 | MORE_WILD_PROBLEMS, 6 | get_start_points_mancino, 7 | ) 8 | 9 | 10 | @pytest.mark.parametrize("name, specification", list(MORE_WILD_PROBLEMS.items())) 11 | def test_more_wild_function_at_start_x(name, specification): # noqa: ARG001 12 | _criterion = specification["fun"] 13 | assert isinstance(specification["start_x"], list) 14 | _x = np.array(specification["start_x"]) 15 | _contributions = _criterion(_x) 16 | calculated = _contributions @ _contributions 17 | expected = specification["start_criterion"] 18 | assert np.allclose(calculated, expected) 19 | 20 | if specification.get("solution_x") is not None: 21 | assert isinstance(specification["solution_x"], list) 22 | _x = np.array(specification["solution_x"]) 23 | _contributions = _criterion(_x) 24 | calculated = _contributions @ _contributions 25 | expected = specification["solution_criterion"] 26 | assert np.allclose(calculated, expected, rtol=1e-8, atol=1e-8) 27 | 28 | 29 | def test_get_start_points_mancino(): 30 | expected = (np.array([102.4824, 96.3335, 90.4363, 84.7852, 79.3747]),) 31 | result = get_start_points_mancino(5) 32 | assert np.allclose(expected, result) 33 | -------------------------------------------------------------------------------- /tests/optimagic/benchmarking/test_noise_distributions.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import pytest 4 | 5 | from optimagic.benchmarking.get_benchmark_problems import _sample_from_distribution 6 | from optimagic.benchmarking.noise_distributions import NOISE_DISTRIBUTIONS 7 | from optimagic.utilities import get_rng 8 | 9 | 10 | @pytest.mark.parametrize("distribution", NOISE_DISTRIBUTIONS) 11 | def test_sample_from_distribution(distribution): 12 | mean = 0.33 13 | std = 0.55 14 | correlation = 0.44 15 | sample = _sample_from_distribution( 16 | distribution=distribution, 17 | mean=mean, 18 | std=std, 19 | size=(100_000, 5), 20 | correlation=correlation, 21 | rng=get_rng(seed=0), 22 | ) 23 | calculated_mean = sample.mean() 24 | calculated_std = sample.std() 25 | corrmat = pd.DataFrame(sample).corr().to_numpy().round(2) 26 | calculated_avgcorr = corrmat[~np.eye(len(corrmat)).astype(bool)].mean() 27 | 28 | assert np.allclose(calculated_mean, mean, atol=0.001) 29 | assert np.allclose(calculated_std, std, atol=0.001) 30 | assert np.allclose(calculated_avgcorr, correlation, atol=0.001) 31 | -------------------------------------------------------------------------------- /tests/optimagic/benchmarking/test_run_benchmark.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from optimagic import get_benchmark_problems 4 | from optimagic.benchmarking.run_benchmark import run_benchmark 5 | 6 | 7 | def test_run_benchmark_dict_options(): 8 | all_problems = get_benchmark_problems("more_wild") 9 | first_two_names = list(all_problems)[:2] 10 | first_two = {name: all_problems[name] for name in first_two_names} 11 | 12 | optimize_options = { 13 | "default_lbfgsb": "scipy_lbfgsb", 14 | "tuned_lbfgsb": { 15 | "algorithm": "scipy_lbfgsb", 16 | "algo_options": {"convergence.relative_criterion_tolerance": 1e-10}, 17 | }, 18 | } 19 | 20 | result = run_benchmark( 21 | problems=first_two, 22 | optimize_options=optimize_options, 23 | error_handling="raise", 24 | ) 25 | 26 | expected_keys = { 27 | ("linear_full_rank_good_start", "default_lbfgsb"), 28 | ("linear_full_rank_bad_start", "default_lbfgsb"), 29 | ("linear_full_rank_good_start", "tuned_lbfgsb"), 30 | ("linear_full_rank_bad_start", "tuned_lbfgsb"), 31 | } 32 | assert set(result) == expected_keys 33 | 34 | 35 | def test_run_benchmark_list_options(): 36 | all_problems = get_benchmark_problems("example") 37 | first_two_names = list(all_problems)[:2] 38 | first_two = {name: all_problems[name] for name in first_two_names} 39 | optimize_options = ["scipy_lbfgsb", "scipy_neldermead"] 40 | 41 | result = run_benchmark( 42 | problems=first_two, 43 | optimize_options=optimize_options, 44 | ) 45 | 46 | expected_keys = { 47 | ("helical_valley_good_start", "scipy_lbfgsb"), 48 | ("rosenbrock_good_start", "scipy_lbfgsb"), 49 | ("helical_valley_good_start", "scipy_neldermead"), 50 | ("rosenbrock_good_start", "scipy_neldermead"), 51 | } 52 | assert set(result) == expected_keys 53 | 54 | 55 | def test_run_benchmark_failing(): 56 | all_problems = get_benchmark_problems("more_wild") 57 | failing_name = "jennrich_sampson" 58 | failing = {failing_name: all_problems[failing_name]} 59 | 60 | optimize_options = ["scipy_lbfgsb"] 61 | 62 | with pytest.warns(): 63 | result = run_benchmark(problems=failing, optimize_options=optimize_options) 64 | 65 | key = (failing_name, "scipy_lbfgsb") 66 | assert isinstance(result[key]["solution"], str) 67 | -------------------------------------------------------------------------------- /tests/optimagic/differentiation/binary_choice_inputs.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/optimagic/differentiation/binary_choice_inputs.pickle -------------------------------------------------------------------------------- /tests/optimagic/differentiation/test_finite_differences.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from numpy.testing import assert_array_almost_equal as aaae 4 | 5 | from optimagic.differentiation.derivatives import Evals 6 | from optimagic.differentiation.finite_differences import jacobian 7 | from optimagic.differentiation.generate_steps import Steps 8 | 9 | 10 | @pytest.fixture() 11 | def jacobian_inputs(): 12 | """Very contrived test case for finite difference formulae with linear function.""" 13 | steps_pos = np.array([[0.1, 0.1, 0.1, 0.1], [0.2, 0.2, 0.2, 0.2]]) 14 | steps = Steps(pos=steps_pos, neg=-steps_pos) 15 | 16 | jac1 = (np.arange(1, 13)).reshape(3, 4) 17 | jac2 = jac1 * 1.1 18 | 19 | evals_pos1 = jac1 @ (np.zeros((4, 4)) + np.eye(4) * 0.1) 20 | evals_pos2 = jac2 @ (np.zeros((4, 4)) + np.eye(4) * 0.2) 21 | evals_neg1 = jac1 @ (np.zeros((4, 4)) - np.eye(4) * 0.1) 22 | evals_neg2 = jac2 @ (np.zeros((4, 4)) - np.eye(4) * 0.2) 23 | evals = Evals( 24 | pos=np.array([evals_pos1, evals_pos2]), neg=np.array([evals_neg1, evals_neg2]) 25 | ) 26 | 27 | expected_jac = np.array([jac1, jac2]) 28 | 29 | f0 = np.zeros(3) 30 | 31 | out = {"evals": evals, "steps": steps, "f0": f0, "expected_jac": expected_jac} 32 | return out 33 | 34 | 35 | methods = ["forward", "backward", "central"] 36 | 37 | 38 | @pytest.mark.parametrize("method", methods) 39 | def test_jacobian_finite_differences(jacobian_inputs, method): 40 | expected_jac = jacobian_inputs.pop("expected_jac") 41 | calculated_jac = jacobian(**jacobian_inputs, method=method) 42 | aaae(calculated_jac, expected_jac) 43 | -------------------------------------------------------------------------------- /tests/optimagic/differentiation/test_numdiff_options.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from optimagic.differentiation.numdiff_options import ( 4 | NumdiffOptions, 5 | pre_process_numdiff_options, 6 | ) 7 | from optimagic.exceptions import InvalidNumdiffOptionsError 8 | 9 | 10 | def test_pre_process_numdiff_options_trivial_case(): 11 | numdiff_options = NumdiffOptions( 12 | method="central", 13 | step_size=0.1, 14 | scaling_factor=0.5, 15 | min_steps=None, 16 | batch_evaluator="joblib", 17 | ) 18 | got = pre_process_numdiff_options(numdiff_options) 19 | assert got == numdiff_options 20 | 21 | 22 | def test_pre_process_numdiff_options_none_case(): 23 | assert pre_process_numdiff_options(None) is None 24 | 25 | 26 | def test_pre_process_numdiff_options_dict_case(): 27 | got = pre_process_numdiff_options( 28 | {"method": "central", "step_size": 0.1, "batch_evaluator": "pathos"} 29 | ) 30 | assert got == NumdiffOptions( 31 | method="central", step_size=0.1, batch_evaluator="pathos" 32 | ) 33 | 34 | 35 | def test_pre_process_numdiff_options_invalid_type(): 36 | with pytest.raises(InvalidNumdiffOptionsError): 37 | pre_process_numdiff_options(numdiff_options="invalid") 38 | 39 | 40 | def test_pre_process_numdiff_options_invalid_dict_key(): 41 | with pytest.raises(InvalidNumdiffOptionsError, match="Invalid numdiff options"): 42 | pre_process_numdiff_options(numdiff_options={"wrong_key": "central"}) 43 | 44 | 45 | def test_pre_process_numdiff_options_invalid_dict_value(): 46 | with pytest.raises(InvalidNumdiffOptionsError, match="Invalid numdiff `method`:"): 47 | pre_process_numdiff_options(numdiff_options={"method": "invalid"}) 48 | 49 | 50 | def test_numdiff_options_invalid_method(): 51 | with pytest.raises(InvalidNumdiffOptionsError, match="Invalid numdiff `method`:"): 52 | NumdiffOptions(method="invalid") 53 | 54 | 55 | def test_numdiff_options_invalid_step_size(): 56 | with pytest.raises( 57 | InvalidNumdiffOptionsError, match="Invalid numdiff `step_size`:" 58 | ): 59 | NumdiffOptions(step_size=0) 60 | 61 | 62 | def test_numdiff_options_invalid_scaling_factor(): 63 | with pytest.raises( 64 | InvalidNumdiffOptionsError, match="Invalid numdiff `scaling_factor`:" 65 | ): 66 | NumdiffOptions(scaling_factor=-1) 67 | 68 | 69 | def test_numdiff_options_invalid_min_steps(): 70 | with pytest.raises( 71 | InvalidNumdiffOptionsError, match="Invalid numdiff `min_steps`:" 72 | ): 73 | NumdiffOptions(min_steps=-1) 74 | 75 | 76 | def test_numdiff_options_invalid_n_cores(): 77 | with pytest.raises(InvalidNumdiffOptionsError, match="Invalid numdiff `n_cores`:"): 78 | NumdiffOptions(n_cores=-1) 79 | 80 | 81 | def test_numdiff_options_invalid_batch_evaluator(): 82 | with pytest.raises( 83 | InvalidNumdiffOptionsError, match="Invalid numdiff `batch_evaluator`:" 84 | ): 85 | NumdiffOptions(batch_evaluator="invalid") 86 | -------------------------------------------------------------------------------- /tests/optimagic/logging/test_base.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | import pytest 4 | 5 | from optimagic.logging.base import InputType, NonUpdatableKeyValueStore, OutputType 6 | from optimagic.typing import DictLikeAccess 7 | 8 | 9 | def test_key_value_store_raise_errors(): 10 | class NoDataClass(NonUpdatableKeyValueStore): 11 | def __init__(self): 12 | super().__init__({1}, [], "key") 13 | 14 | def insert(self, value: InputType) -> None: 15 | pass 16 | 17 | def _select_by_key(self, key: int) -> list[OutputType]: 18 | pass 19 | 20 | def _select_all(self) -> list[OutputType]: 21 | pass 22 | 23 | def select_last_rows(self, n_rows: int) -> list[OutputType]: 24 | pass 25 | 26 | class WrongPrimaryKey(NonUpdatableKeyValueStore): 27 | @dataclass(frozen=True) 28 | class InputDummy(DictLikeAccess): 29 | value: str 30 | 31 | @dataclass(frozen=True) 32 | class OutputDummy(DictLikeAccess): 33 | id: int 34 | value: str 35 | 36 | def __init__(self): 37 | super().__init__( 38 | WrongPrimaryKey.InputDummy, WrongPrimaryKey.OutputDummy, "ID" 39 | ) 40 | 41 | def insert(self, value: InputType) -> None: 42 | pass 43 | 44 | def _select_by_key(self, key: int) -> list[OutputType]: 45 | pass 46 | 47 | def _select_all(self) -> list[OutputType]: 48 | pass 49 | 50 | def select_last_rows(self, n_rows: int) -> list[OutputType]: 51 | pass 52 | 53 | with pytest.raises(ValueError): 54 | NoDataClass() 55 | 56 | with pytest.raises(ValueError): 57 | WrongPrimaryKey() 58 | -------------------------------------------------------------------------------- /tests/optimagic/logging/test_types.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from optimagic.logging.types import ( 4 | IterationStateWithId, 5 | ProblemInitializationWithId, 6 | StepResultWithId, 7 | ) 8 | 9 | 10 | def test_raise_on_missing_id(): 11 | with pytest.raises(ValueError, match="rowid"): 12 | IterationStateWithId(1, 2, 3, True, None, None, None) 13 | 14 | with pytest.raises(ValueError, match="rowid"): 15 | StepResultWithId("n", "optimization", "skipped") 16 | 17 | with pytest.raises(ValueError, match="rowid"): 18 | ProblemInitializationWithId("minimize", 2) 19 | -------------------------------------------------------------------------------- /tests/optimagic/optimization/test_convergence_report.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | from numpy.testing import assert_array_almost_equal as aaae 4 | 5 | from optimagic.optimization.convergence_report import get_convergence_report 6 | from optimagic.optimization.history import History 7 | from optimagic.typing import Direction, EvalTask 8 | 9 | 10 | def test_get_convergence_report_minimize(): 11 | hist = History( 12 | direction=Direction.MINIMIZE, 13 | params=[{"a": 0}, {"a": 2.1}, {"a": 2.5}, {"a": 2.0}], 14 | fun=[5, 4.1, 4.4, 4.0], 15 | start_time=[0, 1, 2, 3], 16 | stop_time=[1, 2, 3, 4], 17 | task=4 * [EvalTask.FUN], 18 | batches=[0, 1, 2, 3], 19 | ) 20 | 21 | calculated = pd.DataFrame.from_dict(get_convergence_report(hist)) 22 | 23 | expected = np.array([[0.025, 0.25], [0.05, 1.0], [0.1, 1], [0.1, 2.0]]) 24 | aaae(calculated.to_numpy(), expected) 25 | 26 | 27 | def test_get_convergence_report_maximize(): 28 | hist = History( 29 | direction=Direction.MAXIMIZE, 30 | params=[{"a": 0}, {"a": 2.1}, {"a": 2.5}, {"a": 2.0}], 31 | fun=[-5, -4.1, -4.4, -4.0], 32 | start_time=[0, 1, 2, 3], 33 | stop_time=[1, 2, 3, 4], 34 | task=4 * [EvalTask.FUN], 35 | batches=[0, 1, 2, 3], 36 | ) 37 | 38 | calculated = pd.DataFrame.from_dict(get_convergence_report(hist)) 39 | 40 | expected = np.array([[0.025, 0.25], [0.05, 1.0], [0.1, 1], [0.1, 2.0]]) 41 | aaae(calculated.to_numpy(), expected) 42 | 43 | 44 | def test_history_is_too_short(): 45 | # first value is best, so history of accepted parameters has only one entry 46 | hist = History( 47 | direction=Direction.MAXIMIZE, 48 | params=[{"a": 0}, {"a": 2.1}, {"a": 2.5}, {"a": 2.0}], 49 | fun=[5, 4.1, 4.4, 4.0], 50 | start_time=[0, 1, 2, 3], 51 | stop_time=[1, 2, 3, 4], 52 | task=4 * [EvalTask.FUN], 53 | batches=[0, 1, 2, 3], 54 | ) 55 | 56 | calculated = get_convergence_report(hist) 57 | assert calculated is None 58 | -------------------------------------------------------------------------------- /tests/optimagic/optimization/test_create_optimization_problem.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from optimagic.optimization.create_optimization_problem import ( 4 | pre_process_user_algorithm, 5 | ) 6 | from optimagic.optimizers.scipy_optimizers import ScipyLBFGSB 7 | 8 | 9 | def test_pre_process_user_algorithm_valid_string(): 10 | got = pre_process_user_algorithm("scipy_lbfgsb") 11 | assert isinstance(got, ScipyLBFGSB) 12 | 13 | 14 | def test_pre_process_user_algorithm_invalid_string(): 15 | with pytest.raises(ValueError): 16 | pre_process_user_algorithm("not_an_algorithm") 17 | 18 | 19 | def test_pre_process_user_algorithm_valid_instance(): 20 | got = pre_process_user_algorithm(ScipyLBFGSB()) 21 | assert isinstance(got, ScipyLBFGSB) 22 | 23 | 24 | def test_pre_process_user_algorithm_valid_class(): 25 | got = pre_process_user_algorithm(ScipyLBFGSB) 26 | assert isinstance(got, ScipyLBFGSB) 27 | -------------------------------------------------------------------------------- /tests/optimagic/optimization/test_error_penalty.py: -------------------------------------------------------------------------------- 1 | import functools 2 | 3 | import numpy as np 4 | import pytest 5 | from numpy.testing import assert_array_almost_equal as aaae 6 | 7 | from optimagic.differentiation.derivatives import first_derivative 8 | from optimagic.optimization.error_penalty import ( 9 | _likelihood_penalty, 10 | _penalty_residuals, 11 | _scalar_penalty, 12 | get_error_penalty_function, 13 | ) 14 | from optimagic.optimization.fun_value import ( 15 | LeastSquaresFunctionValue, 16 | LikelihoodFunctionValue, 17 | ScalarFunctionValue, 18 | ) 19 | from optimagic.typing import AggregationLevel, Direction 20 | from optimagic.utilities import get_rng 21 | 22 | 23 | @pytest.mark.parametrize("seed", range(10)) 24 | def test_penalty_aggregations(seed): 25 | rng = get_rng(seed) 26 | x = rng.uniform(size=5) 27 | x0 = rng.uniform(size=5) 28 | slope = 0.3 29 | constant = 3 30 | dim_out = 10 31 | 32 | scalar, _ = _scalar_penalty(x, constant, slope, x0) 33 | contribs, _ = _likelihood_penalty(x, constant, slope, x0, dim_out) 34 | root_contribs, _ = _penalty_residuals(x, constant, slope, x0, dim_out) 35 | 36 | assert np.isclose(scalar.value, contribs.value.sum()) 37 | assert np.isclose(scalar.value, (root_contribs.value**2).sum()) 38 | 39 | 40 | pairs = [ 41 | (_scalar_penalty, AggregationLevel.SCALAR), 42 | (_likelihood_penalty, AggregationLevel.LIKELIHOOD), 43 | (_penalty_residuals, AggregationLevel.LEAST_SQUARES), 44 | ] 45 | 46 | 47 | @pytest.mark.parametrize("func, solver_type", pairs) 48 | def test_penalty_derivatives(func, solver_type): 49 | rng = get_rng(seed=5) 50 | x = rng.uniform(size=5) 51 | x0 = rng.uniform(size=5) 52 | slope = 0.3 53 | constant = 3 54 | dim_out = 8 55 | 56 | _, calculated = func(x, constant, slope, x0, dim_out) 57 | 58 | partialed = functools.partial( 59 | func, constant=constant, slope=slope, x0=x0, dim_out=dim_out 60 | ) 61 | expected = first_derivative( 62 | partialed, x, unpacker=lambda x: x[0].internal_value(solver_type) 63 | ) 64 | 65 | aaae(calculated, expected.derivative) 66 | 67 | 68 | @pytest.mark.parametrize("seed", range(10)) 69 | def test_penalty_aggregations_via_get_error_penalty(seed): 70 | rng = get_rng(seed) 71 | x = rng.uniform(size=5) 72 | x0 = rng.uniform(size=5) 73 | slope = 0.3 74 | constant = 3 75 | 76 | scalar_func = get_error_penalty_function( 77 | start_x=x0, 78 | start_criterion=ScalarFunctionValue(3), 79 | error_penalty={"slope": slope, "constant": constant}, 80 | solver_type=AggregationLevel.SCALAR, 81 | direction=Direction.MINIMIZE, 82 | ) 83 | 84 | contribs_func = get_error_penalty_function( 85 | start_x=x0, 86 | start_criterion=LikelihoodFunctionValue(np.ones(10)), 87 | error_penalty={"slope": slope, "constant": constant}, 88 | solver_type=AggregationLevel.LIKELIHOOD, 89 | direction=Direction.MINIMIZE, 90 | ) 91 | 92 | root_contribs_func = get_error_penalty_function( 93 | start_x=x0, 94 | start_criterion=LeastSquaresFunctionValue(np.ones(10)), 95 | error_penalty={"slope": slope, "constant": constant}, 96 | solver_type=AggregationLevel.LEAST_SQUARES, 97 | direction=Direction.MINIMIZE, 98 | ) 99 | 100 | scalar, _ = scalar_func(x) 101 | contribs, _ = contribs_func(x) 102 | root_contribs, _ = root_contribs_func(x) 103 | 104 | assert np.isclose(scalar.value, contribs.value.sum()) 105 | assert np.isclose(scalar.value, (root_contribs.value**2).sum()) 106 | -------------------------------------------------------------------------------- /tests/optimagic/optimization/test_optimize.py: -------------------------------------------------------------------------------- 1 | """Tests for (almost) algorithm independent properties of maximize and minimize.""" 2 | 3 | import numpy as np 4 | import pandas as pd 5 | import pytest 6 | from numpy.testing import assert_array_almost_equal as aaae 7 | 8 | from optimagic.examples.criterion_functions import sos_scalar 9 | from optimagic.exceptions import InvalidFunctionError, InvalidNumdiffOptionsError 10 | from optimagic.optimization.optimize import maximize, minimize 11 | 12 | 13 | def test_sign_is_switched_back_after_maximization(): 14 | params = pd.DataFrame() 15 | params["value"] = [1, 2, 3] 16 | res = maximize( 17 | lambda params: 1 - params["value"] @ params["value"], 18 | params=params, 19 | algorithm="scipy_lbfgsb", 20 | ) 21 | 22 | assert np.allclose(res.fun, 1) 23 | 24 | 25 | def test_scipy_lbfgsb_actually_calls_criterion_and_derivative(): 26 | params = pd.DataFrame(data=np.ones((10, 1)), columns=["value"]) 27 | 28 | def raising_crit_and_deriv(params): # noqa: ARG001 29 | raise NotImplementedError("This should not be called.") 30 | 31 | with pytest.raises(InvalidFunctionError, match="Error while evaluating"): 32 | minimize( 33 | fun=sos_scalar, 34 | params=params, 35 | algorithm="scipy_lbfgsb", 36 | fun_and_jac=raising_crit_and_deriv, 37 | ) 38 | 39 | 40 | def test_with_invalid_numdiff_options(): 41 | with pytest.raises(InvalidNumdiffOptionsError): 42 | minimize( 43 | fun=lambda x: x @ x, 44 | params=np.arange(5), 45 | algorithm="scipy_lbfgsb", 46 | numdiff_options={"bla": 15}, 47 | ) 48 | 49 | 50 | # provided fun or fun_and_jac is provided 51 | def test_with_optional_fun_argument(): 52 | expected = np.zeros(5) 53 | res = minimize( 54 | fun_and_jac=lambda x: (x @ x, 2 * x), 55 | params=np.arange(5), 56 | algorithm="scipy_lbfgsb", 57 | ) 58 | aaae(res.x, expected) 59 | 60 | 61 | def test_fun_and_jac_list(): 62 | with pytest.raises(NotImplementedError): 63 | minimize( 64 | fun_and_jac=[lambda x: (x @ x, 2 * x)], 65 | params=np.arange(5), 66 | algorithm="scipy_lbfgsb", 67 | ) 68 | -------------------------------------------------------------------------------- /tests/optimagic/optimization/test_optimize_result.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import pytest 4 | 5 | from optimagic.optimization.optimize_result import OptimizeResult, _create_stars 6 | from optimagic.utilities import get_rng 7 | 8 | 9 | @pytest.fixture() 10 | def convergence_report(): 11 | conv_report = pd.DataFrame( 12 | index=[ 13 | "relative_criterion_change", 14 | "relative_params_change", 15 | "absolute_criterion_change", 16 | "absolute_params_change", 17 | ], 18 | columns=["one_step", "five_steps"], 19 | ) 20 | u = get_rng(seed=0).uniform 21 | conv_report["one_step"] = [ 22 | u(1e-12, 1e-10), 23 | u(1e-9, 1e-8), 24 | u(1e-7, 1e-6), 25 | u(1e-6, 1e-5), 26 | ] 27 | conv_report["five_steps"] = [1e-8, 1e-4, 1e-3, 100] 28 | return conv_report 29 | 30 | 31 | @pytest.fixture() 32 | def base_inputs(): 33 | out = { 34 | "params": np.ones(3), 35 | "fun": 500, 36 | "start_fun": 1000, 37 | "start_params": np.full(3, 10), 38 | "direction": "minimize", 39 | "message": "OPTIMIZATION TERMINATED SUCCESSFULLY", 40 | "success": True, 41 | "n_fun_evals": 100, 42 | "n_jac_evals": 0, 43 | "n_iterations": 80, 44 | "history": {"criterion": list(range(10))}, 45 | "algorithm": "scipy_lbfgsb", 46 | "n_free": 2, 47 | } 48 | return out 49 | 50 | 51 | def test_optimize_result_runs(base_inputs, convergence_report): 52 | res = OptimizeResult( 53 | convergence_report=convergence_report, 54 | **base_inputs, 55 | ) 56 | res.__repr__() 57 | 58 | 59 | def test_create_stars(): 60 | sr = pd.Series([1e-12, 1e-9, 1e-7, 1e-4, 1e-2]) 61 | calculated = _create_stars(sr).tolist() 62 | expected = ["***", "** ", "* ", " ", " "] 63 | assert calculated == expected 64 | 65 | 66 | def test_to_pickle(base_inputs, convergence_report, tmp_path): 67 | res = OptimizeResult( 68 | convergence_report=convergence_report, 69 | **base_inputs, 70 | ) 71 | res.to_pickle(tmp_path / "bla.pkl") 72 | 73 | 74 | def test_dict_access(base_inputs): 75 | res = OptimizeResult(**base_inputs) 76 | assert res["fun"] == 500 77 | assert res["nfev"] == 100 78 | -------------------------------------------------------------------------------- /tests/optimagic/optimization/test_params_versions.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import pytest 4 | from numpy.testing import assert_array_almost_equal as aaae 5 | from pybaum import tree_just_flatten 6 | 7 | from optimagic.examples.criterion_functions import ( 8 | sos_gradient, 9 | sos_ls, 10 | sos_ls_jacobian, 11 | sos_scalar, 12 | ) 13 | from optimagic.optimization.optimize import minimize 14 | from optimagic.parameters.tree_registry import get_registry 15 | 16 | REGISTRY = get_registry(extended=True) 17 | 18 | PARAMS = [ 19 | {"a": 1.0, "b": 2, "c": 3, "d": 4, "e": 5}, 20 | np.arange(5), 21 | list(range(5)), 22 | tuple(range(5)), 23 | pd.Series(np.arange(5)), 24 | {"a": 1, "b": np.array([2, 3]), "c": [pd.Series([4, 5])]}, 25 | ] 26 | 27 | SCALAR_PARAMS = [6, 6.2, np.array([4]), np.array([4.5])] 28 | 29 | 30 | @pytest.mark.parametrize("params", PARAMS + SCALAR_PARAMS) 31 | def test_tree_params_numerical_derivative_scalar_criterion(params): 32 | flat = np.array(tree_just_flatten(params, registry=REGISTRY)) 33 | expected = np.zeros_like(flat) 34 | 35 | res = minimize( 36 | fun=sos_scalar, 37 | params=params, 38 | algorithm="scipy_lbfgsb", 39 | ) 40 | calculated = np.array(tree_just_flatten(res.params, registry=REGISTRY)) 41 | aaae(calculated, expected) 42 | 43 | 44 | @pytest.mark.parametrize("params", PARAMS + SCALAR_PARAMS) 45 | def test_tree_params_scalar_criterion(params): 46 | flat = np.array(tree_just_flatten(params, registry=REGISTRY)) 47 | expected = np.zeros_like(flat) 48 | 49 | res = minimize( 50 | fun=sos_scalar, 51 | jac=sos_gradient, 52 | params=params, 53 | algorithm="scipy_lbfgsb", 54 | ) 55 | calculated = np.array(tree_just_flatten(res.params, registry=REGISTRY)) 56 | aaae(calculated, expected) 57 | 58 | 59 | TEST_CASES_SOS_LS = [] 60 | for p in PARAMS: 61 | for algo in ["scipy_lbfgsb", "scipy_ls_lm"]: 62 | TEST_CASES_SOS_LS.append((p, algo)) 63 | 64 | 65 | @pytest.mark.parametrize("params, algorithm", TEST_CASES_SOS_LS) 66 | def test_tree_params_numerical_derivative_sos_ls(params, algorithm): 67 | flat = np.array(tree_just_flatten(params, registry=REGISTRY)) 68 | expected = np.zeros_like(flat) 69 | 70 | res = minimize( 71 | fun=sos_ls, 72 | params=params, 73 | algorithm=algorithm, 74 | ) 75 | calculated = np.array(tree_just_flatten(res.params, registry=REGISTRY)) 76 | aaae(calculated, expected) 77 | 78 | 79 | @pytest.mark.parametrize("params, algorithm", TEST_CASES_SOS_LS) 80 | def test_tree_params_sos_ls(params, algorithm): 81 | flat = np.array(tree_just_flatten(params, registry=REGISTRY)) 82 | expected = np.zeros_like(flat) 83 | 84 | derivatives = [sos_gradient, sos_ls_jacobian] 85 | res = minimize( 86 | fun=sos_ls, 87 | jac=derivatives, 88 | params=params, 89 | algorithm=algorithm, 90 | ) 91 | calculated = np.array(tree_just_flatten(res.params, registry=REGISTRY)) 92 | aaae(calculated, expected) 93 | -------------------------------------------------------------------------------- /tests/optimagic/optimization/test_process_result.py: -------------------------------------------------------------------------------- 1 | from optimagic.optimization.process_results import _sum_or_none 2 | 3 | 4 | def test_sum_or_none(): 5 | assert _sum_or_none([1, 2, 3]) == 6 6 | assert _sum_or_none([1, 2, None]) is None 7 | -------------------------------------------------------------------------------- /tests/optimagic/optimization/test_pygmo_optimizers.py: -------------------------------------------------------------------------------- 1 | """Test optimization helper functions.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from optimagic.optimizers.pygmo_optimizers import ( 7 | _convert_str_to_int, 8 | get_population_size, 9 | ) 10 | 11 | test_cases = [ 12 | # popsize, x, lower_bound, expected 13 | (55.3, None, None, 55), 14 | (None, np.ones(5), 500, 500), 15 | (None, np.ones(5), 4, 60), 16 | ] 17 | 18 | 19 | @pytest.mark.parametrize("popsize, x, lower_bound, expected", test_cases) 20 | def test_determine_population_size(popsize, x, lower_bound, expected): 21 | res = get_population_size(population_size=popsize, x=x, lower_bound=lower_bound) 22 | assert res == expected 23 | 24 | 25 | def test_convert_str_to_int(): 26 | d = {"a": 1, "b": 3} 27 | assert _convert_str_to_int(d, "a") == 1 28 | assert _convert_str_to_int(d, 1) == 1 29 | with pytest.raises(ValueError): 30 | _convert_str_to_int(d, 5) 31 | with pytest.raises(ValueError): 32 | _convert_str_to_int(d, "hello") 33 | -------------------------------------------------------------------------------- /tests/optimagic/optimization/test_useful_exceptions.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import pytest 4 | 5 | from optimagic.exceptions import ( 6 | InvalidFunctionError, 7 | InvalidKwargsError, 8 | UserFunctionRuntimeError, 9 | ) 10 | from optimagic.optimization.optimize import minimize 11 | 12 | 13 | def test_missing_criterion_kwargs(): 14 | def f(params, bla, blubb): # noqa: ARG001 15 | return (params["value"].to_numpy() ** 2).sum() 16 | 17 | params = pd.DataFrame(np.ones((3, 1)), columns=["value"]) 18 | 19 | with pytest.raises(InvalidKwargsError): 20 | minimize(f, params, "scipy_lbfgsb", fun_kwargs={"bla": 3}) 21 | 22 | 23 | def test_missing_derivative_kwargs(): 24 | def f(params): 25 | return (params["value"].to_numpy() ** 2).sum() 26 | 27 | def grad(params, bla, blubb): # noqa: ARG001 28 | return params["value"].to_numpy() * 2 29 | 30 | params = pd.DataFrame(np.ones((3, 1)), columns=["value"]) 31 | 32 | with pytest.raises(InvalidKwargsError): 33 | minimize(f, params, "scipy_lbfgsb", jac=grad, jac_kwargs={"bla": 3}) 34 | 35 | 36 | def test_missing_criterion_and_derivative_kwargs(): 37 | def f(params): 38 | return (params["value"].to_numpy() ** 2).sum() 39 | 40 | def f_and_grad(params, bla, blubb): # noqa: ARG001 41 | return f(params), params["value"].to_numpy() * 2 42 | 43 | params = pd.DataFrame(np.ones((3, 1)), columns=["value"]) 44 | 45 | with pytest.raises(InvalidKwargsError): 46 | minimize( 47 | f, 48 | params, 49 | "scipy_lbfgsb", 50 | fun_and_jac=f_and_grad, 51 | fun_and_jac_kwargs={"bla": 3}, 52 | ) 53 | 54 | 55 | def test_typo_in_criterion_kwarg(): 56 | def f(params, bla, foo): # noqa: ARG001 57 | return (params["value"].to_numpy() ** 2).sum() 58 | 59 | params = pd.DataFrame(np.ones((3, 1)), columns=["value"]) 60 | 61 | snippet = "Did you mean" 62 | with pytest.raises(InvalidKwargsError, match=snippet): 63 | minimize(f, params, "scipy_lbfgsb", fun_kwargs={"bla": 3, "foa": 4}) 64 | 65 | 66 | def test_criterion_with_runtime_error_derivative_free(): 67 | def f(params): 68 | x = params["value"].to_numpy() 69 | if x.sum() < 1: 70 | raise RuntimeError("Great error message") 71 | 72 | return x @ x 73 | 74 | params = pd.DataFrame(np.full((3, 1), 10), columns=["value"]) 75 | snippet = "when evaluating fun during optimization" 76 | with pytest.raises(UserFunctionRuntimeError, match=snippet): 77 | minimize(f, params, "scipy_neldermead") 78 | 79 | 80 | def test_criterion_with_runtime_error_during_numerical_derivative(): 81 | def f(params): 82 | x = params["value"].to_numpy() 83 | if (x != 1).any(): 84 | raise RuntimeError("Great error message") 85 | 86 | return x @ x 87 | 88 | params = pd.DataFrame(np.ones((3, 1)), columns=["value"]) 89 | snippet = "evaluating a numerical derivative" 90 | with pytest.raises(UserFunctionRuntimeError, match=snippet): 91 | minimize(f, params, "scipy_lbfgsb") 92 | 93 | 94 | def test_criterion_fails_at_start_values(): 95 | def just_fail(params): # noqa: ARG001 96 | raise RuntimeError() 97 | 98 | params = pd.DataFrame(np.ones((3, 1)), columns=["value"]) 99 | snippet = "Error while evaluating fun at start params." 100 | with pytest.raises(InvalidFunctionError, match=snippet): 101 | minimize(just_fail, params, "scipy_lbfgsb") 102 | -------------------------------------------------------------------------------- /tests/optimagic/optimization/test_with_advanced_constraints.py: -------------------------------------------------------------------------------- 1 | """Tests using constraints with optional entries or combination of constraints. 2 | 3 | - Only sum of squares 4 | - Only scipy_lbfgsb 5 | - Only minimize 6 | 7 | """ 8 | 9 | import itertools 10 | 11 | import numpy as np 12 | import pandas as pd 13 | import pytest 14 | from numpy.testing import assert_array_almost_equal as aaae 15 | 16 | import optimagic as om 17 | from optimagic.examples.criterion_functions import sos_gradient, sos_scalar 18 | from optimagic.optimization.optimize import minimize 19 | 20 | CONSTR_INFO = { 21 | "cov_bounds_distance": om.FlatCovConstraint(regularization=0.1), 22 | "sdcorr_bounds_distance": om.FlatSDCorrConstraint(regularization=0.1), 23 | "fixed_and_decreasing": [ 24 | om.DecreasingConstraint(lambda x: x.loc[[1, 2, 3, 4]]), 25 | om.FixedConstraint(lambda x: x.loc[2]), 26 | ], 27 | "fixed_and_increasing": [ 28 | om.IncreasingConstraint(lambda x: x.loc[[0, 1, 2, 3]]), 29 | om.FixedConstraint(lambda x: x.loc[2]), 30 | ], 31 | } 32 | 33 | 34 | START_INFO = { 35 | "cov_bounds_distance": [1, 0.1, 2, 0.2, 0.3, 3], 36 | "sdcorr_bounds_distance": [1, 2, 3, 0.1, 0.2, 0.3], 37 | "fixed_and_decreasing": [1, 4, 4, 2, 1], 38 | "fixed_and_increasing": [1, 2, 3, 4, 1], 39 | } 40 | 41 | RES_INFO = { 42 | "cov_bounds_distance": [0.1, 0, 0.1, 0, 0, 0.1], 43 | "sdcorr_bounds_distance": [0.1, 0.1, 0.1, 0, 0, 0.0], 44 | "fixed_and_decreasing": [0, 4, 4, 0, 0], 45 | "fixed_and_increasing": [0, 0, 3, 3, 0], 46 | } 47 | 48 | 49 | derivatives = [sos_gradient, None] 50 | constr_names = list(CONSTR_INFO.keys()) 51 | 52 | 53 | test_cases = list(itertools.product(derivatives, constr_names)) 54 | 55 | 56 | @pytest.mark.parametrize("derivative, constr_name", test_cases) 57 | def test_with_covariance_constraint_bounds_distance(derivative, constr_name): 58 | params = pd.Series(START_INFO[constr_name], name="value").to_frame() 59 | 60 | res = minimize( 61 | fun=sos_scalar, 62 | params=params, 63 | algorithm="scipy_lbfgsb", 64 | jac=derivative, 65 | constraints=CONSTR_INFO[constr_name], 66 | ) 67 | 68 | assert res.success, "scipy_lbfgsb did not converge." 69 | 70 | expected = np.array(RES_INFO[constr_name]) 71 | aaae(res.params["value"].to_numpy(), expected, decimal=4) 72 | -------------------------------------------------------------------------------- /tests/optimagic/optimization/test_with_bounds.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy.optimize import Bounds as ScipyBounds 3 | 4 | from optimagic.optimization.optimize import maximize, minimize 5 | 6 | 7 | def test_minimize_with_scipy_bounds(): 8 | minimize( 9 | lambda x: x @ x, 10 | np.arange(3), 11 | bounds=ScipyBounds(np.full(3, -1), np.full(3, 5)), 12 | algorithm="scipy_lbfgsb", 13 | ) 14 | 15 | 16 | def test_minimize_with_sequence_bounds(): 17 | minimize( 18 | lambda x: x @ x, 19 | np.arange(3), 20 | bounds=[(-1, 5)] * 3, 21 | algorithm="scipy_lbfgsb", 22 | ) 23 | 24 | 25 | def test_maximize_with_scipy_bounds(): 26 | maximize( 27 | lambda x: -x @ x, 28 | np.arange(3), 29 | bounds=ScipyBounds(np.full(3, -1), np.full(3, 5)), 30 | algorithm="scipy_lbfgsb", 31 | ) 32 | 33 | 34 | def test_maximize_with_sequence_bounds(): 35 | maximize( 36 | lambda x: -x @ x, 37 | np.arange(3), 38 | bounds=[(-1, 5)] * 3, 39 | algorithm="scipy_lbfgsb", 40 | ) 41 | -------------------------------------------------------------------------------- /tests/optimagic/optimization/test_with_logging.py: -------------------------------------------------------------------------------- 1 | """Test optimizations with logging in a temporary database. 2 | 3 | - Only minimize 4 | - Only dict criterion 5 | - scipy_lbfgsb and scipy_ls_dogbox 6 | - with and without derivatives 7 | 8 | """ 9 | 10 | import numpy as np 11 | import pandas as pd 12 | import pytest 13 | from numpy.testing import assert_array_almost_equal as aaae 14 | from pybaum import tree_just_flatten 15 | 16 | from optimagic import mark 17 | from optimagic.examples.criterion_functions import ( 18 | sos_derivatives, 19 | sos_ls, 20 | ) 21 | from optimagic.logging.logger import SQLiteLogOptions 22 | from optimagic.logging.types import ExistenceStrategy 23 | from optimagic.optimization.optimize import minimize 24 | from optimagic.parameters.tree_registry import get_registry 25 | 26 | 27 | @mark.least_squares 28 | def flexible_sos_ls(params): 29 | return params 30 | 31 | 32 | algorithms = ["scipy_lbfgsb", "scipy_ls_dogbox"] 33 | derivatives = [None, sos_derivatives] 34 | params = [pd.DataFrame({"value": np.arange(3)}), np.arange(3), {"a": 1, "b": 2, "c": 3}] 35 | 36 | test_cases = [] 37 | for algo in algorithms: 38 | for p in params: 39 | test_cases.append((algo, p)) 40 | 41 | 42 | @pytest.mark.parametrize("algorithm, params", test_cases) 43 | def test_optimization_with_valid_logging(algorithm, params): 44 | res = minimize( 45 | flexible_sos_ls, 46 | params=params, 47 | algorithm=algorithm, 48 | logging="logging.db", 49 | ) 50 | registry = get_registry(extended=True) 51 | flat = np.array(tree_just_flatten(res.params, registry=registry)) 52 | aaae(flat, np.zeros(3)) 53 | 54 | 55 | def test_optimization_with_existing_exsting_database(): 56 | minimize( 57 | sos_ls, 58 | pd.Series([1, 2, 3], name="value").to_frame(), 59 | algorithm="scipy_lbfgsb", 60 | logging=SQLiteLogOptions( 61 | "logging.db", if_database_exists=ExistenceStrategy.REPLACE 62 | ), 63 | ) 64 | 65 | with pytest.raises(FileExistsError): 66 | minimize( 67 | sos_ls, 68 | pd.Series([1, 2, 3], name="value").to_frame(), 69 | algorithm="scipy_lbfgsb", 70 | logging=SQLiteLogOptions( 71 | "logging.db", if_database_exists=ExistenceStrategy.RAISE 72 | ), 73 | ) 74 | -------------------------------------------------------------------------------- /tests/optimagic/optimizers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/optimagic/optimizers/__init__.py -------------------------------------------------------------------------------- /tests/optimagic/optimizers/_pounders/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/optimagic/optimizers/_pounders/__init__.py -------------------------------------------------------------------------------- /tests/optimagic/optimizers/_pounders/fixtures/find_affine_points_nonzero_i.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | c: 10 3 | delta: 0.05 4 | history_x: 5 | - - 0.15 6 | - 0.008 7 | - 0.01 8 | - - 0.25 9 | - 0.008 10 | - 0.01 11 | - - 0.15 12 | - 0.108 13 | - 0.01 14 | - - 0.15 15 | - 0.008 16 | - 0.11 17 | - - 0.1596177824551 18 | - -0.07539624732067 19 | - 0.08766385239892 20 | model_improving_points: 21 | - - 0.0 22 | - 0.0 23 | - 0.0 24 | - - 0.0 25 | - 0.0 26 | - 0.0 27 | - - 0.0 28 | - 0.0 29 | - 0.0 30 | model_improving_points_expected: 31 | - - 0.19235564910118408 32 | - 0.0 33 | - 0.0 34 | - - -1.6679249464133494 35 | - 0.0 36 | - 2.0 37 | - - 1.5532770479784463 38 | - 2.0 39 | - 0.0 40 | model_indices: 41 | - 1 42 | - 2 43 | - 3 44 | - 0 45 | - 2 46 | - 0 47 | - 2 48 | model_indices_expected: 49 | - 4 50 | - 3 51 | - 2 52 | - 0 53 | - 2 54 | - 0 55 | - 2 56 | n: 3 57 | n_modelpoints: 0 58 | n_modelpoints_expected: 3 59 | project_x_onto_null: true 60 | theta1: 1.0e-05 61 | x_accepted: 62 | - 0.15 63 | - 0.008 64 | - 0.01 65 | -------------------------------------------------------------------------------- /tests/optimagic/optimizers/_pounders/fixtures/find_affine_points_nonzero_ii.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | c: 10 3 | delta: 0.025 4 | history_x: 5 | - - 0.15 6 | - 0.008 7 | - 0.01 8 | - - 0.25 9 | - 0.008 10 | - 0.01 11 | - - 0.15 12 | - 0.108 13 | - 0.01 14 | - - 0.15 15 | - 0.008 16 | - 0.11 17 | - - 0.1596177824551 18 | - -0.07539624732067 19 | - 0.08766385239892 20 | - - 0.2 21 | - 0.008531162120637 22 | - -0.002952684076318 23 | - - 0.1505141617677 24 | - -0.04199731338289 25 | - 0.009934485345754 26 | - - 0.1374618789969 27 | - 0.007934485345754 28 | - -0.03840238867598 29 | - - 0.1505250437069 30 | - 0.007964908595663 31 | - 0.01275913089388 32 | model_improving_points: 33 | - - 0.021001748277756915 34 | - 0.0 35 | - 0.0 36 | - - -0.001403656173463233 37 | - 0.0 38 | - 0.0 39 | - - 0.11036523575529027 40 | - 0.0 41 | - 0.0 42 | model_improving_points_expected: 43 | - - 0.021001748277756915 44 | - -0.5015248401252026 45 | - 0.02056647070703521 46 | - - -0.001403656173463233 47 | - -0.0026205861698429256 48 | - -1.9998925353155312 49 | - - 0.11036523575529027 50 | - -1.9360955470393286 51 | - -0.0026205861698429256 52 | model_indices: 53 | - 8 54 | - 7 55 | - 6 56 | - 5 57 | - 4 58 | - 3 59 | - 2 60 | model_indices_expected: 61 | - 8 62 | - 7 63 | - 6 64 | - 5 65 | - 4 66 | - 3 67 | - 2 68 | n: 3 69 | n_modelpoints: 1 70 | n_modelpoints_expected: 3 71 | project_x_onto_null: true 72 | theta1: 1.0e-05 73 | x_accepted: 74 | - 0.15 75 | - 0.008 76 | - 0.01 77 | -------------------------------------------------------------------------------- /tests/optimagic/optimizers/_pounders/fixtures/find_affine_points_zero_i.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | c: 1.7320508075688772 3 | delta: 0.05 4 | history_x: 5 | - - 0.15 6 | - 0.008 7 | - 0.01 8 | - - 0.25 9 | - 0.008 10 | - 0.01 11 | - - 0.15 12 | - 0.108 13 | - 0.01 14 | - - 0.15 15 | - 0.008 16 | - 0.11 17 | - - 0.1596177824551 18 | - -0.07539624732067 19 | - 0.08766385239892 20 | - - 0.2 21 | - 0.008531162120637 22 | - -0.002952684076318 23 | - - 0.1505141617677 24 | - -0.04199731338289 25 | - 0.009934485345754 26 | - - 0.1374618789969 27 | - 0.007934485345754 28 | - -0.03840238867598 29 | model_improving_points: 30 | - - 0.0 31 | - 0.0 32 | - 0.0 33 | - - 0.0 34 | - 0.0 35 | - 0.0 36 | - - 0.0 37 | - 0.0 38 | - 0.0 39 | model_improving_points_expected: 40 | - - -0.2507624200626013 41 | - 0.010283235353517606 42 | - 1.0000000000000002 43 | - - -0.0013102930849214628 44 | - -0.9999462676577656 45 | - 0.010623242412742123 46 | - - -0.9680477735196643 47 | - -0.0013102930849214628 48 | - -0.2590536815263693 49 | model_indices: 50 | - 5 51 | - 6 52 | - 7 53 | - 2 54 | - 1 55 | - 0 56 | - 0 57 | model_indices_expected: 58 | - 7 59 | - 6 60 | - 5 61 | - 2 62 | - 1 63 | - 0 64 | - 0 65 | n: 3 66 | n_modelpoints: 0 67 | n_modelpoints_expected: 3 68 | project_x_onto_null: false 69 | theta1: 1.0e-05 70 | x_accepted: 71 | - 0.15 72 | - 0.008 73 | - 0.01 74 | -------------------------------------------------------------------------------- /tests/optimagic/optimizers/_pounders/fixtures/find_affine_points_zero_ii.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | c: 1.7320508075688772 3 | delta: 0.05 4 | history_x: 5 | - - 0.15 6 | - 0.008 7 | - 0.01 8 | - - 0.25 9 | - 0.008 10 | - 0.01 11 | - - 0.15 12 | - 0.108 13 | - 0.01 14 | - - 0.15 15 | - 0.008 16 | - 0.11 17 | - - 0.1596177824551 18 | - -0.07539624732067 19 | - 0.08766385239892 20 | - - 0.2 21 | - 0.008531162120637 22 | - -0.002952684076318 23 | - - 0.1505141617677 24 | - -0.04199731338289 25 | - 0.009934485345754 26 | - - 0.1374618789969 27 | - 0.007934485345754 28 | - -0.03840238867598 29 | - - 0.1505250437069 30 | - 0.007964908595663 31 | - 0.01275913089388 32 | - - 0.149883507892 33 | - 0.008098080768719 34 | - 0.009146244784311 35 | - - 0.1716712756093 36 | - -0.003385426549061 37 | - 0.004854131368058 38 | model_improving_points: 39 | - - 0.0 40 | - 0.0 41 | - 0.0 42 | - - 0.0 43 | - 0.0 44 | - 0.0 45 | - - 0.0 46 | - 0.0 47 | - 0.0 48 | model_improving_points_expected: 49 | - - 0.4357553543466791 50 | - 0.012830716298993794 51 | - -0.24843257790248596 52 | - - -0.22967014635560642 53 | - -0.0026634434611177635 54 | - -0.0032719084593076098 55 | - - -0.0858422683250766 56 | - 0.07225772219141803 57 | - -0.9509726692058914 58 | model_indices: 59 | - 8 60 | - 0 61 | - 10 62 | - 6 63 | - 5 64 | - 4 65 | - 3 66 | model_indices_expected: 67 | - 10 68 | - 8 69 | - 7 70 | - 6 71 | - 5 72 | - 4 73 | - 3 74 | n: 3 75 | n_modelpoints: 0 76 | n_modelpoints_expected: 3 77 | project_x_onto_null: false 78 | theta1: 1.0e-05 79 | x_accepted: 80 | - 0.149883507892 81 | - 0.008098080768719 82 | - 0.009146244784311 83 | -------------------------------------------------------------------------------- /tests/optimagic/optimizers/_pounders/fixtures/find_affine_points_zero_iii.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | c: 1.7320508075688772 3 | delta: 0.05 4 | history_x: 5 | - - 0.15 6 | - 0.008 7 | - 0.01 8 | - - 0.25 9 | - 0.008 10 | - 0.01 11 | - - 0.15 12 | - 0.108 13 | - 0.01 14 | - - 0.15 15 | - 0.008 16 | - 0.11 17 | - - 0.1596177824551 18 | - -0.07539624732067 19 | - 0.08766385239892 20 | - - 0.2 21 | - 0.008531162120637 22 | - -0.002952684076318 23 | model_improving_points: 24 | - - 0.0 25 | - 0.0 26 | - 0.0 27 | - - 0.0 28 | - 0.0 29 | - 0.0 30 | - - 0.0 31 | - 0.0 32 | - 0.0 33 | model_improving_points_expected: 34 | - - 1.0 35 | - 0.0 36 | - 0.0 37 | - - 0.010623242412742123 38 | - 0.0 39 | - 0.0 40 | - - -0.2590536815263693 41 | - 0.0 42 | - 0.0 43 | model_indices: 44 | - 0 45 | - 4 46 | - 3 47 | - 2 48 | - 1 49 | - 0 50 | - 0 51 | model_indices_expected: 52 | - 5 53 | - 4 54 | - 3 55 | - 2 56 | - 1 57 | - 0 58 | - 0 59 | n: 3 60 | n_modelpoints: 0 61 | n_modelpoints_expected: 1 62 | project_x_onto_null: false 63 | theta1: 1.0e-05 64 | x_accepted: 65 | - 0.15 66 | - 0.008 67 | - 0.01 68 | -------------------------------------------------------------------------------- /tests/optimagic/optimizers/_pounders/fixtures/scalar_model.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/optimagic/optimizers/_pounders/fixtures/scalar_model.pkl -------------------------------------------------------------------------------- /tests/optimagic/optimizers/test_iminuit_migrad.py: -------------------------------------------------------------------------------- 1 | """Test suite for the iminuit migrad optimizer.""" 2 | 3 | import numpy as np 4 | import pytest 5 | from numpy.testing import assert_array_almost_equal as aaae 6 | 7 | from optimagic.config import IS_IMINUIT_INSTALLED 8 | from optimagic.optimization.optimize import minimize 9 | from optimagic.optimizers.iminuit_migrad import ( 10 | IminuitMigrad, 11 | _convert_bounds_to_minuit_limits, 12 | ) 13 | 14 | 15 | def sphere(x): 16 | return (x**2).sum() 17 | 18 | 19 | def sphere_grad(x): 20 | return 2 * x 21 | 22 | 23 | def test_convert_bounds_unbounded(): 24 | """Test converting unbounded bounds.""" 25 | lower = np.array([-np.inf, -np.inf]) 26 | upper = np.array([np.inf, np.inf]) 27 | limits = _convert_bounds_to_minuit_limits(lower, upper) 28 | 29 | assert len(limits) == 2 30 | assert limits[0] == (None, None) 31 | assert limits[1] == (None, None) 32 | 33 | 34 | def test_convert_bounds_lower_only(): 35 | """Test converting lower bounds only.""" 36 | lower = np.array([1.0, 2.0]) 37 | upper = np.array([np.inf, np.inf]) 38 | limits = _convert_bounds_to_minuit_limits(lower, upper) 39 | 40 | assert len(limits) == 2 41 | assert limits[0] == (1.0, None) 42 | assert limits[1] == (2.0, None) 43 | 44 | 45 | def test_convert_bounds_upper_only(): 46 | """Test converting upper bounds only.""" 47 | lower = np.array([-np.inf, -np.inf]) 48 | upper = np.array([1.0, 2.0]) 49 | limits = _convert_bounds_to_minuit_limits(lower, upper) 50 | 51 | assert len(limits) == 2 52 | assert limits[0] == (None, 1.0) 53 | assert limits[1] == (None, 2.0) 54 | 55 | 56 | def test_convert_bounds_two_sided(): 57 | """Test converting two-sided bounds.""" 58 | lower = np.array([1.0, -2.0]) 59 | upper = np.array([2.0, -1.0]) 60 | limits = _convert_bounds_to_minuit_limits(lower, upper) 61 | 62 | assert len(limits) == 2 63 | assert limits[0] == (1.0, 2.0) 64 | assert limits[1] == (-2.0, -1.0) 65 | 66 | 67 | def test_convert_bounds_mixed(): 68 | """Test converting mixed bounds (some infinite, some finite).""" 69 | lower = np.array([-np.inf, 0.0, 1.0]) 70 | upper = np.array([1.0, np.inf, 2.0]) 71 | limits = _convert_bounds_to_minuit_limits(lower, upper) 72 | 73 | assert len(limits) == 3 74 | assert limits[0] == (None, 1.0) 75 | assert limits[1] == (0.0, None) 76 | assert limits[2] == (1.0, 2.0) 77 | 78 | 79 | @pytest.mark.skipif(not IS_IMINUIT_INSTALLED, reason="iminuit not installed.") 80 | def test_iminuit_migrad(): 81 | """Test basic optimization with sphere function.""" 82 | x0 = np.array([1.0, 2.0, 3.0]) 83 | algorithm = IminuitMigrad() 84 | 85 | res = minimize( 86 | fun=sphere, 87 | jac=sphere_grad, 88 | algorithm=algorithm, 89 | x0=x0, 90 | ) 91 | 92 | assert res.success 93 | aaae(res.x, np.zeros(3), decimal=6) 94 | assert res.n_fun_evals > 0 95 | assert res.n_jac_evals > 0 96 | -------------------------------------------------------------------------------- /tests/optimagic/optimizers/test_nag_optimizers.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from optimagic.optimizers.nag_optimizers import ( 4 | _build_options_dict, 5 | _change_evals_per_point_interface, 6 | _get_fast_start_method, 7 | ) 8 | 9 | 10 | def test_change_evals_per_point_interface_none(): 11 | res = _change_evals_per_point_interface(None) 12 | assert res is None 13 | 14 | 15 | def test_change_evals_per_point_interface_func(): 16 | def return_args( 17 | upper_trustregion_radius, lower_trustregion_radius, n_iterations, n_resets 18 | ): 19 | return ( 20 | upper_trustregion_radius, 21 | lower_trustregion_radius, 22 | n_iterations, 23 | n_resets, 24 | ) 25 | 26 | func = _change_evals_per_point_interface(return_args) 27 | res = func(delta=0, rho=1, iter=2, nrestarts=3) 28 | expected = (0, 1, 2, 3) 29 | assert res == expected 30 | 31 | 32 | def test_get_fast_start_method_auto(): 33 | res = _get_fast_start_method("auto") 34 | assert res == (None, None) 35 | 36 | 37 | def test_get_fast_start_method_jacobian(): 38 | res = _get_fast_start_method("jacobian") 39 | assert res == (True, False) 40 | 41 | 42 | def test_get_fast_start_method_trust(): 43 | res = _get_fast_start_method("trustregion") 44 | assert res == (False, True) 45 | 46 | 47 | def test_get_fast_start_method_error(): 48 | with pytest.raises(ValueError): 49 | _get_fast_start_method("wrong_input") 50 | 51 | 52 | def test_build_options_dict_none(): 53 | default = {"a": 1, "b": 2} 54 | assert default == _build_options_dict(None, default) 55 | 56 | 57 | def test_build_options_dict_override(): 58 | default = {"a": 1, "b": 2} 59 | user_input = {"a": 0} 60 | res = _build_options_dict(user_input, default) 61 | expected = {"a": 0, "b": 2} 62 | assert res == expected 63 | 64 | 65 | def test_build_options_dict_invalid_key(): 66 | default = {"a": 1, "b": 2} 67 | user_input = {"other_key": 0} 68 | with pytest.raises(ValueError): 69 | _build_options_dict(user_input, default) 70 | -------------------------------------------------------------------------------- /tests/optimagic/optimizers/test_neldermead.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from optimagic.optimizers.neldermead import ( 5 | _gao_han, 6 | _init_algo_params, 7 | _init_simplex, 8 | _nash, 9 | _pfeffer, 10 | _varadhan_borchers, 11 | neldermead_parallel, 12 | ) 13 | 14 | 15 | # function to test 16 | def sphere(x, *args, **kwargs): # noqa: ARG001 17 | return (x**2).sum() 18 | 19 | 20 | # unit tests 21 | def test_init_algo_params(): 22 | # test setting 23 | j = 2 24 | adaptive = True 25 | 26 | # outcome 27 | result = _init_algo_params(adaptive, j) 28 | 29 | # expected outcome 30 | expected = (1, 2, 0.5, 0.5) 31 | 32 | assert result == expected 33 | 34 | 35 | def test_init_simplex(): 36 | # test setting 37 | x = np.array([1, 2, 3]) 38 | 39 | # outcome 40 | result = _init_simplex(x) 41 | 42 | # expected outcome 43 | expected = np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3], [1, 2, 3]]) 44 | 45 | assert (result == expected).all() 46 | 47 | 48 | def test_pfeffer(): 49 | # test setting 50 | x = np.array([1, 0, 1]) 51 | 52 | # outcome 53 | result = _pfeffer(x) 54 | 55 | # expected outcome 56 | expected = np.array([[1, 0, 1], [1.05, 0, 1], [1, 0.00025, 1], [1, 0, 1.05]]) 57 | 58 | assert (result == expected).all() 59 | 60 | 61 | def test_nash(): 62 | # test setting 63 | x = np.array([1, 0, 1]) 64 | 65 | # outcome 66 | result = _nash(x) 67 | 68 | # expected outcome 69 | expected = np.array([[1, 0, 1], [1.1, 0, 1], [1, 0.1, 1], [1, 0, 1.1]]) 70 | 71 | assert (result == expected).all() 72 | 73 | 74 | def test_gao_han(): 75 | # test setting 76 | x = np.array([1, 0, 1]) 77 | 78 | # outcome 79 | result = _gao_han(x) 80 | 81 | # expected outcome 82 | expected = np.array([[0.66667, -0.33333, 0.66667], [2, 0, 1], [1, 1, 1], [1, 0, 2]]) 83 | 84 | np.testing.assert_allclose(result, expected, atol=1e-3) 85 | 86 | 87 | def test_varadhan_borchers(): 88 | # test setting 89 | x = np.array([1, 0, 1]) 90 | 91 | # outcome 92 | result = _varadhan_borchers(x) 93 | 94 | # expected outcome 95 | expected = np.array( 96 | [ 97 | [1, 0, 1], 98 | [2.3333, 0.3333, 1.3333], 99 | [1.3333, 1.3333, 1.3333], 100 | [1.3333, 0.3333, 2.3333], 101 | ] 102 | ) 103 | 104 | np.testing.assert_allclose(result, expected, atol=1e-3) 105 | 106 | 107 | # general parameter test 108 | test_cases = [ 109 | {}, 110 | {"adaptive": False}, 111 | {"init_simplex_method": "nash"}, 112 | {"init_simplex_method": "pfeffer"}, 113 | {"init_simplex_method": "varadhan_borchers"}, 114 | ] 115 | 116 | 117 | @pytest.mark.parametrize("algo_options", test_cases) 118 | def test_neldermead_correct_algo_options(algo_options): 119 | res = neldermead_parallel( 120 | criterion=sphere, 121 | x=np.array([1, -5, 3]), 122 | **algo_options, 123 | ) 124 | np.testing.assert_allclose(res["solution_x"], np.zeros(3), atol=5e-4) 125 | 126 | 127 | # test if maximum number of iterations works 128 | def test_fides_stop_after_one_iteration(): 129 | res = neldermead_parallel( 130 | criterion=sphere, 131 | x=np.array([1, -5, 3]), 132 | stopping_maxiter=1, 133 | ) 134 | assert not res["success"] 135 | assert res["n_iterations"] == 1 136 | -------------------------------------------------------------------------------- /tests/optimagic/parameters/test_constraint_tools.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import optimagic as om 4 | from optimagic.exceptions import InvalidParamsError 5 | from optimagic.parameters.constraint_tools import check_constraints, count_free_params 6 | 7 | 8 | def test_count_free_params_no_constraints(): 9 | params = {"a": 1, "b": 2, "c": [3, 3]} 10 | assert count_free_params(params) == 4 11 | 12 | 13 | def test_count_free_params_with_constraints(): 14 | params = {"a": 1, "b": 2, "c": [3, 3]} 15 | constraints = om.EqualityConstraint(lambda x: x["c"]) 16 | assert count_free_params(params, constraints=constraints) == 3 17 | 18 | 19 | def test_check_constraints(): 20 | params = {"a": 1, "b": 2, "c": [3, 4]} 21 | constraints = om.EqualityConstraint(lambda x: x["c"]) 22 | 23 | with pytest.raises(InvalidParamsError): 24 | check_constraints(params, constraints=constraints) 25 | -------------------------------------------------------------------------------- /tests/optimagic/parameters/test_process_constraints.py: -------------------------------------------------------------------------------- 1 | """Test the pc processing.""" 2 | 3 | import numpy as np 4 | import pandas as pd 5 | import pytest 6 | 7 | import optimagic as om 8 | from optimagic.exceptions import InvalidConstraintError 9 | from optimagic.parameters.bounds import Bounds 10 | from optimagic.parameters.constraint_tools import check_constraints 11 | from optimagic.parameters.process_constraints import ( 12 | _replace_pairwise_equality_by_equality, 13 | ) 14 | 15 | 16 | def test_replace_pairwise_equality_by_equality(): 17 | constr = {"indices": [[0, 1], [2, 3]], "type": "pairwise_equality"} 18 | 19 | expected = [ 20 | {"index": [0, 2], "type": "equality"}, 21 | {"index": [1, 3], "type": "equality"}, 22 | ] 23 | 24 | calculated = _replace_pairwise_equality_by_equality([constr]) 25 | 26 | assert calculated == expected 27 | 28 | 29 | @pytest.mark.filterwarnings("ignore:Specifying constraints as a dictionary is") 30 | def test_empty_constraints_work(): 31 | params = pd.DataFrame() 32 | params["value"] = np.arange(5) 33 | params["bla"] = list("abcde") 34 | 35 | constraints = [{"query": "bla == 'blubb'", "type": "equality"}] 36 | 37 | check_constraints(params, constraints) 38 | 39 | 40 | def test_to_many_bounds_in_increasing_constraint_raise_good_error(): 41 | with pytest.raises(InvalidConstraintError): 42 | check_constraints( 43 | params=np.arange(3), 44 | bounds=Bounds(lower=np.arange(3) - 1), 45 | constraints=om.IncreasingConstraint(selector=lambda x: x[:3]), 46 | ) 47 | -------------------------------------------------------------------------------- /tests/optimagic/parameters/test_scale_conversion.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from numpy.testing import assert_array_almost_equal as aaae 4 | from numpy.testing import assert_array_equal as aae 5 | 6 | from optimagic import first_derivative 7 | from optimagic.parameters.conversion import InternalParams 8 | from optimagic.parameters.scale_conversion import get_scale_converter 9 | from optimagic.parameters.scaling import ScalingOptions 10 | 11 | TEST_CASES = { 12 | "start_values": InternalParams( 13 | values=np.array([0, 1, 1, 1, 1, 1]), 14 | lower_bounds=np.array([-2, 0, 0.5, 2 / 3, 3 / 4, 4 / 5]), 15 | upper_bounds=np.array([2, 2, 1.5, 4 / 3, 5 / 4, 6 / 5]), 16 | names=None, 17 | ), 18 | "bounds": InternalParams( 19 | values=np.full(6, 0.5), 20 | lower_bounds=np.zeros(6), 21 | upper_bounds=np.ones(6), 22 | names=None, 23 | ), 24 | } 25 | 26 | IDS = list(TEST_CASES) 27 | PARAMETRIZATION = list(TEST_CASES.items()) 28 | 29 | 30 | @pytest.mark.parametrize("method, expected", PARAMETRIZATION, ids=IDS) 31 | def test_get_scale_converter_active(method, expected): 32 | params = InternalParams( 33 | values=np.arange(6), 34 | lower_bounds=np.arange(6) - 1, 35 | upper_bounds=np.arange(6) + 1, 36 | names=list("abcdef"), 37 | ) 38 | 39 | scaling = ScalingOptions( 40 | method=method, 41 | clipping_value=0.5, 42 | ) 43 | 44 | converter, scaled = get_scale_converter( 45 | internal_params=params, 46 | scaling=scaling, 47 | ) 48 | 49 | aaae(scaled.values, expected.values) 50 | aaae(scaled.lower_bounds, expected.lower_bounds) 51 | aaae(scaled.upper_bounds, expected.upper_bounds) 52 | 53 | aaae(converter.params_to_internal(params.values), expected.values) 54 | aaae(converter.params_from_internal(expected.values), params.values) 55 | 56 | calculated_jacobian = converter.derivative_to_internal(np.eye(len(params.values))) 57 | 58 | numerical_jacobian = first_derivative( 59 | converter.params_from_internal, expected.values 60 | ).derivative 61 | 62 | aaae(calculated_jacobian, numerical_jacobian) 63 | 64 | 65 | def test_scale_conversion_fast_path(): 66 | params = InternalParams( 67 | values=np.arange(6), 68 | lower_bounds=np.arange(6) - 1, 69 | upper_bounds=np.arange(6) + 1, 70 | names=list("abcdef"), 71 | ) 72 | 73 | converter, scaled = get_scale_converter( 74 | internal_params=params, 75 | scaling=None, 76 | ) 77 | 78 | aae(params.values, scaled.values) 79 | aae(params.lower_bounds, scaled.lower_bounds) 80 | aae(params.upper_bounds, scaled.upper_bounds) 81 | 82 | aae(converter.params_to_internal(params.values), params.values) 83 | aae(converter.params_from_internal(params.values), params.values) 84 | aae(converter.derivative_to_internal(np.ones(3)), np.ones(3)) 85 | -------------------------------------------------------------------------------- /tests/optimagic/parameters/test_scaling.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from optimagic.exceptions import InvalidScalingError 4 | from optimagic.parameters.scaling import ( 5 | ScalingOptions, 6 | pre_process_scaling, 7 | ) 8 | 9 | 10 | def test_pre_process_scaling_trivial_case(): 11 | scaling = ScalingOptions( 12 | method="start_values", 13 | clipping_value=1, 14 | magnitude=2, 15 | ) 16 | got = pre_process_scaling(scaling=scaling) 17 | assert got == scaling 18 | 19 | 20 | def test_pre_process_scaling_none_case(): 21 | assert pre_process_scaling(scaling=None) is None 22 | 23 | 24 | def test_pre_process_scaling_false_case(): 25 | assert pre_process_scaling(scaling=False) is None 26 | 27 | 28 | def test_pre_process_scaling_true_case(): 29 | got = pre_process_scaling(scaling=True) 30 | assert got == ScalingOptions() 31 | 32 | 33 | def test_pre_process_scaling_dict_case(): 34 | got = pre_process_scaling( 35 | scaling={"method": "start_values", "clipping_value": 1, "magnitude": 2} 36 | ) 37 | assert got == ScalingOptions(method="start_values", clipping_value=1, magnitude=2) 38 | 39 | 40 | def test_pre_process_scaling_invalid_type(): 41 | with pytest.raises(InvalidScalingError, match="Invalid scaling options"): 42 | pre_process_scaling(scaling="invalid") 43 | 44 | 45 | def test_pre_process_scaling_invalid_dict_key(): 46 | with pytest.raises(InvalidScalingError, match="Invalid scaling options of type:"): 47 | pre_process_scaling(scaling={"wrong_key": "start_values"}) 48 | 49 | 50 | def test_pre_process_scaling_invalid_dict_value(): 51 | with pytest.raises(InvalidScalingError, match="Invalid clipping value:"): 52 | pre_process_scaling(scaling={"clipping_value": "invalid"}) 53 | 54 | 55 | def test_scaling_options_invalid_method_value(): 56 | with pytest.raises(InvalidScalingError, match="Invalid scaling method:"): 57 | ScalingOptions(method="invalid") 58 | 59 | 60 | def test_scaling_options_invalid_clipping_value_type(): 61 | with pytest.raises(InvalidScalingError, match="Invalid clipping value:"): 62 | ScalingOptions(clipping_value="invalid") 63 | 64 | 65 | def test_scaling_options_invalid_magnitude_value_type(): 66 | with pytest.raises(InvalidScalingError, match="Invalid scaling magnitude:"): 67 | ScalingOptions(magnitude="invalid") 68 | 69 | 70 | def test_scaling_options_invalid_magnitude_value_range(): 71 | with pytest.raises(InvalidScalingError, match="Invalid scaling magnitude:"): 72 | ScalingOptions(magnitude=-1) 73 | -------------------------------------------------------------------------------- /tests/optimagic/parameters/test_tree_conversion.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import pytest 4 | from numpy.testing import assert_array_equal as aae 5 | 6 | from optimagic.parameters.bounds import Bounds 7 | from optimagic.parameters.tree_conversion import get_tree_converter 8 | from optimagic.typing import AggregationLevel 9 | 10 | 11 | @pytest.fixture() 12 | def params(): 13 | df = pd.DataFrame({"value": [3, 4], "lower_bound": [0, 0]}, index=["c", "d"]) 14 | params = ([0, np.array([1, 2]), {"a": df, "b": 5}], 6) 15 | return params 16 | 17 | 18 | @pytest.fixture() 19 | def upper_bounds(): 20 | upper = ([None, np.array([11, np.inf]), None], 100) 21 | return upper 22 | 23 | 24 | FUNC_EVALS = [ 25 | 5.0, 26 | np.float32(5), 27 | np.ones(5), 28 | {"a": 1, "b": 2, "c": [np.full(4, 0.5)]}, 29 | pd.Series(1, index=list("abcde")), 30 | np.ones(5), 31 | {"a": 1, "b": 2}, 32 | ] 33 | 34 | 35 | @pytest.mark.parametrize("func_eval", FUNC_EVALS) 36 | def test_tree_converter_scalar_solver(params, upper_bounds, func_eval): 37 | bounds = Bounds( 38 | upper=upper_bounds, 39 | ) 40 | converter, flat_params = get_tree_converter( 41 | params=params, 42 | bounds=bounds, 43 | func_eval=func_eval, 44 | derivative_eval=params, 45 | solver_type=AggregationLevel.SCALAR, 46 | ) 47 | 48 | expected_values = np.arange(7) 49 | expected_lb = np.array([-np.inf, -np.inf, -np.inf, 0, 0, -np.inf, -np.inf]) 50 | expected_ub = np.array([np.inf, 11, np.inf, np.inf, np.inf, np.inf, 100]) 51 | expected_names = ["0_0", "0_1_0", "0_1_1", "0_2_a_c", "0_2_a_d", "0_2_b", "1"] 52 | 53 | aae(flat_params.values, expected_values) 54 | aae(flat_params.lower_bounds, expected_lb) 55 | aae(flat_params.upper_bounds, expected_ub) 56 | assert flat_params.names == expected_names 57 | 58 | aae(converter.params_flatten(params), np.arange(7)) 59 | unflat = converter.params_unflatten(np.arange(7)) 60 | assert unflat[0][0] == params[0][0] 61 | aae(unflat[0][1], params[0][1]) 62 | 63 | 64 | SOLVER_TYPES = [ 65 | AggregationLevel.SCALAR, 66 | AggregationLevel.LIKELIHOOD, 67 | AggregationLevel.LEAST_SQUARES, 68 | ] 69 | 70 | 71 | @pytest.mark.parametrize("solver_type", SOLVER_TYPES) 72 | def test_tree_conversion_fast_path(solver_type): 73 | if solver_type == AggregationLevel.SCALAR: 74 | derivative_eval = np.arange(3) * 2 75 | func_eval = 3 76 | else: 77 | derivative_eval = np.arange(6).reshape(2, 3) 78 | func_eval = np.ones(2) 79 | 80 | converter, flat_params = get_tree_converter( 81 | params=np.arange(3), 82 | bounds=None, 83 | func_eval=func_eval, 84 | derivative_eval=derivative_eval, 85 | solver_type=solver_type, 86 | ) 87 | 88 | aae(flat_params.values, np.arange(3)) 89 | aae(flat_params.lower_bounds, np.full(3, -np.inf)) 90 | aae(flat_params.upper_bounds, np.full(3, np.inf)) 91 | assert flat_params.names == list(map(str, range(3))) 92 | 93 | aae(converter.params_flatten(np.arange(3)), np.arange(3)) 94 | aae(converter.params_unflatten(np.arange(3)), np.arange(3)) 95 | aae(converter.derivative_flatten(derivative_eval), derivative_eval) 96 | -------------------------------------------------------------------------------- /tests/optimagic/parameters/test_tree_registry.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import pytest 4 | from pandas.testing import assert_frame_equal 5 | from pybaum import leaf_names, tree_flatten, tree_unflatten 6 | 7 | from optimagic.parameters.tree_registry import get_registry 8 | 9 | 10 | @pytest.fixture() 11 | def value_df(): 12 | df = pd.DataFrame( 13 | np.arange(6).reshape(3, 2), 14 | columns=["a", "value"], 15 | index=["alpha", "beta", "gamma"], 16 | ) 17 | return df 18 | 19 | 20 | @pytest.fixture() 21 | def other_df(): 22 | df = pd.DataFrame(index=["alpha", "beta", "gamma"]) 23 | df["b"] = np.arange(3).astype(np.int16) 24 | df["c"] = 3.14 25 | return df 26 | 27 | 28 | def test_flatten_df_with_value_column(value_df): 29 | registry = get_registry(extended=True) 30 | flat, _ = tree_flatten(value_df, registry=registry) 31 | assert flat == [1, 3, 5] 32 | 33 | 34 | def test_unflatten_df_with_value_column(value_df): 35 | registry = get_registry(extended=True) 36 | _, treedef = tree_flatten(value_df, registry=registry) 37 | unflat = tree_unflatten(treedef, [10, 11, 12], registry=registry) 38 | assert unflat.equals(value_df.assign(value=[10, 11, 12])) 39 | 40 | 41 | def test_leaf_names_df_with_value_column(value_df): 42 | registry = get_registry(extended=True) 43 | names = leaf_names(value_df, registry=registry) 44 | assert names == ["alpha", "beta", "gamma"] 45 | 46 | 47 | def test_flatten_partially_numeric_df(other_df): 48 | registry = get_registry(extended=True) 49 | flat, _ = tree_flatten(other_df, registry=registry) 50 | assert flat == [0, 3.14, 1, 3.14, 2, 3.14] 51 | 52 | 53 | def test_unflatten_partially_numeric_df(other_df): 54 | registry = get_registry(extended=True) 55 | _, treedef = tree_flatten(other_df, registry=registry) 56 | unflat = tree_unflatten(treedef, [1, 2, 3, 4, 5, 6], registry=registry) 57 | other_df = other_df.assign(b=[1, 3, 5], c=[2, 4, 6]) 58 | assert_frame_equal(unflat, other_df, check_dtype=False) 59 | 60 | 61 | def test_leaf_names_partially_numeric_df(other_df): 62 | registry = get_registry(extended=True) 63 | names = leaf_names(other_df, registry=registry) 64 | assert names == ["alpha_b", "alpha_c", "beta_b", "beta_c", "gamma_b", "gamma_c"] 65 | -------------------------------------------------------------------------------- /tests/optimagic/shared/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/optimagic-dev/optimagic/fe0dcf7b7fc5ff86872dd210fabefbbb320e25a2/tests/optimagic/shared/__init__.py -------------------------------------------------------------------------------- /tests/optimagic/shared/test_process_user_functions.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from numpy.typing import NDArray 4 | 5 | from optimagic import mark 6 | from optimagic.exceptions import InvalidKwargsError 7 | from optimagic.optimization.fun_value import ( 8 | LeastSquaresFunctionValue, 9 | LikelihoodFunctionValue, 10 | ScalarFunctionValue, 11 | ) 12 | from optimagic.shared.process_user_function import ( 13 | get_kwargs_from_args, 14 | infer_aggregation_level, 15 | partial_func_of_params, 16 | ) 17 | from optimagic.typing import AggregationLevel 18 | 19 | 20 | def test_partial_func_of_params(): 21 | def f(params, b, c): 22 | return params + b + c 23 | 24 | func = partial_func_of_params(f, {"b": 2, "c": 3}) 25 | 26 | assert func(1) == 6 27 | 28 | 29 | def test_partial_func_of_params_too_many_kwargs(): 30 | def f(params, b, c): 31 | return params + b + c 32 | 33 | with pytest.raises(InvalidKwargsError): 34 | partial_func_of_params(f, {"params": 1, "b": 2, "c": 3}) 35 | 36 | 37 | def test_partial_func_of_params_too_few_kwargs(): 38 | def f(params, b, c): 39 | return params + b + c 40 | 41 | with pytest.raises(InvalidKwargsError): 42 | partial_func_of_params(f, {"c": 3}) 43 | 44 | 45 | def test_get_kwargs_from_args(): 46 | def f(a, b, c=3, d=4): 47 | return a + b + c 48 | 49 | got = get_kwargs_from_args([1, 2], f, offset=1) 50 | expected = {"b": 1, "c": 2} 51 | 52 | assert got == expected 53 | 54 | 55 | def test_infer_aggregation_level_no_decorator(): 56 | def f(params): 57 | return 1 58 | 59 | assert infer_aggregation_level(f) == AggregationLevel.SCALAR 60 | 61 | 62 | def test_infer_aggregation_level_scalar_decorator(): 63 | @mark.scalar 64 | def f(params): 65 | return 1 66 | 67 | assert infer_aggregation_level(f) == AggregationLevel.SCALAR 68 | 69 | 70 | def test_infer_aggregation_level_scalar_anotation(): 71 | def f(params: NDArray[np.float64]) -> ScalarFunctionValue: 72 | return ScalarFunctionValue(1) 73 | 74 | assert infer_aggregation_level(f) == AggregationLevel.SCALAR 75 | 76 | 77 | def test_infer_aggregation_level_least_squares_decorator(): 78 | @mark.least_squares 79 | def f(params): 80 | return np.ones(3) 81 | 82 | assert infer_aggregation_level(f) == AggregationLevel.LEAST_SQUARES 83 | 84 | 85 | def test_infer_aggregation_level_least_squares_anotation(): 86 | def f(params: NDArray[np.float64]) -> LeastSquaresFunctionValue: 87 | return LeastSquaresFunctionValue(np.ones(3)) 88 | 89 | assert infer_aggregation_level(f) == AggregationLevel.LEAST_SQUARES 90 | 91 | 92 | def test_infer_aggregation_level_likelihood_decorator(): 93 | @mark.likelihood 94 | def f(params): 95 | return np.ones(3) 96 | 97 | assert infer_aggregation_level(f) == AggregationLevel.LIKELIHOOD 98 | 99 | 100 | def test_infer_aggregation_level_likelihood_anotation(): 101 | def f(params: NDArray[np.float64]) -> LikelihoodFunctionValue: 102 | return LikelihoodFunctionValue(np.ones(3)) 103 | 104 | assert infer_aggregation_level(f) == AggregationLevel.LIKELIHOOD 105 | -------------------------------------------------------------------------------- /tests/optimagic/test_algo_selection.py: -------------------------------------------------------------------------------- 1 | from optimagic import algos 2 | 3 | 4 | def test_dfols_is_present(): 5 | assert hasattr(algos, "nag_dfols") 6 | assert hasattr(algos.Bounded, "nag_dfols") 7 | assert hasattr(algos.LeastSquares, "nag_dfols") 8 | assert hasattr(algos.Local, "nag_dfols") 9 | assert hasattr(algos.Bounded.Local.LeastSquares, "nag_dfols") 10 | assert hasattr(algos.Local.Bounded.LeastSquares, "nag_dfols") 11 | assert hasattr(algos.LeastSquares.Bounded.Local, "nag_dfols") 12 | 13 | 14 | def test_scipy_cobyla_is_present(): 15 | assert hasattr(algos, "scipy_cobyla") 16 | assert hasattr(algos.Local, "scipy_cobyla") 17 | assert hasattr(algos.NonlinearConstrained, "scipy_cobyla") 18 | assert hasattr(algos.GradientFree, "scipy_cobyla") 19 | assert hasattr(algos.Local.NonlinearConstrained, "scipy_cobyla") 20 | assert hasattr(algos.NonlinearConstrained.Local, "scipy_cobyla") 21 | assert hasattr(algos.GradientFree.NonlinearConstrained, "scipy_cobyla") 22 | assert hasattr(algos.GradientFree.NonlinearConstrained.Local, "scipy_cobyla") 23 | assert hasattr(algos.Local.GradientFree.NonlinearConstrained, "scipy_cobyla") 24 | assert hasattr(algos.NonlinearConstrained.GradientFree.Local, "scipy_cobyla") 25 | assert hasattr(algos.NonlinearConstrained.Local.GradientFree, "scipy_cobyla") 26 | assert hasattr(algos.Local.NonlinearConstrained.GradientFree, "scipy_cobyla") 27 | 28 | 29 | def test_algorithm_lists(): 30 | assert len(algos.All) >= len(algos.Available) 31 | assert len(algos.AllNames) == len(algos.All) 32 | assert len(algos.AvailableNames) == len(algos.Available) 33 | -------------------------------------------------------------------------------- /tests/optimagic/test_batch_evaluators.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import warnings 3 | 4 | import pytest 5 | 6 | from optimagic.batch_evaluators import process_batch_evaluator 7 | 8 | batch_evaluators = ["joblib", "threading"] 9 | 10 | n_core_list = [1, 2] 11 | 12 | test_cases = list(itertools.product(batch_evaluators, n_core_list)) 13 | 14 | 15 | def double(x): 16 | return 2 * x 17 | 18 | 19 | def buggy_func(x): # noqa: ARG001 20 | raise AssertionError() 21 | 22 | 23 | def add_x_and_y(x, y): 24 | return x + y 25 | 26 | 27 | @pytest.mark.slow() 28 | @pytest.mark.parametrize("batch_evaluator, n_cores", test_cases) 29 | def test_batch_evaluator_without_exceptions(batch_evaluator, n_cores): 30 | batch_evaluator = process_batch_evaluator(batch_evaluator) 31 | 32 | calculated = batch_evaluator( 33 | func=double, 34 | arguments=list(range(10)), 35 | n_cores=n_cores, 36 | ) 37 | 38 | expected = list(range(0, 20, 2)) 39 | 40 | assert calculated == expected 41 | 42 | 43 | @pytest.mark.slow() 44 | @pytest.mark.parametrize("batch_evaluator, n_cores", test_cases) 45 | def test_batch_evaluator_with_unhandled_exceptions(batch_evaluator, n_cores): 46 | batch_evaluator = process_batch_evaluator(batch_evaluator) 47 | with pytest.raises(AssertionError): 48 | batch_evaluator( 49 | func=buggy_func, 50 | arguments=list(range(10)), 51 | n_cores=n_cores, 52 | error_handling="raise", 53 | ) 54 | 55 | 56 | @pytest.mark.slow() 57 | @pytest.mark.parametrize("batch_evaluator, n_cores", test_cases) 58 | def test_batch_evaluator_with_handled_exceptions(batch_evaluator, n_cores): 59 | batch_evaluator = process_batch_evaluator(batch_evaluator) 60 | with warnings.catch_warnings(): 61 | warnings.simplefilter("ignore") 62 | 63 | calculated = batch_evaluator( 64 | func=buggy_func, 65 | arguments=list(range(10)), 66 | n_cores=n_cores, 67 | error_handling="continue", 68 | ) 69 | 70 | for calc in calculated: 71 | assert isinstance(calc, str) 72 | 73 | 74 | @pytest.mark.slow() 75 | @pytest.mark.parametrize("batch_evaluator, n_cores", test_cases) 76 | def test_batch_evaluator_with_list_unpacking(batch_evaluator, n_cores): 77 | batch_evaluator = process_batch_evaluator(batch_evaluator) 78 | calculated = batch_evaluator( 79 | func=add_x_and_y, 80 | arguments=[(1, 2), (3, 4)], 81 | n_cores=n_cores, 82 | unpack_symbol="*", 83 | ) 84 | expected = [3, 7] 85 | assert calculated == expected 86 | 87 | 88 | @pytest.mark.slow() 89 | @pytest.mark.parametrize("batch_evaluator, n_cores", test_cases) 90 | def test_batch_evaluator_with_dict_unpacking(batch_evaluator, n_cores): 91 | batch_evaluator = process_batch_evaluator(batch_evaluator) 92 | calculated = batch_evaluator( 93 | func=add_x_and_y, 94 | arguments=[{"x": 1, "y": 2}, {"x": 3, "y": 4}], 95 | n_cores=n_cores, 96 | unpack_symbol="**", 97 | ) 98 | expected = [3, 7] 99 | assert calculated == expected 100 | 101 | 102 | def test_get_batch_evaluator_invalid_value(): 103 | with pytest.raises(ValueError): 104 | process_batch_evaluator("bla") 105 | 106 | 107 | def test_get_batch_evaluator_invalid_type(): 108 | with pytest.raises(TypeError): 109 | process_batch_evaluator(3) 110 | 111 | 112 | def test_get_batch_evaluator_with_callable(): 113 | assert callable(process_batch_evaluator(lambda x: x)) 114 | -------------------------------------------------------------------------------- /tests/optimagic/test_decorators.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from optimagic.decorators import ( 4 | catch, 5 | unpack, 6 | ) 7 | 8 | 9 | def test_catch_at_defaults(): 10 | @catch 11 | def f(): 12 | raise ValueError 13 | 14 | with pytest.warns(UserWarning): 15 | assert f() is None 16 | 17 | @catch 18 | def g(): 19 | raise KeyboardInterrupt() 20 | 21 | with pytest.raises(KeyboardInterrupt): 22 | g() 23 | 24 | 25 | def test_catch_with_reraise(): 26 | @catch(reraise=True) 27 | def f(): 28 | raise ValueError 29 | 30 | with pytest.raises(ValueError): 31 | f() 32 | 33 | 34 | def test_unpack_decorator_none(): 35 | @unpack(symbol=None) 36 | def f(x): 37 | return x 38 | 39 | assert f(3) == 3 40 | 41 | 42 | def test_unpack_decorator_one_star(): 43 | @unpack(symbol="*") 44 | def f(x, y): 45 | return x + y 46 | 47 | assert f((3, 4)) == 7 48 | 49 | 50 | def test_unpack_decorator_two_stars(): 51 | @unpack(symbol="**") 52 | def f(x, y): 53 | return x + y 54 | 55 | assert f({"x": 3, "y": 4}) == 7 56 | -------------------------------------------------------------------------------- /tests/optimagic/test_mark.py: -------------------------------------------------------------------------------- 1 | import functools 2 | from dataclasses import dataclass 3 | 4 | import pytest 5 | 6 | import optimagic as om 7 | from optimagic.optimization.algorithm import AlgoInfo, Algorithm 8 | from optimagic.typing import AggregationLevel 9 | 10 | 11 | def f(x): 12 | pass 13 | 14 | 15 | @dataclass(frozen=True) 16 | class ImmutableF: 17 | def __call__(self, x): 18 | pass 19 | 20 | 21 | def _g(x, y): 22 | pass 23 | 24 | 25 | g = functools.partial(_g, y=1) 26 | 27 | 28 | CALLABLES = [f, ImmutableF(), g] 29 | 30 | 31 | @pytest.mark.parametrize("func", CALLABLES) 32 | def test_scalar(func): 33 | got = om.mark.scalar(func) 34 | 35 | assert got._problem_type == AggregationLevel.SCALAR 36 | 37 | 38 | @pytest.mark.parametrize("func", CALLABLES) 39 | def test_least_squares(func): 40 | got = om.mark.least_squares(func) 41 | 42 | assert got._problem_type == AggregationLevel.LEAST_SQUARES 43 | 44 | 45 | @pytest.mark.parametrize("func", CALLABLES) 46 | def test_likelihood(func): 47 | got = om.mark.likelihood(func) 48 | 49 | assert got._problem_type == AggregationLevel.LIKELIHOOD 50 | 51 | 52 | def test_mark_minimizer(): 53 | @om.mark.minimizer( 54 | name="test", 55 | solver_type=AggregationLevel.LEAST_SQUARES, 56 | is_available=True, 57 | is_global=True, 58 | needs_jac=True, 59 | needs_hess=True, 60 | supports_parallelism=True, 61 | supports_bounds=True, 62 | supports_linear_constraints=True, 63 | supports_nonlinear_constraints=True, 64 | disable_history=False, 65 | ) 66 | @dataclass(frozen=True) 67 | class DummyAlgorithm(Algorithm): 68 | initial_radius: float = 1.0 69 | max_radius: float = 10.0 70 | convergence_ftol_rel: float = 1e-6 71 | stopping_maxiter: int = 1000 72 | 73 | def _solve_internal_problem(self, problem, x0): 74 | pass 75 | 76 | assert hasattr(DummyAlgorithm, "__algo_info__") 77 | assert isinstance(DummyAlgorithm.__algo_info__, AlgoInfo) 78 | assert DummyAlgorithm.__algo_info__.name == "test" 79 | -------------------------------------------------------------------------------- /tests/optimagic/test_timing.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from optimagic import timing 4 | 5 | 6 | def test_invalid_aggregate_batch_time(): 7 | with pytest.raises(ValueError, match="aggregate_batch_time must be a callable"): 8 | timing.CostModel( 9 | fun=None, 10 | jac=None, 11 | fun_and_jac=None, 12 | label="label", 13 | aggregate_batch_time="Not callable", 14 | ) 15 | -------------------------------------------------------------------------------- /tests/optimagic/test_typed_dicts_consistency.py: -------------------------------------------------------------------------------- 1 | from typing import get_args, get_type_hints 2 | 3 | from optimagic.differentiation.numdiff_options import NumdiffOptions, NumdiffOptionsDict 4 | from optimagic.optimization.multistart_options import ( 5 | MultistartOptions, 6 | MultistartOptionsDict, 7 | ) 8 | from optimagic.parameters.scaling import ScalingOptions, ScalingOptionsDict 9 | 10 | 11 | def assert_attributes_and_type_hints_are_equal(dataclass, typed_dict): 12 | """Test that dataclass and typed_dict have same attributes and types. 13 | 14 | This assertion purposefully ignores that all type hints in the typed dict are 15 | wrapped by typing.NotRequired. 16 | 17 | As there is no easy way to *not* read the NotRequired types in 3.10, we need to 18 | activate include_extras=True to get the NotRequired types in Python 3.11 and 19 | above. Once we drop support for Python 3.10, we can remove the 20 | include_extras=True argument and the removal of the NotRequired types. 21 | 22 | Args: 23 | dataclass: An instance of a dataclass 24 | typed_dict: An instance of a typed dict 25 | 26 | """ 27 | types_from_dataclass = get_type_hints(dataclass) 28 | types_from_typed_dict = get_type_hints(typed_dict, include_extras=True) 29 | types_from_typed_dict = { 30 | # Remove typing.NotRequired from the types 31 | k: get_args(v)[0] 32 | for k, v in types_from_typed_dict.items() 33 | } 34 | assert types_from_dataclass == types_from_typed_dict 35 | 36 | 37 | def test_scaling_options_and_dict_have_same_attributes(): 38 | assert_attributes_and_type_hints_are_equal(ScalingOptions, ScalingOptionsDict) 39 | 40 | 41 | def test_multistart_options_and_dict_have_same_attributes(): 42 | assert_attributes_and_type_hints_are_equal(MultistartOptions, MultistartOptionsDict) 43 | 44 | 45 | def test_numdiff_options_and_dict_have_same_attributes(): 46 | assert_attributes_and_type_hints_are_equal(NumdiffOptions, NumdiffOptionsDict) 47 | -------------------------------------------------------------------------------- /tests/optimagic/visualization/test_convergence_plot.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from optimagic import get_benchmark_problems 4 | from optimagic.benchmarking.run_benchmark import run_benchmark 5 | from optimagic.visualization.convergence_plot import ( 6 | _check_only_allowed_subset_provided, 7 | convergence_plot, 8 | ) 9 | 10 | # integration test to make sure non default argument do not throw Errors 11 | profile_options = [ 12 | {"n_cols": 3}, 13 | {"distance_measure": "parameter_distance"}, 14 | {"monotone": False}, 15 | {"normalize_distance": False}, 16 | {"runtime_measure": "walltime"}, 17 | {"runtime_measure": "n_batches"}, 18 | {"stopping_criterion": None}, 19 | {"stopping_criterion": "x"}, 20 | {"stopping_criterion": "x_and_y"}, 21 | {"stopping_criterion": "x_or_y"}, 22 | {"x_precision": 1e-5}, 23 | {"y_precision": 1e-5}, 24 | ] 25 | 26 | 27 | @pytest.mark.parametrize( 28 | "options, grid", zip(profile_options, [True, False], strict=False) 29 | ) 30 | def test_convergence_plot_options(options, grid): 31 | problems = get_benchmark_problems("example") 32 | stop_after_10 = { 33 | "stopping_max_criterion_evaluations": 10, 34 | "stopping_max_iterations": 10, 35 | } 36 | optimizers = { 37 | "lbfgsb": {"algorithm": "scipy_lbfgsb", "algo_options": stop_after_10}, 38 | "nm": {"algorithm": "scipy_neldermead", "algo_options": stop_after_10}, 39 | } 40 | results = run_benchmark( 41 | problems, 42 | optimizers, 43 | n_cores=1, # must be 1 for the test to work 44 | ) 45 | 46 | convergence_plot( 47 | problems=problems, 48 | results=results, 49 | problem_subset=["bard_good_start"], 50 | combine_plots_in_grid=grid, 51 | **options, 52 | ) 53 | 54 | 55 | def test_check_only_allowed_subset_provided_none(): 56 | allowed = ["a", "b", "c"] 57 | _check_only_allowed_subset_provided(None, allowed, "name") 58 | 59 | 60 | def test_check_only_allowed_subset_provided_all_included(): 61 | allowed = ["a", "b", "c"] 62 | _check_only_allowed_subset_provided(["a", "b"], allowed, "name") 63 | 64 | 65 | def test_check_only_allowed_subset_provided_missing(): 66 | allowed = ["a", "b", "c"] 67 | with pytest.raises(ValueError): 68 | _check_only_allowed_subset_provided(["d"], allowed, "name") 69 | -------------------------------------------------------------------------------- /tests/optimagic/visualization/test_deviation_plot.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from optimagic import get_benchmark_problems 4 | from optimagic.benchmarking.run_benchmark import run_benchmark 5 | from optimagic.visualization.deviation_plot import ( 6 | deviation_plot, 7 | ) 8 | 9 | # integration test to make sure non default argument do not throw Errors 10 | profile_options = [ 11 | {"distance_measure": "parameter_distance"}, 12 | {"distance_measure": "criterion"}, 13 | {"monotone": True}, 14 | {"monotone": False}, 15 | {"runtime_measure": "n_evaluations"}, 16 | {"runtime_measure": "n_batches"}, 17 | ] 18 | 19 | 20 | @pytest.mark.parametrize("options", profile_options) 21 | def test_convergence_plot_options(options): 22 | problems = get_benchmark_problems("example") 23 | stop_after_10 = { 24 | "stopping_max_criterion_evaluations": 10, 25 | "stopping_max_iterations": 10, 26 | } 27 | optimizers = { 28 | "lbfgsb": {"algorithm": "scipy_lbfgsb", "algo_options": stop_after_10}, 29 | "nm": {"algorithm": "scipy_neldermead", "algo_options": stop_after_10}, 30 | } 31 | results = run_benchmark( 32 | problems, 33 | optimizers, 34 | n_cores=1, # must be 1 for the test to work 35 | ) 36 | 37 | deviation_plot(problems=problems, results=results, **options) 38 | -------------------------------------------------------------------------------- /tests/optimagic/visualization/test_slice_plot.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from optimagic import mark 5 | from optimagic.parameters.bounds import Bounds 6 | from optimagic.visualization.slice_plot import slice_plot 7 | 8 | 9 | @pytest.fixture() 10 | def fixed_inputs(): 11 | params = {"alpha": 0, "beta": 0, "gamma": 0, "delta": 0} 12 | bounds = Bounds( 13 | lower={name: -5 for name in params}, 14 | upper={name: i + 2 for i, name in enumerate(params)}, 15 | ) 16 | 17 | out = { 18 | "params": params, 19 | "bounds": bounds, 20 | } 21 | return out 22 | 23 | 24 | @mark.likelihood 25 | def sphere_loglike(params): 26 | x = np.array(list(params.values())) 27 | return x**2 28 | 29 | 30 | def sphere(params): 31 | x = np.array(list(params.values())) 32 | return x @ x 33 | 34 | 35 | KWARGS = [ 36 | {}, 37 | {"plots_per_row": 4}, 38 | {"selector": lambda x: [x["alpha"], x["beta"]]}, 39 | {"param_names": {"alpha": "Alpha", "beta": "Beta"}}, 40 | {"share_x": True}, 41 | {"share_y": False}, 42 | {"return_dict": True}, 43 | ] 44 | parametrization = [ 45 | (func, kwargs) for func in [sphere_loglike, sphere] for kwargs in KWARGS 46 | ] 47 | 48 | 49 | @pytest.mark.parametrize("func, kwargs", parametrization) 50 | def test_slice_plot(fixed_inputs, func, kwargs): 51 | slice_plot( 52 | func=func, 53 | **fixed_inputs, 54 | **kwargs, 55 | ) 56 | --------------------------------------------------------------------------------