├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
├── dependabot.yml
└── workflows
│ └── test.yml
├── .gitignore
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── Makefile
├── README.md
├── docs
└── images
│ ├── bayes_ackley.gif
│ ├── bayes_ackley_.gif
│ ├── bayes_convex.gif
│ ├── bayes_convex_.gif
│ ├── blue.jpg
│ └── logo.png
├── examples
├── hyperactive_intro.ipynb
├── integrations
│ └── sklearn_example.py
├── opt_strat_early_stop.py
├── optimization_applications
│ ├── GMM_Hyperactive_Example.ipynb
│ ├── constrained_optimization.py
│ ├── ensemble_learning_example.py
│ ├── feature_selection.py
│ ├── feature_transformation.py
│ ├── hyperpara_optimize.py
│ ├── memory.py
│ ├── meta_data_collection.py
│ ├── meta_learning.py
│ ├── meta_optimization.py
│ ├── model_selection.py
│ ├── multiple_different_optimizers.py
│ ├── multiple_scores.py
│ ├── neural_architecture_search.py
│ ├── pretrained_nas.py
│ ├── search_space_example.py
│ ├── sklearn_pipeline_example.py
│ ├── sklearn_preprocessing.py
│ ├── test_function.py
│ └── transfer_learning.py
├── optimization_techniques
│ ├── bayesian_optimization.py
│ ├── direct_algorithm.py
│ ├── downhill_simplex.py
│ ├── ensemble_optimizer.py
│ ├── evolution_strategy.py
│ ├── forest_optimization.py
│ ├── grid_search.py
│ ├── hill_climbing.py
│ ├── lipschitz_optimization.py
│ ├── parallel_tempering.py
│ ├── particle_swarm_optimization.py
│ ├── pattern_search.py
│ ├── powells_method.py
│ ├── rand_rest_hill_climbing.py
│ ├── random_annealing.py
│ ├── random_search.py
│ ├── repulsing_hill_climbing.py
│ ├── simulated_annealing.py
│ ├── spiral_optimization.py
│ ├── stochastic_hill_climbing.py
│ └── tpe.py
├── tensorflow_example.py
└── tested_and_supported_packages
│ ├── catboost_example.py
│ ├── joblib_example.py
│ ├── keras_example.py
│ ├── lightgbm_example.py
│ ├── mlxtend_example.py
│ ├── multiprocessing_example.py
│ ├── pytorch_example.py
│ ├── rgf_example.py
│ ├── sklearn_example.py
│ ├── tensorflow_example.py
│ └── xgboost_example.py
├── extension_templates
├── experiments.py
└── optimizers.py
├── pyproject.toml
├── requirements
└── requirements-test.in
├── src
└── hyperactive
│ ├── __init__.py
│ ├── _registry
│ ├── __init__.py
│ └── _lookup.py
│ ├── base
│ ├── __init__.py
│ ├── _experiment.py
│ ├── _optimizer.py
│ └── tests
│ │ ├── __init__.py
│ │ └── test_endtoend.py
│ ├── distribution.py
│ ├── experiment
│ ├── __init__.py
│ ├── integrations
│ │ ├── __init__.py
│ │ └── sklearn_cv.py
│ └── toy
│ │ ├── __init__.py
│ │ ├── _ackley.py
│ │ ├── _parabola.py
│ │ └── _sphere.py
│ ├── hyperactive.py
│ ├── integrations
│ ├── __init__.py
│ ├── sklearn
│ │ ├── __init__.py
│ │ ├── best_estimator.py
│ │ ├── checks.py
│ │ ├── hyperactive_search_cv.py
│ │ ├── opt_cv.py
│ │ └── utils.py
│ └── sktime
│ │ ├── __init__.py
│ │ └── main.py
│ ├── opt
│ ├── __init__.py
│ ├── _adapters
│ │ ├── __init__.py
│ │ └── _gfo.py
│ ├── gridsearch
│ │ ├── __init__.py
│ │ └── _sk.py
│ ├── hillclimbing
│ │ ├── __init__.py
│ │ └── _hillclimbing.py
│ ├── hillclimbing_repulsing
│ │ ├── __init__.py
│ │ └── _hillclimbing_repulsing.py
│ └── hillclimbing_stochastic
│ │ ├── __init__.py
│ │ └── _hillclimbing_stochastic.py
│ ├── optimizers
│ ├── __init__.py
│ ├── constraint.py
│ ├── dictionary.py
│ ├── hyper_gradient_conv.py
│ ├── hyper_optimizer.py
│ ├── objective_function.py
│ ├── optimizer_attributes.py
│ ├── optimizers.py
│ └── strategies
│ │ ├── __init__.py
│ │ ├── custom_optimization_strategy.py
│ │ ├── optimization_strategy.py
│ │ └── optimizer_attributes.py
│ ├── print_results.py
│ ├── process.py
│ ├── results.py
│ ├── run_search.py
│ ├── search_space.py
│ ├── tests
│ ├── __init__.py
│ ├── _config.py
│ ├── _doctest.py
│ ├── test_all_objects.py
│ └── test_doctest.py
│ └── todos.md
└── tests
├── __init__.py
├── _local_test_optimization_strategies
├── __init__.py
├── _parametrize.py
├── _test_memory_warm_start.py
├── _test_memory_warm_start_smbo.py
├── _test_strategy_combinations.py
└── _test_strategy_multiprocessing.py
├── _local_test_timings
├── __init__.py
├── _search_space_list.py
├── _test_memory.py
├── _test_memory_warm_start.py
├── _test_memory_warm_start_n_jobs.py
├── _test_shared_memory.py
├── _test_warm_start.py
└── _test_warm_start_n_jobs.py
├── _test_examples.py
├── integrations
├── __init__.py
└── sklearn
│ ├── __init__.py
│ ├── test_parametrize_with_checks.py
│ └── test_sklearn_api.py
├── test_callbacks.py
├── test_catch.py
├── test_constr_opt.py
├── test_distribution.py
├── test_early_stop.py
├── test_empty_output
├── __init__.py
├── non_verbose.py
├── test_empty_output.py
└── verbose.py
├── test_hyper_gradient_trafo.py
├── test_initializers.py
├── test_issues
├── __init__.py
├── test_issue_25.py
├── test_issue_29.py
└── test_issue_34.py
├── test_max_score.py
├── test_max_time.py
├── test_obj_func_arg.py
├── test_optimization_strategies
├── __init__.py
├── _parametrize.py
├── test_constr_opt.py
├── test_early_stopping.py
└── test_search_space_pruning.py
├── test_optimizers
├── __init__.py
├── _parametrize.py
├── test_best_results.py
├── test_gfo_wrapper.py
├── test_memory.py
└── test_optimization_strategies.py
├── test_pass_through.py
├── test_random_state.py
├── test_results.py
├── test_results_methods.py
├── test_search_spaces.py
└── test_warm_starts
├── __init__.py
├── test_memory_warm_start.py
├── test_warm_start.py
└── test_warm_start_smbo.py
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Look into the FAQ of the readme. Can the bug be resolved by one of those solutions?**
11 |
12 | **Describe the bug**
13 |
14 | **Code to reproduce the behavior**
15 |
16 | **Error message from command line**
17 |
18 | **System information:**
19 | - OS Platform and Distribution
20 | - Python version
21 | - Hyperactive version
22 |
23 | **Additional context**
24 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: enhancement
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 |
12 | **Describe the solution you'd like**
13 |
14 | **Describe alternatives you've considered**
15 |
16 | **Additional context**
17 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "pip"
9 | directory: "/"
10 | schedule:
11 | interval: "weekly"
12 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: tests
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | - dev
8 | pull_request:
9 | branches:
10 | - master
11 | - dev
12 |
13 | concurrency:
14 | group: ${{ github.workflow }}-${{ github.ref }}
15 | cancel-in-progress: true
16 |
17 | jobs:
18 | build:
19 | strategy:
20 | matrix:
21 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
22 | os: ["macos-latest", "windows-latest", "ubuntu-latest"]
23 |
24 | fail-fast: false
25 |
26 | runs-on: ${{ matrix.os }}
27 | timeout-minutes: 30
28 |
29 | steps:
30 | - uses: actions/checkout@v4
31 |
32 | - name: Set up Python ${{ matrix.python-version }}
33 | uses: actions/setup-python@v5
34 | with:
35 | python-version: ${{ matrix.python-version }}
36 |
37 | - name: Install dependencies
38 | run: |
39 | python -m pip install --upgrade pip
40 | python -m pip install build
41 |
42 | make install-all-extras
43 |
44 | - name: Test with pytest
45 | run: |
46 | python -m pytest tests --cov=hyperactive --cov-report=term-missing --cov-report=xml -p no:warnings
47 |
48 | - name: Test with pytest
49 | run: |
50 | python -m pytest src/hyperactive -p no:warnings
51 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 | requirements*.txt
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Dev
42 | **/_dev_scripts
43 |
44 | # Unit test / coverage reports
45 | htmlcov/
46 | .tox/
47 | .nox/
48 | .coverage
49 | .coverage.*
50 | .cache
51 | nosetests.xml
52 | coverage.xml
53 | *.cover
54 | *.py,cover
55 | .hypothesis/
56 | .pytest_cache/
57 |
58 | # Translations
59 | *.mo
60 | *.pot
61 |
62 | # Django stuff:
63 | *.log
64 | local_settings.py
65 | db.sqlite3
66 | db.sqlite3-journal
67 |
68 | # Flask stuff:
69 | instance/
70 | .webassets-cache
71 |
72 | # Scrapy stuff:
73 | .scrapy
74 |
75 | # Sphinx documentation
76 | docs/_build/
77 |
78 | # PyBuilder
79 | target/
80 |
81 | # Jupyter Notebook
82 | .ipynb_checkpoints
83 |
84 | # IPython
85 | profile_default/
86 | ipython_config.py
87 |
88 | # pyenv
89 | .python-version
90 |
91 | # pipenv
92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
95 | # install all needed dependencies.
96 | #Pipfile.lock
97 |
98 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
99 | __pypackages__/
100 |
101 | # Celery stuff
102 | celerybeat-schedule
103 | celerybeat.pid
104 |
105 | # SageMath parsed files
106 | *.sage.py
107 |
108 | # Environments
109 | .env
110 | .venv
111 | env/
112 | venv/
113 | ENV/
114 | env.bak/
115 | venv.bak/
116 |
117 | # Spyder project settings
118 | .spyderproject
119 | .spyproject
120 |
121 | # Rope project settings
122 | .ropeproject
123 |
124 | # mkdocs documentation
125 | /site
126 |
127 | # mypy
128 | .mypy_cache/
129 | .dmypy.json
130 | dmypy.json
131 |
132 | # Pyre type checker
133 | .pyre/
134 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to contribute
2 |
3 | There are many ways to contribute to this project. The following list should give you some ideas how to contribute. The only requirement for a contribution is that you are familiar with this project and understand the problems it is trying to solve.
4 |
5 |
6 |
7 |
8 | ## Discussions
9 |
10 | You can contribute to this project by taking part in a discussion.
11 |
12 |
13 |
14 |
15 | #### - Upvoting an issue
16 |
17 | The easiest way to contribute is to upvote an issue (with a thumbs up emojy) that is important to you. This way I can see which bugfix, feature or question is important to my users.
18 |
19 |
20 |
21 |
22 | #### - Take part in discussions
23 |
24 | If you have experience in a topic, that touches the issue you might be able to participate in the discussion.
25 |
26 |
27 |
28 |
29 | #### - Reproduce a bug
30 |
31 | An issue about a bug can benefit from users reproducing the bug and therefore confirm, that the bug exists.
32 |
33 |
34 |
35 |
36 | ## Create a pull request
37 |
38 | A more difficult way to contribute is to open a pull request.
39 |
40 |
41 |
42 |
43 | #### - Corrections in Readme
44 |
45 | If you want to start easy you can create a pull request, that corrects a mistake in the readme. Those mistakes could be from wrong spelling or a wrong default value in the API reference.
46 |
47 |
48 |
49 |
50 | #### - Add an example
51 |
52 | A great way to conribute is to add an example from you field of work, that incorporates this package.
53 |
54 |
55 |
56 |
57 | #### - Solve an existing issue
58 |
59 | Solving an issue with a pull request is one of the most difficult ways to contribute. If you need help with the solution you can ask it in the corresponding issue or contact me at my official email (from my profile page).
60 |
61 |
62 |
63 |
64 | ## Open an issue
65 |
66 | You can contribute to this project by opening an issue. This could be a question, a bug report, a feature request or other types. In any case you should do a search beforehand to confirm, that a similar issue has not already been opened.
67 |
68 |
69 |
70 |
71 | #### - Questions
72 |
73 | This can be a question about how an algorithm works or if something in the documentation is not clear.
74 |
75 |
76 |
77 |
78 | #### - Bug reports
79 |
80 | If you encounter an error with this software you should open an issue. Please look into the error message to verify if the origin of the error is in this software. If you decide to open an issue about a bug report you should select the issue template and follow the instructions.
81 |
82 |
83 |
84 |
85 | #### - Feature Requests
86 |
87 | This could be a feature that could be very useful for your work, an interesting algorithm or a way to open up the software to more usecases.
88 |
89 |
90 |
91 |
92 | ---
93 |
94 |
95 |
96 |
97 |
98 | # Contribution Guidelines
99 |
100 | When contributing to this repository, please first discuss the change you wish to make via issue, email, or any other method with the owners of this repository before making a change.
101 |
102 | Please note we have a code of conduct, please follow it in all your interactions with the project.
103 |
104 |
105 |
106 |
107 | ## Issues
108 |
109 | Before opening an issue, please use the search to find out if your problem or question has already been adressed before.
110 | When opening issues, please use the issue templates and try to describe your problem in detail.
111 |
112 | If you open an issue that describes a bug, please add a small example code snippet. This helps to reproduce the bug, which speeds up the process of fixing the bug.
113 |
114 |
115 |
116 |
117 | ## Pull Requests
118 |
119 | - In the PR title use tags [Fix], [Feature], [Refactor], [Release], [Hotfix]
120 | - Link PR to issue of it solves one.
121 | - Explain how you solved the issue
122 | - Check the Format und Coding Style
123 |
124 |
125 |
126 |
127 | ---
128 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Simon Blanke
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | clean: clean-pyc clean-ipynb clean-catboost clean-build clean-test
2 |
3 | clean-progress_board:
4 | find . -name '*.csv~' -exec rm -f {} +
5 | find . -name '*.lock~' -exec rm -f {} +
6 |
7 | clean-pyc:
8 | find . -name '*.pyc' -exec rm -f {} +
9 | find . -name '*.pyo' -exec rm -f {} +
10 | find . -name '*~' -exec rm -f {} +
11 | find . -name '__pycache__' -exec rm -fr {} +
12 |
13 | clean-ipynb:
14 | find . -name '*.ipynb_checkpoints' -exec rm -fr {} +
15 |
16 | clean-catboost:
17 | find . -name 'catboost_info' -exec rm -fr {} +
18 |
19 | clean-build:
20 | rm -fr build/
21 | rm -fr dist/
22 | rm -fr .eggs/
23 | find . -name '*.egg-info' -exec rm -fr {} +
24 | find . -name '*.egg' -exec rm -f {} +
25 |
26 | clean-test:
27 | cd tests/; \
28 | rm -f .coverage; \
29 | rm -fr htmlcov/
30 |
31 | test-search_space:
32 | cd tests/; \
33 | i=0; while [ "$$i" -le 100 ]; do \
34 | i=$$((i + 1));\
35 | pytest -q test_search_spaces.py; \
36 | done
37 |
38 | test-pytest:
39 | python -m pytest --durations=10 -x -p no:warnings tests/; \
40 |
41 | test-timings:
42 | cd tests/_local_test_timings; \
43 | pytest *.py -x -p no:warnings
44 |
45 | test-local: test-timings
46 |
47 | test: test-pytest test-local
48 |
49 |
50 | test-examples:
51 | cd tests; \
52 | python _test_examples.py
53 |
54 | test-extensive: test test-local test-examples
55 |
56 | push: test
57 | git push
58 |
59 | release: reinstall test-extensive
60 | python -m twine upload dist/*
61 |
62 | dist:
63 | python setup.py sdist
64 | python setup.py bdist_wheel
65 | ls -l dist
66 |
67 | build:
68 | python -m build
69 |
70 | install: build
71 | pip install dist/*.whl
72 |
73 | uninstall:
74 | pip uninstall -y hyperactive
75 | rm -fr build dist *.egg-info
76 |
77 | install-test-requirements:
78 | python -m pip install .[test]
79 |
80 | install-build-requirements:
81 | python -m pip install .[build]
82 |
83 | install-all-extras:
84 | python -m pip install .[all_extras]
85 |
86 | install-editable:
87 | pip install -e .
88 |
89 | reinstall: uninstall install
90 |
91 | reinstall-editable: uninstall install-editable
--------------------------------------------------------------------------------
/docs/images/bayes_ackley.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/docs/images/bayes_ackley.gif
--------------------------------------------------------------------------------
/docs/images/bayes_ackley_.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/docs/images/bayes_ackley_.gif
--------------------------------------------------------------------------------
/docs/images/bayes_convex.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/docs/images/bayes_convex.gif
--------------------------------------------------------------------------------
/docs/images/bayes_convex_.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/docs/images/bayes_convex_.gif
--------------------------------------------------------------------------------
/docs/images/blue.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/docs/images/blue.jpg
--------------------------------------------------------------------------------
/docs/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/docs/images/logo.png
--------------------------------------------------------------------------------
/examples/integrations/sklearn_example.py:
--------------------------------------------------------------------------------
1 | from sklearn import svm, datasets
2 |
3 | from hyperactive.integrations import HyperactiveSearchCV
4 | from hyperactive.optimizers import RandomSearchOptimizer
5 |
6 | iris = datasets.load_iris()
7 |
8 |
9 | svc = svm.SVC()
10 | opt = RandomSearchOptimizer()
11 | parameters = {"kernel": ["linear", "rbf"], "C": [1, 10]}
12 |
13 | search = HyperactiveSearchCV(svc, opt, parameters)
14 | search.fit(iris.data, iris.target)
15 |
16 | print("\n search.get_params() \n", search.get_params())
17 |
--------------------------------------------------------------------------------
/examples/opt_strat_early_stop.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from hyperactive import Hyperactive
4 |
5 |
6 | from hyperactive.optimizers.strategies import CustomOptimizationStrategy
7 | from hyperactive.optimizers import (
8 | HillClimbingOptimizer,
9 | RandomSearchOptimizer,
10 | BayesianOptimizer,
11 | )
12 |
13 |
14 | opt_strat = CustomOptimizationStrategy()
15 | opt_strat.add_optimizer(
16 | RandomSearchOptimizer(), duration=0.5, early_stopping={"n_iter_no_change": 10}
17 | )
18 | opt_strat.add_optimizer(
19 | HillClimbingOptimizer(), duration=0.5, early_stopping={"n_iter_no_change": 10}
20 | )
21 |
22 |
23 | def objective_function(opt):
24 | score = -opt["x1"] * opt["x1"]
25 | return score, {"additional stuff": 1}
26 |
27 |
28 | search_space = {"x1": list(np.arange(-100, 101, 1))}
29 | n_iter = 100
30 | optimizer = opt_strat
31 |
32 | hyper = Hyperactive()
33 | hyper.add_search(
34 | objective_function,
35 | search_space,
36 | n_iter=n_iter,
37 | n_jobs=1,
38 | optimizer=optimizer,
39 | )
40 | hyper.run()
41 |
--------------------------------------------------------------------------------
/examples/optimization_applications/constrained_optimization.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from hyperactive import Hyperactive
4 |
5 |
6 | def convex_function(pos_new):
7 | score = -(pos_new["x1"] * pos_new["x1"] + pos_new["x2"] * pos_new["x2"])
8 | return score
9 |
10 |
11 | search_space = {
12 | "x1": list(np.arange(-100, 101, 0.1)),
13 | "x2": list(np.arange(-100, 101, 0.1)),
14 | }
15 |
16 |
17 | def constraint_1(para):
18 | # reject parameters where x1 and x2 are higher than 2.5 at the same time
19 | return not (para["x1"] > 2.5 and para["x2"] > 2.5)
20 |
21 |
22 | # put one or more constraints inside a list
23 | constraints_list = [constraint_1]
24 |
25 |
26 | hyper = Hyperactive()
27 | # pass list of constraints
28 | hyper.add_search(
29 | convex_function,
30 | search_space,
31 | n_iter=50,
32 | constraints=constraints_list,
33 | )
34 | hyper.run()
35 |
36 | search_data = hyper.search_data(convex_function)
37 |
38 | print("\n search_data \n", search_data, "\n")
39 |
--------------------------------------------------------------------------------
/examples/optimization_applications/ensemble_learning_example.py:
--------------------------------------------------------------------------------
1 | """
2 | This example shows how you can search for the best models in each layer in a
3 | stacking ensemble.
4 |
5 | We want to create a stacking ensemble with 3 layers:
6 | - a top layer with one model
7 | - a middle layer with multiple models
8 | - a bottom layer with multiple models
9 |
10 | We also want to know how many models should be used in the middle and bottom layer.
11 | For that we can use the helper function "get_combinations". It works as follows:
12 |
13 | input = [1, 2 , 3]
14 | output = get_combinations(input, comb_len=2)
15 | output: [[1, 2], [1, 3], [2, 3], [1, 2, 3]]
16 |
17 | Instead of numbers we insert models into "input". This way we get each combination
18 | with more than 2 elements. Only 1 model per layer would not make much sense.
19 |
20 | The ensemble itself is created via the package "mlxtend" in the objective-function "stacking".
21 | """
22 |
23 | import itertools
24 |
25 | from sklearn.datasets import load_breast_cancer
26 | from sklearn.model_selection import cross_val_score
27 | from mlxtend.classifier import StackingClassifier
28 |
29 | from sklearn.ensemble import (
30 | GradientBoostingClassifier,
31 | RandomForestClassifier,
32 | ExtraTreesClassifier,
33 | )
34 |
35 | from sklearn.neighbors import KNeighborsClassifier
36 | from sklearn.neural_network import MLPClassifier
37 | from sklearn.gaussian_process import GaussianProcessClassifier
38 | from sklearn.tree import DecisionTreeClassifier
39 | from sklearn.naive_bayes import GaussianNB
40 |
41 | from sklearn.linear_model import LogisticRegression
42 | from sklearn.linear_model import RidgeClassifier
43 |
44 | from hyperactive import Hyperactive
45 |
46 | data = load_breast_cancer()
47 | X, y = data.data, data.target
48 |
49 | # define models that are used in search space
50 | gbc = GradientBoostingClassifier()
51 | rfc = RandomForestClassifier()
52 | etc = ExtraTreesClassifier()
53 |
54 | mlp = MLPClassifier()
55 | gnb = GaussianNB()
56 | gpc = GaussianProcessClassifier()
57 | dtc = DecisionTreeClassifier()
58 | knn = KNeighborsClassifier()
59 |
60 | lr = LogisticRegression()
61 | rc = RidgeClassifier()
62 |
63 |
64 | def stacking(opt):
65 | lvl_1_ = opt["lvl_1"]()
66 | lvl_0_ = opt["lvl_0"]()
67 | top_ = opt["top"]()
68 |
69 | stack_lvl_0 = StackingClassifier(classifiers=lvl_0_, meta_classifier=top_)
70 | stack_lvl_1 = StackingClassifier(classifiers=lvl_1_, meta_classifier=stack_lvl_0)
71 | scores = cross_val_score(stack_lvl_1, X, y, cv=3)
72 |
73 | return scores.mean()
74 |
75 |
76 | # helper function to create search space dimensions
77 | def get_combinations(models, comb_len=2):
78 | def _list_in_list_of_lists(list_, list_of_lists):
79 | for list__ in list_of_lists:
80 | if set(list_) == set(list__):
81 | return True
82 |
83 | comb_list = []
84 | for i in range(0, len(models) + 1):
85 | for subset in itertools.permutations(models, i):
86 | if len(subset) < comb_len:
87 | continue
88 | if _list_in_list_of_lists(subset, comb_list):
89 | continue
90 |
91 | comb_list.append(list(subset))
92 |
93 | comb_list_f = []
94 | for comb_ in comb_list:
95 |
96 | def _func_():
97 | return comb_
98 |
99 | _func_.__name__ = str(i) + "___" + str(comb_)
100 | comb_list_f.append(_func_)
101 |
102 | return comb_list_f
103 |
104 |
105 | def lr_f():
106 | return lr
107 |
108 |
109 | def dtc_f():
110 | return dtc
111 |
112 |
113 | def gnb_f():
114 | return gnb
115 |
116 |
117 | def rc_f():
118 | return rc
119 |
120 |
121 | models_0 = [gpc, dtc, mlp, gnb, knn]
122 | models_1 = [gbc, rfc, etc]
123 |
124 | stack_lvl_0_clfs = get_combinations(models_0)
125 | stack_lvl_1_clfs = get_combinations(models_1)
126 |
127 |
128 | print("\n stack_lvl_0_clfs \n", stack_lvl_0_clfs, "\n")
129 |
130 |
131 | search_space = {
132 | "lvl_1": stack_lvl_1_clfs,
133 | "lvl_0": stack_lvl_0_clfs,
134 | "top": [lr_f, dtc_f, gnb_f, rc_f],
135 | }
136 |
137 | """
138 | hyper = Hyperactive()
139 | hyper.add_search(stacking, search_space, n_iter=3)
140 | hyper.run()
141 | """
142 |
--------------------------------------------------------------------------------
/examples/optimization_applications/feature_selection.py:
--------------------------------------------------------------------------------
1 | """
2 | This example shows how to select the best features for a model
3 | and dataset.
4 |
5 | The boston dataset has 13 features, therefore we have 13 search space
6 | dimensions for the feature selection.
7 |
8 | The function "get_feature_indices" returns the list of features that
9 | where selected. This can be used to select the subset of features in "x_new".
10 | """
11 |
12 | import numpy as np
13 | import itertools
14 | from sklearn.datasets import load_diabetes
15 | from sklearn.model_selection import cross_val_score
16 | from sklearn.neighbors import KNeighborsRegressor
17 | from hyperactive import Hyperactive
18 | from hyperactive.optimizers import EvolutionStrategyOptimizer
19 |
20 |
21 | data = load_diabetes()
22 | X, y = data.data, data.target
23 |
24 |
25 | # helper function that returns the selected training data features by index
26 | def get_feature_indices(opt):
27 | feature_indices = []
28 | for key in opt.keys():
29 | if "feature" not in key:
30 | continue
31 | if opt[key] == 0:
32 | continue
33 |
34 | nth_feature = int(key.rsplit(".", 1)[1])
35 | feature_indices.append(nth_feature)
36 |
37 | return feature_indices
38 |
39 |
40 | def model(opt):
41 | feature_indices = get_feature_indices(opt)
42 | if len(feature_indices) == 0:
43 | return 0
44 |
45 | feature_idx_list = [idx for idx in feature_indices if idx is not None]
46 | x_new = X[:, feature_idx_list]
47 |
48 | knr = KNeighborsRegressor(n_neighbors=opt["n_neighbors"])
49 | scores = cross_val_score(knr, x_new, y, cv=5)
50 | score = scores.mean()
51 |
52 | return score
53 |
54 |
55 | # each feature is used for training (1) or not used for training (0)
56 | search_space = {
57 | "n_neighbors": list(range(1, 100)),
58 | "feature.0": [1, 0],
59 | "feature.1": [1, 0],
60 | "feature.2": [1, 0],
61 | "feature.3": [1, 0],
62 | "feature.4": [1, 0],
63 | "feature.5": [1, 0],
64 | "feature.6": [1, 0],
65 | "feature.7": [1, 0],
66 | "feature.8": [1, 0],
67 | "feature.9": [1, 0],
68 | }
69 |
70 |
71 | optimizer = EvolutionStrategyOptimizer(rand_rest_p=0.20)
72 |
73 | hyper = Hyperactive()
74 | hyper.add_search(
75 | model,
76 | search_space,
77 | n_iter=200,
78 | initialize={"random": 15},
79 | optimizer=optimizer,
80 | )
81 | hyper.run()
82 |
--------------------------------------------------------------------------------
/examples/optimization_applications/feature_transformation.py:
--------------------------------------------------------------------------------
1 | """
2 | This example shows how you can search for useful feature
3 | transformations for your dataset. This example is very similar to
4 | "feature_selection". It adds the possibility to change the features
5 | with the numpy functions in the search space.
6 |
7 | """
8 |
9 | import numpy as np
10 | import itertools
11 | from sklearn.datasets import load_diabetes
12 | from sklearn.model_selection import cross_val_score
13 | from sklearn.neighbors import KNeighborsRegressor
14 | from hyperactive import Hyperactive
15 |
16 | data = load_diabetes()
17 | X, y = data.data, data.target
18 |
19 |
20 | def get_feature_list(opt):
21 | feature_list = []
22 | for key in opt.keys():
23 | if "feature" not in key:
24 | continue
25 |
26 | nth_feature = int(key.rsplit(".", 1)[1])
27 |
28 | if opt[key] == 0:
29 | continue
30 | elif opt[key] == 1:
31 | feature = X[:, nth_feature]
32 | feature_list.append(feature)
33 | else:
34 | feature = opt[key](X[:, nth_feature])
35 | feature_list.append(feature)
36 |
37 | return feature_list
38 |
39 |
40 | def model(opt):
41 | feature_list = get_feature_list(opt)
42 | X_new = np.array(feature_list).T
43 |
44 | knr = KNeighborsRegressor(n_neighbors=opt["n_neighbors"])
45 | scores = cross_val_score(knr, X_new, y, cv=5)
46 | score = scores.mean()
47 |
48 | return score
49 |
50 |
51 | def log_f(*args, **kwargs):
52 | return np.log(*args, **kwargs)
53 |
54 |
55 | def square_f(*args, **kwargs):
56 | return np.square(*args, **kwargs)
57 |
58 |
59 | def sqrt_f(*args, **kwargs):
60 | return np.sqrt(*args, **kwargs)
61 |
62 |
63 | def sin_f(*args, **kwargs):
64 | return np.sin(*args, **kwargs)
65 |
66 |
67 | def cos_f(*args, **kwargs):
68 | return np.cos(*args, **kwargs)
69 |
70 |
71 | # features can be used (1), not used (0) or transformed for training
72 | features_search_space = [
73 | 1,
74 | 0,
75 | log_f,
76 | square_f,
77 | sqrt_f,
78 | sin_f,
79 | cos_f,
80 | ]
81 |
82 | search_space = {
83 | "n_neighbors": list(range(1, 100)),
84 | "feature.0": features_search_space,
85 | "feature.1": features_search_space,
86 | "feature.2": features_search_space,
87 | "feature.3": features_search_space,
88 | "feature.4": features_search_space,
89 | "feature.5": features_search_space,
90 | "feature.6": features_search_space,
91 | "feature.7": features_search_space,
92 | "feature.8": features_search_space,
93 | "feature.9": features_search_space,
94 | }
95 |
96 |
97 | hyper = Hyperactive()
98 | hyper.add_search(model, search_space, n_iter=150)
99 | hyper.run()
100 |
--------------------------------------------------------------------------------
/examples/optimization_applications/hyperpara_optimize.py:
--------------------------------------------------------------------------------
1 | """
2 | This example shows the original purpose of Hyperactive.
3 | You can search for any number of hyperparameters and Hyperactive
4 | will return the best one after the optimization run.
5 |
6 | """
7 |
8 | import numpy as np
9 | from sklearn.model_selection import cross_val_score
10 | from sklearn.ensemble import GradientBoostingClassifier
11 | from sklearn.datasets import load_wine
12 | from hyperactive import Hyperactive
13 |
14 | data = load_wine()
15 | X, y = data.data, data.target
16 |
17 |
18 | def model(opt):
19 | gbr = GradientBoostingClassifier(
20 | n_estimators=opt["n_estimators"],
21 | max_depth=opt["max_depth"],
22 | min_samples_split=opt["min_samples_split"],
23 | min_samples_leaf=opt["min_samples_leaf"],
24 | criterion=opt["criterion"],
25 | )
26 | scores = cross_val_score(gbr, X, y, cv=4)
27 |
28 | return scores.mean()
29 |
30 |
31 | search_space = {
32 | "n_estimators": list(range(10, 150, 5)),
33 | "max_depth": list(range(2, 12)),
34 | "min_samples_split": list(range(2, 25)),
35 | "min_samples_leaf": list(range(1, 25)),
36 | "criterion": ["friedman_mse", "squared_error", "absolute_error"],
37 | "subsample": list(np.arange(0.1, 3, 0.1)),
38 | }
39 |
40 |
41 | hyper = Hyperactive()
42 | hyper.add_search(model, search_space, n_iter=40)
43 | hyper.run()
44 |
--------------------------------------------------------------------------------
/examples/optimization_applications/memory.py:
--------------------------------------------------------------------------------
1 | """
2 | Hyperactive saves all positions it explores in a memory dictionary. If it encounters
3 | this positions again Hyperactive will just read the score from the memory dictionary
4 | instead of reevaluating the objective function. If there is a machine-/deep-learning
5 | model within the objective function this memory saves you a lot of computation
6 | time, because it is much faster to just look up the score in a dictionary instead
7 | of retraining an entire machine learning model.
8 |
9 | You can also pass the search data to the "memory_warm_start"-parameter of the next
10 | optimization run. This way the next optimization run has the memory of the
11 | previous run, which (again) saves you a lot of computation time.
12 | """
13 | import time
14 | from sklearn.model_selection import cross_val_score
15 | from sklearn.tree import DecisionTreeRegressor
16 | from sklearn.datasets import load_diabetes
17 | from hyperactive import Hyperactive
18 |
19 | data = load_diabetes()
20 | X, y = data.data, data.target
21 |
22 |
23 | def model(opt):
24 | gbr = DecisionTreeRegressor(
25 | max_depth=opt["max_depth"],
26 | min_samples_split=opt["min_samples_split"],
27 | )
28 | scores = cross_val_score(gbr, X, y, cv=10)
29 |
30 | return scores.mean()
31 |
32 |
33 | search_space = {
34 | "max_depth": list(range(10, 35)),
35 | "min_samples_split": list(range(2, 22)),
36 | }
37 |
38 | c_time1 = time.time()
39 | hyper = Hyperactive()
40 | hyper.add_search(model, search_space, n_iter=100)
41 | hyper.run()
42 | d_time1 = time.time() - c_time1
43 | print("Optimization time 1:", round(d_time1, 2))
44 |
45 | # Hyperactive collects the search data
46 | search_data = hyper.search_data(model)
47 |
48 | # You can pass the search data to memory_warm_start to save time
49 | c_time2 = time.time()
50 | hyper = Hyperactive()
51 | hyper.add_search(model, search_space, n_iter=100, memory_warm_start=search_data)
52 | # The next run will be faster, because Hyperactive knows parts of the search space
53 | hyper.run()
54 | d_time2 = time.time() - c_time2
55 | print("Optimization time 2:", round(d_time2, 2))
56 |
--------------------------------------------------------------------------------
/examples/optimization_applications/meta_data_collection.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | from sklearn.datasets import load_iris
3 | from sklearn.neighbors import KNeighborsClassifier
4 | from sklearn.model_selection import cross_val_score
5 |
6 | from hyperactive import Hyperactive
7 |
8 | data = load_iris()
9 | X, y = data.data, data.target
10 |
11 |
12 | def model1(opt):
13 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
14 | scores = cross_val_score(knr, X, y, cv=10)
15 | score = scores.mean()
16 |
17 | return score
18 |
19 |
20 | search_space = {"n_neighbors": list(range(1, 50)), "leaf_size": list(range(5, 60, 5))}
21 |
22 |
23 | hyper = Hyperactive()
24 | hyper.add_search(model1, search_space, n_iter=500, memory=True)
25 | hyper.run()
26 |
27 | search_data = hyper.search_data(model1)
28 | # save the search data of a model for later use
29 | search_data.to_csv("./model1.csv", index=False)
30 |
31 |
32 | # load the search data and pass it to "memory_warm_start"
33 | search_data_loaded = pd.read_csv("./model1.csv")
34 |
35 | hyper = Hyperactive()
36 | hyper.add_search(
37 | model1, search_space, n_iter=500, memory=True, memory_warm_start=search_data_loaded
38 | )
39 | hyper.run()
40 |
--------------------------------------------------------------------------------
/examples/optimization_applications/meta_learning.py:
--------------------------------------------------------------------------------
1 | import random
2 | import numpy as pd
3 | import pandas as pd
4 |
5 | from sklearn.datasets import load_iris
6 | from sklearn.datasets import make_classification
7 | from sklearn.neighbors import KNeighborsClassifier
8 | from sklearn.ensemble import GradientBoostingRegressor
9 | from sklearn.model_selection import cross_val_score
10 |
11 | from hyperactive import Hyperactive
12 |
13 |
14 | def model(opt):
15 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
16 | scores = cross_val_score(knr, X, y, cv=5)
17 | score = scores.mean()
18 |
19 | return score
20 |
21 |
22 | search_space = {
23 | "n_neighbors": list(range(1, 80)),
24 | }
25 |
26 |
27 | search_data_list = []
28 |
29 | for i in range(25):
30 | n_samples = random.randint(100, 1000)
31 | n_features = random.randint(3, 20)
32 | n_informative = n_features - random.randint(0, n_features - 2)
33 |
34 | X, y = make_classification(
35 | n_samples=n_samples,
36 | n_classes=2,
37 | n_features=n_features,
38 | n_informative=n_informative,
39 | n_redundant=0,
40 | random_state=i,
41 | )
42 |
43 | hyper = Hyperactive(verbosity=False)
44 | hyper.add_search(model, search_space, n_iter=10)
45 | hyper.run()
46 |
47 | search_data = hyper.search_data(model)
48 |
49 | search_data["size_X"] = X.size
50 | search_data["itemsize_X"] = X.itemsize
51 | search_data["ndim_X"] = X.ndim
52 |
53 | search_data["size_y"] = y.size
54 | search_data["itemsize_y"] = y.itemsize
55 | search_data["ndim_y"] = y.ndim
56 |
57 | search_data_list.append(search_data)
58 |
59 |
60 | meta_data = pd.concat(search_data_list)
61 |
62 | X_meta = meta_data.drop(["score"], axis=1)
63 | y_meta = meta_data["score"]
64 |
65 |
66 | gbr = GradientBoostingRegressor()
67 | gbr.fit(X_meta, y_meta)
68 |
69 | data = load_iris()
70 | X_new, y_new = data.data, data.target
71 |
72 | X_meta_test = pd.DataFrame(range(1, 100), columns=["n_neighbors"])
73 |
74 | X_meta_test["size_X"] = X_new.size
75 | X_meta_test["itemsize_X"] = X_new.itemsize
76 | X_meta_test["ndim_X"] = X_new.ndim
77 |
78 | X_meta_test["size_y"] = y_new.size
79 | X_meta_test["itemsize_y"] = y_new.itemsize
80 | X_meta_test["ndim_y"] = y_new.ndim
81 |
82 |
83 | y_meta_pred = gbr.predict(X_meta_test)
84 |
85 | y_meta_pred_max_idx = y_meta_pred.argmax()
86 | n_neighbors_best = search_space["n_neighbors"][y_meta_pred_max_idx]
87 |
88 | hyper = Hyperactive()
89 | hyper.add_search(model, search_space, n_iter=200)
90 | hyper.run()
91 |
--------------------------------------------------------------------------------
/examples/optimization_applications/meta_optimization.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from hyperactive import Hyperactive
3 | from hyperactive.optimizers import BayesianOptimizer
4 |
5 |
6 | from gradient_free_optimizers import RandomRestartHillClimbingOptimizer
7 |
8 |
9 | def meta_opt(opt_para):
10 | scores = []
11 |
12 | for i in range(33):
13 |
14 | def ackley_function(para):
15 | x = para["x"]
16 | y = para["y"]
17 | loss1 = -20 * np.exp(-0.2 * np.sqrt(0.5 * (x * x + y * y)))
18 | loss2 = -np.exp(0.5 * (np.cos(2 * np.pi * x) + np.cos(2 * np.pi * y)))
19 | loss3 = np.exp(1)
20 | loss4 = 20
21 |
22 | loss = loss1 + loss2 + loss3 + loss4
23 |
24 | return -loss
25 |
26 | dim_size = np.arange(-6, 6, 0.01)
27 |
28 | search_space = {
29 | "x": dim_size,
30 | "y": dim_size,
31 | }
32 |
33 | opt = RandomRestartHillClimbingOptimizer(
34 | search_space,
35 | random_state=i,
36 | epsilon=opt_para["epsilon"],
37 | n_neighbours=opt_para["n_neighbours"],
38 | n_iter_restart=opt_para["n_iter_restart"],
39 | )
40 | opt.search(
41 | ackley_function,
42 | n_iter=100,
43 | verbosity=False,
44 | )
45 |
46 | scores.append(opt.best_score)
47 |
48 | return np.array(scores).sum()
49 |
50 |
51 | search_space = {
52 | "epsilon": list(np.arange(0.01, 0.1, 0.01)),
53 | "n_neighbours": list(range(1, 10)),
54 | "n_iter_restart": list(range(2, 12)),
55 | }
56 |
57 |
58 | optimizer = BayesianOptimizer()
59 |
60 | hyper = Hyperactive()
61 | hyper.add_search(meta_opt, search_space, n_iter=120, optimizer=optimizer)
62 | hyper.run()
63 |
--------------------------------------------------------------------------------
/examples/optimization_applications/model_selection.py:
--------------------------------------------------------------------------------
1 | from sklearn.model_selection import cross_val_score
2 |
3 | from sklearn.svm import SVR
4 | from sklearn.neighbors import KNeighborsRegressor
5 | from sklearn.gaussian_process import GaussianProcessRegressor
6 | from sklearn.tree import DecisionTreeRegressor
7 | from sklearn.ensemble import (
8 | GradientBoostingRegressor,
9 | RandomForestRegressor,
10 | ExtraTreesRegressor,
11 | )
12 | from sklearn.neural_network import MLPRegressor
13 |
14 | from sklearn.datasets import load_diabetes
15 | from hyperactive import Hyperactive
16 |
17 | data = load_diabetes()
18 | X, y = data.data, data.target
19 |
20 |
21 | def model(opt):
22 | model_class = opt["regressor"]()
23 | model = model_class()
24 | scores = cross_val_score(model, X, y, cv=5)
25 |
26 | return scores.mean()
27 |
28 |
29 | def SVR_f():
30 | return SVR
31 |
32 |
33 | def KNeighborsRegressor_f():
34 | return KNeighborsRegressor
35 |
36 |
37 | def GaussianProcessRegressor_f():
38 | return GaussianProcessRegressor
39 |
40 |
41 | def DecisionTreeRegressor_f():
42 | return DecisionTreeRegressor
43 |
44 |
45 | def GradientBoostingRegressor_f():
46 | return GradientBoostingRegressor
47 |
48 |
49 | def RandomForestRegressor_f():
50 | return RandomForestRegressor
51 |
52 |
53 | def ExtraTreesRegressor_f():
54 | return ExtraTreesRegressor
55 |
56 |
57 | def MLPRegressor_f():
58 | return MLPRegressor
59 |
60 |
61 | search_space = {
62 | "regressor": [
63 | SVR_f,
64 | KNeighborsRegressor_f,
65 | GaussianProcessRegressor_f,
66 | DecisionTreeRegressor_f,
67 | GradientBoostingRegressor_f,
68 | RandomForestRegressor_f,
69 | ExtraTreesRegressor_f,
70 | MLPRegressor_f,
71 | ],
72 | }
73 |
74 |
75 | hyper = Hyperactive()
76 | hyper.add_search(model, search_space, n_iter=50)
77 | hyper.run()
78 |
--------------------------------------------------------------------------------
/examples/optimization_applications/multiple_different_optimizers.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from sklearn.model_selection import cross_val_score
4 | from sklearn.ensemble import GradientBoostingClassifier
5 | from sklearn.ensemble import RandomForestClassifier
6 | from sklearn.datasets import load_breast_cancer
7 |
8 | from hyperactive import Hyperactive
9 | from hyperactive.optimizers import (
10 | HillClimbingOptimizer,
11 | RandomRestartHillClimbingOptimizer,
12 | )
13 |
14 | data = load_breast_cancer()
15 | X, y = data.data, data.target
16 |
17 |
18 | def model_rfc(opt):
19 | rfc = RandomForestClassifier(
20 | n_estimators=opt["n_estimators"],
21 | criterion=opt["criterion"],
22 | max_features=opt["max_features"],
23 | min_samples_split=opt["min_samples_split"],
24 | min_samples_leaf=opt["min_samples_leaf"],
25 | bootstrap=opt["bootstrap"],
26 | )
27 | scores = cross_val_score(rfc, X, y, cv=3)
28 |
29 | return scores.mean()
30 |
31 |
32 | def model_gbc(opt):
33 | gbc = GradientBoostingClassifier(
34 | n_estimators=opt["n_estimators"],
35 | learning_rate=opt["learning_rate"],
36 | max_depth=opt["max_depth"],
37 | min_samples_split=opt["min_samples_split"],
38 | min_samples_leaf=opt["min_samples_leaf"],
39 | subsample=opt["subsample"],
40 | max_features=opt["max_features"],
41 | )
42 | scores = cross_val_score(gbc, X, y, cv=3)
43 |
44 | return scores.mean()
45 |
46 |
47 | search_space_rfc = {
48 | "n_estimators": list(range(10, 200, 10)),
49 | "criterion": ["gini", "entropy"],
50 | "max_features": list(np.arange(0.05, 1.01, 0.05)),
51 | "min_samples_split": list(range(2, 21)),
52 | "min_samples_leaf": list(range(1, 21)),
53 | "bootstrap": [True, False],
54 | }
55 |
56 |
57 | search_space_gbc = {
58 | "n_estimators": list(range(10, 200, 10)),
59 | "learning_rate": [1e-3, 1e-2, 1e-1, 0.5, 1.0],
60 | "max_depth": list(range(1, 11)),
61 | "min_samples_split": list(range(2, 21)),
62 | "min_samples_leaf": list(range(1, 21)),
63 | "subsample": list(np.arange(0.05, 1.01, 0.05)),
64 | "max_features": list(np.arange(0.05, 1.01, 0.05)),
65 | }
66 |
67 | optimizer1 = HillClimbingOptimizer()
68 | optimizer2 = RandomRestartHillClimbingOptimizer()
69 |
70 |
71 | hyper = Hyperactive()
72 | hyper.add_search(
73 | model_rfc,
74 | search_space_rfc,
75 | n_iter=50,
76 | optimizer=optimizer1,
77 | )
78 | hyper.add_search(
79 | model_gbc,
80 | search_space_gbc,
81 | n_iter=50,
82 | optimizer=optimizer2,
83 | n_jobs=2,
84 | )
85 | hyper.run(max_time=5)
86 |
--------------------------------------------------------------------------------
/examples/optimization_applications/multiple_scores.py:
--------------------------------------------------------------------------------
1 | import time
2 | from sklearn.model_selection import cross_val_score
3 | from sklearn.ensemble import GradientBoostingRegressor
4 | from sklearn.datasets import load_diabetes
5 | from hyperactive import Hyperactive
6 |
7 | data = load_diabetes()
8 | X, y = data.data, data.target
9 |
10 | """
11 | Hyperactive cannot handle multi objective optimization.
12 | But we can achive something similar with a workaround.
13 | The following example searches for the highest cv-score and the lowest training time.
14 | It is possible by creating an objective/score from those two variables.
15 | You can also return additional parameters to track the cv-score and training time separately.
16 | """
17 |
18 |
19 | def model(opt):
20 | gbr = GradientBoostingRegressor(
21 | n_estimators=opt["n_estimators"],
22 | max_depth=opt["max_depth"],
23 | min_samples_split=opt["min_samples_split"],
24 | )
25 |
26 | c_time = time.time()
27 | scores = cross_val_score(gbr, X, y, cv=3)
28 | train_time = time.time() - c_time
29 |
30 | cv_score = scores.mean()
31 |
32 | # you can create a score that is a composition of two objectives
33 | score = cv_score / train_time
34 |
35 | # instead of just returning the score you can also return the score + a dict
36 | return score, {"training_time": train_time, "cv_score": cv_score}
37 |
38 |
39 | search_space = {
40 | "n_estimators": list(range(10, 150, 5)),
41 | "max_depth": list(range(2, 12)),
42 | "min_samples_split": list(range(2, 22)),
43 | }
44 |
45 |
46 | hyper = Hyperactive()
47 | hyper.add_search(model, search_space, n_iter=20)
48 | hyper.run()
49 |
50 | # The variables from the dict are collected in the results.
51 | print("\n Results \n", hyper.search_data(model))
52 |
--------------------------------------------------------------------------------
/examples/optimization_applications/neural_architecture_search.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from keras.models import Sequential
3 | from keras.layers import (
4 | Dense,
5 | Conv2D,
6 | MaxPooling2D,
7 | Flatten,
8 | Activation,
9 | Dropout,
10 | )
11 | from keras.datasets import cifar10
12 | from keras.utils import to_categorical
13 |
14 | from hyperactive import Hyperactive
15 |
16 | (X_train, y_train), (X_test, y_test) = cifar10.load_data()
17 |
18 | y_train = to_categorical(y_train, 10)
19 | y_test = to_categorical(y_test, 10)
20 |
21 | # to make the example quick
22 | X_train = X_train[0:1000]
23 | y_train = y_train[0:1000]
24 |
25 | X_test = X_test[0:1000]
26 | y_test = y_test[0:1000]
27 |
28 |
29 | def conv1(nn):
30 | nn.add(Conv2D(32, (3, 3)))
31 | nn.add(Activation("relu"))
32 | nn.add(MaxPooling2D(pool_size=(2, 2)))
33 | return nn
34 |
35 |
36 | def conv2(nn):
37 | nn.add(Conv2D(32, (3, 3)))
38 | nn.add(Activation("relu"))
39 | return nn
40 |
41 |
42 | def conv3(nn):
43 | return nn
44 |
45 |
46 | def cnn(opt):
47 | nn = Sequential()
48 | nn.add(
49 | Conv2D(
50 | opt["filters.0"],
51 | (3, 3),
52 | padding="same",
53 | input_shape=X_train.shape[1:],
54 | )
55 | )
56 | nn.add(Activation("relu"))
57 | nn.add(Conv2D(opt["filters.0"], (3, 3)))
58 | nn.add(Activation("relu"))
59 | nn.add(MaxPooling2D(pool_size=(2, 2)))
60 | nn.add(Dropout(0.25))
61 |
62 | nn.add(Conv2D(opt["filters.0"], (3, 3), padding="same"))
63 | nn.add(Activation("relu"))
64 | nn = opt["conv_layer.0"](nn)
65 | nn.add(Dropout(0.25))
66 |
67 | nn.add(Flatten())
68 | nn.add(Dense(opt["neurons.0"]))
69 | nn.add(Activation("relu"))
70 | nn.add(Dropout(0.5))
71 | nn.add(Dense(10))
72 | nn.add(Activation("softmax"))
73 |
74 | nn.compile(
75 | optimizer="adam", loss="categorical_crossentropy", metrics=["accuracy"]
76 | )
77 | nn.fit(X_train, y_train, epochs=5, batch_size=256)
78 |
79 | _, score = nn.evaluate(x=X_test, y=y_test)
80 |
81 | return score
82 |
83 |
84 | search_space = {
85 | "conv_layer.0": [conv1, conv2, conv3],
86 | "filters.0": [16, 32, 64, 128],
87 | "neurons.0": list(range(100, 1000, 100)),
88 | }
89 |
90 |
91 | hyper = Hyperactive()
92 | hyper.add_search(cnn, search_space, n_iter=5)
93 | hyper.run()
94 |
--------------------------------------------------------------------------------
/examples/optimization_applications/pretrained_nas.py:
--------------------------------------------------------------------------------
1 | """
2 | This script describes how to save time during the optimization by
3 | using a pretrained model. It is similar to the transer learning example,
4 | but here you do the training and model creation of the pretrained model
5 | yourself.
6 |
7 | The problem is that most of the optimization time is "waisted" by
8 | training the model. The time to find a new position to explore by
9 | Hyperactive is very small compared to the training time of
10 | neural networks. This means, that we can do more optimization
11 | if we keep the training time as little as possible.
12 |
13 | The idea of pretrained neural architecture search is to pretrain a complete model one time.
14 | In the next step we remove the layers that should be optimized
15 | and make the remaining layers not-trainable.
16 |
17 | This results in a partial, pretrained, not-trainable model that will be
18 | used during the Hyperactive optimization.
19 |
20 | You can now add layers to the partial model in the objective function
21 | and add the parameters or layers that will be optimized by Hyperactive.
22 |
23 | With each iteration of the optimization run we are only training
24 | the added layers of the model. This saves a lot of training time.
25 |
26 | """
27 |
28 | import numpy as np
29 | import keras
30 | from keras.models import Sequential
31 | from keras.layers import (
32 | Dense,
33 | Conv2D,
34 | MaxPooling2D,
35 | Flatten,
36 | Activation,
37 | Dropout,
38 | )
39 | from keras.datasets import cifar10
40 | from keras.utils import to_categorical
41 |
42 | from hyperactive import Hyperactive
43 |
44 | (X_train, y_train), (X_test, y_test) = cifar10.load_data()
45 |
46 | y_train = to_categorical(y_train, 10)
47 | y_test = to_categorical(y_test, 10)
48 |
49 | # to make the example quick
50 | X_train = X_train[0:1000]
51 | y_train = y_train[0:1000]
52 |
53 | X_test = X_test[0:1000]
54 | y_test = y_test[0:1000]
55 |
56 |
57 | # create model and train it
58 | model = Sequential()
59 | model.add(Conv2D(64, (3, 3), padding="same", input_shape=X_train.shape[1:]))
60 | model.add(Activation("relu"))
61 | model.add(Conv2D(32, (3, 3)))
62 | model.add(Activation("relu"))
63 | model.add(MaxPooling2D(pool_size=(2, 2)))
64 | model.add(Dropout(0.25))
65 |
66 | model.add(Conv2D(32, (3, 3), padding="same"))
67 | model.add(Activation("relu"))
68 | model.add(Dropout(0.25))
69 | model.add(Flatten())
70 | model.add(Dense(200))
71 | model.add(Activation("relu"))
72 | model.add(Dropout(0.5))
73 | model.add(Dense(10))
74 | model.add(Activation("softmax"))
75 |
76 | model.compile(optimizer="adam", loss="categorical_crossentropy", metrics=["accuracy"])
77 | model.fit(X_train, y_train, epochs=5, batch_size=500)
78 |
79 | model_pretrained = model
80 | n_layers = len(model_pretrained.layers)
81 |
82 | # delete the last 9 layers
83 | for i in range(n_layers - 9):
84 | model_pretrained.pop()
85 |
86 | # set remaining layers to not-trainable
87 | for layer in model_pretrained.layers:
88 | layer.trainable = False
89 |
90 | model_pretrained.summary()
91 |
92 |
93 | def cnn(opt):
94 | model = keras.models.clone_model(model_pretrained)
95 |
96 | model.add(Flatten())
97 | model.add(Dense(opt["neurons.0"]))
98 | model.add(Activation("relu"))
99 | model.add(Dropout(0.5))
100 | model.add(Dense(10))
101 | model.add(Activation("softmax"))
102 |
103 | model.compile(
104 | optimizer="adam", loss="categorical_crossentropy", metrics=["accuracy"]
105 | )
106 | model.fit(X_train, y_train, epochs=5, batch_size=500)
107 |
108 | model.summary()
109 |
110 | _, score = model.evaluate(x=X_test, y=y_test)
111 |
112 | return score
113 |
114 |
115 | # conv 1, 2, 3 are functions that adds layers. We want to know which function is the best
116 | def conv1(model):
117 | model.add(Conv2D(64, (3, 3)))
118 | model.add(Activation("relu"))
119 | model.add(MaxPooling2D(pool_size=(2, 2)))
120 | return model
121 |
122 |
123 | def conv2(model):
124 | model.add(Conv2D(64, (3, 3)))
125 | model.add(Activation("relu"))
126 | return model
127 |
128 |
129 | def conv3(model):
130 | return model
131 |
132 |
133 | search_space = {
134 | "conv_layer.0": [conv1, conv2, conv3],
135 | "neurons.0": list(range(100, 1000, 100)),
136 | }
137 |
138 |
139 | hyper = Hyperactive()
140 | hyper.add_search(cnn, search_space, n_iter=3)
141 | hyper.run()
142 |
--------------------------------------------------------------------------------
/examples/optimization_applications/search_space_example.py:
--------------------------------------------------------------------------------
1 | """
2 | Hyperactive is very versatile, because it can handle not just numerical or
3 | string variables in the search space, but also functions. If you want to
4 | search for the best list, numpy array, dataframed or class you can put them into a
5 | function that returns them as shown in the example below.
6 |
7 | This enables many possibilities for more complex optimization applications.
8 | Neural architecture search, feature engineering, ensemble optimization and many other applications are
9 | only possible or much easier if you can put functions in the search space.
10 | """
11 |
12 | from hyperactive import Hyperactive
13 |
14 |
15 | def function_0():
16 | # do stuff in function0
17 | return
18 |
19 |
20 | def function_1():
21 | # do stuff in function1
22 | return
23 |
24 |
25 | def function_2():
26 | # do stuff in function2
27 | return
28 |
29 |
30 | def list1():
31 | return [1, 0, 0]
32 |
33 |
34 | def list2():
35 | return [0, 1, 0]
36 |
37 |
38 | def list3():
39 | return [0, 0, 1]
40 |
41 |
42 | # Hyperactive can handle python objects in the search space
43 | search_space = {
44 | "int": list(range(1, 10)),
45 | "float": [0.1, 0.01, 0.001],
46 | "string": ["string1", "string2"],
47 | "function": [function_0, function_1, function_2],
48 | "list": [list1, list2, list3],
49 | }
50 |
51 |
52 | def objective_function(para):
53 | # score must be a number
54 | score = 1
55 | return score
56 |
57 |
58 | hyper = Hyperactive()
59 | hyper.add_search(objective_function, search_space, n_iter=20)
60 | hyper.run()
61 |
62 | search_data = hyper.search_data(objective_function)
63 |
64 | print("\n Search Data: \n", search_data)
65 |
--------------------------------------------------------------------------------
/examples/optimization_applications/sklearn_pipeline_example.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_breast_cancer
2 | from sklearn.model_selection import cross_val_score
3 | from sklearn.feature_selection import SelectKBest, f_classif
4 | from sklearn.ensemble import GradientBoostingClassifier
5 | from sklearn.pipeline import Pipeline
6 |
7 | from hyperactive import Hyperactive
8 |
9 | data = load_breast_cancer()
10 | X, y = data.data, data.target
11 |
12 |
13 | def pipeline1(filter_, gbc):
14 | return Pipeline([("filter_", filter_), ("gbc", gbc)])
15 |
16 |
17 | def pipeline2(filter_, gbc):
18 | return gbc
19 |
20 |
21 | def model(opt):
22 | gbc = GradientBoostingClassifier(
23 | n_estimators=opt["n_estimators"],
24 | max_depth=opt["max_depth"],
25 | min_samples_split=opt["min_samples_split"],
26 | min_samples_leaf=opt["min_samples_leaf"],
27 | )
28 | filter_ = SelectKBest(f_classif, k=opt["k"])
29 | model_ = opt["pipeline"](filter_, gbc)
30 |
31 | scores = cross_val_score(model_, X, y, cv=3)
32 |
33 | return scores.mean()
34 |
35 |
36 | search_space = {
37 | "k": list(range(2, 30)),
38 | "n_estimators": list(range(10, 200, 10)),
39 | "max_depth": list(range(2, 12)),
40 | "min_samples_split": list(range(2, 12)),
41 | "min_samples_leaf": list(range(1, 11)),
42 | "pipeline": [pipeline1, pipeline2],
43 | }
44 |
45 |
46 | hyper = Hyperactive()
47 | hyper.add_search(model, search_space, n_iter=30)
48 | hyper.run()
49 |
--------------------------------------------------------------------------------
/examples/optimization_applications/sklearn_preprocessing.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from sklearn.datasets import load_breast_cancer
3 | from sklearn.model_selection import cross_val_score
4 | from sklearn.decomposition import PCA
5 | from sklearn.feature_selection import SelectKBest, f_classif
6 | from sklearn.ensemble import GradientBoostingClassifier
7 | from hyperactive import Hyperactive
8 |
9 | data = load_breast_cancer()
10 | X, y = data.data, data.target
11 |
12 |
13 | def model(opt):
14 | model = GradientBoostingClassifier(
15 | n_estimators=opt["n_estimators"],
16 | max_depth=opt["max_depth"],
17 | )
18 |
19 | X_pca = opt["decomposition"](X, opt)
20 | X_mod = np.hstack((X, X_pca))
21 |
22 | X_best = SelectKBest(f_classif, k=opt["k"]).fit_transform(X_mod, y)
23 | scores = cross_val_score(model, X_best, y, cv=3)
24 |
25 | return scores.mean()
26 |
27 |
28 | def pca(X_, opt):
29 | X_ = PCA(n_components=opt["n_components"]).fit_transform(X_)
30 |
31 | return X_
32 |
33 |
34 | def none(X_, opt):
35 | return X_
36 |
37 |
38 | search_space = {
39 | "decomposition": [pca, none],
40 | "k": list(range(2, 30)),
41 | "n_components": list(range(1, 11)),
42 | "n_estimators": list(range(10, 100, 3)),
43 | "max_depth": list(range(2, 12)),
44 | }
45 |
46 |
47 | hyper = Hyperactive()
48 | hyper.add_search(model, search_space, n_iter=20)
49 | hyper.run()
50 |
--------------------------------------------------------------------------------
/examples/optimization_applications/test_function.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from hyperactive import Hyperactive
3 |
4 |
5 | def ackley_function(para):
6 | x, y = para["x"], para["y"]
7 |
8 | loss = (
9 | -20 * np.exp(-0.2 * np.sqrt(0.5 * (x * x + y * y)))
10 | - np.exp(0.5 * (np.cos(2 * np.pi * x) + np.cos(2 * np.pi * y)))
11 | + np.exp(1)
12 | + 20
13 | )
14 |
15 | return -loss
16 |
17 |
18 | search_space = {
19 | "x": list(np.arange(-10, 10, 0.01)),
20 | "y": list(np.arange(-10, 10, 0.01)),
21 | }
22 |
23 |
24 | hyper = Hyperactive()
25 | hyper.add_search(ackley_function, search_space, n_iter=100000)
26 | hyper.run()
27 |
--------------------------------------------------------------------------------
/examples/optimization_applications/transfer_learning.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from keras.models import Sequential
3 | from keras import applications
4 | from keras.layers import Dense, Flatten, Dropout, Activation
5 | from keras.datasets import cifar10
6 | from keras.utils import to_categorical
7 |
8 | from hyperactive import Hyperactive
9 |
10 | (X_train, y_train), (X_test, y_test) = cifar10.load_data()
11 |
12 | y_train = to_categorical(y_train, 10)
13 | y_test = to_categorical(y_test, 10)
14 |
15 | nn = applications.VGG19(weights="imagenet", include_top=False)
16 |
17 | for layer in nn.layers[:5]:
18 | layer.trainable = False
19 |
20 |
21 | def cnn(opt):
22 | nn = Sequential()
23 |
24 | nn.add(Flatten())
25 | nn.add(Dense(opt["Dense.0"]))
26 | nn.add(Activation("relu"))
27 | nn.add(Dropout(opt["Dropout.0"]))
28 | nn.add(Dense(10))
29 | nn.add(Activation("softmax"))
30 |
31 | nn.compile(optimizer="adam", loss="categorical_crossentropy", metrics=["accuracy"])
32 | nn.fit(X_train, y_train, epochs=5, batch_size=256)
33 |
34 | _, score = nn.evaluate(x=X_test, y=y_test)
35 |
36 | return score
37 |
38 |
39 | search_space = {
40 | "Dense.0": list(range(100, 1000, 100)),
41 | "Dropout.0": list(np.arange(0.1, 0.9, 0.1)),
42 | }
43 |
44 |
45 | hyper = Hyperactive()
46 | hyper.add_search(cnn, search_space, n_iter=5)
47 | hyper.run()
48 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/bayesian_optimization.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | from sklearn.datasets import load_iris
5 | from sklearn.neighbors import KNeighborsClassifier
6 | from sklearn.model_selection import cross_val_score
7 |
8 | from hyperactive import Hyperactive
9 | from hyperactive.optimizers import BayesianOptimizer
10 |
11 |
12 | data = load_iris()
13 | X, y = data.data, data.target
14 |
15 |
16 | def model(opt):
17 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
18 | scores = cross_val_score(knr, X, y, cv=5)
19 | score = scores.mean()
20 |
21 | return score
22 |
23 |
24 | search_space = {
25 | "n_neighbors": list(range(1, 100)),
26 | }
27 |
28 |
29 | hyper = Hyperactive()
30 | hyper.add_search(model, search_space, n_iter=100)
31 | hyper.run()
32 |
33 | search_data = hyper.search_data(model)
34 |
35 |
36 | optimizer = BayesianOptimizer(xi=0.03, warm_start_smbo=search_data, rand_rest_p=0.1)
37 |
38 | hyper = Hyperactive()
39 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
40 | hyper.run()
41 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/direct_algorithm.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from hyperactive import Hyperactive
4 | from hyperactive.optimizers import DirectAlgorithm
5 |
6 |
7 | def sphere_function(para):
8 | x = para["x"]
9 | y = para["y"]
10 |
11 | return -(x * x + y * y)
12 |
13 |
14 | search_space = {
15 | "x": list(np.arange(-10, 10, 0.1)),
16 | "y": list(np.arange(-10, 10, 0.1)),
17 | }
18 |
19 | opt = DirectAlgorithm()
20 |
21 |
22 | hyper = Hyperactive()
23 | hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
24 | hyper.run()
25 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/downhill_simplex.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from hyperactive import Hyperactive
4 | from hyperactive.optimizers import DownhillSimplexOptimizer
5 |
6 |
7 | def sphere_function(para):
8 | x = para["x"]
9 | y = para["y"]
10 |
11 | return -(x * x + y * y)
12 |
13 |
14 | search_space = {
15 | "x": list(np.arange(-10, 10, 0.1)),
16 | "y": list(np.arange(-10, 10, 0.1)),
17 | }
18 |
19 | opt = DownhillSimplexOptimizer(
20 | alpha=1.2,
21 | gamma=1.1,
22 | beta=0.8,
23 | sigma=1,
24 | )
25 |
26 |
27 | hyper = Hyperactive()
28 | hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
29 | hyper.run()
30 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/ensemble_optimizer.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.neighbors import KNeighborsClassifier
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from sklearn.svm import SVR
6 | from sklearn.tree import DecisionTreeRegressor
7 | from sklearn.neural_network import MLPRegressor
8 |
9 | from hyperactive import Hyperactive
10 | from hyperactive.optimizers import EnsembleOptimizer
11 |
12 |
13 | data = load_iris()
14 | X, y = data.data, data.target
15 |
16 |
17 | def model(opt):
18 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
19 | scores = cross_val_score(knr, X, y, cv=5)
20 | score = scores.mean()
21 |
22 | return score
23 |
24 |
25 | search_space = {
26 | "n_neighbors": list(range(1, 100)),
27 | }
28 |
29 | hyper = Hyperactive()
30 | hyper.add_search(model, search_space, n_iter=100)
31 | hyper.run()
32 |
33 | search_data = hyper.search_data(model)
34 |
35 | optimizer = EnsembleOptimizer(
36 | estimators=[SVR(), DecisionTreeRegressor(), MLPRegressor()],
37 | xi=0.02,
38 | warm_start_smbo=search_data,
39 | rand_rest_p=0.05,
40 | )
41 |
42 | hyper = Hyperactive()
43 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
44 | hyper.run()
45 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/evolution_strategy.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.neighbors import KNeighborsClassifier
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers import EvolutionStrategyOptimizer
7 |
8 |
9 | data = load_iris()
10 | X, y = data.data, data.target
11 |
12 |
13 | def model(opt):
14 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
15 | scores = cross_val_score(knr, X, y, cv=5)
16 | score = scores.mean()
17 |
18 | return score
19 |
20 |
21 | search_space = {
22 | "n_neighbors": list(range(1, 100)),
23 | }
24 |
25 |
26 | optimizer = EvolutionStrategyOptimizer(
27 | mutation_rate=0.5, crossover_rate=0.5, rand_rest_p=0.05
28 | )
29 |
30 | hyper = Hyperactive()
31 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
32 | hyper.run()
33 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/forest_optimization.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.neighbors import KNeighborsClassifier
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers import ForestOptimizer
7 |
8 |
9 | data = load_iris()
10 | X, y = data.data, data.target
11 |
12 |
13 | def model(opt):
14 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
15 | scores = cross_val_score(knr, X, y, cv=5)
16 | score = scores.mean()
17 |
18 | return score
19 |
20 |
21 | search_space = {
22 | "n_neighbors": list(range(1, 100)),
23 | }
24 |
25 | hyper = Hyperactive()
26 | hyper.add_search(model, search_space, n_iter=100)
27 | hyper.run()
28 |
29 | search_data = hyper.search_data(model)
30 |
31 | optimizer = ForestOptimizer(
32 | tree_regressor="random_forest",
33 | xi=0.02,
34 | warm_start_smbo=search_data,
35 | rand_rest_p=0.05,
36 | )
37 |
38 | hyper = Hyperactive()
39 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
40 | hyper.run()
41 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/grid_search.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from hyperactive import Hyperactive
4 | from hyperactive.optimizers import GridSearchOptimizer
5 |
6 |
7 | def sphere_function(para):
8 | x = para["x"]
9 | y = para["y"]
10 |
11 | return -(x * x + y * y)
12 |
13 |
14 | search_space = {
15 | "x": list(np.arange(-10, 10, 0.1)),
16 | "y": list(np.arange(-10, 10, 0.1)),
17 | }
18 |
19 | opt = GridSearchOptimizer(
20 | step_size=3,
21 | )
22 |
23 |
24 | hyper = Hyperactive()
25 | hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
26 | hyper.run()
27 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/hill_climbing.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.neighbors import KNeighborsClassifier
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers import HillClimbingOptimizer
7 |
8 |
9 | data = load_iris()
10 | X, y = data.data, data.target
11 |
12 |
13 | def model(opt):
14 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
15 | scores = cross_val_score(knr, X, y, cv=5)
16 | score = scores.mean()
17 |
18 | return score
19 |
20 |
21 | search_space = {
22 | "n_neighbors": list(range(1, 100)),
23 | }
24 |
25 |
26 | optimizer = HillClimbingOptimizer(
27 | epsilon=0.1, distribution="laplace", n_neighbours=4, rand_rest_p=0.1
28 | )
29 |
30 | hyper = Hyperactive()
31 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
32 | hyper.run()
33 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/lipschitz_optimization.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from hyperactive import Hyperactive
4 | from hyperactive.optimizers import LipschitzOptimizer
5 |
6 |
7 | def sphere_function(para):
8 | x = para["x"]
9 | y = para["y"]
10 |
11 | return -(x * x + y * y)
12 |
13 |
14 | search_space = {
15 | "x": list(np.arange(-10, 10, 0.1)),
16 | "y": list(np.arange(-10, 10, 0.1)),
17 | }
18 |
19 | opt = LipschitzOptimizer(
20 | sampling={"random": 100000},
21 | )
22 |
23 | hyper = Hyperactive()
24 | hyper.add_search(sphere_function, search_space, n_iter=100, optimizer=opt)
25 | hyper.run()
26 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/parallel_tempering.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.neighbors import KNeighborsClassifier
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers import ParallelTemperingOptimizer
7 |
8 |
9 | data = load_iris()
10 | X, y = data.data, data.target
11 |
12 |
13 | def model(opt):
14 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
15 | scores = cross_val_score(knr, X, y, cv=5)
16 | score = scores.mean()
17 |
18 | return score
19 |
20 |
21 | search_space = {
22 | "n_neighbors": list(range(1, 100)),
23 | }
24 |
25 |
26 | optimizer = ParallelTemperingOptimizer(n_iter_swap=5, rand_rest_p=0.05)
27 |
28 | hyper = Hyperactive()
29 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
30 | hyper.run()
31 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/particle_swarm_optimization.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.neighbors import KNeighborsClassifier
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers import ParticleSwarmOptimizer
7 |
8 |
9 | data = load_iris()
10 | X, y = data.data, data.target
11 |
12 |
13 | def model(opt):
14 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
15 | scores = cross_val_score(knr, X, y, cv=5)
16 | score = scores.mean()
17 |
18 | return score
19 |
20 |
21 | search_space = {
22 | "n_neighbors": list(range(1, 100)),
23 | }
24 |
25 |
26 | optimizer = ParticleSwarmOptimizer(
27 | inertia=0.4,
28 | cognitive_weight=0.7,
29 | social_weight=0.7,
30 | temp_weight=0.3,
31 | rand_rest_p=0.05,
32 | )
33 |
34 | hyper = Hyperactive()
35 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
36 | hyper.run()
37 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/pattern_search.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from hyperactive import Hyperactive
4 | from hyperactive.optimizers import PatternSearch
5 |
6 |
7 | def sphere_function(para):
8 | x = para["x"]
9 | y = para["y"]
10 |
11 | return -(x * x + y * y)
12 |
13 |
14 | search_space = {
15 | "x": list(np.arange(-10, 10, 0.1)),
16 | "y": list(np.arange(-10, 10, 0.1)),
17 | }
18 |
19 | opt = PatternSearch(
20 | n_positions=2,
21 | pattern_size=0.5,
22 | reduction=0.99,
23 | )
24 |
25 |
26 | hyper = Hyperactive()
27 | hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
28 | hyper.run()
29 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/powells_method.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from hyperactive import Hyperactive
4 | from hyperactive.optimizers import PowellsMethod
5 |
6 |
7 | def sphere_function(para):
8 | x = para["x"]
9 | y = para["y"]
10 |
11 | return -(x * x + y * y)
12 |
13 |
14 | search_space = {
15 | "x": list(np.arange(-10, 10, 0.1)),
16 | "y": list(np.arange(-10, 10, 0.1)),
17 | }
18 |
19 | opt = PowellsMethod(
20 | iters_p_dim=20,
21 | )
22 |
23 | hyper = Hyperactive()
24 | hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
25 | hyper.run()
26 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/rand_rest_hill_climbing.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.neighbors import KNeighborsClassifier
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers import RandomRestartHillClimbingOptimizer
7 |
8 |
9 | data = load_iris()
10 | X, y = data.data, data.target
11 |
12 |
13 | def model(opt):
14 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
15 | scores = cross_val_score(knr, X, y, cv=5)
16 | score = scores.mean()
17 |
18 | return score
19 |
20 |
21 | search_space = {
22 | "n_neighbors": list(range(1, 100)),
23 | }
24 |
25 |
26 | optimizer = RandomRestartHillClimbingOptimizer(
27 | epsilon=0.1,
28 | distribution="laplace",
29 | n_neighbours=4,
30 | rand_rest_p=0.1,
31 | n_iter_restart=20,
32 | )
33 |
34 | hyper = Hyperactive()
35 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
36 | hyper.run()
37 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/random_annealing.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.neighbors import KNeighborsClassifier
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers import RandomAnnealingOptimizer
7 |
8 |
9 | data = load_iris()
10 | X, y = data.data, data.target
11 |
12 |
13 | def model(opt):
14 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
15 | scores = cross_val_score(knr, X, y, cv=5)
16 | score = scores.mean()
17 |
18 | return score
19 |
20 |
21 | search_space = {
22 | "n_neighbors": list(range(1, 100)),
23 | }
24 |
25 |
26 | optimizer = RandomAnnealingOptimizer(
27 | epsilon=0.1,
28 | distribution="laplace",
29 | n_neighbours=4,
30 | rand_rest_p=0.1,
31 | annealing_rate=0.999,
32 | start_temp=0.8,
33 | )
34 |
35 | hyper = Hyperactive()
36 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
37 | hyper.run()
38 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/random_search.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.neighbors import KNeighborsClassifier
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers import RandomSearchOptimizer
7 |
8 |
9 | data = load_iris()
10 | X, y = data.data, data.target
11 |
12 |
13 | def model(opt):
14 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
15 | scores = cross_val_score(knr, X, y, cv=5)
16 | score = scores.mean()
17 |
18 | return score
19 |
20 |
21 | search_space = {
22 | "n_neighbors": list(range(1, 100)),
23 | }
24 |
25 |
26 | optimizer = RandomSearchOptimizer()
27 |
28 | hyper = Hyperactive()
29 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
30 | hyper.run()
31 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/repulsing_hill_climbing.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.neighbors import KNeighborsClassifier
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers import RepulsingHillClimbingOptimizer
7 |
8 |
9 | data = load_iris()
10 | X, y = data.data, data.target
11 |
12 |
13 | def model(opt):
14 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
15 | scores = cross_val_score(knr, X, y, cv=5)
16 | score = scores.mean()
17 |
18 | return score
19 |
20 |
21 | search_space = {
22 | "n_neighbors": list(range(1, 100)),
23 | }
24 |
25 |
26 | optimizer = RepulsingHillClimbingOptimizer(
27 | epsilon=0.1,
28 | distribution="laplace",
29 | n_neighbours=4,
30 | repulsion_factor=5,
31 | rand_rest_p=0.1,
32 | )
33 |
34 |
35 | hyper = Hyperactive()
36 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
37 | hyper.run()
38 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/simulated_annealing.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.neighbors import KNeighborsClassifier
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers import SimulatedAnnealingOptimizer
7 |
8 |
9 | data = load_iris()
10 | X, y = data.data, data.target
11 |
12 |
13 | def model(opt):
14 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
15 | scores = cross_val_score(knr, X, y, cv=5)
16 | score = scores.mean()
17 |
18 | return score
19 |
20 |
21 | search_space = {
22 | "n_neighbors": list(range(1, 100)),
23 | }
24 |
25 |
26 | optimizer = SimulatedAnnealingOptimizer(
27 | epsilon=0.1,
28 | distribution="laplace",
29 | n_neighbours=4,
30 | rand_rest_p=0.1,
31 | p_accept=0.15,
32 | norm_factor="adaptive",
33 | annealing_rate=0.999,
34 | start_temp=0.8,
35 | )
36 |
37 | hyper = Hyperactive()
38 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
39 | hyper.run()
40 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/spiral_optimization.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from hyperactive import Hyperactive
4 | from hyperactive.optimizers import SpiralOptimization
5 |
6 |
7 | def sphere_function(para):
8 | x = para["x"]
9 | y = para["y"]
10 |
11 | return -(x * x + y * y)
12 |
13 |
14 | search_space = {
15 | "x": list(np.arange(-25, 10, 0.1)),
16 | "y": list(np.arange(-10, 15, 0.1)),
17 | }
18 |
19 | opt = SpiralOptimization(
20 | population=15,
21 | decay_rate=0.99,
22 | )
23 |
24 | hyper = Hyperactive()
25 | hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
26 | hyper.run()
27 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/stochastic_hill_climbing.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from hyperactive import Hyperactive
4 | from hyperactive.optimizers import StochasticHillClimbingOptimizer
5 |
6 |
7 | def sphere_function(para):
8 | x = para["x"]
9 | y = para["y"]
10 |
11 | return -(x * x + y * y)
12 |
13 |
14 | search_space = {
15 | "x": list(np.arange(-10, 10, 0.1)),
16 | "y": list(np.arange(-10, 10, 0.1)),
17 | }
18 |
19 | opt = StochasticHillClimbingOptimizer(
20 | epsilon=0.01,
21 | n_neighbours=5,
22 | distribution="laplace",
23 | p_accept=0.05,
24 | )
25 |
26 | hyper = Hyperactive()
27 | hyper.add_search(sphere_function, search_space, n_iter=1500, optimizer=opt)
28 | hyper.run()
29 |
--------------------------------------------------------------------------------
/examples/optimization_techniques/tpe.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.neighbors import KNeighborsClassifier
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers import TreeStructuredParzenEstimators
7 |
8 |
9 | data = load_iris()
10 | X, y = data.data, data.target
11 |
12 |
13 | def model(opt):
14 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
15 | scores = cross_val_score(knr, X, y, cv=5)
16 | score = scores.mean()
17 |
18 | return score
19 |
20 |
21 | search_space = {
22 | "n_neighbors": list(range(1, 100)),
23 | }
24 |
25 | hyper = Hyperactive()
26 | hyper.add_search(model, search_space, n_iter=100)
27 | hyper.run()
28 |
29 | search_data = hyper.search_data(model)
30 |
31 |
32 | optimizer = TreeStructuredParzenEstimators(
33 | gamma_tpe=0.5, warm_start_smbo=search_data, rand_rest_p=0.05
34 | )
35 |
36 | hyper = Hyperactive()
37 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
38 | hyper.run()
39 |
--------------------------------------------------------------------------------
/examples/tensorflow_example.py:
--------------------------------------------------------------------------------
1 | from __future__ import division, print_function, absolute_import
2 | import tensorflow as tf
3 | from tensorflow.examples.tutorials.mnist import input_data
4 |
5 | from hyperactive import Hyperactive
6 |
7 | mnist = input_data.read_data_sets("/tmp/data/", one_hot=False)
8 |
9 | learning_rate = 0.001
10 | num_steps = 500
11 | batch_size = 128
12 |
13 | num_input = 784
14 | num_classes = 10
15 | dropout = 0.25
16 |
17 | X_train = mnist.train.images
18 | y_train = mnist.train.labels
19 |
20 | X_test = mnist.test.images
21 | y_test = mnist.test.labels
22 |
23 |
24 | def cnn(para, X_train, y_train):
25 | def conv_net(x_dict, n_classes, dropout, reuse, is_training):
26 | with tf.variable_scope("ConvNet", reuse=reuse):
27 | x = x_dict["images"]
28 | x = tf.reshape(x, shape=[-1, 28, 28, 1])
29 | conv1 = tf.layers.conv2d(x, para["filters_0"], 5, activation=tf.nn.relu)
30 | conv1 = tf.layers.max_pooling2d(conv1, 2, 2)
31 | conv2 = tf.layers.conv2d(conv1, para["filters_1"], 3, activation=tf.nn.relu)
32 | conv2 = tf.layers.max_pooling2d(conv2, 2, 2)
33 | fc1 = tf.contrib.layers.flatten(conv2)
34 | fc1 = tf.layers.dense(fc1, para["dense_0"])
35 | fc1 = tf.layers.dropout(fc1, rate=dropout, training=is_training)
36 | out = tf.layers.dense(fc1, n_classes)
37 |
38 | return out
39 |
40 | def model_fn(features, labels, mode):
41 | logits_train = conv_net(
42 | features, num_classes, dropout, reuse=False, is_training=True
43 | )
44 | logits_test = conv_net(
45 | features, num_classes, dropout, reuse=True, is_training=False
46 | )
47 |
48 | pred_classes = tf.argmax(logits_test, axis=1)
49 | # pred_probas = tf.nn.softmax(logits_test)
50 |
51 | if mode == tf.estimator.ModeKeys.PREDICT:
52 | return tf.estimator.EstimatorSpec(mode, predictions=pred_classes)
53 |
54 | loss_op = tf.reduce_mean(
55 | tf.nn.sparse_softmax_cross_entropy_with_logits(
56 | logits=logits_train, labels=tf.cast(labels, dtype=tf.int32)
57 | )
58 | )
59 | optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
60 | train_op = optimizer.minimize(loss_op, global_step=tf.train.get_global_step())
61 |
62 | acc_op = tf.metrics.accuracy(labels=labels, predictions=pred_classes)
63 |
64 | estim_specs = tf.estimator.EstimatorSpec(
65 | mode=mode,
66 | predictions=pred_classes,
67 | loss=loss_op,
68 | train_op=train_op,
69 | eval_metric_ops={"accuracy": acc_op},
70 | )
71 |
72 | return estim_specs
73 |
74 | model = tf.estimator.Estimator(model_fn)
75 |
76 | input_fn = tf.estimator.inputs.numpy_input_fn(
77 | x={"images": X_train},
78 | y=y_train,
79 | batch_size=batch_size,
80 | num_epochs=None,
81 | shuffle=True,
82 | )
83 | model.train(input_fn, steps=num_steps)
84 |
85 | input_fn = tf.estimator.inputs.numpy_input_fn(
86 | x={"images": X_test}, y=y_test, batch_size=batch_size, shuffle=False
87 | )
88 | e = model.evaluate(input_fn)
89 |
90 | return float(e["accuracy"])
91 |
92 |
93 | search_space = {
94 | "filters_0": [16, 32, 64],
95 | "filters_1": [16, 32, 64],
96 | "dense_0": list(range(100, 2000, 100)),
97 | }
98 |
99 |
100 | hyper = Hyperactive(X_train, y_train)
101 | hyper.add_search(cnn, search_space, n_iter=100)
102 | hyper.run()
103 |
--------------------------------------------------------------------------------
/examples/tested_and_supported_packages/catboost_example.py:
--------------------------------------------------------------------------------
1 | from sklearn.model_selection import cross_val_score
2 | from catboost import CatBoostClassifier
3 | from sklearn.datasets import load_breast_cancer
4 | from hyperactive import Hyperactive
5 |
6 | data = load_breast_cancer()
7 | X, y = data.data, data.target
8 |
9 |
10 | def model(opt):
11 | cbc = CatBoostClassifier(
12 | iterations=10, depth=opt["depth"], learning_rate=opt["learning_rate"]
13 | )
14 | scores = cross_val_score(cbc, X, y, cv=3)
15 |
16 | return scores.mean()
17 |
18 |
19 | search_space = {
20 | "depth": list(range(2, 12)),
21 | "learning_rate": [1e-3, 1e-2, 1e-1, 0.5, 1.0],
22 | }
23 |
24 |
25 | hyper = Hyperactive()
26 | hyper.add_search(model, search_space, n_iter=10)
27 | hyper.run()
28 |
--------------------------------------------------------------------------------
/examples/tested_and_supported_packages/joblib_example.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from sklearn.model_selection import cross_val_score
3 | from sklearn.ensemble import GradientBoostingClassifier
4 | from sklearn.ensemble import RandomForestClassifier
5 | from sklearn.ensemble import ExtraTreesClassifier
6 | from xgboost import XGBClassifier
7 | from sklearn.datasets import load_breast_cancer
8 | from hyperactive import Hyperactive
9 |
10 | data = load_breast_cancer()
11 | X, y = data.data, data.target
12 |
13 |
14 | def model_etc(opt):
15 | etc = ExtraTreesClassifier(
16 | n_estimators=opt["n_estimators"],
17 | criterion=opt["criterion"],
18 | max_features=opt["max_features"],
19 | min_samples_split=opt["min_samples_split"],
20 | min_samples_leaf=opt["min_samples_leaf"],
21 | bootstrap=opt["bootstrap"],
22 | )
23 | scores = cross_val_score(etc, X, y, cv=3)
24 |
25 | return scores.mean()
26 |
27 |
28 | def model_rfc(opt):
29 | rfc = RandomForestClassifier(
30 | n_estimators=opt["n_estimators"],
31 | criterion=opt["criterion"],
32 | max_features=opt["max_features"],
33 | min_samples_split=opt["min_samples_split"],
34 | min_samples_leaf=opt["min_samples_leaf"],
35 | bootstrap=opt["bootstrap"],
36 | )
37 | scores = cross_val_score(rfc, X, y, cv=3)
38 |
39 | return scores.mean()
40 |
41 |
42 | def model_gbc(opt):
43 | gbc = GradientBoostingClassifier(
44 | n_estimators=opt["n_estimators"],
45 | learning_rate=opt["learning_rate"],
46 | max_depth=opt["max_depth"],
47 | min_samples_split=opt["min_samples_split"],
48 | min_samples_leaf=opt["min_samples_leaf"],
49 | subsample=opt["subsample"],
50 | max_features=opt["max_features"],
51 | )
52 | scores = cross_val_score(gbc, X, y, cv=3)
53 |
54 | return scores.mean()
55 |
56 |
57 | search_space_etc = {
58 | "n_estimators": list(range(10, 200, 10)),
59 | "criterion": ["gini", "entropy"],
60 | "max_features": list(np.arange(0.05, 1.01, 0.05)),
61 | "min_samples_split": list(range(2, 21)),
62 | "min_samples_leaf": list(range(1, 21)),
63 | "bootstrap": [True, False],
64 | }
65 |
66 |
67 | search_space_rfc = {
68 | "n_estimators": list(range(10, 200, 10)),
69 | "criterion": ["gini", "entropy"],
70 | "max_features": list(np.arange(0.05, 1.01, 0.05)),
71 | "min_samples_split": list(range(2, 21)),
72 | "min_samples_leaf": list(range(1, 21)),
73 | "bootstrap": [True, False],
74 | }
75 |
76 |
77 | search_space_gbc = {
78 | "n_estimators": list(range(10, 200, 10)),
79 | "learning_rate": [1e-3, 1e-2, 1e-1, 0.5, 1.0],
80 | "max_depth": list(range(1, 11)),
81 | "min_samples_split": list(range(2, 21)),
82 | "min_samples_leaf": list(range(1, 21)),
83 | "subsample": list(np.arange(0.05, 1.01, 0.05)),
84 | "max_features": list(np.arange(0.05, 1.01, 0.05)),
85 | }
86 |
87 |
88 | hyper = Hyperactive(distribution="joblib")
89 | hyper.add_search(model_etc, search_space_etc, n_iter=50)
90 | hyper.add_search(model_rfc, search_space_rfc, n_iter=50)
91 | hyper.add_search(model_gbc, search_space_gbc, n_iter=50)
92 | hyper.run(max_time=5)
93 |
--------------------------------------------------------------------------------
/examples/tested_and_supported_packages/keras_example.py:
--------------------------------------------------------------------------------
1 | import tensorflow as tf
2 | from keras.models import Sequential
3 | from keras.layers import (
4 | Dense,
5 | Conv2D,
6 | MaxPooling2D,
7 | Flatten,
8 | Dropout,
9 | Activation,
10 | )
11 | from keras.datasets import cifar10
12 | from keras.utils import to_categorical
13 |
14 | from hyperactive import Hyperactive
15 |
16 |
17 | config = tf.compat.v1.ConfigProto()
18 | config.gpu_options.allow_growth = True
19 | config.log_device_placement = True
20 |
21 | sess = tf.compat.v1.Session(config=config)
22 | tf.compat.v1.keras.backend.set_session(sess)
23 |
24 |
25 | (X_train, y_train), (X_test, y_test) = cifar10.load_data()
26 |
27 | y_train = to_categorical(y_train, 10)
28 | y_test = to_categorical(y_test, 10)
29 |
30 |
31 | # to make the example quick
32 | X_train = X_train[0:1000]
33 | y_train = y_train[0:1000]
34 |
35 |
36 | X_test = X_test[0:1000]
37 | y_test = y_test[0:1000]
38 |
39 |
40 | def cnn(opt):
41 | nn = Sequential()
42 | nn.add(
43 | Conv2D(
44 | opt["filter.0"],
45 | (3, 3),
46 | padding="same",
47 | input_shape=X_train.shape[1:],
48 | )
49 | )
50 | nn.add(Activation("relu"))
51 | nn.add(Conv2D(opt["filter.0"], (3, 3)))
52 | nn.add(Activation("relu"))
53 | nn.add(MaxPooling2D(pool_size=(2, 2)))
54 | nn.add(Dropout(0.25))
55 |
56 | nn.add(Conv2D(opt["filter.0"], (3, 3), padding="same"))
57 | nn.add(Activation("relu"))
58 | nn.add(Conv2D(opt["filter.0"], (3, 3)))
59 | nn.add(Activation("relu"))
60 | nn.add(MaxPooling2D(pool_size=(2, 2)))
61 | nn.add(Dropout(0.25))
62 |
63 | nn.add(Flatten())
64 | nn.add(Dense(opt["layer.0"]))
65 | nn.add(Activation("relu"))
66 | nn.add(Dropout(0.5))
67 | nn.add(Dense(10))
68 | nn.add(Activation("softmax"))
69 |
70 | nn.compile(optimizer="adam", loss="categorical_crossentropy", metrics=["accuracy"])
71 | nn.fit(X_train, y_train, epochs=20, batch_size=512)
72 |
73 | _, score = nn.evaluate(x=X_test, y=y_test)
74 |
75 | return score
76 |
77 |
78 | search_space = {
79 | "filter.0": [16, 32, 64, 128],
80 | "layer.0": list(range(100, 1000, 100)),
81 | }
82 |
83 |
84 | hyper = Hyperactive()
85 | hyper.add_search(cnn, search_space, n_iter=5)
86 | hyper.run()
87 |
--------------------------------------------------------------------------------
/examples/tested_and_supported_packages/lightgbm_example.py:
--------------------------------------------------------------------------------
1 | from sklearn.model_selection import cross_val_score
2 | from lightgbm import LGBMRegressor
3 | from sklearn.datasets import load_diabetes
4 | from hyperactive import Hyperactive
5 |
6 | data = load_diabetes()
7 | X, y = data.data, data.target
8 |
9 |
10 | def model(opt):
11 | lgbm = LGBMRegressor(
12 | num_leaves=opt["num_leaves"],
13 | bagging_freq=opt["bagging_freq"],
14 | learning_rate=opt["learning_rate"],
15 | )
16 | scores = cross_val_score(lgbm, X, y, cv=3)
17 |
18 | return scores.mean()
19 |
20 |
21 | search_space = {
22 | "num_leaves": list(range(2, 50)),
23 | "bagging_freq": list(range(2, 12)),
24 | "learning_rate": [1e-3, 1e-2, 1e-1, 0.5, 1.0],
25 | }
26 |
27 |
28 | hyper = Hyperactive()
29 | hyper.add_search(model, search_space, n_iter=20)
30 | hyper.run()
31 |
--------------------------------------------------------------------------------
/examples/tested_and_supported_packages/mlxtend_example.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_breast_cancer
2 | from sklearn.model_selection import cross_val_score
3 | from mlxtend.classifier import EnsembleVoteClassifier
4 | from sklearn.tree import DecisionTreeClassifier
5 | from sklearn.neural_network import MLPClassifier
6 | from sklearn.svm import SVC
7 | from hyperactive import Hyperactive
8 |
9 |
10 | data = load_breast_cancer()
11 | X, y = data.data, data.target
12 |
13 |
14 | def model(opt):
15 | dtc = DecisionTreeClassifier(
16 | min_samples_split=opt["min_samples_split"],
17 | min_samples_leaf=opt["min_samples_leaf"],
18 | )
19 | mlp = MLPClassifier(hidden_layer_sizes=opt["hidden_layer_sizes"])
20 | svc = SVC(C=opt["C"], degree=opt["degree"], gamma="auto", probability=True)
21 |
22 | eclf = EnsembleVoteClassifier(
23 | clfs=[dtc, mlp, svc], weights=opt["weights"], voting="soft",
24 | )
25 |
26 | scores = cross_val_score(eclf, X, y, cv=3)
27 |
28 | return scores.mean()
29 |
30 |
31 | search_space = {
32 | "min_samples_split": list(range(2, 15)),
33 | "min_samples_leaf": list(range(1, 15)),
34 | "hidden_layer_sizes": list(range(5, 50, 5)),
35 | "weights": [[1, 1, 1], [2, 1, 1], [1, 2, 1], [1, 1, 2]],
36 | "C": list(range(1, 1000)),
37 | "degree": list(range(0, 8)),
38 | }
39 |
40 |
41 | hyper = Hyperactive()
42 | hyper.add_search(model, search_space, n_iter=25)
43 | hyper.run()
44 |
45 |
--------------------------------------------------------------------------------
/examples/tested_and_supported_packages/multiprocessing_example.py:
--------------------------------------------------------------------------------
1 | """
2 | Hyperactive can perform optimizations of multiple different objective functions
3 | in parallel. This can be done via multiprocessing, joblib or a custom wrapper-function.
4 | The processes won't communicate with each other.
5 |
6 | You can add as many searches to the optimization run (.add_search(...)) and
7 | run each of those searches n-times (n_jobs).
8 |
9 | In the example below we are performing 4 searches in parallel:
10 | - model_etc one time
11 | - model_rfc one time
12 | - model_gbc two times
13 |
14 | """
15 | import numpy as np
16 | from sklearn.model_selection import cross_val_score
17 | from sklearn.ensemble import GradientBoostingClassifier
18 | from sklearn.ensemble import RandomForestClassifier
19 | from sklearn.ensemble import ExtraTreesClassifier
20 | from xgboost import XGBClassifier
21 | from sklearn.datasets import load_breast_cancer
22 | from hyperactive import Hyperactive
23 |
24 | data = load_breast_cancer()
25 | X, y = data.data, data.target
26 |
27 |
28 | def model_etc(opt):
29 | etc = ExtraTreesClassifier(
30 | n_estimators=opt["n_estimators"],
31 | criterion=opt["criterion"],
32 | max_features=opt["max_features"],
33 | min_samples_split=opt["min_samples_split"],
34 | min_samples_leaf=opt["min_samples_leaf"],
35 | bootstrap=opt["bootstrap"],
36 | )
37 | scores = cross_val_score(etc, X, y, cv=3)
38 |
39 | return scores.mean()
40 |
41 |
42 | def model_rfc(opt):
43 | rfc = RandomForestClassifier(
44 | n_estimators=opt["n_estimators"],
45 | criterion=opt["criterion"],
46 | max_features=opt["max_features"],
47 | min_samples_split=opt["min_samples_split"],
48 | min_samples_leaf=opt["min_samples_leaf"],
49 | bootstrap=opt["bootstrap"],
50 | )
51 | scores = cross_val_score(rfc, X, y, cv=3)
52 |
53 | return scores.mean()
54 |
55 |
56 | def model_gbc(opt):
57 | gbc = GradientBoostingClassifier(
58 | n_estimators=opt["n_estimators"],
59 | learning_rate=opt["learning_rate"],
60 | max_depth=opt["max_depth"],
61 | min_samples_split=opt["min_samples_split"],
62 | min_samples_leaf=opt["min_samples_leaf"],
63 | subsample=opt["subsample"],
64 | max_features=opt["max_features"],
65 | )
66 | scores = cross_val_score(gbc, X, y, cv=3)
67 |
68 | return scores.mean()
69 |
70 |
71 | search_space_etc = {
72 | "n_estimators": list(range(10, 200, 10)),
73 | "criterion": ["gini", "entropy"],
74 | "max_features": list(np.arange(0.05, 1.01, 0.05)),
75 | "min_samples_split": list(range(2, 21)),
76 | "min_samples_leaf": list(range(1, 21)),
77 | "bootstrap": [True, False],
78 | }
79 |
80 |
81 | search_space_rfc = {
82 | "n_estimators": list(range(10, 200, 10)),
83 | "criterion": ["gini", "entropy"],
84 | "max_features": list(np.arange(0.05, 1.01, 0.05)),
85 | "min_samples_split": list(range(2, 21)),
86 | "min_samples_leaf": list(range(1, 21)),
87 | "bootstrap": [True, False],
88 | }
89 |
90 |
91 | search_space_gbc = {
92 | "n_estimators": list(range(10, 200, 10)),
93 | "learning_rate": [1e-3, 1e-2, 1e-1, 0.5, 1.0],
94 | "max_depth": list(range(1, 11)),
95 | "min_samples_split": list(range(2, 21)),
96 | "min_samples_leaf": list(range(1, 21)),
97 | "subsample": list(np.arange(0.05, 1.01, 0.05)),
98 | "max_features": list(np.arange(0.05, 1.01, 0.05)),
99 | }
100 |
101 |
102 | hyper = Hyperactive()
103 | hyper.add_search(model_etc, search_space_etc, n_iter=50)
104 | hyper.add_search(model_rfc, search_space_rfc, n_iter=50)
105 | hyper.add_search(model_gbc, search_space_gbc, n_iter=50, n_jobs=2)
106 | hyper.run(max_time=5)
107 |
108 | search_data_etc = hyper.search_data(model_etc)
109 | search_data_rfc = hyper.search_data(model_rfc)
110 | search_data_gbc = hyper.search_data(model_gbc)
111 |
112 | print("\n ExtraTreesClassifier search data \n", search_data_etc)
113 | print("\n GradientBoostingClassifier search data \n", search_data_gbc)
114 |
--------------------------------------------------------------------------------
/examples/tested_and_supported_packages/pytorch_example.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import torch
4 | import torch.nn as nn
5 | import torch.nn.functional as F
6 | import torch.optim as optim
7 | import torch.utils.data
8 | from torchvision import datasets
9 | from torchvision import transforms
10 |
11 | from hyperactive import Hyperactive
12 |
13 |
14 | """
15 | derived from optuna example:
16 | https://github.com/optuna/optuna/blob/master/examples/pytorch_simple.py
17 | """
18 | DEVICE = torch.device("cpu")
19 | BATCHSIZE = 256
20 | CLASSES = 10
21 | DIR = os.getcwd()
22 | EPOCHS = 10
23 | LOG_INTERVAL = 10
24 | N_TRAIN_EXAMPLES = BATCHSIZE * 30
25 | N_VALID_EXAMPLES = BATCHSIZE * 10
26 |
27 |
28 | # Get the MNIST dataset.
29 | train_loader = torch.utils.data.DataLoader(
30 | datasets.MNIST(DIR, train=True, download=True, transform=transforms.ToTensor()),
31 | batch_size=BATCHSIZE,
32 | shuffle=True,
33 | )
34 | valid_loader = torch.utils.data.DataLoader(
35 | datasets.MNIST(DIR, train=False, transform=transforms.ToTensor()),
36 | batch_size=BATCHSIZE,
37 | shuffle=True,
38 | )
39 |
40 |
41 | def pytorch_cnn(params):
42 | linear0 = params["linear.0"]
43 | linear1 = params["linear.1"]
44 |
45 | layers = []
46 |
47 | in_features = 28 * 28
48 |
49 | layers.append(nn.Linear(in_features, linear0))
50 | layers.append(nn.ReLU())
51 | layers.append(nn.Dropout(0.2))
52 |
53 | layers.append(nn.Linear(linear0, linear1))
54 | layers.append(nn.ReLU())
55 | layers.append(nn.Dropout(0.2))
56 |
57 | layers.append(nn.Linear(linear1, CLASSES))
58 | layers.append(nn.LogSoftmax(dim=1))
59 |
60 | model = nn.Sequential(*layers)
61 |
62 | # model = create_model(params).to(DEVICE)
63 | optimizer = getattr(optim, "Adam")(model.parameters(), lr=0.01)
64 |
65 | # Training of the model.
66 | for epoch in range(EPOCHS):
67 | model.train()
68 | for batch_idx, (data, target) in enumerate(train_loader):
69 | # Limiting training data for faster epochs.
70 | if batch_idx * BATCHSIZE >= N_TRAIN_EXAMPLES:
71 | break
72 |
73 | data, target = data.view(data.size(0), -1).to(DEVICE), target.to(DEVICE)
74 |
75 | optimizer.zero_grad()
76 | output = model(data)
77 | loss = F.nll_loss(output, target)
78 | loss.backward()
79 | optimizer.step()
80 |
81 | # Validation of the model.
82 | model.eval()
83 | correct = 0
84 | with torch.no_grad():
85 | for batch_idx, (data, target) in enumerate(valid_loader):
86 | # Limiting validation data.
87 | if batch_idx * BATCHSIZE >= N_VALID_EXAMPLES:
88 | break
89 | data, target = data.view(data.size(0), -1).to(DEVICE), target.to(DEVICE)
90 | output = model(data)
91 | # Get the index of the max log-probability.
92 | pred = output.argmax(dim=1, keepdim=True)
93 | correct += pred.eq(target.view_as(pred)).sum().item()
94 |
95 | accuracy = correct / min(len(valid_loader.dataset), N_VALID_EXAMPLES)
96 |
97 | return accuracy
98 |
99 |
100 | search_space = {
101 | "linear.0": list(range(10, 200, 10)),
102 | "linear.1": list(range(10, 200, 10)),
103 | }
104 |
105 |
106 | hyper = Hyperactive()
107 | hyper.add_search(pytorch_cnn, search_space, n_iter=5)
108 | hyper.run()
109 |
--------------------------------------------------------------------------------
/examples/tested_and_supported_packages/rgf_example.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_breast_cancer
2 | from sklearn.model_selection import cross_val_score
3 | from rgf.sklearn import RGFClassifier
4 |
5 | from hyperactive import Hyperactive
6 |
7 | data = load_breast_cancer()
8 | X, y = data.data, data.target
9 |
10 |
11 | def model(opt):
12 | rgf = RGFClassifier(
13 | max_leaf=opt["max_leaf"],
14 | reg_depth=opt["reg_depth"],
15 | min_samples_leaf=opt["min_samples_leaf"],
16 | algorithm="RGF_Sib",
17 | test_interval=100,
18 | verbose=False,
19 | )
20 | scores = cross_val_score(rgf, X, y, cv=3)
21 |
22 | return scores.mean()
23 |
24 |
25 | search_space = {
26 | "max_leaf": list(range(10, 2000, 10)),
27 | "reg_depth": list(range(1, 21)),
28 | "min_samples_leaf": list(range(1, 21)),
29 | }
30 |
31 | hyper = Hyperactive()
32 | hyper.add_search(model, search_space, n_iter=10)
33 | hyper.run()
34 |
--------------------------------------------------------------------------------
/examples/tested_and_supported_packages/sklearn_example.py:
--------------------------------------------------------------------------------
1 | from sklearn.model_selection import cross_val_score
2 | from sklearn.ensemble import GradientBoostingRegressor
3 | from sklearn.datasets import load_diabetes
4 | from hyperactive import Hyperactive
5 |
6 | data = load_diabetes()
7 | X, y = data.data, data.target
8 |
9 |
10 | def model(opt):
11 | gbr = GradientBoostingRegressor(
12 | n_estimators=opt["n_estimators"],
13 | max_depth=opt["max_depth"],
14 | min_samples_split=opt["min_samples_split"],
15 | )
16 | scores = cross_val_score(gbr, X, y, cv=3)
17 |
18 | return scores.mean()
19 |
20 |
21 | search_space = {
22 | "n_estimators": list(range(10, 150, 5)),
23 | "max_depth": list(range(2, 12)),
24 | "min_samples_split": list(range(2, 22)),
25 | }
26 |
27 |
28 | hyper = Hyperactive()
29 | hyper.add_search(model, search_space, n_iter=20)
30 | hyper.run()
31 |
--------------------------------------------------------------------------------
/examples/tested_and_supported_packages/tensorflow_example.py:
--------------------------------------------------------------------------------
1 | import tensorflow as tf
2 |
3 | from hyperactive import Hyperactive
4 |
5 | mnist = tf.keras.datasets.mnist
6 |
7 | (x_train, y_train), (x_test, y_test) = mnist.load_data()
8 | x_train, x_test = x_train / 255.0, x_test / 255.0
9 |
10 |
11 | def cnn(params):
12 | nn = tf.keras.models.Sequential(
13 | [
14 | tf.keras.layers.Flatten(input_shape=(28, 28)),
15 | tf.keras.layers.Dense(128, activation="relu"),
16 | tf.keras.layers.Dropout(0.2),
17 | tf.keras.layers.Dense(10),
18 | ]
19 | )
20 | loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
21 |
22 | nn.compile(optimizer="adam", loss=loss_fn, metrics=["accuracy"])
23 | nn.fit(x_train, y_train, epochs=5)
24 | _, score = nn.evaluate(x=x_test, y=y_test)
25 |
26 | return score
27 |
28 |
29 | search_space = {
30 | "filters_0": [16, 32, 64],
31 | "filters_1": [16, 32, 64],
32 | "dense_0": list(range(100, 2000, 100)),
33 | }
34 |
35 | hyper = Hyperactive()
36 | hyper.add_search(cnn, search_space, n_iter=5)
37 | hyper.run()
38 |
--------------------------------------------------------------------------------
/examples/tested_and_supported_packages/xgboost_example.py:
--------------------------------------------------------------------------------
1 | from sklearn.model_selection import cross_val_score
2 | from xgboost import XGBClassifier
3 | from sklearn.datasets import load_breast_cancer
4 | from hyperactive import Hyperactive
5 |
6 | data = load_breast_cancer()
7 | X, y = data.data, data.target
8 |
9 |
10 | def model(opt):
11 | xgb = XGBClassifier(
12 | n_estimators=opt["n_estimators"],
13 | max_depth=opt["max_depth"],
14 | learning_rate=opt["learning_rate"],
15 | )
16 | scores = cross_val_score(xgb, X, y, cv=3)
17 |
18 | return scores.mean()
19 |
20 |
21 | search_space = {
22 | "n_estimators": list(range(10, 200, 10)),
23 | "max_depth": list(range(2, 12)),
24 | "learning_rate": [1e-3, 1e-2, 1e-1, 0.5, 1.0],
25 | }
26 |
27 |
28 | hyper = Hyperactive()
29 | hyper.add_search(model, search_space, n_iter=30)
30 | hyper.run()
31 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [tool.setuptools.packages.find]
6 | where = ["src"]
7 |
8 | [project]
9 | name = "hyperactive"
10 | version = "4.8.1"
11 | description = "An optimization and data collection toolbox for convenient and fast prototyping of computationally expensive models."
12 | readme = "README.md"
13 | requires-python = ">=3.9"
14 | license = {file = "LICENSE"}
15 | keywords = ["visualization", "data-science"]
16 | authors = [
17 | {name = "Simon Blanke", email = "simon.blanke@yahoo.com" }
18 | ]
19 | maintainers = [
20 | {name = "Simon Blanke", email = "simon.blanke@yahoo.com" }
21 | ]
22 | classifiers=[
23 | "Programming Language :: Python :: 3",
24 | "Programming Language :: Python :: 3.9",
25 | "Programming Language :: Python :: 3.10",
26 | "Programming Language :: Python :: 3.11",
27 | "Programming Language :: Python :: 3.12",
28 | "Programming Language :: Python :: 3.13",
29 | "License :: OSI Approved :: MIT License",
30 | "Operating System :: OS Independent",
31 | "Topic :: Scientific/Engineering :: Information Analysis",
32 | "Topic :: Scientific/Engineering :: Mathematics",
33 | "Topic :: Software Development :: Libraries :: Python Modules",
34 | "Intended Audience :: Developers",
35 | "Intended Audience :: Information Technology",
36 | "Intended Audience :: Science/Research",
37 | ]
38 |
39 | dependencies = [
40 | "numpy >=1.18.1, <3.0.0",
41 | "tqdm >=4.48.0, <5.0.0",
42 | "pandas <3.0.0",
43 | "gradient-free-optimizers >=1.2.4, <2.0.0",
44 | "scikit-base <1.0.0",
45 | ]
46 |
47 | [project.optional-dependencies]
48 | sklearn-integration = [
49 | "scikit-learn == 1.6.1",
50 | ]
51 | build = [
52 | "setuptools",
53 | "build",
54 | "wheel",
55 | ]
56 | test = [
57 | "pytest == 8.3.5",
58 | "flake8",
59 | "pytest-cov",
60 | "pathos",
61 | ]
62 | all_extras = [
63 | "hyperactive[build]",
64 | "hyperactive[test]",
65 | "hyperactive[integrations]",
66 | ]
67 |
68 |
69 | [project.urls]
70 | "Homepage" = "https://github.com/SimonBlanke/Hyperactive"
71 | "Bug Reports" = "https://github.com/SimonBlanke/Hyperactive/issues"
72 | "Source" = "https://github.com/SimonBlanke/Hyperactive/"
73 |
--------------------------------------------------------------------------------
/requirements/requirements-test.in:
--------------------------------------------------------------------------------
1 | pytest == 8.3.5
2 | flake8
3 | pytest-cov
4 | pathos
5 |
--------------------------------------------------------------------------------
/src/hyperactive/__init__.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 | import importlib.metadata
6 |
7 | __version__ = importlib.metadata.version("hyperactive")
8 | __license__ = "MIT"
9 |
10 |
11 | from .hyperactive import Hyperactive
12 |
13 |
14 | __all__ = [
15 | "Hyperactive",
16 | ]
17 |
--------------------------------------------------------------------------------
/src/hyperactive/_registry/__init__.py:
--------------------------------------------------------------------------------
1 | """Hyperactive registry."""
2 |
3 | from hyperactive._registry._lookup import all_objects
4 |
5 | __all__ = ["all_objects"]
6 |
--------------------------------------------------------------------------------
/src/hyperactive/base/__init__.py:
--------------------------------------------------------------------------------
1 | """Base classes for optimizers and experiments."""
2 | # copyright: hyperactive developers, MIT License (see LICENSE file)
3 |
4 | from hyperactive.base._experiment import BaseExperiment
5 | from hyperactive.base._optimizer import BaseOptimizer
6 |
7 | __all__ = ["BaseExperiment", "BaseOptimizer"]
8 |
--------------------------------------------------------------------------------
/src/hyperactive/base/_experiment.py:
--------------------------------------------------------------------------------
1 | """Base class for experiment."""
2 | # copyright: hyperactive developers, MIT License (see LICENSE file)
3 |
4 | import numpy as np
5 | from skbase.base import BaseObject
6 |
7 |
8 | class BaseExperiment(BaseObject):
9 | """Base class for experiment."""
10 |
11 | _tags = {
12 | "object_type": "experiment",
13 | "python_dependencies": None,
14 | "property:randomness": "random", # random or deterministic
15 | # if deterministic, two calls of score will result in the same value
16 | # random = two calls may result in different values; same as "stochastic"
17 | }
18 |
19 | def __init__(self):
20 | super().__init__()
21 |
22 | def __call__(self, **kwargs):
23 | """Score parameters, with kwargs call."""
24 | score, _ = self.score(kwargs)
25 | return score
26 |
27 | @property
28 | def __name__(self):
29 | return type(self).__name__
30 |
31 | def paramnames(self):
32 | """Return the parameter names of the search.
33 |
34 | Returns
35 | -------
36 | list of str
37 | The parameter names of the search parameters.
38 | """
39 | return self._paramnames()
40 |
41 | def _paramnames(self):
42 | """Return the parameter names of the search.
43 |
44 | Returns
45 | -------
46 | list of str
47 | The parameter names of the search parameters.
48 | """
49 | raise NotImplementedError
50 |
51 | def score(self, params):
52 | """Score the parameters.
53 |
54 | Parameters
55 | ----------
56 | params : dict with string keys
57 | Parameters to score.
58 |
59 | Returns
60 | -------
61 | float
62 | The score of the parameters.
63 | dict
64 | Additional metadata about the search.
65 | """
66 | paramnames = self.paramnames()
67 | if not set(params.keys()) <= set(paramnames):
68 | raise ValueError("Parameters do not match.")
69 | res, metadata = self._score(params)
70 | res = np.float64(res)
71 | return res, metadata
72 |
73 | def _score(self, params):
74 | """Score the parameters.
75 |
76 | Parameters
77 | ----------
78 | params : dict with string keys
79 | Parameters to score.
80 |
81 | Returns
82 | -------
83 | float
84 | The score of the parameters.
85 | dict
86 | Additional metadata about the search.
87 | """
88 | raise NotImplementedError
89 |
--------------------------------------------------------------------------------
/src/hyperactive/base/_optimizer.py:
--------------------------------------------------------------------------------
1 | """Base class for optimizer."""
2 | # copyright: hyperactive developers, MIT License (see LICENSE file)
3 |
4 | from skbase.base import BaseObject
5 |
6 |
7 | class BaseOptimizer(BaseObject):
8 | """Base class for optimizer."""
9 |
10 | _tags = {
11 | "object_type": "optimizer",
12 | "python_dependencies": None,
13 | # properties of the optimizer
14 | "info:name": None, # str
15 | "info:local_vs_global": "mixed", # "local", "mixed", "global"
16 | "info:explore_vs_exploit": "mixed", # "explore", "exploit", "mixed"
17 | "info:compute": "middle", # "low", "middle", "high"
18 | # see here for explanation of the tags:
19 | # https://simonblanke.github.io/gradient-free-optimizers-documentation/1.5/optimizers/ # noqa: E501
20 | }
21 |
22 | def __init__(self):
23 | super().__init__()
24 | assert hasattr(self, "experiment"), "Optimizer must have an experiment."
25 | search_config = self.get_params()
26 | self._experiment = search_config.pop("experiment", None)
27 |
28 | if self.get_tag("info:name") is None:
29 | self.set_tags(**{"info:name": self.__class__.__name__})
30 |
31 | def get_search_config(self):
32 | """Get the search configuration.
33 |
34 | Returns
35 | -------
36 | dict with str keys
37 | The search configuration dictionary.
38 | """
39 | search_config = self.get_params(deep=False)
40 | search_config.pop("experiment", None)
41 | return search_config
42 |
43 | def get_experiment(self):
44 | """Get the experiment.
45 |
46 | Returns
47 | -------
48 | BaseExperiment
49 | The experiment to optimize parameters for.
50 | """
51 | return self._experiment
52 |
53 | def run(self):
54 | """Run the optimization search process.
55 |
56 | Returns
57 | -------
58 | best_params : dict
59 | The best parameters found during the optimization process.
60 | """
61 | experiment = self.get_experiment()
62 | search_config = self.get_search_config()
63 |
64 | best_params = self._run(experiment, **search_config)
65 | self.best_params_ = best_params
66 | return best_params
67 |
--------------------------------------------------------------------------------
/src/hyperactive/base/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """Test base classes for optimizers and experiments."""
2 | # copyright: hyperactive developers, MIT License (see LICENSE file)
3 |
--------------------------------------------------------------------------------
/src/hyperactive/base/tests/test_endtoend.py:
--------------------------------------------------------------------------------
1 | """Integration tests for end-to-end usage of optimizers with experiments.
2 |
3 | API unit tests are in TestAllOptimizers and TestAllExperiments.
4 | """
5 | # copyright: hyperactive developers, MIT License (see LICENSE file)
6 |
7 |
8 | def test_endtoend_hillclimbing():
9 | """Test end-to-end usage of HillClimbing optimizer with an experiment."""
10 | # 1. define the experiment
11 | from hyperactive.experiment.integrations import SklearnCvExperiment
12 | from sklearn.datasets import load_iris
13 | from sklearn.svm import SVC
14 | from sklearn.metrics import accuracy_score
15 | from sklearn.model_selection import KFold
16 |
17 | X, y = load_iris(return_X_y=True)
18 |
19 | sklearn_exp = SklearnCvExperiment(
20 | estimator=SVC(),
21 | scoring=accuracy_score,
22 | cv=KFold(n_splits=3, shuffle=True),
23 | X=X,
24 | y=y,
25 | )
26 |
27 | # 2. set up the HillClimbing optimizer
28 | import numpy as np
29 | from hyperactive.opt import HillClimbing
30 |
31 | hillclimbing_config = {
32 | "search_space": {
33 | "C": np.array([0.01, 0.1, 1, 10]),
34 | "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]),
35 | },
36 | "n_iter": 100,
37 | }
38 | hill_climbing = HillClimbing(**hillclimbing_config, experiment=sklearn_exp)
39 |
40 | # 3. run the HillClimbing optimizer
41 | hill_climbing.run()
42 |
43 | best_params = hill_climbing.best_params_
44 | assert best_params is not None, "Best parameters should not be None"
45 | assert isinstance(best_params, dict), "Best parameters should be a dictionary"
46 | assert "C" in best_params, "Best parameters should contain 'C'"
47 | assert "gamma" in best_params, "Best parameters should contain 'gamma'"
48 |
--------------------------------------------------------------------------------
/src/hyperactive/distribution.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 | from sys import platform
6 | from tqdm import tqdm
7 |
8 | if platform.startswith("linux"):
9 | initializer = tqdm.set_lock
10 | initargs = (tqdm.get_lock(),)
11 | else:
12 | initializer = None
13 | initargs = ()
14 |
15 |
16 | def single_process(process_func, process_infos):
17 | return [process_func(*info) for info in process_infos]
18 |
19 |
20 | def multiprocessing_wrapper(process_func, process_infos, n_processes):
21 | import multiprocessing as mp
22 |
23 | with mp.Pool(
24 | n_processes, initializer=initializer, initargs=initargs
25 | ) as pool:
26 | return pool.map(process_func, process_infos)
27 |
28 |
29 | def pathos_wrapper(process_func, search_processes_paras, n_processes):
30 | import pathos.multiprocessing as pmp
31 |
32 | with pmp.Pool(
33 | n_processes, initializer=initializer, initargs=initargs
34 | ) as pool:
35 | return pool.map(process_func, search_processes_paras)
36 |
37 |
38 | def joblib_wrapper(process_func, search_processes_paras, n_processes):
39 | from joblib import Parallel, delayed
40 |
41 | jobs = [
42 | delayed(process_func)(*info_dict)
43 | for info_dict in search_processes_paras
44 | ]
45 | return Parallel(n_jobs=n_processes)(jobs)
46 |
--------------------------------------------------------------------------------
/src/hyperactive/experiment/__init__.py:
--------------------------------------------------------------------------------
1 | """Base classes for optimizers and experiments."""
2 |
3 | from hyperactive.base import BaseExperiment
4 |
5 | __all__ = ["BaseExperiment"]
6 |
--------------------------------------------------------------------------------
/src/hyperactive/experiment/integrations/__init__.py:
--------------------------------------------------------------------------------
1 | """Integrations with packages for tuning."""
2 | # copyright: hyperactive developers, MIT License (see LICENSE file)
3 |
4 |
5 | from hyperactive.experiment.integrations.sklearn_cv import SklearnCvExperiment
6 |
7 | __all__ = ["SklearnCvExperiment"]
8 |
--------------------------------------------------------------------------------
/src/hyperactive/experiment/toy/__init__.py:
--------------------------------------------------------------------------------
1 | """Toy experiments."""
2 |
3 |
4 | from hyperactive.experiment.toy._ackley import Ackley
5 | from hyperactive.experiment.toy._parabola import Parabola
6 | from hyperactive.experiment.toy._sphere import Sphere
7 |
8 | __all__ = [
9 | "Ackley",
10 | "Parabola",
11 | "Sphere",
12 | ]
13 |
--------------------------------------------------------------------------------
/src/hyperactive/experiment/toy/_parabola.py:
--------------------------------------------------------------------------------
1 | """2D parabola function, common benchmark for optimization algorithms."""
2 | # copyright: hyperactive developers, MIT License (see LICENSE file)
3 |
4 | from hyperactive.base import BaseExperiment
5 |
6 |
7 | class Parabola(BaseExperiment):
8 | r"""2D parabola, common benchmark for optimization algorithms.
9 |
10 | Parabola parameterized by the formula:
11 |
12 | .. math::
13 | f(x, y) = a * (x^2 + y^2) + b * x + c * y
14 |
15 | where :math:`a`, :math:`b`, and :math:`c` are coefficients which can
16 | be set as parameters.
17 |
18 | The function arguments :math:`x` and :math:`y`
19 | are the input variables of the `score` method,
20 | and are set as `x` and `y` respectively.
21 |
22 | Parameters
23 | ----------
24 | a : float, default=1.0
25 | Coefficient of the parabola.
26 | b : float, default=0.0
27 | Coefficient of the parabola.
28 | c : float, default=0.0
29 | Coefficient of the parabola.
30 |
31 | Example
32 | -------
33 | >>> from hyperactive.experiment.toy import Parabola
34 | >>> parabola = Parabola(a=1.0, b=0.0, c=0.0)
35 | >>> params = {"x": 1, "y": 2}
36 | >>> score, add_info = parabola.score(params)
37 |
38 | Quick call without metadata return or dictionary:
39 | >>> score = parabola(x=1, y=2)
40 | """
41 |
42 | _tags = {
43 | "property:randomness": "deterministic", # random or deterministic
44 | # if deterministic, two calls of score will result in the same value
45 | # random = two calls may result in different values; same as "stochastic"
46 | }
47 |
48 | def __init__(self, a=1.0, b=0.0, c=0.0):
49 | self.a = a
50 | self.b = b
51 | self.c = c
52 | super().__init__()
53 |
54 | def _paramnames(self):
55 | return ["x", "y"]
56 |
57 | def _score(self, params):
58 | x = params["x"]
59 | y = params["y"]
60 |
61 | return self.a * (x**2 + y**2) + self.b * x + self.c * y, {}
62 |
63 | @classmethod
64 | def _get_score_params(self):
65 | """Return settings for testing the score function. Used in tests only.
66 |
67 | Returns a list, the i-th element corresponds to self.get_test_params()[i].
68 | It should be a valid call for self.score.
69 |
70 | Returns
71 | -------
72 | list of dict
73 | The parameters to be used for scoring.
74 | """
75 | params0 = {"x": 0, "y": 0}
76 | params1 = {"x": 1, "y": 1}
77 | return [params0, params1]
78 |
--------------------------------------------------------------------------------
/src/hyperactive/integrations/__init__.py:
--------------------------------------------------------------------------------
1 | # copyright: hyperactive developers, MIT License (see LICENSE file)
2 |
3 | from hyperactive.integrations.sklearn import HyperactiveSearchCV, OptCV
4 |
5 | __all__ = [
6 | "HyperactiveSearchCV",
7 | "OptCV",
8 | ]
9 |
--------------------------------------------------------------------------------
/src/hyperactive/integrations/sklearn/__init__.py:
--------------------------------------------------------------------------------
1 | # copyright: hyperactive developers, MIT License (see LICENSE file)
2 |
3 |
4 | from hyperactive.integrations.sklearn.hyperactive_search_cv import HyperactiveSearchCV
5 | from hyperactive.integrations.sklearn.opt_cv import OptCV
6 |
7 | __all__ = [
8 | "HyperactiveSearchCV",
9 | "OptCV",
10 | ]
11 |
--------------------------------------------------------------------------------
/src/hyperactive/integrations/sklearn/best_estimator.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | from sklearn.utils.metaestimators import available_if
7 | from sklearn.utils.deprecation import _deprecate_Xt_in_inverse_transform
8 | from sklearn.exceptions import NotFittedError
9 | from sklearn.utils.validation import check_is_fitted
10 |
11 | from .utils import _estimator_has
12 |
13 |
14 | # NOTE Implementations of following methods from:
15 | # https://github.com/scikit-learn/scikit-learn/blob/main/sklearn/model_selection/_search.py
16 | # Tag: 1.5.1
17 | class BestEstimator:
18 |
19 | @available_if(_estimator_has("score_samples"))
20 | def score_samples(self, X):
21 | check_is_fitted(self)
22 | return self.best_estimator_.score_samples(X)
23 |
24 | @available_if(_estimator_has("predict"))
25 | def predict(self, X):
26 | check_is_fitted(self)
27 | return self.best_estimator_.predict(X)
28 |
29 | @available_if(_estimator_has("predict_proba"))
30 | def predict_proba(self, X):
31 | check_is_fitted(self)
32 | return self.best_estimator_.predict_proba(X)
33 |
34 | @available_if(_estimator_has("predict_log_proba"))
35 | def predict_log_proba(self, X):
36 | check_is_fitted(self)
37 | return self.best_estimator_.predict_log_proba(X)
38 |
39 | @available_if(_estimator_has("decision_function"))
40 | def decision_function(self, X):
41 | check_is_fitted(self)
42 | return self.best_estimator_.decision_function(X)
43 |
44 | @available_if(_estimator_has("transform"))
45 | def transform(self, X):
46 | check_is_fitted(self)
47 | return self.best_estimator_.transform(X)
48 |
49 | @available_if(_estimator_has("inverse_transform"))
50 | def inverse_transform(self, X=None, Xt=None):
51 | X = _deprecate_Xt_in_inverse_transform(X, Xt)
52 | check_is_fitted(self)
53 | return self.best_estimator_.inverse_transform(X)
54 |
55 | @property
56 | def classes_(self):
57 | _estimator_has("classes_")(self)
58 | return self.best_estimator_.classes_
59 |
--------------------------------------------------------------------------------
/src/hyperactive/integrations/sklearn/checks.py:
--------------------------------------------------------------------------------
1 | class Checks:
2 | _fit_successful = False
3 |
4 | def verify_fit(function):
5 | def wrapper(self, X, y):
6 | out = function(self, X, y)
7 | self._fit_successful = True
8 | return out
9 |
10 | return wrapper
11 |
--------------------------------------------------------------------------------
/src/hyperactive/integrations/sklearn/utils.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | from sklearn.utils.validation import (
7 | indexable,
8 | _check_method_params,
9 | check_is_fitted,
10 | )
11 |
12 | # NOTE Implementations of following methods from:
13 | # https://github.com/scikit-learn/scikit-learn/blob/main/sklearn/model_selection/_search.py
14 | # Tag: 1.5.1
15 |
16 |
17 | def _check_refit(search_cv, attr):
18 | if not search_cv.refit:
19 | raise AttributeError(
20 | f"This {type(search_cv).__name__} instance was initialized with "
21 | f"`refit=False`. {attr} is available only after refitting on the best "
22 | "parameters. You can refit an estimator manually using the "
23 | "`best_params_` attribute"
24 | )
25 |
26 |
27 | def _estimator_has(attr):
28 | def check(self):
29 | _check_refit(self, attr)
30 | if hasattr(self, "best_estimator_"):
31 | # raise an AttributeError if `attr` does not exist
32 | getattr(self.best_estimator_, attr)
33 | return True
34 | # raise an AttributeError if `attr` does not exist
35 | getattr(self.estimator, attr)
36 | return True
37 |
38 | return check
39 |
--------------------------------------------------------------------------------
/src/hyperactive/integrations/sktime/__init__.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | from .main import HyperactiveSearchCV
7 |
--------------------------------------------------------------------------------
/src/hyperactive/integrations/sktime/main.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | class HyperactiveSearchCV:
7 | def __init__(self) -> None:
8 | pass
9 |
--------------------------------------------------------------------------------
/src/hyperactive/opt/__init__.py:
--------------------------------------------------------------------------------
1 | """Individual optimization algorithms."""
2 | # copyright: hyperactive developers, MIT License (see LICENSE file)
3 |
4 | from hyperactive.opt.gridsearch import GridSearchSk
5 | from hyperactive.opt.hillclimbing import HillClimbing
6 | from hyperactive.opt.hillclimbing_repulsing import HillClimbingRepulsing
7 | from hyperactive.opt.hillclimbing_stochastic import HillClimbingStochastic
8 |
9 | __all__ = [
10 | "GridSearchSk",
11 | "HillClimbing",
12 | "HillClimbingRepulsing",
13 | "HillClimbingStochastic",
14 | ]
15 |
--------------------------------------------------------------------------------
/src/hyperactive/opt/_adapters/__init__.py:
--------------------------------------------------------------------------------
1 | """Adapters for individual packages."""
2 | # copyright: hyperactive developers, MIT License (see LICENSE file)
3 |
--------------------------------------------------------------------------------
/src/hyperactive/opt/gridsearch/__init__.py:
--------------------------------------------------------------------------------
1 | """Grid search with sklearn style grid and backends."""
2 | # copyright: hyperactive developers, MIT License (see LICENSE file)
3 |
4 | from hyperactive.opt.gridsearch._sk import GridSearchSk
5 |
6 | __all__ = ["GridSearchSk"]
7 |
--------------------------------------------------------------------------------
/src/hyperactive/opt/hillclimbing/__init__.py:
--------------------------------------------------------------------------------
1 | """Hill climbing optimizer."""
2 | # copyright: hyperactive developers, MIT License (see LICENSE file)
3 |
4 | from hyperactive.opt.hillclimbing._hillclimbing import HillClimbing
5 |
6 | __all__ = ["HillClimbing"]
7 |
--------------------------------------------------------------------------------
/src/hyperactive/opt/hillclimbing_repulsing/__init__.py:
--------------------------------------------------------------------------------
1 | """Hill climbing optimizer."""
2 | # copyright: hyperactive developers, MIT License (see LICENSE file)
3 |
4 | from hyperactive.opt.hillclimbing_repulsing._hillclimbing_repulsing import (
5 | HillClimbingRepulsing,
6 | )
7 |
8 | __all__ = ["HillClimbingRepulsing"]
9 |
--------------------------------------------------------------------------------
/src/hyperactive/opt/hillclimbing_stochastic/__init__.py:
--------------------------------------------------------------------------------
1 | """Hill climbing optimizer."""
2 | # copyright: hyperactive developers, MIT License (see LICENSE file)
3 |
4 | from hyperactive.opt.hillclimbing_stochastic._hillclimbing_stochastic import (
5 | HillClimbingStochastic,
6 | )
7 |
8 | __all__ = ["HillClimbingStochastic"]
9 |
--------------------------------------------------------------------------------
/src/hyperactive/optimizers/__init__.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | from .optimizers import (
7 | HillClimbingOptimizer,
8 | StochasticHillClimbingOptimizer,
9 | RepulsingHillClimbingOptimizer,
10 | SimulatedAnnealingOptimizer,
11 | DownhillSimplexOptimizer,
12 | RandomSearchOptimizer,
13 | GridSearchOptimizer,
14 | RandomRestartHillClimbingOptimizer,
15 | RandomAnnealingOptimizer,
16 | PowellsMethod,
17 | PatternSearch,
18 | ParallelTemperingOptimizer,
19 | ParticleSwarmOptimizer,
20 | SpiralOptimization,
21 | GeneticAlgorithmOptimizer,
22 | EvolutionStrategyOptimizer,
23 | DifferentialEvolutionOptimizer,
24 | BayesianOptimizer,
25 | LipschitzOptimizer,
26 | DirectAlgorithm,
27 | TreeStructuredParzenEstimators,
28 | ForestOptimizer,
29 | )
30 |
31 |
32 | __all__ = [
33 | "HillClimbingOptimizer",
34 | "StochasticHillClimbingOptimizer",
35 | "RepulsingHillClimbingOptimizer",
36 | "SimulatedAnnealingOptimizer",
37 | "DownhillSimplexOptimizer",
38 | "RandomSearchOptimizer",
39 | "GridSearchOptimizer",
40 | "RandomRestartHillClimbingOptimizer",
41 | "RandomAnnealingOptimizer",
42 | "PowellsMethod",
43 | "PatternSearch",
44 | "ParallelTemperingOptimizer",
45 | "ParticleSwarmOptimizer",
46 | "SpiralOptimization",
47 | "GeneticAlgorithmOptimizer",
48 | "EvolutionStrategyOptimizer",
49 | "DifferentialEvolutionOptimizer",
50 | "BayesianOptimizer",
51 | "LipschitzOptimizer",
52 | "DirectAlgorithm",
53 | "TreeStructuredParzenEstimators",
54 | "ForestOptimizer",
55 | ]
56 |
--------------------------------------------------------------------------------
/src/hyperactive/optimizers/constraint.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | def gfo2hyper(search_space, para):
7 | values_dict = {}
8 | for key, values in search_space.items():
9 | pos_ = int(para[key])
10 | values_dict[key] = values[pos_]
11 |
12 | return values_dict
13 |
14 |
15 | class Constraint:
16 | def __init__(self, constraint, search_space):
17 | self.constraint = constraint
18 | self.search_space = search_space
19 |
20 | def __call__(self, para):
21 | para = gfo2hyper(self.search_space, para)
22 | return self.constraint(para)
23 |
--------------------------------------------------------------------------------
/src/hyperactive/optimizers/dictionary.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | class DictClass:
7 | def __init__(self):
8 | self.para_dict = {}
9 |
10 | def __getitem__(self, key):
11 | return self.para_dict[key]
12 |
13 | def keys(self):
14 | return self.para_dict.keys()
15 |
16 | def values(self):
17 | return self.para_dict.values()
18 |
--------------------------------------------------------------------------------
/src/hyperactive/optimizers/objective_function.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | from .dictionary import DictClass
7 |
8 |
9 | def gfo2hyper(search_space, para):
10 | values_dict = {}
11 | for _, key in enumerate(search_space.keys()):
12 | pos_ = int(para[key])
13 | values_dict[key] = search_space[key][pos_]
14 |
15 | return values_dict
16 |
17 |
18 | class ObjectiveFunction(DictClass):
19 | def __init__(self, objective_function, optimizer, callbacks, catch, nth_process):
20 | super().__init__()
21 |
22 | self.objective_function = objective_function
23 | self.optimizer = optimizer
24 | self.callbacks = callbacks
25 | self.catch = catch
26 | self.nth_process = nth_process
27 |
28 | self.nth_iter = 0
29 |
30 | def run_callbacks(self, type_):
31 | if self.callbacks and type_ in self.callbacks:
32 | [callback(self) for callback in self.callbacks[type_]]
33 |
34 | def __call__(self, search_space):
35 | # wrapper for GFOs
36 | def _model(para):
37 | self.nth_iter = len(self.optimizer.pos_l)
38 | para = gfo2hyper(search_space, para)
39 | self.para_dict = para
40 |
41 | try:
42 | self.run_callbacks("before")
43 | results = self.objective_function(self)
44 | self.run_callbacks("after")
45 | except tuple(self.catch.keys()) as e:
46 | results = self.catch[e.__class__]
47 |
48 | return results
49 |
50 | _model.__name__ = self.objective_function.__name__
51 | return _model
52 |
--------------------------------------------------------------------------------
/src/hyperactive/optimizers/optimizer_attributes.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | class OptimizerAttributes:
7 | def __init__(self):
8 | self.best_para = None
9 | self.best_score = None
10 | self.best_since_iter = None
11 | self.eval_times = None
12 | self.iter_times = None
13 | self.search_data = None
14 | self.random_seed = None
15 |
16 | def _add_result_attributes(
17 | self,
18 | best_para,
19 | best_score,
20 | best_since_iter,
21 | eval_times,
22 | iter_times,
23 | search_data,
24 | random_seed,
25 | ):
26 | self.best_para = best_para
27 | self.best_score = best_score
28 | self.best_since_iter = best_since_iter
29 | self.eval_times = eval_times
30 | self.iter_times = iter_times
31 | self.search_data = search_data
32 | self.random_seed = random_seed
33 |
--------------------------------------------------------------------------------
/src/hyperactive/optimizers/strategies/__init__.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | from .custom_optimization_strategy import CustomOptimizationStrategy
7 |
8 |
9 | __all__ = [
10 | "CustomOptimizationStrategy",
11 | ]
12 |
--------------------------------------------------------------------------------
/src/hyperactive/optimizers/strategies/custom_optimization_strategy.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 | from .optimization_strategy import BaseOptimizationStrategy
6 |
7 |
8 | class CustomOptimizationStrategy(BaseOptimizationStrategy):
9 | def __init__(self):
10 | super().__init__()
11 |
12 | self.optimizer_setup_l = []
13 | self.duration_sum = 0
14 |
15 | def add_optimizer(self, optimizer, duration=1, early_stopping=None):
16 | self.duration_sum += duration
17 | optimizer_setup = {
18 | "optimizer": optimizer,
19 | "duration": duration,
20 | "early_stopping": early_stopping,
21 | }
22 | self.optimizer_setup_l.append(optimizer_setup)
23 |
--------------------------------------------------------------------------------
/src/hyperactive/optimizers/strategies/optimizer_attributes.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 | import pandas as pd
6 |
7 |
8 | class OptimizerAttributes:
9 | def __init__(self):
10 | self.best_para = None
11 | self.best_score = None
12 | self.best_since_iter = None
13 | self.eval_times = None
14 | self.iter_times = None
15 | self.search_data = None
16 | self.random_seed = None
17 |
18 | def _add_result_attributes(
19 | self,
20 | best_para,
21 | best_score,
22 | best_since_iter,
23 | eval_times,
24 | iter_times,
25 | search_data,
26 | random_seed,
27 | ):
28 | if self.best_para is None:
29 | self.best_para = best_para
30 | else:
31 | if best_score > self.best_score:
32 | self.best_para = best_para
33 |
34 | if self.best_score is None:
35 | self.best_score = best_score
36 | else:
37 | if best_score > self.best_score:
38 | self.best_score = best_score
39 |
40 | if self.best_since_iter is None:
41 | self.best_since_iter = best_since_iter
42 | else:
43 | if best_score > self.best_score:
44 | self.best_since_iter = best_since_iter
45 |
46 | if self.eval_times is None:
47 | self.eval_times = eval_times
48 | else:
49 | self.eval_times = self.eval_times + eval_times
50 |
51 | if self.iter_times is None:
52 | self.iter_times = iter_times
53 | else:
54 | self.iter_times = self.iter_times + eval_times
55 |
56 | if self.search_data is None:
57 | self.search_data = search_data
58 | else:
59 | self.search_data = pd.concat(
60 | [self.search_data, search_data], ignore_index=True
61 | )
62 |
63 | if self.random_seed is None:
64 | self.random_seed = random_seed
65 |
--------------------------------------------------------------------------------
/src/hyperactive/process.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | from tqdm import tqdm
7 |
8 |
9 | def _process_(nth_process, optimizer):
10 | if "progress_bar" in optimizer.verbosity:
11 | p_bar = tqdm(
12 | position=nth_process,
13 | total=optimizer.n_iter,
14 | ascii=" ─",
15 | colour="Yellow",
16 | )
17 | else:
18 | p_bar = None
19 |
20 | optimizer.search(nth_process, p_bar)
21 |
22 | if p_bar:
23 | p_bar.colour = "GREEN"
24 | p_bar.refresh()
25 | p_bar.close()
26 |
27 | return {
28 | "nth_process": nth_process,
29 | "best_para": optimizer.best_para,
30 | "best_score": optimizer.best_score,
31 | "best_iter": optimizer.best_since_iter,
32 | "eval_times": optimizer.eval_times,
33 | "iter_times": optimizer.iter_times,
34 | "search_data": optimizer.search_data,
35 | "random_seed": optimizer.random_seed,
36 | }
37 |
--------------------------------------------------------------------------------
/src/hyperactive/results.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | import numpy as np
7 | import pandas as pd
8 |
9 |
10 | class Results:
11 | def __init__(self, results_list, opt_pros):
12 | self.results_list = results_list
13 | self.opt_pros = opt_pros
14 |
15 | self.objFunc2results = {}
16 | self.search_id2results = {}
17 |
18 | def _sort_results_objFunc(self, objective_function):
19 | best_score = -np.inf
20 | best_para = None
21 | search_data = None
22 |
23 | search_data_list = []
24 |
25 | for results_ in self.results_list:
26 | nth_process = results_["nth_process"]
27 |
28 | opt = self.opt_pros[nth_process]
29 | objective_function_ = opt.objective_function
30 | search_space_ = opt.s_space()
31 | params = list(search_space_.keys())
32 |
33 | if objective_function_ != objective_function:
34 | continue
35 |
36 | if results_["best_score"] > best_score:
37 | best_score = results_["best_score"]
38 | best_para = results_["best_para"]
39 |
40 | search_data = results_["search_data"]
41 | search_data["eval_times"] = results_["eval_times"]
42 | search_data["iter_times"] = results_["iter_times"]
43 |
44 | search_data_list.append(search_data)
45 |
46 | if len(search_data_list) > 0:
47 | search_data = pd.concat(search_data_list)
48 |
49 | self.objFunc2results[objective_function] = {
50 | "best_para": best_para,
51 | "best_score": best_score,
52 | "search_data": search_data,
53 | "params": params,
54 | }
55 |
56 | def _get_result(self, id_, result_name):
57 | if id_ not in self.objFunc2results:
58 | self._sort_results_objFunc(id_)
59 |
60 | search_data = self.objFunc2results[id_][result_name]
61 |
62 | return search_data
63 |
64 | def best_para(self, id_):
65 | best_para_ = self._get_result(id_, "best_para")
66 |
67 | if best_para_ is not None:
68 | return best_para_
69 |
70 | raise ValueError("objective function name not recognized")
71 |
72 | def best_score(self, id_):
73 | best_score_ = self._get_result(id_, "best_score")
74 |
75 | if best_score_ != -np.inf:
76 | return best_score_
77 |
78 | raise ValueError("objective function name not recognized")
79 |
80 | def search_data(self, id_):
81 | search_data = self._get_result(id_, "search_data")
82 |
83 | params = self.objFunc2results[id_]["params"]
84 |
85 | if search_data is not None:
86 | return search_data
87 |
88 | raise ValueError("objective function name not recognized")
89 |
--------------------------------------------------------------------------------
/src/hyperactive/run_search.py:
--------------------------------------------------------------------------------
1 | # Author: Simon Blanke
2 | # Email: simon.blanke@yahoo.com
3 | # License: MIT License
4 |
5 |
6 | from .distribution import (
7 | single_process,
8 | joblib_wrapper,
9 | multiprocessing_wrapper,
10 | pathos_wrapper,
11 | )
12 | from .process import _process_
13 |
14 |
15 | def proxy(args):
16 | return _process_(*args)
17 |
18 |
19 | dist_dict = {
20 | "joblib": (joblib_wrapper, _process_),
21 | "multiprocessing": (multiprocessing_wrapper, proxy),
22 | "pathos": (pathos_wrapper, proxy),
23 | }
24 |
25 |
26 | def _get_distribution(distribution):
27 | if callable(distribution):
28 | return (distribution, _process_), {}
29 |
30 | elif isinstance(distribution, dict):
31 | dist_key = next(iter(distribution))
32 | dist_paras = distribution[dist_key]
33 |
34 | return dist_dict[dist_key], dist_paras
35 |
36 | elif isinstance(distribution, str):
37 | return dist_dict[distribution], {}
38 |
39 |
40 | def run_search(opt_pros, distribution, n_processes):
41 | process_infos = list(opt_pros.items())
42 |
43 | if n_processes == "auto":
44 | n_processes = len(process_infos)
45 |
46 | if n_processes == 1:
47 | results_list = single_process(_process_, process_infos)
48 | else:
49 | (distribution, process_func), dist_paras = _get_distribution(distribution)
50 |
51 | results_list = distribution(process_func, process_infos, n_processes)
52 |
53 | return results_list
54 |
--------------------------------------------------------------------------------
/src/hyperactive/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """Object unified API test suite."""
2 |
--------------------------------------------------------------------------------
/src/hyperactive/tests/_config.py:
--------------------------------------------------------------------------------
1 | """Test configs."""
2 |
3 | # list of str, names of estimators to exclude from testing
4 | # WARNING: tests for these estimators will be skipped
5 | EXCLUDE_ESTIMATORS = [
6 | "DummySkipped",
7 | "ClassName", # exclude classes from extension templates
8 | ]
9 |
10 | # dictionary of lists of str, names of tests to exclude from testing
11 | # keys are class names of estimators, values are lists of test names to exclude
12 | # WARNING: tests with these names will be skipped
13 | EXCLUDED_TESTS = {}
14 |
--------------------------------------------------------------------------------
/src/hyperactive/tests/_doctest.py:
--------------------------------------------------------------------------------
1 | """Doctest utilities."""
2 | # copyright: sktime developers, BSD-3-Clause License (see LICENSE file)
3 |
4 | import contextlib
5 | import doctest
6 | import io
7 |
8 |
9 | def run_doctest(
10 | f,
11 | verbose=False,
12 | name=None,
13 | compileflags=None,
14 | optionflags=doctest.ELLIPSIS,
15 | raise_on_error=True,
16 | ):
17 | """Run doctests for a given function or class, and return or raise.
18 |
19 | Parameters
20 | ----------
21 | f : callable
22 | Function or class to run doctests for.
23 | verbose : bool, optional (default=False)
24 | If True, print the results of the doctests.
25 | name : str, optional (default=f.__name__, if available, otherwise "NoName")
26 | Name of the function or class.
27 | compileflags : int, optional (default=None)
28 | Flags to pass to the Python parser.
29 | optionflags : int, optional (default=doctest.ELLIPSIS)
30 | Flags to control the behaviour of the doctest.
31 | raise_on_error : bool, optional (default=True)
32 | If True, raise an exception if the doctests fail.
33 |
34 | Returns
35 | -------
36 | doctest_output : str
37 | Output of the doctests.
38 |
39 | Raises
40 | ------
41 | RuntimeError
42 | If raise_on_error=True and the doctests fail.
43 | """
44 | doctest_output_io = io.StringIO()
45 | with contextlib.redirect_stdout(doctest_output_io):
46 | doctest.run_docstring_examples(
47 | f=f,
48 | globs=globals(),
49 | verbose=verbose,
50 | name=name,
51 | compileflags=compileflags,
52 | optionflags=optionflags,
53 | )
54 | doctest_output = doctest_output_io.getvalue()
55 |
56 | if name is None:
57 | name = f.__name__ if hasattr(f, "__name__") else "NoName"
58 |
59 | if raise_on_error and len(doctest_output) > 0:
60 | raise RuntimeError(
61 | f"Docstring examples failed doctests "
62 | f"for {name}, doctest output: {doctest_output}"
63 | )
64 | return doctest_output
65 |
--------------------------------------------------------------------------------
/src/hyperactive/tests/test_doctest.py:
--------------------------------------------------------------------------------
1 | # copyright: sktime developers, BSD-3-Clause License (see LICENSE file)
2 | """Doctest checks directed through pytest with conditional skipping."""
3 |
4 | import importlib
5 | import inspect
6 | import pkgutil
7 | from functools import lru_cache
8 |
9 | from hyperactive.tests._doctest import run_doctest
10 |
11 | EXCLUDE_MODULES_STARTING_WITH = ("all", "test")
12 | PKG_NAME = "hyperactive"
13 |
14 |
15 | def _all_functions(module_name):
16 | """Get all functions from a module, including submodules.
17 |
18 | Excludes modules starting with 'all' or 'test'.
19 |
20 | Parameters
21 | ----------
22 | module_name : str
23 | Name of the module.
24 |
25 | Returns
26 | -------
27 | functions_list : list
28 | List of tuples (function_name, function_object).
29 | """
30 | res = _all_functions_cached(module_name)
31 | # copy the result to avoid modifying the cached result
32 | return res.copy()
33 |
34 |
35 | @lru_cache
36 | def _all_functions_cached(module_name):
37 | """Get all functions from a module, including submodules.
38 |
39 | Excludes: modules starting with 'all' or 'test'.
40 |
41 | Parameters
42 | ----------
43 | module_name : str
44 | Name of the module.
45 | only_changed_modules : bool, optional (default=False)
46 | If True, only functions from modules that have changed are returned.
47 |
48 | Returns
49 | -------
50 | functions_list : list
51 | List of tuples (function_name, function_object).
52 | """
53 | # Import the package
54 | package = importlib.import_module(module_name)
55 |
56 | # Initialize an empty list to hold all functions
57 | functions_list = []
58 |
59 | # Walk through the package's modules
60 | package_path = package.__path__[0]
61 | for _, modname, _ in pkgutil.walk_packages(
62 | path=[package_path], prefix=package.__name__ + "."
63 | ):
64 | # Skip modules starting with 'all' or 'test'
65 | if modname.split(".")[-1].startswith(EXCLUDE_MODULES_STARTING_WITH):
66 | continue
67 |
68 | # Import the module
69 | module = importlib.import_module(modname)
70 |
71 | # Get all functions from the module
72 | for name, obj in inspect.getmembers(module, inspect.isfunction):
73 | # if function is imported from another module, skip it
74 | if obj.__module__ != module.__name__:
75 | continue
76 | # add the function to the list
77 | functions_list.append((name, obj))
78 |
79 | return functions_list
80 |
81 |
82 | def pytest_generate_tests(metafunc):
83 | """Test parameterization routine for pytest.
84 |
85 | Fixtures parameterized
86 | ----------------------
87 | func : all functions in the package, as returned by _all_functions
88 | """
89 | # we assume all four arguments are present in the test below
90 | funcs_and_names = _all_functions(PKG_NAME)
91 |
92 | if len(funcs_and_names) > 0:
93 | names, funcs = zip(*funcs_and_names)
94 |
95 | metafunc.parametrize("func", funcs, ids=names)
96 | else:
97 | metafunc.parametrize("func", [])
98 |
99 |
100 | def test_all_functions_doctest(func):
101 | """Run doctest for all functions in sktime."""
102 | run_doctest(func, name=f"function {func.__name__}")
103 |
--------------------------------------------------------------------------------
/src/hyperactive/todos.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | todos
4 | - rename score to objective_function?
5 | - rename _score to _objective_function
6 | - scores arg from **params to params (dict)
7 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/tests/__init__.py
--------------------------------------------------------------------------------
/tests/_local_test_optimization_strategies/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/tests/_local_test_optimization_strategies/__init__.py
--------------------------------------------------------------------------------
/tests/_local_test_optimization_strategies/_parametrize.py:
--------------------------------------------------------------------------------
1 | from hyperactive.optimizers import (
2 | HillClimbingOptimizer,
3 | StochasticHillClimbingOptimizer,
4 | RepulsingHillClimbingOptimizer,
5 | SimulatedAnnealingOptimizer,
6 | DownhillSimplexOptimizer,
7 | RandomSearchOptimizer,
8 | GridSearchOptimizer,
9 | RandomRestartHillClimbingOptimizer,
10 | RandomAnnealingOptimizer,
11 | PowellsMethod,
12 | PatternSearch,
13 | ParallelTemperingOptimizer,
14 | ParticleSwarmOptimizer,
15 | SpiralOptimization,
16 | EvolutionStrategyOptimizer,
17 | BayesianOptimizer,
18 | LipschitzOptimizer,
19 | DirectAlgorithm,
20 | TreeStructuredParzenEstimators,
21 | ForestOptimizer,
22 | )
23 |
24 |
25 | optimizers = (
26 | "Optimizer",
27 | [
28 | (HillClimbingOptimizer),
29 | (StochasticHillClimbingOptimizer),
30 | (RepulsingHillClimbingOptimizer),
31 | (SimulatedAnnealingOptimizer),
32 | (DownhillSimplexOptimizer),
33 | (RandomSearchOptimizer),
34 | (GridSearchOptimizer),
35 | (RandomRestartHillClimbingOptimizer),
36 | (RandomAnnealingOptimizer),
37 | (PowellsMethod),
38 | (PatternSearch),
39 | (ParallelTemperingOptimizer),
40 | (ParticleSwarmOptimizer),
41 | (SpiralOptimization),
42 | (EvolutionStrategyOptimizer),
43 | (BayesianOptimizer),
44 | (LipschitzOptimizer),
45 | (DirectAlgorithm),
46 | (TreeStructuredParzenEstimators),
47 | (ForestOptimizer),
48 | ],
49 | )
50 |
51 |
52 | optimizers_strat = (
53 | "Optimizer_strat",
54 | [
55 | (HillClimbingOptimizer),
56 | (StochasticHillClimbingOptimizer),
57 | (RepulsingHillClimbingOptimizer),
58 | (SimulatedAnnealingOptimizer),
59 | (DownhillSimplexOptimizer),
60 | (RandomSearchOptimizer),
61 | (GridSearchOptimizer),
62 | (RandomRestartHillClimbingOptimizer),
63 | (RandomAnnealingOptimizer),
64 | (PowellsMethod),
65 | (PatternSearch),
66 | (ParallelTemperingOptimizer),
67 | (ParticleSwarmOptimizer),
68 | (SpiralOptimization),
69 | (EvolutionStrategyOptimizer),
70 | (BayesianOptimizer),
71 | (LipschitzOptimizer),
72 | (DirectAlgorithm),
73 | (TreeStructuredParzenEstimators),
74 | (ForestOptimizer),
75 | ],
76 | )
77 |
78 |
79 | optimizers_non_smbo = (
80 | "Optimizer_non_smbo",
81 | [
82 | (HillClimbingOptimizer),
83 | (StochasticHillClimbingOptimizer),
84 | (RepulsingHillClimbingOptimizer),
85 | (SimulatedAnnealingOptimizer),
86 | (DownhillSimplexOptimizer),
87 | (RandomSearchOptimizer),
88 | (GridSearchOptimizer),
89 | (RandomRestartHillClimbingOptimizer),
90 | (RandomAnnealingOptimizer),
91 | (PowellsMethod),
92 | (PatternSearch),
93 | (ParallelTemperingOptimizer),
94 | (ParticleSwarmOptimizer),
95 | (SpiralOptimization),
96 | (EvolutionStrategyOptimizer),
97 | ],
98 | )
99 |
100 |
101 | optimizers_smbo = (
102 | "Optimizer_smbo",
103 | [
104 | (BayesianOptimizer),
105 | (LipschitzOptimizer),
106 | (DirectAlgorithm),
107 | (TreeStructuredParzenEstimators),
108 | (ForestOptimizer),
109 | ],
110 | )
111 |
--------------------------------------------------------------------------------
/tests/_local_test_optimization_strategies/_test_memory_warm_start.py:
--------------------------------------------------------------------------------
1 | import time
2 | import pytest
3 | import numpy as np
4 |
5 |
6 | from hyperactive import Hyperactive
7 | from hyperactive.optimizers.strategies import CustomOptimizationStrategy
8 | from hyperactive.optimizers import GridSearchOptimizer
9 |
10 | from ._parametrize import optimizers_non_smbo
11 |
12 |
13 | def objective_function(opt):
14 | time.sleep(0.01)
15 | score = -(opt["x1"] * opt["x1"])
16 | return score
17 |
18 |
19 | search_space = {
20 | "x1": list(np.arange(0, 100, 1)),
21 | }
22 |
23 |
24 | def test_memory_Warm_start_0():
25 | optimizer1 = GridSearchOptimizer()
26 | optimizer2 = GridSearchOptimizer()
27 |
28 | opt_strat = CustomOptimizationStrategy()
29 | opt_strat.add_optimizer(optimizer1, duration=0.2)
30 | opt_strat.add_optimizer(optimizer2, duration=0.8)
31 |
32 | n_iter = 1000
33 |
34 | c_time = time.time()
35 |
36 | hyper = Hyperactive()
37 | hyper.add_search(
38 | objective_function,
39 | search_space,
40 | optimizer=opt_strat,
41 | n_iter=n_iter,
42 | memory=True,
43 | )
44 | hyper.run()
45 |
46 | d_time = time.time() - c_time
47 |
48 | search_data = hyper.search_data(objective_function)
49 |
50 | optimizer1 = hyper.opt_pros[0].optimizer_setup_l[0]["optimizer"]
51 | optimizer2 = hyper.opt_pros[0].optimizer_setup_l[1]["optimizer"]
52 |
53 | assert len(search_data) == n_iter
54 |
55 | assert len(optimizer1.search_data) == 200
56 | assert len(optimizer2.search_data) == 800
57 |
58 | assert optimizer1.best_score <= optimizer2.best_score
59 |
60 | print("\n d_time", d_time)
61 |
62 | assert d_time < 3
63 |
64 |
65 | def test_memory_Warm_start_1():
66 | optimizer1 = GridSearchOptimizer()
67 | optimizer2 = GridSearchOptimizer()
68 |
69 | opt_strat = CustomOptimizationStrategy()
70 | opt_strat.add_optimizer(optimizer1, duration=0.2)
71 | opt_strat.add_optimizer(optimizer2, duration=0.8)
72 |
73 | n_iter = 100
74 |
75 | search_space = {
76 | "x1": list(np.arange(0, 1, 1)),
77 | }
78 |
79 | c_time = time.time()
80 |
81 | hyper = Hyperactive()
82 | hyper.add_search(
83 | objective_function,
84 | search_space,
85 | optimizer=opt_strat,
86 | n_iter=n_iter,
87 | memory=False,
88 | )
89 | hyper.run()
90 |
91 | d_time = time.time() - c_time
92 |
93 | search_data = hyper.search_data(objective_function)
94 |
95 | optimizer1 = hyper.opt_pros[0].optimizer_setup_l[0]["optimizer"]
96 | optimizer2 = hyper.opt_pros[0].optimizer_setup_l[1]["optimizer"]
97 |
98 | assert len(search_data) == n_iter
99 |
100 | assert len(optimizer1.search_data) == 20
101 | assert len(optimizer2.search_data) == 80
102 |
103 | assert optimizer1.best_score <= optimizer2.best_score
104 |
105 | print("\n d_time", d_time)
106 |
107 | assert d_time > 0.95
108 |
109 |
110 |
111 | @pytest.mark.parametrize(*optimizers_non_smbo)
112 | def test_memory_Warm_start_2(Optimizer_non_smbo):
113 | optimizer1 = GridSearchOptimizer()
114 | optimizer2 = Optimizer_non_smbo()
115 |
116 | opt_strat = CustomOptimizationStrategy()
117 | opt_strat.add_optimizer(optimizer1, duration=0.5)
118 | opt_strat.add_optimizer(optimizer2, duration=0.5)
119 |
120 | search_space = {
121 | "x1": list(np.arange(0, 50, 1)),
122 | }
123 |
124 | n_iter = 100
125 |
126 | c_time = time.time()
127 |
128 | hyper = Hyperactive()
129 | hyper.add_search(
130 | objective_function,
131 | search_space,
132 | optimizer=opt_strat,
133 | n_iter=n_iter,
134 | memory=True,
135 | )
136 | hyper.run()
137 |
138 | d_time = time.time() - c_time
139 |
140 | search_data = hyper.search_data(objective_function)
141 |
142 | optimizer1 = hyper.opt_pros[0].optimizer_setup_l[0]["optimizer"]
143 | optimizer2 = hyper.opt_pros[0].optimizer_setup_l[1]["optimizer"]
144 |
145 | assert len(search_data) == n_iter
146 |
147 | assert len(optimizer1.search_data) == 50
148 | assert len(optimizer2.search_data) == 50
149 |
150 | assert optimizer1.best_score <= optimizer2.best_score
151 |
152 | print("\n d_time", d_time)
153 |
154 | assert d_time < 0.9
155 |
--------------------------------------------------------------------------------
/tests/_local_test_optimization_strategies/_test_memory_warm_start_smbo.py:
--------------------------------------------------------------------------------
1 | import time
2 | import pytest
3 | import numpy as np
4 |
5 |
6 | from hyperactive import Hyperactive
7 | from hyperactive.optimizers.strategies import CustomOptimizationStrategy
8 | from hyperactive.optimizers import GridSearchOptimizer
9 |
10 | from ._parametrize import optimizers_smbo
11 |
12 |
13 | def objective_function(opt):
14 | time.sleep(0.01)
15 | score = -(opt["x1"] * opt["x1"])
16 | return score
17 |
18 |
19 | search_space = {
20 | "x1": list(np.arange(0, 100, 1)),
21 | }
22 |
23 |
24 | @pytest.mark.parametrize(*optimizers_smbo)
25 | def test_memory_Warm_start_smbo_0(Optimizer_smbo):
26 | optimizer1 = GridSearchOptimizer()
27 | optimizer2 = Optimizer_smbo()
28 |
29 | opt_strat = CustomOptimizationStrategy()
30 | opt_strat.add_optimizer(optimizer1, duration=0.8)
31 | opt_strat.add_optimizer(optimizer2, duration=0.2)
32 |
33 | n_iter = 100
34 |
35 | hyper = Hyperactive()
36 | hyper.add_search(
37 | objective_function,
38 | search_space,
39 | optimizer=opt_strat,
40 | n_iter=n_iter,
41 | memory=True,
42 | )
43 | hyper.run()
44 |
45 | search_data = hyper.search_data(objective_function)
46 |
47 | optimizer1 = hyper.opt_pros[0].optimizer_setup_l[0]["optimizer"]
48 | optimizer2 = hyper.opt_pros[0].optimizer_setup_l[1]["optimizer"]
49 |
50 | assert len(search_data) == n_iter
51 |
52 | assert len(optimizer1.search_data) == 80
53 | assert len(optimizer2.search_data) == 20
54 |
55 | assert optimizer1.best_score <= optimizer2.best_score
56 |
--------------------------------------------------------------------------------
/tests/_local_test_optimization_strategies/_test_strategy_multiprocessing.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import numpy as np
3 |
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers import RandomSearchOptimizer
7 | from hyperactive.optimizers.strategies import CustomOptimizationStrategy
8 |
9 | from ._parametrize import optimizers, optimizers_strat
10 |
11 |
12 | def objective_function(opt):
13 | score = -(opt["x1"] * opt["x1"] + opt["x2"] * opt["x2"])
14 | return score
15 |
16 |
17 | search_space = {
18 | "x1": list(np.arange(-3, 3, 1)),
19 | "x2": list(np.arange(-3, 3, 1)),
20 | }
21 |
22 | """
23 | @pytest.mark.parametrize(*optimizers_strat)
24 | def test_strategy_multiprocessing_0(Optimizer_strat):
25 | optimizer1 = RandomSearchOptimizer()
26 | optimizer2 = Optimizer_strat()
27 |
28 | opt_strat = CustomOptimizationStrategy()
29 | opt_strat.add_optimizer(optimizer1, duration=0.5)
30 | opt_strat.add_optimizer(optimizer2, duration=0.5)
31 |
32 | n_iter = 25
33 |
34 | hyper = Hyperactive()
35 | hyper.add_search(
36 | objective_function,
37 | search_space,
38 | optimizer=opt_strat,
39 | n_iter=n_iter,
40 | n_jobs=2,
41 | )
42 | hyper.run()
43 |
44 |
45 | @pytest.mark.parametrize(*optimizers_strat)
46 | def test_strategy_multiprocessing_1(Optimizer_strat):
47 | optimizer1 = RandomSearchOptimizer()
48 | optimizer2 = Optimizer_strat()
49 |
50 | opt_strat = CustomOptimizationStrategy()
51 | opt_strat.add_optimizer(optimizer1, duration=0.5)
52 | opt_strat.add_optimizer(optimizer2, duration=0.5)
53 |
54 | n_iter = 25
55 |
56 | hyper = Hyperactive()
57 | hyper.add_search(
58 | objective_function,
59 | search_space,
60 | optimizer=opt_strat,
61 | n_iter=n_iter,
62 | n_jobs=1,
63 | )
64 | hyper.add_search(
65 | objective_function,
66 | search_space,
67 | optimizer=opt_strat,
68 | n_iter=n_iter,
69 | n_jobs=1,
70 | )
71 | hyper.run()
72 | """
73 |
--------------------------------------------------------------------------------
/tests/_local_test_timings/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/tests/_local_test_timings/__init__.py
--------------------------------------------------------------------------------
/tests/_local_test_timings/_search_space_list.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pandas as pd
3 |
4 |
5 | def search_space_setup(size=1000):
6 | if size < 1000:
7 | print("Error: Some search spaces cannot be created")
8 | return
9 |
10 | pad_full = list(range(0, size))
11 | pad_cat = list(range(int(size / 3)))
12 | pad_10 = list(range(int(size ** 0.1)))
13 |
14 | search_space_0 = {
15 | "x1": pad_full,
16 | }
17 |
18 | search_space_1 = {
19 | "x1": pad_cat,
20 | "x2": list(range(3)),
21 | }
22 |
23 | search_space_2 = {
24 | "x1": pad_10,
25 | "x2": pad_10,
26 | "x3": pad_10,
27 | "x4": pad_10,
28 | "x5": pad_10,
29 | "x6": pad_10,
30 | "x7": pad_10,
31 | "x8": pad_10,
32 | "x9": pad_10,
33 | "x10": pad_10,
34 | }
35 |
36 | search_space_3 = {
37 | "x1": pad_10,
38 | "x2": pad_10,
39 | "x3": pad_10,
40 | "x4": pad_10,
41 | "x5": pad_10,
42 | "x6": pad_10,
43 | "x7": pad_10,
44 | "x8": pad_10,
45 | "x9": pad_10,
46 | "x10": pad_10,
47 | "x11": [1],
48 | "x12": [1],
49 | "x13": [1],
50 | "x14": [1],
51 | "x15": [1],
52 | "x16": [1],
53 | "x17": [1],
54 | "x18": [1],
55 | "x19": [1],
56 | "x20": [1],
57 | }
58 |
59 | search_space_4 = {
60 | "x1": pad_cat,
61 | "str1": ["0", "1", "2"],
62 | }
63 |
64 | def func1():
65 | pass
66 |
67 | def func2():
68 | pass
69 |
70 | def func3():
71 | pass
72 |
73 | search_space_5 = {
74 | "x1": pad_cat,
75 | "func1": [func1, func2, func3],
76 | }
77 |
78 | class class1:
79 | pass
80 |
81 | class class2:
82 | pass
83 |
84 | class class3:
85 | pass
86 |
87 | def wr_func_1():
88 | return class1
89 |
90 | def wr_func_2():
91 | return class2
92 |
93 | def wr_func_3():
94 | return class3
95 |
96 | search_space_6 = {
97 | "x1": pad_cat,
98 | "class_1": [wr_func_1, wr_func_2, wr_func_3],
99 | }
100 |
101 | class class1:
102 | def __init__(self):
103 | pass
104 |
105 | class class2:
106 | def __init__(self):
107 | pass
108 |
109 | class class3:
110 | def __init__(self):
111 | pass
112 |
113 | def wr_func_1():
114 | return class1()
115 |
116 | def wr_func_2():
117 | return class2()
118 |
119 | def wr_func_3():
120 | return class3()
121 |
122 | search_space_7 = {
123 | "x1": pad_cat,
124 | "class_obj_1": [wr_func_1, wr_func_2, wr_func_3],
125 | }
126 |
127 | def wr_func_1():
128 | return [1, 0, 0]
129 |
130 | def wr_func_2():
131 | return [0, 1, 0]
132 |
133 | def wr_func_3():
134 | return [0, 0, 1]
135 |
136 | search_space_8 = {
137 | "x1": pad_cat,
138 | "list_1": [wr_func_1, wr_func_2, wr_func_3],
139 | }
140 |
141 | def wr_func_1():
142 | return np.array([1, 0, 0])
143 |
144 | def wr_func_2():
145 | return np.array([0, 1, 0])
146 |
147 | def wr_func_3():
148 | return np.array([0, 0, 1])
149 |
150 | search_space_9 = {
151 | "x1": pad_cat,
152 | "array_1": [wr_func_1, wr_func_2, wr_func_3],
153 | }
154 |
155 | def wr_func_1():
156 | return pd.DataFrame(np.array([1, 0, 0]))
157 |
158 | def wr_func_2():
159 | return pd.DataFrame(np.array([0, 1, 0]))
160 |
161 | def wr_func_3():
162 | return pd.DataFrame(np.array([0, 0, 1]))
163 |
164 | search_space_10 = {
165 | "x1": pad_cat,
166 | "df_1": [wr_func_1, wr_func_2, wr_func_3],
167 | }
168 |
169 | search_space_list = [
170 | (search_space_0),
171 | (search_space_1),
172 | (search_space_2),
173 | (search_space_3),
174 | (search_space_4),
175 | (search_space_5),
176 | (search_space_6),
177 | (search_space_7),
178 | (search_space_8),
179 | (search_space_9),
180 | (search_space_10),
181 | ]
182 |
183 | return search_space_list
184 |
--------------------------------------------------------------------------------
/tests/_local_test_timings/_test_warm_start.py:
--------------------------------------------------------------------------------
1 | import time
2 | import pytest
3 | import numpy as np
4 | import pandas as pd
5 |
6 | from hyperactive import Hyperactive
7 |
8 | from ._search_space_list import search_space_setup
9 |
10 | search_space_list = search_space_setup()
11 |
12 |
13 | def objective_function(opt):
14 | score = -opt["x1"] * opt["x1"]
15 | return score
16 |
17 |
18 | @pytest.mark.parametrize("search_space", search_space_list)
19 | def test_warm_start_0(search_space):
20 | hyper0 = Hyperactive()
21 | hyper0.add_search(objective_function, search_space, n_iter=20)
22 | hyper0.run()
23 |
24 | search_data0 = hyper0.best_para(objective_function)
25 |
26 | hyper1 = Hyperactive()
27 | hyper1.add_search(
28 | objective_function,
29 | search_space,
30 | n_iter=20,
31 | initialize={"warm_start": [search_data0]},
32 | )
33 | hyper1.run()
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/tests/_test_examples.py:
--------------------------------------------------------------------------------
1 | import os, sys, glob
2 | import subprocess
3 | from subprocess import DEVNULL, STDOUT
4 |
5 | here = os.path.dirname(os.path.abspath(__file__))
6 |
7 | files0 = glob.glob(here+"/../examples/*/*.py")
8 | files1 = glob.glob(here+"/../examples/*.py")
9 |
10 | files = files0 + files1
11 |
12 | print("run files:", files)
13 |
14 | for file_path in files:
15 | file_name = str(file_path.rsplit("/", maxsplit=1)[1])
16 |
17 | try:
18 | print("\033[0;33;40m Testing", file_name, end="...\r")
19 | subprocess.check_call(["python", file_path], stdout=DEVNULL, stderr=STDOUT)
20 | except subprocess.CalledProcessError:
21 | print("\033[0;31;40m Error in", file_name)
22 | else:
23 | print("\033[0;32;40m", file_name, "is correct")
24 | print("\n")
25 |
--------------------------------------------------------------------------------
/tests/integrations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/tests/integrations/__init__.py
--------------------------------------------------------------------------------
/tests/integrations/sklearn/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/tests/integrations/sklearn/__init__.py
--------------------------------------------------------------------------------
/tests/integrations/sklearn/test_parametrize_with_checks.py:
--------------------------------------------------------------------------------
1 | from sklearn import svm
2 |
3 | from hyperactive.integrations import HyperactiveSearchCV, OptCV
4 | from hyperactive.opt import GridSearchSk as GridSearch
5 | from hyperactive.optimizers import RandomSearchOptimizer
6 |
7 | from sklearn.model_selection import KFold
8 | from sklearn.utils.estimator_checks import parametrize_with_checks
9 |
10 | svc = svm.SVC()
11 | parameters = {"kernel": ["linear", "rbf"], "C": [1, 10]}
12 | opt = RandomSearchOptimizer()
13 | hyperactivecv = HyperactiveSearchCV(svc, parameters, opt)
14 |
15 | cv = KFold(n_splits=2, shuffle=True, random_state=42)
16 | optcv = OptCV(estimator=svc, optimizer=GridSearch(param_grid=parameters), cv=cv)
17 |
18 | ESTIMATORS = [hyperactivecv, optcv]
19 |
20 |
21 | @parametrize_with_checks(ESTIMATORS)
22 | def test_estimators(estimator, check):
23 | check(estimator)
24 |
--------------------------------------------------------------------------------
/tests/test_callbacks.py:
--------------------------------------------------------------------------------
1 | import copy
2 | import pytest
3 | import numpy as np
4 | import pandas as pd
5 |
6 | from hyperactive import Hyperactive
7 |
8 |
9 | search_space = {
10 | "x1": list(np.arange(-100, 100, 1)),
11 | }
12 |
13 |
14 | def test_callback_0():
15 | def callback_1(access):
16 | access.stuff1 = 1
17 |
18 | def callback_2(access):
19 | access.stuff2 = 2
20 |
21 | def objective_function(access):
22 | assert access.stuff1 == 1
23 | assert access.stuff2 == 2
24 |
25 | return 0
26 |
27 | hyper = Hyperactive()
28 | hyper.add_search(
29 | objective_function,
30 | search_space,
31 | n_iter=100,
32 | callbacks={"before": [callback_1, callback_2]},
33 | )
34 | hyper.run()
35 |
36 |
37 | def test_callback_1():
38 | def callback_1(access):
39 | access.stuff1 = 1
40 |
41 | def callback_2(access):
42 | access.stuff1 = 2
43 |
44 | def objective_function(access):
45 | assert access.stuff1 == 1
46 |
47 | return 0
48 |
49 | hyper = Hyperactive()
50 | hyper.add_search(
51 | objective_function,
52 | search_space,
53 | n_iter=100,
54 | callbacks={"before": [callback_1], "after": [callback_2]},
55 | )
56 | hyper.run()
57 |
58 |
59 | def test_callback_2():
60 | def callback_1(access):
61 | access.pass_through["stuff1"] = 1
62 |
63 | def objective_function(access):
64 | assert access.pass_through["stuff1"] == 1
65 |
66 | return 0
67 |
68 | hyper = Hyperactive()
69 | hyper.add_search(
70 | objective_function,
71 | search_space,
72 | n_iter=100,
73 | callbacks={"before": [callback_1]},
74 | pass_through={"stuff1": 0},
75 | )
76 | hyper.run()
77 |
78 |
79 | def test_callback_3():
80 | def callback_1(access):
81 | access.pass_through["stuff1"] = 1
82 |
83 | def objective_function(access):
84 | if access.nth_iter == 0:
85 | assert access.pass_through["stuff1"] == 0
86 | else:
87 | assert access.pass_through["stuff1"] == 1
88 |
89 | return 0
90 |
91 | hyper = Hyperactive()
92 | hyper.add_search(
93 | objective_function,
94 | search_space,
95 | n_iter=100,
96 | callbacks={"after": [callback_1]},
97 | pass_through={"stuff1": 0},
98 | )
99 | hyper.run()
100 |
--------------------------------------------------------------------------------
/tests/test_catch.py:
--------------------------------------------------------------------------------
1 | import copy
2 | import pytest
3 | import math
4 | import numpy as np
5 | import pandas as pd
6 |
7 | from hyperactive import Hyperactive
8 |
9 |
10 | search_space = {
11 | "x1": list(np.arange(-100, 100, 1)),
12 | }
13 |
14 |
15 | def test_catch_1():
16 | def objective_function(access):
17 | a = 1 + "str"
18 |
19 | return 0
20 |
21 | hyper = Hyperactive()
22 | hyper.add_search(
23 | objective_function,
24 | search_space,
25 | n_iter=100,
26 | catch={TypeError: np.nan},
27 | )
28 | hyper.run()
29 |
30 |
31 | def test_catch_2():
32 | def objective_function(access):
33 | math.sqrt(-10)
34 |
35 | return 0
36 |
37 | hyper = Hyperactive()
38 | hyper.add_search(
39 | objective_function,
40 | search_space,
41 | n_iter=100,
42 | catch={ValueError: np.nan},
43 | )
44 | hyper.run()
45 |
46 |
47 | def test_catch_3():
48 | def objective_function(access):
49 | x = 1 / 0
50 |
51 | return 0
52 |
53 | hyper = Hyperactive()
54 | hyper.add_search(
55 | objective_function,
56 | search_space,
57 | n_iter=100,
58 | catch={ZeroDivisionError: np.nan},
59 | )
60 | hyper.run()
61 |
62 |
63 | def test_catch_all_0():
64 | def objective_function(access):
65 | a = 1 + "str"
66 | math.sqrt(-10)
67 | x = 1 / 0
68 |
69 | return 0
70 |
71 | hyper = Hyperactive()
72 | hyper.add_search(
73 | objective_function,
74 | search_space,
75 | n_iter=100,
76 | catch={
77 | TypeError: np.nan,
78 | ValueError: np.nan,
79 | ZeroDivisionError: np.nan,
80 | },
81 | )
82 | hyper.run()
83 |
84 | nan_ = hyper.search_data(objective_function)["score"].values[0]
85 |
86 | assert math.isnan(nan_)
87 |
88 |
89 | def test_catch_all_1():
90 | def objective_function(access):
91 | a = 1 + "str"
92 | math.sqrt(-10)
93 | x = 1 / 0
94 |
95 | return 0, {"error": False}
96 |
97 | catch_return = (np.nan, {"error": True})
98 |
99 | hyper = Hyperactive()
100 | hyper.add_search(
101 | objective_function,
102 | search_space,
103 | n_iter=100,
104 | catch={
105 | TypeError: catch_return,
106 | ValueError: catch_return,
107 | ZeroDivisionError: catch_return,
108 | },
109 | )
110 | hyper.run()
111 |
112 | nan_ = hyper.search_data(objective_function)["score"].values[0]
113 | error_ = hyper.search_data(objective_function)["error"].values[0]
114 |
115 | assert math.isnan(nan_)
116 | assert error_ == True
117 |
--------------------------------------------------------------------------------
/tests/test_constr_opt.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from hyperactive import Hyperactive
4 |
5 |
6 | def test_constr_opt_0():
7 | def objective_function(para):
8 | score = -para["x1"] * para["x1"]
9 | return score
10 |
11 | search_space = {
12 | "x1": list(np.arange(-15, 15, 1)),
13 | }
14 |
15 | def constraint_1(para):
16 | print(" para", para)
17 |
18 | return para["x1"] > -5
19 |
20 | constraints_list = [constraint_1]
21 |
22 | hyper = Hyperactive()
23 | hyper.add_search(
24 | objective_function,
25 | search_space,
26 | n_iter=50,
27 | constraints=constraints_list,
28 | )
29 | hyper.run()
30 |
31 | search_data = hyper.search_data(objective_function)
32 | x0_values = search_data["x1"].values
33 |
34 | print("\n search_data \n", search_data, "\n")
35 |
36 | assert np.all(x0_values > -5)
37 |
38 |
39 | def test_constr_opt_1():
40 | def objective_function(para):
41 | score = -(para["x1"] * para["x1"] + para["x2"] * para["x2"])
42 | return score
43 |
44 | search_space = {
45 | "x1": list(np.arange(-10, 10, 1)),
46 | "x2": list(np.arange(-10, 10, 1)),
47 | }
48 |
49 | def constraint_1(para):
50 | return para["x1"] > -5
51 |
52 | constraints_list = [constraint_1]
53 |
54 | hyper = Hyperactive()
55 | hyper.add_search(
56 | objective_function,
57 | search_space,
58 | n_iter=50,
59 | constraints=constraints_list,
60 | )
61 | hyper.run()
62 |
63 | search_data = hyper.search_data(objective_function)
64 | x0_values = search_data["x1"].values
65 |
66 | print("\n search_data \n", search_data, "\n")
67 |
68 | assert np.all(x0_values > -5)
69 |
70 |
71 | def test_constr_opt_2():
72 | n_iter = 50
73 |
74 | def objective_function(para):
75 | score = -para["x1"] * para["x1"]
76 | return score
77 |
78 | search_space = {
79 | "x1": list(np.arange(-10, 10, 0.1)),
80 | }
81 |
82 | def constraint_1(para):
83 | return para["x1"] > -5
84 |
85 | def constraint_2(para):
86 | return para["x1"] < 5
87 |
88 | constraints_list = [constraint_1, constraint_2]
89 |
90 | hyper = Hyperactive()
91 | hyper.add_search(
92 | objective_function,
93 | search_space,
94 | n_iter=50,
95 | constraints=constraints_list,
96 | )
97 | hyper.run()
98 |
99 | search_data = hyper.search_data(objective_function)
100 | x0_values = search_data["x1"].values
101 |
102 | print("\n search_data \n", search_data, "\n")
103 |
104 | assert np.all(x0_values > -5)
105 | assert np.all(x0_values < 5)
106 |
107 | n_new_positions = 0
108 | n_new_scores = 0
109 |
110 | n_current_positions = 0
111 | n_current_scores = 0
112 |
113 | n_best_positions = 0
114 | n_best_scores = 0
115 |
116 | for hyper_optimizer in hyper.opt_pros.values():
117 | optimizer = hyper_optimizer.gfo_optimizer
118 |
119 | n_new_positions = n_new_positions + len(optimizer.pos_new_list)
120 | n_new_scores = n_new_scores + len(optimizer.score_new_list)
121 |
122 | n_current_positions = n_current_positions + len(optimizer.pos_current_list)
123 | n_current_scores = n_current_scores + len(optimizer.score_current_list)
124 |
125 | n_best_positions = n_best_positions + len(optimizer.pos_best_list)
126 | n_best_scores = n_best_scores + len(optimizer.score_best_list)
127 |
128 | print("\n optimizer", optimizer)
129 | print(" n_new_positions", optimizer.pos_new_list)
130 | print(" n_new_scores", optimizer.score_new_list)
131 |
132 | assert n_new_positions == n_iter
133 | assert n_new_scores == n_iter
134 |
135 | assert n_current_positions == n_current_scores
136 | assert n_current_positions <= n_new_positions
137 |
138 | assert n_best_positions == n_best_scores
139 | assert n_best_positions <= n_new_positions
140 |
141 | assert n_new_positions == n_new_scores
142 |
--------------------------------------------------------------------------------
/tests/test_empty_output/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/tests/test_empty_output/__init__.py
--------------------------------------------------------------------------------
/tests/test_empty_output/non_verbose.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from hyperactive import Hyperactive
3 |
4 |
5 | def ackley_function(para):
6 | x, y = para["x"], para["y"]
7 |
8 | loss = (
9 | -20 * np.exp(-0.2 * np.sqrt(0.5 * (x * x + y * y)))
10 | - np.exp(0.5 * (np.cos(2 * np.pi * x) + np.cos(2 * np.pi * y)))
11 | + np.exp(1)
12 | + 20
13 | )
14 |
15 | return -loss
16 |
17 |
18 | search_space = {
19 | "x": list(np.arange(-10, 10, 0.01)),
20 | "y": list(np.arange(-10, 10, 0.01)),
21 | }
22 |
23 |
24 | hyper = Hyperactive(verbosity=False)
25 | hyper.add_search(ackley_function, search_space, n_iter=30, memory=True)
26 | hyper.run()
27 |
--------------------------------------------------------------------------------
/tests/test_empty_output/test_empty_output.py:
--------------------------------------------------------------------------------
1 | import os, sys, subprocess, pytest
2 |
3 |
4 | if sys.platform.startswith("win"):
5 | pytest.skip("skip these tests for windows", allow_module_level=True)
6 |
7 |
8 | here = os.path.dirname(os.path.abspath(__file__))
9 |
10 | verbose_file = os.path.join(here, "verbose.py")
11 | non_verbose_file = os.path.join(here, "non_verbose.py")
12 |
13 |
14 | def _run_subprocess(script):
15 | output = []
16 | process = subprocess.Popen(
17 | [sys.executable, "-u", script],
18 | stdout=subprocess.PIPE,
19 | stderr=subprocess.PIPE,
20 | text=True,
21 | bufsize=1, # Line buffered
22 | env={**os.environ, "PYTHONUNBUFFERED": "1"},
23 | )
24 | # Read output line by line
25 | while True:
26 | line = process.stdout.readline()
27 | if line:
28 | output.append(line)
29 | if not line and process.poll() is not None:
30 | break
31 |
32 | return "".join(output), process.stderr.read()
33 |
34 |
35 | def test_empty_output():
36 | stdout_verb, stderr_verb = _run_subprocess(verbose_file)
37 | stdout_non_verb, stderr_non_verb = _run_subprocess(non_verbose_file)
38 |
39 | print("\n stdout_verb \n", stdout_verb, "\n")
40 | print("\n stderr_verb \n", stderr_verb, "\n")
41 |
42 | print("\n stdout_non_verb \n", stdout_non_verb, "\n")
43 | print("\n stderr_non_verb \n", stderr_non_verb, "\n")
44 |
45 | assert "Results:" in stdout_verb
46 | assert not stdout_non_verb
47 |
--------------------------------------------------------------------------------
/tests/test_empty_output/verbose.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import numpy as np
3 | from hyperactive import Hyperactive
4 |
5 |
6 | def ackley_function(para):
7 | x, y = para["x"], para["y"]
8 |
9 | loss = (
10 | -20 * np.exp(-0.2 * np.sqrt(0.5 * (x * x + y * y)))
11 | - np.exp(0.5 * (np.cos(2 * np.pi * x) + np.cos(2 * np.pi * y)))
12 | + np.exp(1)
13 | + 20
14 | )
15 |
16 | return -loss
17 |
18 |
19 | search_space = {
20 | "x": list(np.arange(-10, 10, 0.01)),
21 | "y": list(np.arange(-10, 10, 0.01)),
22 | }
23 |
24 |
25 | hyper = Hyperactive()
26 | hyper.add_search(ackley_function, search_space, n_iter=30, memory=True)
27 | hyper.run()
28 |
29 | sys.stdout.flush()
30 |
--------------------------------------------------------------------------------
/tests/test_initializers.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from hyperactive import Hyperactive
3 |
4 |
5 | def objective_function(opt):
6 | score = -opt["x1"] * opt["x1"]
7 | return score
8 |
9 |
10 | search_space = {
11 | "x1": list(np.arange(-100, 101, 1)),
12 | }
13 |
14 |
15 | def test_initialize_warm_start_0():
16 | init = {
17 | "x1": 0,
18 | }
19 |
20 | initialize = {"warm_start": [init]}
21 |
22 | hyper = Hyperactive()
23 | hyper.add_search(
24 | objective_function,
25 | search_space,
26 | n_iter=1,
27 | initialize=initialize,
28 | )
29 | hyper.run()
30 |
31 | assert abs(hyper.best_score(objective_function)) < 0.001
32 |
33 |
34 | def test_initialize_warm_start_1():
35 | search_space = {
36 | "x1": list(np.arange(-10, 10, 1)),
37 | }
38 | init = {
39 | "x1": -10,
40 | }
41 |
42 | initialize = {"warm_start": [init]}
43 |
44 | hyper = Hyperactive()
45 | hyper.add_search(
46 | objective_function,
47 | search_space,
48 | n_iter=1,
49 | initialize=initialize,
50 | )
51 | hyper.run()
52 |
53 | assert hyper.best_para(objective_function) == init
54 |
55 |
56 | def test_initialize_vertices():
57 | initialize = {"vertices": 2}
58 |
59 | hyper = Hyperactive()
60 | hyper.add_search(
61 | objective_function,
62 | search_space,
63 | n_iter=2,
64 | initialize=initialize,
65 | )
66 | hyper.run()
67 |
68 | assert abs(hyper.best_score(objective_function)) - 10000 < 0.001
69 |
70 |
71 | def test_initialize_grid_0():
72 | search_space = {
73 | "x1": list(np.arange(-1, 2, 1)),
74 | }
75 | initialize = {"grid": 1}
76 |
77 | hyper = Hyperactive()
78 | hyper.add_search(
79 | objective_function,
80 | search_space,
81 | n_iter=1,
82 | initialize=initialize,
83 | )
84 | hyper.run()
85 |
86 | assert abs(hyper.best_score(objective_function)) < 0.001
87 |
88 |
89 | def test_initialize_grid_1():
90 | search_space = {
91 | "x1": list(np.arange(-2, 3, 1)),
92 | }
93 |
94 | initialize = {"grid": 1}
95 |
96 | hyper = Hyperactive()
97 | hyper.add_search(
98 | objective_function,
99 | search_space,
100 | n_iter=1,
101 | initialize=initialize,
102 | )
103 | hyper.run()
104 |
105 | assert abs(hyper.best_score(objective_function)) - 1 < 0.001
106 |
107 |
108 | def test_initialize_all_0():
109 | search_space = {
110 | "x1": list(np.arange(-2, 3, 1)),
111 | }
112 |
113 | initialize = {"grid": 100, "vertices": 100, "random": 100}
114 |
115 | hyper = Hyperactive()
116 | hyper.add_search(
117 | objective_function,
118 | search_space,
119 | n_iter=300,
120 | initialize=initialize,
121 | )
122 | hyper.run()
123 |
--------------------------------------------------------------------------------
/tests/test_issues/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/tests/test_issues/__init__.py
--------------------------------------------------------------------------------
/tests/test_issues/test_issue_25.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pandas as pd
3 |
4 | from hyperactive import Hyperactive
5 |
6 |
7 | def test_issue_25():
8 | # set a path to save the dataframe
9 | path = "./search_data.csv"
10 | search_space = {
11 | "n_neighbors": list(range(1, 50)),
12 | }
13 |
14 | # get para names from search space + the score
15 | para_names = list(search_space.keys()) + ["score"]
16 |
17 | # init empty pandas dataframe
18 | search_data = pd.DataFrame(columns=para_names)
19 | search_data.to_csv(path, index=False)
20 |
21 | def objective_function(para):
22 | # score = random.choice([1.2, 2.3, np.nan])
23 | score = np.nan
24 |
25 | # you can access the entire dictionary from "para"
26 | parameter_dict = para.para_dict
27 |
28 | # save the score in the copy of the dictionary
29 | parameter_dict["score"] = score
30 |
31 | # append parameter dictionary to pandas dataframe
32 | search_data = pd.read_csv(path, na_values="nan")
33 | search_data_new = pd.DataFrame(
34 | parameter_dict, columns=para_names, index=[0]
35 | )
36 |
37 | # search_data = search_data.append(search_data_new)
38 | search_data = pd.concat(
39 | [search_data, search_data_new], ignore_index=True
40 | )
41 |
42 | search_data.to_csv(path, index=False, na_rep="nan")
43 |
44 | return score
45 |
46 | hyper0 = Hyperactive()
47 | hyper0.add_search(objective_function, search_space, n_iter=50)
48 | hyper0.run()
49 |
50 | search_data_0 = pd.read_csv(path, na_values="nan")
51 | """
52 | the second run should be much faster than before,
53 | because Hyperactive already knows most parameters/scores
54 | """
55 | hyper1 = Hyperactive()
56 | hyper1.add_search(
57 | objective_function,
58 | search_space,
59 | n_iter=50,
60 | memory_warm_start=search_data_0,
61 | )
62 | hyper1.run()
63 |
--------------------------------------------------------------------------------
/tests/test_issues/test_issue_29.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_diabetes
2 | from sklearn.tree import DecisionTreeRegressor
3 | from sklearn.model_selection import cross_val_score
4 |
5 | from hyperactive import Hyperactive
6 |
7 |
8 | def test_issue_29():
9 | data = load_diabetes()
10 | X, y = data.data, data.target
11 |
12 | def model(para):
13 | dtr = DecisionTreeRegressor(
14 | min_samples_split=para["min_samples_split"],
15 | max_depth=para["max_depth"],
16 | )
17 | scores = cross_val_score(dtr, X, y, cv=3)
18 |
19 | print(
20 | "Iteration:",
21 | para.optimizer.nth_iter,
22 | " Best score",
23 | para.optimizer.best_score,
24 | )
25 |
26 | return scores.mean()
27 |
28 | search_space = {
29 | "min_samples_split": list(range(2, 12)),
30 | "max_depth": list(range(2, 12)),
31 | }
32 |
33 | hyper = Hyperactive()
34 | hyper.add_search(model, search_space, n_iter=20)
35 | hyper.run()
36 |
--------------------------------------------------------------------------------
/tests/test_issues/test_issue_34.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from hyperactive import Hyperactive
3 |
4 | """ --- test search spaces with mixed int/float types --- """
5 | n_iter = 100
6 |
7 |
8 | def test_mixed_type_search_space_0():
9 | def objective_function(para):
10 | assert isinstance(para["x1"], int)
11 |
12 | return 1
13 |
14 | search_space = {
15 | "x1": list(range(10, 20)),
16 | }
17 |
18 | hyper = Hyperactive()
19 | hyper.add_search(objective_function, search_space, n_iter=n_iter)
20 | hyper.run()
21 |
22 |
23 | def test_mixed_type_search_space_1():
24 | def objective_function(para):
25 | assert isinstance(para["x2"], float)
26 |
27 | return 1
28 |
29 | search_space = {
30 | "x2": list(np.arange(1, 2, 0.1)),
31 | }
32 |
33 | hyper = Hyperactive()
34 | hyper.add_search(objective_function, search_space, n_iter=n_iter)
35 | hyper.run()
36 |
37 |
38 | def test_mixed_type_search_space_2():
39 | def objective_function(para):
40 | assert isinstance(para["x1"], int)
41 | assert isinstance(para["x2"], float)
42 |
43 | return 1
44 |
45 | search_space = {
46 | "x1": list(range(10, 20)),
47 | "x2": list(np.arange(1, 2, 0.1)),
48 | }
49 |
50 | hyper = Hyperactive()
51 | hyper.add_search(objective_function, search_space, n_iter=n_iter)
52 | hyper.run()
53 |
54 |
55 | def test_mixed_type_search_space_3():
56 | def objective_function(para):
57 | assert isinstance(para["x1"], int)
58 | assert isinstance(para["x2"], float)
59 | assert isinstance(para["x3"], float)
60 | assert isinstance(para["x4"], str)
61 |
62 | return 1
63 |
64 | search_space = {
65 | "x1": list(range(10, 20)),
66 | "x2": list(np.arange(1, 2, 0.1)),
67 | "x3": list(np.arange(1, 2, 0.1)),
68 | "x4": ["str1", "str2", "str3"],
69 | }
70 |
71 | hyper = Hyperactive()
72 | hyper.add_search(objective_function, search_space, n_iter=n_iter)
73 | hyper.run()
74 |
--------------------------------------------------------------------------------
/tests/test_max_score.py:
--------------------------------------------------------------------------------
1 | import time
2 | import numpy as np
3 | from sklearn.datasets import load_breast_cancer
4 | from sklearn.model_selection import cross_val_score
5 | from sklearn.tree import DecisionTreeClassifier
6 |
7 | from hyperactive import Hyperactive
8 | from hyperactive.optimizers import (
9 | RandomSearchOptimizer,
10 | HillClimbingOptimizer,
11 | )
12 |
13 |
14 | def objective_function(para):
15 | score = -para["x1"] * para["x1"]
16 | return score
17 |
18 |
19 | search_space = {
20 | "x1": list(np.arange(0, 100000, 0.1)),
21 | }
22 |
23 |
24 | def test_max_score_0():
25 | def objective_function(para):
26 | score = -para["x1"] * para["x1"]
27 | return score
28 |
29 | search_space = {
30 | "x1": list(np.arange(0, 100, 0.1)),
31 | }
32 |
33 | max_score = -9999
34 |
35 | opt = HillClimbingOptimizer(
36 | epsilon=0.01,
37 | rand_rest_p=0,
38 | )
39 |
40 | hyper = Hyperactive()
41 | hyper.add_search(
42 | objective_function,
43 | search_space,
44 | optimizer=opt,
45 | n_iter=100000,
46 | initialize={"warm_start": [{"x1": 99}]},
47 | max_score=max_score,
48 | )
49 | hyper.run()
50 |
51 | print("\n Results head \n", hyper.search_data(objective_function).head())
52 | print("\n Results tail \n", hyper.search_data(objective_function).tail())
53 |
54 | print("\nN iter:", len(hyper.search_data(objective_function)))
55 |
56 | assert -100 > hyper.best_score(objective_function) > max_score
57 |
58 |
59 | def test_max_score_1():
60 | def objective_function(para):
61 | score = -para["x1"] * para["x1"]
62 | time.sleep(0.01)
63 | return score
64 |
65 | search_space = {
66 | "x1": list(np.arange(0, 100, 0.1)),
67 | }
68 |
69 | max_score = -9999
70 |
71 | c_time = time.perf_counter()
72 | hyper = Hyperactive()
73 | hyper.add_search(
74 | objective_function,
75 | search_space,
76 | n_iter=100000,
77 | initialize={"warm_start": [{"x1": 99}]},
78 | max_score=max_score,
79 | )
80 | hyper.run()
81 | diff_time = time.perf_counter() - c_time
82 |
83 | print("\n Results head \n", hyper.search_data(objective_function).head())
84 | print("\n Results tail \n", hyper.search_data(objective_function).tail())
85 |
86 | print("\nN iter:", len(hyper.search_data(objective_function)))
87 |
88 | assert diff_time < 1
89 |
--------------------------------------------------------------------------------
/tests/test_max_time.py:
--------------------------------------------------------------------------------
1 | import time
2 | import numpy as np
3 | from hyperactive import Hyperactive
4 |
5 |
6 | def objective_function(para):
7 | score = -para["x1"] * para["x1"]
8 | return score
9 |
10 |
11 | search_space = {
12 | "x1": list(np.arange(0, 100000, 1)),
13 | }
14 |
15 |
16 | def test_max_time_0():
17 | c_time1 = time.perf_counter()
18 | hyper = Hyperactive()
19 | hyper.add_search(objective_function, search_space, n_iter=1000000)
20 | hyper.run(max_time=0.1)
21 | diff_time1 = time.perf_counter() - c_time1
22 |
23 | assert diff_time1 < 1
24 |
25 |
26 | def test_max_time_1():
27 | c_time1 = time.perf_counter()
28 | hyper = Hyperactive()
29 | hyper.add_search(objective_function, search_space, n_iter=1000000)
30 | hyper.run(max_time=1)
31 | diff_time1 = time.perf_counter() - c_time1
32 |
33 | assert 0.3 < diff_time1 < 2
34 |
--------------------------------------------------------------------------------
/tests/test_obj_func_arg.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from hyperactive import Hyperactive
3 |
4 |
5 | search_space = {
6 | "x1": list(np.arange(0, 100, 1)),
7 | }
8 |
9 |
10 | def test_argument_0():
11 | def objective_function(para):
12 |
13 | print("\npara.nth_iter", para.nth_iter)
14 | print("nth_iter_local", para.pass_through["nth_iter_local"])
15 |
16 | assert para.nth_iter == para.pass_through["nth_iter_local"]
17 |
18 | para.pass_through["nth_iter_local"] += 1
19 |
20 | return 0
21 |
22 | hyper = Hyperactive()
23 | hyper.add_search(
24 | objective_function,
25 | search_space,
26 | n_iter=100,
27 | pass_through={"nth_iter_local": 0},
28 | memory=False,
29 | )
30 | hyper.run()
31 |
--------------------------------------------------------------------------------
/tests/test_optimization_strategies/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/tests/test_optimization_strategies/__init__.py
--------------------------------------------------------------------------------
/tests/test_optimization_strategies/_parametrize.py:
--------------------------------------------------------------------------------
1 | from hyperactive.optimizers import (
2 | HillClimbingOptimizer,
3 | StochasticHillClimbingOptimizer,
4 | RepulsingHillClimbingOptimizer,
5 | SimulatedAnnealingOptimizer,
6 | DownhillSimplexOptimizer,
7 | RandomSearchOptimizer,
8 | GridSearchOptimizer,
9 | RandomRestartHillClimbingOptimizer,
10 | RandomAnnealingOptimizer,
11 | PowellsMethod,
12 | PatternSearch,
13 | ParallelTemperingOptimizer,
14 | ParticleSwarmOptimizer,
15 | SpiralOptimization,
16 | EvolutionStrategyOptimizer,
17 | BayesianOptimizer,
18 | LipschitzOptimizer,
19 | DirectAlgorithm,
20 | TreeStructuredParzenEstimators,
21 | ForestOptimizer,
22 | )
23 |
24 |
25 | optimizers = (
26 | "Optimizer",
27 | [
28 | (HillClimbingOptimizer),
29 | (StochasticHillClimbingOptimizer),
30 | (RepulsingHillClimbingOptimizer),
31 | (SimulatedAnnealingOptimizer),
32 | (DownhillSimplexOptimizer),
33 | (RandomSearchOptimizer),
34 | (GridSearchOptimizer),
35 | (RandomRestartHillClimbingOptimizer),
36 | (RandomAnnealingOptimizer),
37 | (PowellsMethod),
38 | (PatternSearch),
39 | (ParallelTemperingOptimizer),
40 | (ParticleSwarmOptimizer),
41 | (SpiralOptimization),
42 | (EvolutionStrategyOptimizer),
43 | (BayesianOptimizer),
44 | (LipschitzOptimizer),
45 | (DirectAlgorithm),
46 | (TreeStructuredParzenEstimators),
47 | (ForestOptimizer),
48 | ],
49 | )
50 |
51 |
52 | optimizers_strat = (
53 | "Optimizer_strat",
54 | [
55 | (HillClimbingOptimizer),
56 | (StochasticHillClimbingOptimizer),
57 | (RepulsingHillClimbingOptimizer),
58 | (SimulatedAnnealingOptimizer),
59 | (DownhillSimplexOptimizer),
60 | (RandomSearchOptimizer),
61 | (GridSearchOptimizer),
62 | (RandomRestartHillClimbingOptimizer),
63 | (RandomAnnealingOptimizer),
64 | (PowellsMethod),
65 | (PatternSearch),
66 | (ParallelTemperingOptimizer),
67 | (ParticleSwarmOptimizer),
68 | (SpiralOptimization),
69 | (EvolutionStrategyOptimizer),
70 | (BayesianOptimizer),
71 | (LipschitzOptimizer),
72 | (DirectAlgorithm),
73 | (TreeStructuredParzenEstimators),
74 | (ForestOptimizer),
75 | ],
76 | )
77 |
78 |
79 | optimizers_non_smbo = (
80 | "Optimizer_non_smbo",
81 | [
82 | (HillClimbingOptimizer),
83 | (StochasticHillClimbingOptimizer),
84 | (RepulsingHillClimbingOptimizer),
85 | (SimulatedAnnealingOptimizer),
86 | (DownhillSimplexOptimizer),
87 | (RandomSearchOptimizer),
88 | (GridSearchOptimizer),
89 | (RandomRestartHillClimbingOptimizer),
90 | (RandomAnnealingOptimizer),
91 | (PowellsMethod),
92 | (PatternSearch),
93 | (ParallelTemperingOptimizer),
94 | (ParticleSwarmOptimizer),
95 | (SpiralOptimization),
96 | (EvolutionStrategyOptimizer),
97 | ],
98 | )
99 |
100 |
101 | optimizers_smbo = (
102 | "Optimizer_smbo",
103 | [
104 | (BayesianOptimizer),
105 | (LipschitzOptimizer),
106 | (DirectAlgorithm),
107 | (TreeStructuredParzenEstimators),
108 | (ForestOptimizer),
109 | ],
110 | )
111 |
--------------------------------------------------------------------------------
/tests/test_optimization_strategies/test_early_stopping.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import numpy as np
3 |
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers.strategies import CustomOptimizationStrategy
7 | from hyperactive.optimizers import RandomSearchOptimizer
8 |
9 | from ._parametrize import optimizers
10 |
11 |
12 | n_iter_no_change_parametr = (
13 | "n_iter_no_change",
14 | [
15 | (5),
16 | (10),
17 | (15),
18 | ],
19 | )
20 |
21 |
22 | @pytest.mark.parametrize(*n_iter_no_change_parametr)
23 | @pytest.mark.parametrize(*optimizers)
24 | def test_strategy_early_stopping_0(Optimizer, n_iter_no_change):
25 | def objective_function(para):
26 | score = -para["x1"] * para["x1"]
27 | return score
28 |
29 | search_space = {
30 | "x1": list(np.arange(0, 100, 0.1)),
31 | }
32 |
33 | # n_iter_no_change = 5
34 | early_stopping = {
35 | "n_iter_no_change": n_iter_no_change,
36 | }
37 |
38 | optimizer1 = Optimizer()
39 | optimizer2 = RandomSearchOptimizer()
40 |
41 | opt_strat = CustomOptimizationStrategy()
42 | opt_strat.add_optimizer(optimizer1, duration=0.9, early_stopping=early_stopping)
43 | opt_strat.add_optimizer(optimizer2, duration=0.1)
44 |
45 | n_iter = 30
46 |
47 | hyper = Hyperactive()
48 | hyper.add_search(
49 | objective_function,
50 | search_space,
51 | optimizer=opt_strat,
52 | n_iter=n_iter,
53 | initialize={"warm_start": [{"x1": 0}]},
54 | )
55 | hyper.run()
56 |
57 | optimizer1 = hyper.opt_pros[0].optimizer_setup_l[0]["optimizer"]
58 | optimizer2 = hyper.opt_pros[0].optimizer_setup_l[1]["optimizer"]
59 |
60 | search_data = optimizer1.search_data
61 | n_performed_iter = len(search_data)
62 |
63 | print("\n n_performed_iter \n", n_performed_iter)
64 | print("\n n_iter_no_change \n", n_iter_no_change)
65 |
66 | assert n_performed_iter == (n_iter_no_change + 1)
67 |
--------------------------------------------------------------------------------
/tests/test_optimization_strategies/test_search_space_pruning.py:
--------------------------------------------------------------------------------
1 | import time
2 | import pytest
3 | import numpy as np
4 |
5 |
6 | from hyperactive import Hyperactive
7 | from hyperactive.optimizers.strategies import CustomOptimizationStrategy
8 | from hyperactive.optimizers import GridSearchOptimizer
9 |
10 | from ._parametrize import optimizers_smbo
11 |
12 |
13 | @pytest.mark.parametrize(*optimizers_smbo)
14 | def test_memory_Warm_start_smbo_0(Optimizer_smbo):
15 | def objective_function(opt):
16 | time.sleep(0.01)
17 | score = -(opt["x1"] * opt["x1"])
18 | return score
19 |
20 | search_space = {
21 | "x1": list(np.arange(0, 100, 1)),
22 | }
23 |
24 | optimizer1 = GridSearchOptimizer()
25 | optimizer2 = Optimizer_smbo()
26 |
27 | opt_strat = CustomOptimizationStrategy()
28 |
29 | duration_1 = 0.8
30 | duration_2 = 0.2
31 |
32 | opt_strat.add_optimizer(optimizer1, duration=duration_1)
33 | opt_strat.add_optimizer(optimizer2, duration=duration_2)
34 |
35 | n_iter = 20
36 |
37 | hyper = Hyperactive()
38 | hyper.add_search(
39 | objective_function,
40 | search_space,
41 | optimizer=opt_strat,
42 | n_iter=n_iter,
43 | memory=True,
44 | )
45 | hyper.run()
46 |
47 | search_data = hyper.search_data(objective_function)
48 |
49 | optimizer1 = hyper.opt_pros[0].optimizer_setup_l[0]["optimizer"]
50 | optimizer2 = hyper.opt_pros[0].optimizer_setup_l[1]["optimizer"]
51 |
52 | assert len(search_data) == n_iter
53 |
54 | assert len(optimizer1.search_data) == int(n_iter * duration_1)
55 | assert len(optimizer2.search_data) == int(n_iter * duration_2)
56 |
57 | assert optimizer1.best_score <= optimizer2.best_score
58 |
--------------------------------------------------------------------------------
/tests/test_optimizers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/tests/test_optimizers/__init__.py
--------------------------------------------------------------------------------
/tests/test_optimizers/_parametrize.py:
--------------------------------------------------------------------------------
1 | from hyperactive.optimizers import (
2 | HillClimbingOptimizer,
3 | StochasticHillClimbingOptimizer,
4 | RepulsingHillClimbingOptimizer,
5 | SimulatedAnnealingOptimizer,
6 | DownhillSimplexOptimizer,
7 | RandomSearchOptimizer,
8 | GridSearchOptimizer,
9 | RandomRestartHillClimbingOptimizer,
10 | RandomAnnealingOptimizer,
11 | PowellsMethod,
12 | PatternSearch,
13 | ParallelTemperingOptimizer,
14 | ParticleSwarmOptimizer,
15 | SpiralOptimization,
16 | GeneticAlgorithmOptimizer,
17 | EvolutionStrategyOptimizer,
18 | DifferentialEvolutionOptimizer,
19 | BayesianOptimizer,
20 | LipschitzOptimizer,
21 | DirectAlgorithm,
22 | TreeStructuredParzenEstimators,
23 | ForestOptimizer,
24 | )
25 |
26 |
27 | optimizers = (
28 | "Optimizer",
29 | [
30 | (HillClimbingOptimizer),
31 | (StochasticHillClimbingOptimizer),
32 | (RepulsingHillClimbingOptimizer),
33 | (SimulatedAnnealingOptimizer),
34 | (DownhillSimplexOptimizer),
35 | (RandomSearchOptimizer),
36 | (GridSearchOptimizer),
37 | (RandomRestartHillClimbingOptimizer),
38 | (RandomAnnealingOptimizer),
39 | (PowellsMethod),
40 | (PatternSearch),
41 | (ParallelTemperingOptimizer),
42 | (ParticleSwarmOptimizer),
43 | (SpiralOptimization),
44 | (GeneticAlgorithmOptimizer),
45 | (EvolutionStrategyOptimizer),
46 | (DifferentialEvolutionOptimizer),
47 | (BayesianOptimizer),
48 | (LipschitzOptimizer),
49 | (DirectAlgorithm),
50 | (TreeStructuredParzenEstimators),
51 | (ForestOptimizer),
52 | ],
53 | )
54 |
--------------------------------------------------------------------------------
/tests/test_optimizers/test_best_results.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import numpy as np
3 |
4 |
5 | from hyperactive import Hyperactive
6 | from ._parametrize import optimizers
7 |
8 |
9 | def objective_function(opt):
10 | score = -opt["x1"] * opt["x1"]
11 | return score
12 |
13 |
14 | def objective_function_m5(opt):
15 | score = -(opt["x1"] - 5) * (opt["x1"] - 5)
16 | return score
17 |
18 |
19 | def objective_function_p5(opt):
20 | score = -(opt["x1"] + 5) * (opt["x1"] + 5)
21 | return score
22 |
23 |
24 | search_space_0 = {"x1": list(np.arange(-100, 101, 1))}
25 | search_space_1 = {"x1": list(np.arange(0, 101, 1))}
26 | search_space_2 = {"x1": list(np.arange(-100, 1, 1))}
27 |
28 | search_space_3 = {"x1": list(np.arange(-10, 11, 0.1))}
29 | search_space_4 = {"x1": list(np.arange(0, 11, 0.1))}
30 | search_space_5 = {"x1": list(np.arange(-10, 1, 0.1))}
31 |
32 | search_space_6 = {"x1": list(np.arange(-0.0000000003, 0.0000000003, 0.0000000001))}
33 | search_space_7 = {"x1": list(np.arange(0, 0.0000000003, 0.0000000001))}
34 | search_space_8 = {"x1": list(np.arange(-0.0000000003, 0, 0.0000000001))}
35 |
36 | objective_para = (
37 | "objective",
38 | [
39 | (objective_function),
40 | (objective_function_m5),
41 | (objective_function_p5),
42 | ],
43 | )
44 |
45 | search_space_para = (
46 | "search_space",
47 | [
48 | (search_space_0),
49 | (search_space_1),
50 | (search_space_2),
51 | (search_space_3),
52 | (search_space_4),
53 | (search_space_5),
54 | (search_space_6),
55 | (search_space_7),
56 | (search_space_8),
57 | ],
58 | )
59 |
60 |
61 | @pytest.mark.parametrize(*objective_para)
62 | @pytest.mark.parametrize(*search_space_para)
63 | @pytest.mark.parametrize(*optimizers)
64 | def test_best_results_0(Optimizer, search_space, objective):
65 | search_space = search_space
66 | objective_function = objective
67 |
68 | initialize = {"vertices": 2}
69 |
70 | hyper = Hyperactive()
71 | hyper.add_search(
72 | objective_function,
73 | search_space,
74 | optimizer=Optimizer(),
75 | n_iter=10,
76 | memory=False,
77 | initialize=initialize,
78 | )
79 | hyper.run()
80 |
81 | assert hyper.best_score(objective_function) == objective_function(
82 | hyper.best_para(objective_function)
83 | )
84 |
85 |
86 | @pytest.mark.parametrize(*objective_para)
87 | @pytest.mark.parametrize(*search_space_para)
88 | @pytest.mark.parametrize(*optimizers)
89 | def test_best_results_1(Optimizer, search_space, objective):
90 | search_space = search_space
91 | objective_function = objective
92 |
93 | initialize = {"vertices": 2}
94 |
95 | hyper = Hyperactive()
96 | hyper.add_search(
97 | objective_function,
98 | search_space,
99 | optimizer=Optimizer(),
100 | n_iter=10,
101 | memory=False,
102 | initialize=initialize,
103 | )
104 | hyper.run()
105 |
106 | assert hyper.best_para(objective_function)["x1"] in list(
107 | hyper.search_data(objective_function)["x1"]
108 | )
109 |
--------------------------------------------------------------------------------
/tests/test_optimizers/test_gfo_wrapper.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import numpy as np
3 |
4 | from tqdm import tqdm
5 | from ._parametrize import optimizers
6 | from hyperactive.search_space import SearchSpace
7 |
8 |
9 | def objective_function(opt):
10 | score = -opt["x1"] * opt["x1"]
11 | return score
12 |
13 |
14 | def objective_function_m5(opt):
15 | score = -(opt["x1"] - 5) * (opt["x1"] - 5)
16 | return score
17 |
18 |
19 | def objective_function_p5(opt):
20 | score = -(opt["x1"] + 5) * (opt["x1"] + 5)
21 | return score
22 |
23 |
24 | search_space_0 = {"x1": list(np.arange(-100, 101, 1))}
25 | search_space_1 = {"x1": list(np.arange(0, 101, 1))}
26 | search_space_2 = {"x1": list(np.arange(-100, 1, 1))}
27 |
28 | search_space_3 = {"x1": list(np.arange(-10, 11, 0.1))}
29 | search_space_4 = {"x1": list(np.arange(0, 11, 0.1))}
30 | search_space_5 = {"x1": list(np.arange(-10, 1, 0.1))}
31 |
32 | search_space_6 = {"x1": list(np.arange(-0.0000000003, 0.0000000003, 0.0000000001))}
33 | search_space_7 = {"x1": list(np.arange(0, 0.0000000003, 0.0000000001))}
34 | search_space_8 = {"x1": list(np.arange(-0.0000000003, 0, 0.0000000001))}
35 |
36 | objective_para = (
37 | "objective",
38 | [
39 | (objective_function),
40 | (objective_function_m5),
41 | (objective_function_p5),
42 | ],
43 | )
44 |
45 | search_space_para = (
46 | "search_space",
47 | [
48 | (search_space_0),
49 | (search_space_1),
50 | (search_space_2),
51 | (search_space_3),
52 | (search_space_4),
53 | (search_space_5),
54 | (search_space_6),
55 | (search_space_7),
56 | (search_space_8),
57 | ],
58 | )
59 |
60 |
61 | @pytest.mark.parametrize(*objective_para)
62 | @pytest.mark.parametrize(*search_space_para)
63 | @pytest.mark.parametrize(*optimizers)
64 | def test_gfo_opt_wrapper_0(Optimizer, search_space, objective):
65 | search_space = search_space
66 | objective_function = objective
67 |
68 | n_iter = 10
69 | s_space = SearchSpace(search_space)
70 |
71 | initialize = {"vertices": 2}
72 | constraints = []
73 | pass_through = {}
74 | callbacks = None
75 | catch = None
76 | max_score = None
77 | early_stopping = None
78 | random_state = None
79 | memory = None
80 | memory_warm_start = None
81 | verbosity = ["progress_bar", "print_results", "print_times"]
82 |
83 | opt = Optimizer()
84 |
85 | opt.setup_search(
86 | objective_function=objective_function,
87 | s_space=s_space,
88 | n_iter=n_iter,
89 | initialize=initialize,
90 | constraints=constraints,
91 | pass_through=pass_through,
92 | callbacks=callbacks,
93 | catch=catch,
94 | max_score=max_score,
95 | early_stopping=early_stopping,
96 | random_state=random_state,
97 | memory=memory,
98 | memory_warm_start=memory_warm_start,
99 | verbosity=verbosity,
100 | )
101 | opt.max_time = None
102 | opt.search(nth_process=0, p_bar=tqdm(total=n_iter))
103 |
104 | assert opt.best_score == objective_function(opt.best_para)
105 |
--------------------------------------------------------------------------------
/tests/test_optimizers/test_memory.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import numpy as np
3 |
4 |
5 | from hyperactive import Hyperactive
6 | from ._parametrize import optimizers
7 |
8 |
9 | def objective_function(opt):
10 | score = -opt["x1"] * opt["x1"]
11 | return score
12 |
13 |
14 | search_space = {"x1": list(np.arange(-10, 11, 1))}
15 |
16 |
17 | @pytest.mark.parametrize(*optimizers)
18 | def test_memory_0(Optimizer):
19 | optimizer = Optimizer()
20 |
21 | n_iter = 30
22 |
23 | hyper = Hyperactive()
24 | hyper.add_search(
25 | objective_function,
26 | search_space,
27 | optimizer=optimizer,
28 | n_iter=n_iter,
29 | n_jobs=2,
30 | )
31 | hyper.add_search(
32 | objective_function,
33 | search_space,
34 | optimizer=optimizer,
35 | n_iter=n_iter,
36 | n_jobs=2,
37 | )
38 | hyper.run()
39 |
--------------------------------------------------------------------------------
/tests/test_optimizers/test_optimization_strategies.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import numpy as np
3 |
4 |
5 | from hyperactive import Hyperactive
6 | from hyperactive.optimizers.strategies import CustomOptimizationStrategy
7 | from hyperactive.optimizers import HillClimbingOptimizer
8 |
9 | from ._parametrize import optimizers
10 |
11 |
12 | def objective_function(opt):
13 | score = -(opt["x1"] * opt["x1"] + opt["x2"] * opt["x2"])
14 | return score
15 |
16 |
17 | search_space = {
18 | "x1": list(np.arange(-3, 3, 1)),
19 | "x2": list(np.arange(-3, 3, 1)),
20 | }
21 |
22 |
23 | @pytest.mark.parametrize(*optimizers)
24 | def test_strategy_combinations_0(Optimizer):
25 | optimizer1 = Optimizer()
26 | optimizer2 = HillClimbingOptimizer()
27 |
28 | opt_strat = CustomOptimizationStrategy()
29 | opt_strat.add_optimizer(optimizer1, duration=0.5)
30 | opt_strat.add_optimizer(optimizer2, duration=0.5)
31 |
32 | n_iter = 4
33 |
34 | hyper = Hyperactive()
35 | hyper.add_search(
36 | objective_function,
37 | search_space,
38 | optimizer=opt_strat,
39 | n_iter=n_iter,
40 | memory=False,
41 | initialize={"random": 1},
42 | )
43 | hyper.run()
44 |
45 | search_data = hyper.search_data(objective_function)
46 |
47 | optimizer1 = hyper.opt_pros[0].optimizer_setup_l[0]["optimizer"]
48 | optimizer2 = hyper.opt_pros[0].optimizer_setup_l[1]["optimizer"]
49 |
50 | assert len(search_data) == n_iter
51 |
52 | assert len(optimizer1.search_data) == 2
53 | assert len(optimizer2.search_data) == 2
54 |
55 | assert optimizer1.best_score <= optimizer2.best_score
56 |
--------------------------------------------------------------------------------
/tests/test_warm_starts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonBlanke/Hyperactive/e6e7ce03f1412fb87b3c55ad19827bc844bd6e16/tests/test_warm_starts/__init__.py
--------------------------------------------------------------------------------
/tests/test_warm_starts/test_memory_warm_start.py:
--------------------------------------------------------------------------------
1 | import time
2 | import pytest, sys
3 | import numpy as np
4 | import pandas as pd
5 |
6 | from hyperactive import Hyperactive
7 |
8 |
9 | if sys.platform.startswith("win"):
10 | pytest.skip("skip these tests for windows", allow_module_level=True)
11 |
12 |
13 | def func1():
14 | pass
15 |
16 |
17 | def func2():
18 | pass
19 |
20 |
21 | class class1:
22 | def __init__(self):
23 | pass
24 |
25 |
26 | class class2:
27 | def __init__(self):
28 | pass
29 |
30 |
31 | def class_f1():
32 | return class1
33 |
34 |
35 | def class_f2():
36 | return class2
37 |
38 |
39 | def numpy_f1():
40 | return np.array([0, 1])
41 |
42 |
43 | def numpy_f2():
44 | return np.array([1, 0])
45 |
46 |
47 | search_space = {
48 | "x0": list(range(-3, 3)),
49 | "x1": list(np.arange(-1, 1, 0.001)),
50 | "string0": ["str0", "str1"],
51 | "function0": [func1, func2],
52 | "class0": [class_f1, class_f2],
53 | "numpy0": [numpy_f1, numpy_f2],
54 | }
55 |
56 |
57 | def objective_function(opt):
58 | score = -opt["x1"] * opt["x1"]
59 | return score
60 |
61 |
62 | def test_memory_warm_start_0():
63 | hyper0 = Hyperactive()
64 | hyper0.add_search(objective_function, search_space, n_iter=15)
65 | hyper0.run()
66 |
67 | search_data0 = hyper0.search_data(objective_function)
68 |
69 | hyper1 = Hyperactive()
70 | hyper1.add_search(
71 | objective_function,
72 | search_space,
73 | n_iter=15,
74 | memory_warm_start=search_data0,
75 | )
76 | hyper1.run()
77 |
78 |
79 | def test_memory_warm_start_1():
80 | hyper0 = Hyperactive(distribution="pathos")
81 | hyper0.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
82 | hyper0.run()
83 |
84 | search_data0 = hyper0.search_data(objective_function)
85 |
86 | hyper1 = Hyperactive()
87 | hyper1.add_search(
88 | objective_function,
89 | search_space,
90 | n_iter=15,
91 | memory_warm_start=search_data0,
92 | )
93 | hyper1.run()
94 |
95 |
96 | def test_memory_warm_start_2():
97 | hyper0 = Hyperactive()
98 | hyper0.add_search(objective_function, search_space, n_iter=15)
99 | hyper0.run()
100 |
101 | search_data0 = hyper0.search_data(objective_function)
102 |
103 | hyper1 = Hyperactive(distribution="pathos")
104 | hyper1.add_search(
105 | objective_function,
106 | search_space,
107 | n_iter=15,
108 | n_jobs=2,
109 | memory_warm_start=search_data0,
110 | )
111 | hyper1.run()
112 |
113 |
114 | def test_memory_warm_start_3():
115 | hyper0 = Hyperactive(distribution="pathos")
116 | hyper0.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
117 | hyper0.run()
118 |
119 | search_data0 = hyper0.search_data(objective_function)
120 |
121 | hyper1 = Hyperactive(distribution="pathos")
122 | hyper1.add_search(
123 | objective_function,
124 | search_space,
125 | n_iter=15,
126 | n_jobs=2,
127 | memory_warm_start=search_data0,
128 | )
129 | hyper1.run()
130 |
--------------------------------------------------------------------------------
/tests/test_warm_starts/test_warm_start.py:
--------------------------------------------------------------------------------
1 | import time
2 | import pytest, sys
3 | import numpy as np
4 | import pandas as pd
5 |
6 | from hyperactive import Hyperactive
7 |
8 |
9 | if sys.platform.startswith("win"):
10 | pytest.skip("skip these tests for windows", allow_module_level=True)
11 |
12 |
13 | def func1():
14 | pass
15 |
16 |
17 | def func2():
18 | pass
19 |
20 |
21 | class class1:
22 | def __init__(self):
23 | pass
24 |
25 |
26 | class class2:
27 | def __init__(self):
28 | pass
29 |
30 |
31 | def class_f1():
32 | return class1
33 |
34 |
35 | def class_f2():
36 | return class2
37 |
38 |
39 | def numpy_f1():
40 | return np.array([0, 1])
41 |
42 |
43 | def numpy_f2():
44 | return np.array([1, 0])
45 |
46 |
47 | search_space = {
48 | "x0": list(range(-3, 3)),
49 | "x1": list(np.arange(-1, 1, 0.001)),
50 | "string0": ["str0", "str1"],
51 | "function0": [func1, func2],
52 | "class0": [class_f1, class_f2],
53 | "numpy0": [numpy_f1, numpy_f2],
54 | }
55 |
56 |
57 | def objective_function(opt):
58 | score = -opt["x1"] * opt["x1"]
59 | return score
60 |
61 |
62 | def test_warm_start_0():
63 | hyper0 = Hyperactive()
64 | hyper0.add_search(objective_function, search_space, n_iter=15)
65 | hyper0.run()
66 |
67 | best_para0 = hyper0.best_para(objective_function)
68 |
69 | hyper1 = Hyperactive()
70 | hyper1.add_search(
71 | objective_function,
72 | search_space,
73 | n_iter=15,
74 | initialize={"warm_start": [best_para0]},
75 | )
76 | hyper1.run()
77 |
78 |
79 | def test_warm_start_1():
80 | hyper0 = Hyperactive(distribution="pathos")
81 | hyper0.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
82 | hyper0.run()
83 |
84 | best_para0 = hyper0.best_para(objective_function)
85 |
86 | hyper1 = Hyperactive()
87 | hyper1.add_search(
88 | objective_function,
89 | search_space,
90 | n_iter=15,
91 | initialize={"warm_start": [best_para0]},
92 | )
93 | hyper1.run()
94 |
95 |
96 | def test_warm_start_2():
97 | hyper0 = Hyperactive()
98 | hyper0.add_search(objective_function, search_space, n_iter=15)
99 | hyper0.run()
100 |
101 | best_para0 = hyper0.best_para(objective_function)
102 |
103 | hyper1 = Hyperactive(distribution="pathos")
104 | hyper1.add_search(
105 | objective_function,
106 | search_space,
107 | n_iter=15,
108 | n_jobs=2,
109 | initialize={"warm_start": [best_para0]},
110 | )
111 | hyper1.run()
112 |
113 |
114 | def test_warm_start_3():
115 | hyper0 = Hyperactive(distribution="pathos")
116 | hyper0.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
117 | hyper0.run()
118 |
119 | best_para0 = hyper0.best_para(objective_function)
120 |
121 | hyper1 = Hyperactive(distribution="pathos")
122 | hyper1.add_search(
123 | objective_function,
124 | search_space,
125 | n_iter=15,
126 | n_jobs=2,
127 | initialize={"warm_start": [best_para0]},
128 | )
129 | hyper1.run()
130 |
--------------------------------------------------------------------------------
/tests/test_warm_starts/test_warm_start_smbo.py:
--------------------------------------------------------------------------------
1 | import time
2 | import pytest, sys
3 | import numpy as np
4 | import pandas as pd
5 |
6 | from hyperactive import (
7 | Hyperactive,
8 | )
9 |
10 | from hyperactive.optimizers import (
11 | BayesianOptimizer,
12 | TreeStructuredParzenEstimators,
13 | ForestOptimizer,
14 | )
15 |
16 |
17 | if sys.platform.startswith("win"):
18 | pytest.skip("skip these tests for windows", allow_module_level=True)
19 |
20 |
21 | def func1():
22 | pass
23 |
24 |
25 | def func2():
26 | pass
27 |
28 |
29 | class class1:
30 | def __init__(self):
31 | pass
32 |
33 |
34 | class class2:
35 | def __init__(self):
36 | pass
37 |
38 |
39 | def class_f1():
40 | return class1
41 |
42 |
43 | def class_f2():
44 | return class2
45 |
46 |
47 | def numpy_f1():
48 | return np.array([0, 1])
49 |
50 |
51 | def numpy_f2():
52 | return np.array([1, 0])
53 |
54 |
55 | search_space = {
56 | "x0": list(range(-3, 3)),
57 | "x1": list(np.arange(-1, 1, 0.001)),
58 | "string0": ["str0", "str1"],
59 | "function0": [func1, func2],
60 | "class0": [class_f1, class_f2],
61 | "numpy0": [numpy_f1, numpy_f2],
62 | }
63 |
64 |
65 | def objective_function(opt):
66 | score = -opt["x1"] * opt["x1"]
67 | return score
68 |
69 |
70 | smbo_opts = [
71 | BayesianOptimizer,
72 | TreeStructuredParzenEstimators,
73 | ForestOptimizer,
74 | ]
75 |
76 | initialize = {"random": 1}
77 | n_iter = 3
78 |
79 |
80 | @pytest.mark.parametrize("smbo_opt", smbo_opts)
81 | def test_warm_start_smbo_0(smbo_opt):
82 | hyper0 = Hyperactive()
83 | hyper0.add_search(objective_function, search_space, n_iter=n_iter)
84 | hyper0.run()
85 |
86 | search_data0 = hyper0.search_data(objective_function)
87 | smbo_opt_ = smbo_opt(warm_start_smbo=search_data0)
88 |
89 | hyper1 = Hyperactive()
90 | hyper1.add_search(
91 | objective_function,
92 | search_space,
93 | n_iter=n_iter,
94 | optimizer=smbo_opt_,
95 | initialize=initialize,
96 | )
97 | hyper1.run()
98 |
99 |
100 | @pytest.mark.parametrize("smbo_opt", smbo_opts)
101 | def test_warm_start_smbo_1(smbo_opt):
102 | hyper0 = Hyperactive(distribution="pathos")
103 | hyper0.add_search(
104 | objective_function,
105 | search_space,
106 | n_iter=n_iter,
107 | n_jobs=2,
108 | initialize=initialize,
109 | )
110 | hyper0.run()
111 |
112 | search_data0 = hyper0.search_data(objective_function)
113 | smbo_opt_ = smbo_opt(warm_start_smbo=search_data0)
114 |
115 | hyper1 = Hyperactive()
116 | hyper1.add_search(
117 | objective_function, search_space, n_iter=n_iter, optimizer=smbo_opt_
118 | )
119 | hyper1.run()
120 |
121 |
122 | @pytest.mark.parametrize("smbo_opt", smbo_opts)
123 | def test_warm_start_smbo_2(smbo_opt):
124 | hyper0 = Hyperactive()
125 | hyper0.add_search(objective_function, search_space, n_iter=n_iter)
126 | hyper0.run()
127 |
128 | search_data0 = hyper0.search_data(objective_function)
129 | smbo_opt_ = smbo_opt(warm_start_smbo=search_data0)
130 |
131 | hyper1 = Hyperactive(distribution="joblib")
132 | hyper1.add_search(
133 | objective_function,
134 | search_space,
135 | n_iter=n_iter,
136 | n_jobs=2,
137 | optimizer=smbo_opt_,
138 | initialize=initialize,
139 | )
140 | hyper1.run()
141 |
142 |
143 | @pytest.mark.parametrize("smbo_opt", smbo_opts)
144 | def test_warm_start_smbo_3(smbo_opt):
145 | hyper0 = Hyperactive(distribution="pathos")
146 | hyper0.add_search(objective_function, search_space, n_iter=n_iter, n_jobs=2)
147 | hyper0.run()
148 |
149 | search_data0 = hyper0.search_data(objective_function)
150 | smbo_opt_ = smbo_opt(warm_start_smbo=search_data0)
151 |
152 | hyper1 = Hyperactive(distribution="joblib")
153 | hyper1.add_search(
154 | objective_function,
155 | search_space,
156 | n_iter=n_iter,
157 | n_jobs=2,
158 | optimizer=smbo_opt_,
159 | initialize=initialize,
160 | )
161 | hyper1.run()
162 |
--------------------------------------------------------------------------------