├── .devcontainer
└── devcontainer.json
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ ├── feature_request.md
│ └── questions.md
└── workflows
│ ├── lint.yml
│ ├── publish.yml
│ ├── test.yml
│ └── testwin.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .pylintrc
├── LICENSE
├── Makefile
├── README.md
├── RELEASE.md
├── docs
├── Makefile
├── _static
│ └── logo_doc.svg
├── _templates
│ ├── class.rst
│ ├── package.rst_t
│ └── toc.rst_t
├── api.rst
├── api
│ ├── tune.api.rst
│ ├── tune.concepts.flow.rst
│ ├── tune.concepts.rst
│ ├── tune.concepts.space.rst
│ ├── tune.iterative.rst
│ ├── tune.noniterative.rst
│ ├── tune.rst
│ ├── tune_hyperopt.rst
│ ├── tune_notebook.rst
│ ├── tune_optuna.rst
│ ├── tune_sklearn.rst
│ ├── tune_tensorflow.rst
│ └── tune_test.rst
├── conf.py
├── index.rst
├── introduction.rst
├── make.bat
├── notebooks
│ ├── checkpoint.ipynb
│ ├── noniterative.ipynb
│ ├── noniterative_objective.ipynb
│ ├── noniterative_optimizers.ipynb
│ ├── space.ipynb
│ └── tune_dataset.ipynb
├── short_tutorials.rst
└── top_api.rst
├── images
└── logo.svg
├── requirements.txt
├── setup.cfg
├── setup.py
├── tests
├── __init__.py
├── tune
│ ├── __init__.py
│ ├── _utils
│ │ ├── __init__.py
│ │ ├── test_collections.py
│ │ ├── test_execution.py
│ │ ├── test_math.py
│ │ └── test_values.py
│ ├── api
│ │ ├── __init__.py
│ │ └── test_factory.py
│ ├── concepts
│ │ ├── __init__.py
│ │ ├── space
│ │ │ ├── __init__.py
│ │ │ ├── test_parameters.py
│ │ │ └── test_space.py
│ │ ├── test_checkpoint.py
│ │ ├── test_dataset.py
│ │ ├── test_flow.py
│ │ └── test_logger.py
│ ├── iterative
│ │ ├── __init__.py
│ │ ├── test_asha.py
│ │ ├── test_objective.py
│ │ ├── test_sha.py
│ │ └── test_study.py
│ └── noniterative
│ │ ├── __init__.py
│ │ ├── test_convert.py
│ │ ├── test_stopper.py
│ │ └── test_study.py
├── tune_hyperopt
│ ├── __init__.py
│ └── test_local_optimizer.py
├── tune_notebook
│ ├── __init__.py
│ └── test_monitors.py
├── tune_optuna
│ ├── __init__.py
│ └── test_local_optimizer.py
├── tune_sklearn
│ ├── __init__.py
│ ├── test_objective.py
│ ├── test_suggest.py
│ └── test_utils.py
└── tune_tensorflow
│ ├── __init__.py
│ ├── mock.py
│ ├── test_objective.py
│ ├── test_suggest.py
│ └── test_utils.py
├── tune
├── __init__.py
├── _utils
│ ├── __init__.py
│ ├── collections.py
│ ├── execution.py
│ ├── math.py
│ ├── serialization.py
│ ├── test.py
│ └── values.py
├── api
│ ├── __init__.py
│ ├── factory.py
│ ├── optimize.py
│ └── suggest.py
├── concepts
│ ├── __init__.py
│ ├── checkpoint.py
│ ├── dataset.py
│ ├── flow
│ │ ├── __init__.py
│ │ ├── judge.py
│ │ ├── report.py
│ │ └── trial.py
│ ├── logger.py
│ └── space
│ │ ├── __init__.py
│ │ ├── parameters.py
│ │ └── spaces.py
├── constants.py
├── exceptions.py
├── iterative
│ ├── __init__.py
│ ├── asha.py
│ ├── objective.py
│ ├── sha.py
│ └── study.py
└── noniterative
│ ├── __init__.py
│ ├── convert.py
│ ├── objective.py
│ ├── stopper.py
│ └── study.py
├── tune_hyperopt
├── __init__.py
└── optimizer.py
├── tune_mlflow
├── __init__.py
└── loggers.py
├── tune_notebook
├── __init__.py
└── monitors.py
├── tune_optuna
├── __init__.py
└── optimizer.py
├── tune_sklearn
├── __init__.py
├── objective.py
├── suggest.py
└── utils.py
├── tune_tensorflow
├── __init__.py
├── objective.py
├── spec.py
├── suggest.py
└── utils.py
├── tune_test
├── __init__.py
└── local_optmizer.py
├── tune_version
└── __init__.py
└── tune_wandb
├── __init__.py
└── loggers.py
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Fugue Development Environment",
3 | "image": "mcr.microsoft.com/vscode/devcontainers/python:3.10",
4 | "customizations": {
5 | "vscode": {
6 | "settings": {
7 | "terminal.integrated.shell.linux": "/bin/bash",
8 | "python.pythonPath": "/usr/local/bin/python",
9 | "python.defaultInterpreterPath": "/usr/local/bin/python",
10 | "editor.defaultFormatter": "ms-python.black-formatter",
11 | "isort.interpreter": [
12 | "/usr/local/bin/python"
13 | ],
14 | "flake8.interpreter": [
15 | "/usr/local/bin/python"
16 | ],
17 | "pylint.interpreter": [
18 | "/usr/local/bin/python"
19 | ],
20 | "black-formatter.interpreter": [
21 | "/usr/local/bin/python"
22 | ]
23 | },
24 | "extensions": [
25 | "ms-python.python",
26 | "ms-python.isort",
27 | "ms-python.flake8",
28 | "ms-python.pylint",
29 | "ms-python.mypy",
30 | "ms-python.black-formatter",
31 | "GitHub.copilot",
32 | "njpwerner.autodocstring"
33 | ]
34 | }
35 | },
36 | "forwardPorts": [
37 | 8888
38 | ],
39 | "postCreateCommand": "make devenv",
40 | "features": {
41 | "ghcr.io/devcontainers/features/docker-in-docker:2.11.0": {},
42 | "ghcr.io/devcontainers/features/java:1": {
43 | "version": "11"
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: "[BUG]"
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Minimal Code To Reproduce**
11 |
12 | ```python
13 | ```
14 |
15 | **Describe the bug**
16 | A clear and concise description of what the bug is.
17 |
18 | **Expected behavior**
19 | A clear and concise description of what you expected to happen.
20 |
21 | **Environment (please complete the following information):**
22 | - Backend: pandas/dask/ray?
23 | - Backend version:
24 | - Python version:
25 | - OS: linux/windows
26 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: "[FEATURE]"
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/questions.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Questions
3 | about: General questions
4 | title: "[QUESTION]"
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: Lint
5 |
6 | on:
7 | push:
8 | branches: [ master ]
9 | pull_request:
10 | branches: [ master ]
11 |
12 | jobs:
13 | build:
14 | runs-on: ubuntu-latest
15 | strategy:
16 | matrix:
17 | python-version: [3.8]
18 |
19 | steps:
20 | - uses: actions/checkout@v2
21 | - name: Set up Python ${{ matrix.python-version }}
22 | uses: actions/setup-python@v1
23 | with:
24 | python-version: ${{ matrix.python-version }}
25 | - name: Install dependencies
26 | run: make devenv
27 | - name: Lint
28 | run: make lint
29 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | # This workflows will upload a Python Package using Twine when a release is created
2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
3 |
4 | name: Publish
5 |
6 | on:
7 | release:
8 | types: [created]
9 |
10 | jobs:
11 | deploy:
12 | runs-on: ubuntu-latest
13 |
14 | steps:
15 | - uses: actions/checkout@v2
16 | - name: Set up Python
17 | uses: actions/setup-python@v1
18 | with:
19 | python-version: '3.8'
20 | - name: Install dependencies
21 | run: make devenv
22 | - name: Test
23 | if: "!github.event.release.prerelease"
24 | run: make test
25 | - name: Build and publish
26 | env:
27 | RELEASE_TAG: ${{ github.event.release.tag_name }}
28 | TWINE_USERNAME: __token__
29 | TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
30 | run: |
31 | make package
32 | twine upload dist/*
33 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: Test
5 |
6 | on:
7 | push:
8 | branches: [ master ]
9 | pull_request:
10 | branches: [ master ]
11 |
12 | jobs:
13 | build:
14 | runs-on: ubuntu-latest
15 | strategy:
16 | matrix:
17 | python-version: [3.8, 3.9, "3.10", "3.11", "3.12"]
18 |
19 | steps:
20 | - uses: actions/checkout@v2
21 | - name: Set up Python ${{ matrix.python-version }}
22 | uses: actions/setup-python@v1
23 | with:
24 | python-version: ${{ matrix.python-version }}
25 | - name: Install dependencies
26 | run: make devenv
27 | - name: Test
28 | run: make test
29 | - name: "Upload coverage to Codecov"
30 | if: matrix.python-version == '3.10'
31 | uses: codecov/codecov-action@v4
32 | with:
33 | fail_ci_if_error: false
34 | env:
35 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
36 |
--------------------------------------------------------------------------------
/.github/workflows/testwin.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: Test Windows
5 |
6 | on:
7 | push:
8 | branches: [ master ]
9 | pull_request:
10 | branches: [ master ]
11 |
12 | jobs:
13 | build:
14 | runs-on: windows-latest
15 | strategy:
16 | matrix:
17 | python-version: [ 3.8, 3.9, "3.10", "3.11", "3.12"]
18 |
19 | steps:
20 | - uses: actions/checkout@v2
21 | - name: Set up Python ${{ matrix.python-version }}
22 | uses: actions/setup-python@v1
23 | with:
24 | python-version: ${{ matrix.python-version }}
25 | - name: Install dependencies
26 | run: make devenv
27 | - name: Test
28 | run: make testcore
29 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 | pythonenv*
113 |
114 | # Spyder project settings
115 | .spyderproject
116 | .spyproject
117 |
118 | # Rope project settings
119 | .ropeproject
120 |
121 | # mkdocs documentation
122 | /site
123 |
124 | # mypy
125 | .mypy_cache
126 | .dmypy.json
127 | dmypy.json
128 |
129 | # Pyre type checker
130 | .pyre/
131 |
132 | .vscode
133 | tmp
134 |
135 | # Antlr
136 | .antlr
137 |
138 | # dask
139 | dask-worker-space
140 |
141 | # spark
142 | spark-warehourse
143 | =*
144 |
145 | # mlflow
146 | mlruns
147 | mlruns/*
148 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | default_language_version:
2 | python: python3
3 |
4 | exclude: |
5 | (?x)(
6 | ^tests/|
7 | ^docs/
8 | )
9 | repos:
10 | - repo: https://github.com/pre-commit/pre-commit-hooks
11 | rev: v3.2.0
12 | hooks:
13 | - id: check-ast
14 | - id: check-docstring-first
15 | - id: check-executables-have-shebangs
16 | - id: check-json
17 | - id: check-merge-conflict
18 | - id: check-yaml
19 | - id: debug-statements
20 | - id: end-of-file-fixer
21 | - id: trailing-whitespace
22 | - id: check-vcs-permalinks
23 | - repo: https://github.com/pycqa/flake8
24 | rev: '3.8.3'
25 | hooks:
26 | - id: flake8
27 | types: [python]
28 | additional_dependencies:
29 | - flake8-bugbear
30 | - flake8-builtins
31 | # - flake8-docstrings # TODO: add back!
32 | # - flake8-rst-docstrings
33 | - flake8-comprehensions
34 | - flake8-tidy-imports
35 | - pycodestyle
36 | - repo: https://github.com/pre-commit/mirrors-mypy
37 | rev: v0.971
38 | hooks:
39 | - id: mypy
40 | - repo: https://github.com/pre-commit/mirrors-pylint
41 | rev: v2.6.0
42 | hooks:
43 | - id: pylint
44 | - repo: https://github.com/ambv/black
45 | rev: 22.3.0
46 | hooks:
47 | - id: black
48 | types: [python]
49 | language_version: python3
50 |
--------------------------------------------------------------------------------
/.pylintrc:
--------------------------------------------------------------------------------
1 | [MESSAGES CONTROL]
2 | disable = C0103,C0114,C0115,C0116,C0122,C0200,C0201,C0302,C0411,C0415,E0401,E0712,E1130,E1136,R0201,R0205,R0801,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R1705,R1710,R1718,R1720,R1724,W0102,W0107,W0108,W0201,W0212,W0221,W0223,W0237,W0511,W0613,W0631,W0640,W0703,W0707,W1116
3 | # TODO: R0205: inherits from object, can be safely removed
4 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: help clean dev docs package test
2 |
3 | help:
4 | @echo "The following make targets are available:"
5 | @echo " devenv create venv and install all deps for dev env (assumes python3 cmd exists)"
6 | @echo " dev install all deps for dev env (assumes venv is present)"
7 | @echo " docs create pydocs for all relveant modules (assumes venv is present)"
8 | @echo " package package for pypi"
9 | @echo " test run all tests with coverage (assumes venv is present)"
10 |
11 | devenv:
12 | pip3 install -r requirements.txt
13 | pre-commit install
14 |
15 | dev:
16 | pip3 install -r requirements.txt
17 |
18 | docs:
19 | rm -rf docs/api
20 | rm -rf docs/build
21 | sphinx-apidoc --no-toc -f -t=docs/_templates -o docs/api tune/
22 | sphinx-apidoc --no-toc -f -t=docs/_templates -o docs/api tune_hyperopt/
23 | sphinx-apidoc --no-toc -f -t=docs/_templates -o docs/api tune_optuna/
24 | sphinx-apidoc --no-toc -f -t=docs/_templates -o docs/api tune_sklearn/
25 | sphinx-apidoc --no-toc -f -t=docs/_templates -o docs/api tune_tensorflow/
26 | sphinx-apidoc --no-toc -f -t=docs/_templates -o docs/api tune_notebook/
27 | sphinx-apidoc --no-toc -f -t=docs/_templates -o docs/api tune_test/
28 | sphinx-build -b html docs/ docs/build/
29 |
30 | lint:
31 | pre-commit run --all-files
32 |
33 | package:
34 | rm -rf dist/*
35 | python3 setup.py sdist
36 | python3 setup.py bdist_wheel
37 |
38 | lab:
39 | mkdir -p tmp
40 | pip install .
41 | jupyter lab --port=8888 --ip=0.0.0.0 --no-browser --allow-root --NotebookApp.token='' --NotebookApp.password='' --NotebookApp.allow_origin='*'
42 |
43 | test:
44 | python3 -bb -m pytest tests
45 |
46 | testcore:
47 | python3 -bb -m pytest tests/tune tests/tune_notebook tests/tune_hyperopt tests/tune_optuna
48 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | #
2 |
3 | [](https://tune.readthedocs.org)
4 | [](https://pypi.python.org/pypi/tune/)[](https://pypi.python.org/pypi/tune/)
5 | [](https://pypi.python.org/pypi/tune/)
6 | [](https://codecov.io/gh/fugue-project/tune)
7 |
8 | [](https://join.slack.com/t/fugue-project/shared_invite/zt-jl0pcahu-KdlSOgi~fP50TZWmNxdWYQ)
9 |
10 | Tune is an abstraction layer for general parameter tuning. It is built on [Fugue](https://github.com/fugue-project/fugue) so it can seamlessly run on any backend supported by Fugue, such as Spark, Dask and local.
11 |
12 | ## Installation
13 |
14 | ```bash
15 | pip install tune
16 | ```
17 |
18 | It's recommended to also install Scikit-Learn (for all compatible models tuning) and Hyperopt (to enable [Bayesian Optimization](https://en.wikipedia.org/wiki/Bayesian_optimization))
19 |
20 | ```bash
21 | pip install tune[hyperopt,sklearn]
22 | ```
23 |
24 | ## Quick Start
25 |
26 | To quickly start, please go through these tutorials on Kaggle:
27 |
28 | 1. [Search Space](https://www.kaggle.com/goodwanghan/tune-tutorials-01-seach-space)
29 | 2. [Non-iterative Problems](https://www.kaggle.com/goodwanghan/tune-tutorials-2-non-iterative-problems), such as Scikit-Learn model tuning
30 | 3. [Iterative Problems](https://www.kaggle.com/goodwanghan/tune-tutorials-3-iterative-problems), such as Keras model tuning
31 |
32 |
33 | ## Design Philosophy
34 |
35 | Tune does not follow Scikit-Learn's model selection APIs and does not provide distributed backend for it. **We believe that parameter tuning is a general problem that is not only for machine learning**, so our abstractions are built from ground up, the lower level APIs do not assume the objective is a machine learning model, while the higher level APIs are dedicated to solve specific problems, such as Scikit-Learn compatible model tuning and Keras model tuning.
36 |
37 | Although we didn't base our solution on any of [HyperOpt](http://hyperopt.github.io/hyperopt/), [Optuna](https://optuna.org/), [Ray Tune](https://docs.ray.io/en/master/tune/index.html) and [Nevergrad](https://github.com/facebookresearch/nevergrad) etc., we are truly inspired by these wonderful solutions and their design. We also integrated with many of them for deeper level optimizations.
38 |
39 | Tuning problems are never easy, here are our goals:
40 |
41 | * Provide the simplest and most intuitive APIs for major tuning cases. We always start from real tuning cases, figure out the minimal requirement for each of them and then determine the layers of abstraction. Read [this tutorial](https://www.kaggle.com/goodwanghan/tune-tutorials-2-non-iterative-problems), you can see how minimal the interfaces can be.
42 | * Be scale agnostic and platform agnostic. We want you to worry less about *distributed computing*, and just focus on the tuning logic itself. Built on Fugue, Tune let you develop your tuning process iteratively. You can test with small spaces on local machine, and then switch to larger spaces and run distributedly with no code change. It can effectively save time and cost and make the process fun and rewarding. And to run any tuning logic distributedly, you only need a core framework itself (Spark, Dask, etc.) and you do not need a database, a queue service or even an embeded cluster.
43 | * Be highly extendable and flexible on lower level. For example
44 | * you can extend on Fugue level, for example create an execution engine for [Prefect](https://www.prefect.io/) to run the tuning jobs as a Prefect workflow
45 | * you can integrate third party optimizers and use Tune just as a distributed orchestrator. We have integrated [HyperOpt](http://hyperopt.github.io/hyperopt/). And [Optuna](https://optuna.org/) and [Nevergrad](https://github.com/facebookresearch/nevergrad) is on the way.
46 | * you can start external instances (e.g. EC2 instances) for different training subtasks and to fully utilize your cloud
47 | * you can combine with distributed training as long as your have enough compute resource
48 |
49 | ## Focuses
50 |
51 | Here are our current focuses:
52 |
53 | * A flexible space design and can describe a hybrid space of grid search, random search and second level optimization such as bayesian optimization
54 | * Integrate with 3rd party tuning frameworks
55 | * Create generalized and distributed versions of [Successive Halving](https://scikit-learn.org/stable/auto_examples/model_selection/plot_successive_halving_iterations.html), [Hyperband](https://arxiv.org/abs/1603.06560) and [Asynchronous Successive Halving](https://arxiv.org/abs/1810.05934).
56 |
57 |
58 | ## Collaboration
59 |
60 | We are looking for collaborators, if you are interested, please let us know. Please join our [Slack channel](https://join.slack.com/t/fugue-project/shared_invite/zt-jl0pcahu-KdlSOgi~fP50TZWmNxdWYQ).
61 |
--------------------------------------------------------------------------------
/RELEASE.md:
--------------------------------------------------------------------------------
1 | # Release Notes
2 |
3 | ## 0.1.6
4 |
5 | - Support Python 3.12
6 |
7 | ## 0.1.5
8 |
9 | - Refactor `FunctionWrapper`, remove the Fugue contraint
10 |
11 | ## 0.1.3
12 |
13 | - Added Fugue version constraint to avoid breaking changes
14 |
15 | ## 0.1.2
16 |
17 | - [54](https://github.com/fugue-project/tune/pull/54): Integrated with MLFlow
18 | - [60](https://github.com/fugue-project/tune/issues/60): Integrated with W&B
19 | - [57](https://github.com/fugue-project/tune/issues/57): Expanded test coverage from Python 3.6 - 3.10, Linux and Windows
20 | - [58](https://github.com/fugue-project/tune/issues/58): Fixed Bayesian optimizations min_better bug
21 |
22 | ## 0.1.0
23 |
24 | - Replace all `pickle` with `cloudpickle`
25 |
26 | ## 0.0.9
27 |
28 | - Major refactoring on Space to use [parameters template](https://github.com/fugue-project/tune/issues/47)
29 | - Add [TransitionChoice](https://github.com/fugue-project/tune/issues/46)
30 | - Enable local optimizers (optuna and hyperopt) to handle [nested data structures](https://github.com/fugue-project/tune/issues/44)
31 |
32 | ## 0.0.8
33 |
34 | - Fixed the [lower bound](https://github.com/fugue-project/tune/issues/43) of Rand expression
35 |
36 | ## 0.0.7
37 |
38 | - [Optuna integration](https://github.com/fugue-project/tune/issues/23) and make Optuna and Hyperopt consistent
39 | - Make test coverage [100](https://github.com/fugue-project/tune/issues/16)
40 |
41 | ## 0.0.6
42 |
43 | - [Early stop](https://github.com/fugue-project/tune/issues/22) for non iterative
44 | - Work on local optimizer [1](https://github.com/fugue-project/tune/issues/18) [2](https://github.com/fugue-project/tune/issues/31)
45 | - Added initialize and finalize [hooks](https://github.com/fugue-project/tune/issues/28) for monitors
46 | - Improved realtime chart [rendering](https://github.com/fugue-project/tune/issues/19)
47 | - Fixed [prerelease](https://github.com/fugue-project/tune/issues/27)
48 | - Fixed report [timeout bug](https://github.com/fugue-project/tune/issues/20)
49 |
50 | ## Before 0.0.6
51 |
52 | - Implemented main features for iterative and non iterative tuning
53 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/_static/logo_doc.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
24 |
--------------------------------------------------------------------------------
/docs/_templates/class.rst:
--------------------------------------------------------------------------------
1 |
2 | {{ fullname | escape | underline}}
3 |
4 | .. currentmodule:: {{ module }}
5 |
6 | .. autoclass:: {{ objname }}
7 |
8 | {% block attributes %}
9 | {% if attributes %}
10 | .. rubric:: Attributes
11 |
12 | .. autosummary::
13 | :nosignatures:
14 |
15 | {% for item in attributes %}
16 | ~{{ name }}.{{ item }}
17 | {%- endfor %}
18 |
19 | {% endif %}
20 | {% endblock %}
21 |
22 | {% block methods %}
23 | {% if methods %}
24 | .. rubric:: Methods
25 |
26 | .. autosummary::
27 | :nosignatures:
28 | :toctree: methods
29 |
30 | {% for item in methods %}
31 | {%- if item not in inherited_members %}
32 | ~{{ name }}.{{ item }}
33 | {%- endif %}
34 | {%- endfor %}
35 | {% endif %}
36 |
37 | {%- if '__call__' in members %}
38 | ~{{ name }}.__call__
39 | {%- endif %}
40 |
41 | {% endblock %}
42 |
--------------------------------------------------------------------------------
/docs/_templates/package.rst_t:
--------------------------------------------------------------------------------
1 | {%- macro automodule(modname, options) -%}
2 | .. automodule:: {{ modname }}
3 | {%- for option in options %}
4 | :{{ option }}:
5 | {%- endfor %}
6 | {%- endmacro %}
7 |
8 | {%- macro toctree(docnames) -%}
9 | .. toctree::
10 | :hidden:
11 | :maxdepth: {{ maxdepth }}
12 | {% for docname in docnames %}
13 | {{ docname }}
14 | {%- endfor %}
15 | {%- endmacro %}
16 |
17 | {%- if is_namespace %}
18 | {{- [pkgname, ""] | join(" ") | e | heading }}
19 | {% else %}
20 | {{- [pkgname, ""] | join(" ") | e | heading }}
21 | {% endif %}
22 |
23 | {%- if modulefirst and not is_namespace %}
24 | {{ automodule(pkgname, automodule_options) }}
25 | {% endif %}
26 |
27 | {%- if subpackages %}
28 | {{ toctree(subpackages) }}
29 | {% endif %}
30 |
31 | {%- if submodules %}
32 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
33 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
34 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
35 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
36 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
37 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
38 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
39 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
40 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
41 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
42 |
43 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
44 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
45 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
46 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
47 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
48 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
49 |
50 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
51 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
52 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
53 |
54 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
55 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
56 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
57 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
58 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
59 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
60 |
61 | {% if separatemodules %}
62 | {%- else %}
63 | {%- for submodule in submodules %}
64 | {% if show_headings %}
65 | {{- submodule | e | heading(2) }}
66 | {% endif %}
67 | {{ automodule(submodule, automodule_options) }}
68 | {% endfor %}
69 | {%- endif %}
70 | {% endif %}
71 |
--------------------------------------------------------------------------------
/docs/_templates/toc.rst_t:
--------------------------------------------------------------------------------
1 | {{ header | heading }}
2 |
3 | .. toctree::
4 | :maxdepth: {{ maxdepth }}
5 | {% for docname in docnames %}
6 | {{ docname }}
7 | {%- endfor %}
8 |
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | Complete API Reference
2 | ======================
3 |
4 | .. toctree::
5 |
6 | api/tune
7 | api/tune_hyperopt
8 | api/tune_optuna
9 | api/tune_sklearn
10 | api/tune_tensorflow
11 | api/tune_notebook
12 | api/tune_test
13 |
14 |
15 |
--------------------------------------------------------------------------------
/docs/api/tune.api.rst:
--------------------------------------------------------------------------------
1 | tune.api
2 | =========
3 |
4 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
5 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
6 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
7 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
8 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
9 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
10 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
11 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
12 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
13 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
14 |
15 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
16 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
17 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
18 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
19 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
20 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
21 |
22 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
23 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
24 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
25 |
26 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
27 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
28 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
29 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
30 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
31 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
32 |
33 |
34 | tune.api.factory
35 | ----------------
36 |
37 | .. automodule:: tune.api.factory
38 | :members:
39 | :undoc-members:
40 | :show-inheritance:
41 |
42 | tune.api.optimize
43 | -----------------
44 |
45 | .. automodule:: tune.api.optimize
46 | :members:
47 | :undoc-members:
48 | :show-inheritance:
49 |
50 | tune.api.suggest
51 | ----------------
52 |
53 | .. automodule:: tune.api.suggest
54 | :members:
55 | :undoc-members:
56 | :show-inheritance:
57 |
58 |
--------------------------------------------------------------------------------
/docs/api/tune.concepts.flow.rst:
--------------------------------------------------------------------------------
1 | tune.concepts.flow
2 | ===================
3 |
4 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
5 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
6 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
7 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
8 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
9 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
10 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
11 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
12 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
13 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
14 |
15 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
16 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
17 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
18 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
19 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
20 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
21 |
22 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
23 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
24 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
25 |
26 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
27 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
28 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
29 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
30 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
31 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
32 |
33 |
34 | tune.concepts.flow.judge
35 | ------------------------
36 |
37 | .. automodule:: tune.concepts.flow.judge
38 | :members:
39 | :undoc-members:
40 | :show-inheritance:
41 |
42 | tune.concepts.flow.report
43 | -------------------------
44 |
45 | .. automodule:: tune.concepts.flow.report
46 | :members:
47 | :undoc-members:
48 | :show-inheritance:
49 |
50 | tune.concepts.flow.trial
51 | ------------------------
52 |
53 | .. automodule:: tune.concepts.flow.trial
54 | :members:
55 | :undoc-members:
56 | :show-inheritance:
57 |
58 |
--------------------------------------------------------------------------------
/docs/api/tune.concepts.rst:
--------------------------------------------------------------------------------
1 | tune.concepts
2 | ==============
3 |
4 | .. toctree::
5 | :hidden:
6 | :maxdepth: 4
7 |
8 | tune.concepts.flow
9 | tune.concepts.space
10 |
11 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
12 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
13 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
14 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
15 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
16 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
17 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
18 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
19 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
20 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
21 |
22 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
23 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
24 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
25 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
26 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
27 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
28 |
29 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
30 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
31 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
32 |
33 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
34 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
35 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
36 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
37 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
38 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
39 |
40 |
41 | tune.concepts.checkpoint
42 | ------------------------
43 |
44 | .. automodule:: tune.concepts.checkpoint
45 | :members:
46 | :undoc-members:
47 | :show-inheritance:
48 |
49 | tune.concepts.dataset
50 | ---------------------
51 |
52 | .. automodule:: tune.concepts.dataset
53 | :members:
54 | :undoc-members:
55 | :show-inheritance:
56 |
57 |
--------------------------------------------------------------------------------
/docs/api/tune.concepts.space.rst:
--------------------------------------------------------------------------------
1 | tune.concepts.space
2 | ====================
3 |
4 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
5 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
6 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
7 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
8 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
9 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
10 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
11 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
12 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
13 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
14 |
15 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
16 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
17 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
18 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
19 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
20 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
21 |
22 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
23 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
24 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
25 |
26 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
27 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
28 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
29 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
30 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
31 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
32 |
33 |
34 | tune.concepts.space.parameters
35 | ------------------------------
36 |
37 | .. automodule:: tune.concepts.space.parameters
38 | :members:
39 | :undoc-members:
40 | :show-inheritance:
41 |
42 | tune.concepts.space.spaces
43 | --------------------------
44 |
45 | .. automodule:: tune.concepts.space.spaces
46 | :members:
47 | :undoc-members:
48 | :show-inheritance:
49 |
50 |
--------------------------------------------------------------------------------
/docs/api/tune.iterative.rst:
--------------------------------------------------------------------------------
1 | tune.iterative
2 | ===============
3 |
4 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
5 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
6 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
7 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
8 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
9 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
10 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
11 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
12 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
13 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
14 |
15 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
16 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
17 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
18 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
19 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
20 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
21 |
22 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
23 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
24 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
25 |
26 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
27 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
28 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
29 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
30 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
31 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
32 |
33 |
34 | tune.iterative.asha
35 | -------------------
36 |
37 | .. automodule:: tune.iterative.asha
38 | :members:
39 | :undoc-members:
40 | :show-inheritance:
41 |
42 | tune.iterative.objective
43 | ------------------------
44 |
45 | .. automodule:: tune.iterative.objective
46 | :members:
47 | :undoc-members:
48 | :show-inheritance:
49 |
50 | tune.iterative.sha
51 | ------------------
52 |
53 | .. automodule:: tune.iterative.sha
54 | :members:
55 | :undoc-members:
56 | :show-inheritance:
57 |
58 | tune.iterative.study
59 | --------------------
60 |
61 | .. automodule:: tune.iterative.study
62 | :members:
63 | :undoc-members:
64 | :show-inheritance:
65 |
66 |
--------------------------------------------------------------------------------
/docs/api/tune.noniterative.rst:
--------------------------------------------------------------------------------
1 | tune.noniterative
2 | ==================
3 |
4 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
5 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
6 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
7 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
8 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
9 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
10 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
11 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
12 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
13 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
14 |
15 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
16 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
17 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
18 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
19 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
20 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
21 |
22 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
23 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
24 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
25 |
26 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
27 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
28 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
29 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
30 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
31 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
32 |
33 |
34 | tune.noniterative.convert
35 | -------------------------
36 |
37 | .. automodule:: tune.noniterative.convert
38 | :members:
39 | :undoc-members:
40 | :show-inheritance:
41 |
42 | tune.noniterative.objective
43 | ---------------------------
44 |
45 | .. automodule:: tune.noniterative.objective
46 | :members:
47 | :undoc-members:
48 | :show-inheritance:
49 |
50 | tune.noniterative.stopper
51 | -------------------------
52 |
53 | .. automodule:: tune.noniterative.stopper
54 | :members:
55 | :undoc-members:
56 | :show-inheritance:
57 |
58 | tune.noniterative.study
59 | -----------------------
60 |
61 | .. automodule:: tune.noniterative.study
62 | :members:
63 | :undoc-members:
64 | :show-inheritance:
65 |
66 |
--------------------------------------------------------------------------------
/docs/api/tune.rst:
--------------------------------------------------------------------------------
1 | tune
2 | =====
3 |
4 | .. toctree::
5 | :hidden:
6 | :maxdepth: 4
7 |
8 | tune.api
9 | tune.concepts
10 | tune.iterative
11 | tune.noniterative
12 |
13 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
14 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
15 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
16 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
17 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
18 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
19 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
20 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
21 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
22 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
23 |
24 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
25 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
26 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
27 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
28 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
29 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
30 |
31 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
32 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
33 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
34 |
35 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
36 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
37 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
38 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
39 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
40 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
41 |
42 |
43 | tune.constants
44 | --------------
45 |
46 | .. automodule:: tune.constants
47 | :members:
48 | :undoc-members:
49 | :show-inheritance:
50 |
51 | tune.exceptions
52 | ---------------
53 |
54 | .. automodule:: tune.exceptions
55 | :members:
56 | :undoc-members:
57 | :show-inheritance:
58 |
59 |
--------------------------------------------------------------------------------
/docs/api/tune_hyperopt.rst:
--------------------------------------------------------------------------------
1 | tune\_hyperopt
2 | ===============
3 |
4 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
5 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
6 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
7 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
8 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
9 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
10 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
11 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
12 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
13 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
14 |
15 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
16 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
17 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
18 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
19 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
20 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
21 |
22 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
23 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
24 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
25 |
26 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
27 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
28 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
29 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
30 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
31 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
32 |
33 |
34 | tune\_hyperopt.optimizer
35 | ------------------------
36 |
37 | .. automodule:: tune_hyperopt.optimizer
38 | :members:
39 | :undoc-members:
40 | :show-inheritance:
41 |
42 |
--------------------------------------------------------------------------------
/docs/api/tune_notebook.rst:
--------------------------------------------------------------------------------
1 | tune\_notebook
2 | ===============
3 |
4 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
5 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
6 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
7 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
8 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
9 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
10 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
11 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
12 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
13 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
14 |
15 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
16 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
17 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
18 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
19 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
20 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
21 |
22 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
23 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
24 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
25 |
26 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
27 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
28 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
29 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
30 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
31 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
32 |
33 |
34 | tune\_notebook.monitors
35 | -----------------------
36 |
37 | .. automodule:: tune_notebook.monitors
38 | :members:
39 | :undoc-members:
40 | :show-inheritance:
41 |
42 |
--------------------------------------------------------------------------------
/docs/api/tune_optuna.rst:
--------------------------------------------------------------------------------
1 | tune\_optuna
2 | =============
3 |
4 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
5 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
6 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
7 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
8 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
9 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
10 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
11 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
12 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
13 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
14 |
15 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
16 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
17 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
18 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
19 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
20 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
21 |
22 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
23 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
24 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
25 |
26 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
27 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
28 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
29 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
30 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
31 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
32 |
33 |
34 | tune\_optuna.optimizer
35 | ----------------------
36 |
37 | .. automodule:: tune_optuna.optimizer
38 | :members:
39 | :undoc-members:
40 | :show-inheritance:
41 |
42 |
--------------------------------------------------------------------------------
/docs/api/tune_sklearn.rst:
--------------------------------------------------------------------------------
1 | tune\_sklearn
2 | ==============
3 |
4 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
5 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
6 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
7 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
8 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
9 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
10 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
11 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
12 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
13 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
14 |
15 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
16 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
17 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
18 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
19 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
20 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
21 |
22 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
23 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
24 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
25 |
26 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
27 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
28 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
29 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
30 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
31 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
32 |
33 |
34 | tune\_sklearn.objective
35 | -----------------------
36 |
37 | .. automodule:: tune_sklearn.objective
38 | :members:
39 | :undoc-members:
40 | :show-inheritance:
41 |
42 | tune\_sklearn.suggest
43 | ---------------------
44 |
45 | .. automodule:: tune_sklearn.suggest
46 | :members:
47 | :undoc-members:
48 | :show-inheritance:
49 |
50 | tune\_sklearn.utils
51 | -------------------
52 |
53 | .. automodule:: tune_sklearn.utils
54 | :members:
55 | :undoc-members:
56 | :show-inheritance:
57 |
58 |
--------------------------------------------------------------------------------
/docs/api/tune_tensorflow.rst:
--------------------------------------------------------------------------------
1 | tune\_tensorflow
2 | =================
3 |
4 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
5 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
6 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
7 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
8 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
9 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
10 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
11 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
12 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
13 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
14 |
15 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
16 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
17 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
18 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
19 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
20 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
21 |
22 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
23 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
24 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
25 |
26 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
27 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
28 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
29 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
30 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
31 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
32 |
33 |
34 | tune\_tensorflow.objective
35 | --------------------------
36 |
37 | .. automodule:: tune_tensorflow.objective
38 | :members:
39 | :undoc-members:
40 | :show-inheritance:
41 |
42 | tune\_tensorflow.spec
43 | ---------------------
44 |
45 | .. automodule:: tune_tensorflow.spec
46 | :members:
47 | :undoc-members:
48 | :show-inheritance:
49 |
50 | tune\_tensorflow.suggest
51 | ------------------------
52 |
53 | .. automodule:: tune_tensorflow.suggest
54 | :members:
55 | :undoc-members:
56 | :show-inheritance:
57 |
58 | tune\_tensorflow.utils
59 | ----------------------
60 |
61 | .. automodule:: tune_tensorflow.utils
62 | :members:
63 | :undoc-members:
64 | :show-inheritance:
65 |
66 |
--------------------------------------------------------------------------------
/docs/api/tune_test.rst:
--------------------------------------------------------------------------------
1 | tune\_test
2 | ===========
3 |
4 | .. |SpaceTutorial| replace:: :ref:`Space Tutorial `
5 | .. |DatasetTutorial| replace:: :ref:`TuneDataset Tutorial `
6 | .. |Trial| replace:: :class:`~tune.concepts.flow.trial.Trial`
7 | .. |SortMetric| replace:: :meth:`tune.concepts.flow.report.TrialReport.sort_metric`
8 | .. |TrialObject| replace:: :class:`~tune.concepts.flow.trial.Trial`
9 | .. |TrialReportObject| replace:: :class:`~tune.concepts.flow.report.TrialReport`
10 | .. |NonIterativeObjective| replace:: a simple python function or :class:`~tune.noniterative.objective.NonIterativeObjectiveFunc` compatible object, please read :ref:`Non-Iterative Objective Explained `
11 | .. |NonIterativeOptimizer| replace:: an object that can be converted to :class:`~tune.noniterative.objective.NonIterativeObjectiveLocalOptimizer`, please read :ref:`Non-Iterative Optimizers `
12 | .. |DataFrameLike| replace:: Pandas, Spark, Dask or any dataframe that can be converted to Fugue :class:`~fugue.dataframe.dataframe.DataFrame`
13 | .. |TempPath| replace:: temp path for serialized dataframe partitions. It can be empty if you preset using ``TUNE_OBJECT_FACTORY.``:meth:`~tune.api.factory.TuneObjectFactory.set_temp_path`. For details, read :ref:`TuneDataset Tutorial `
14 |
15 | .. |SchemaLikeObject| replace:: :ref:`Schema like object `
16 | .. |ParamsLikeObject| replace:: :ref:`Parameters like object `
17 | .. |DataFrameLikeObject| replace:: :ref:`DataFrame like object `
18 | .. |DataFramesLikeObject| replace:: :ref:`DataFrames like object `
19 | .. |PartitionLikeObject| replace:: :ref:`Partition like object `
20 | .. |RPCHandlerLikeObject| replace:: :ref:`RPChandler like object `
21 |
22 | .. |ExecutionEngine| replace:: :class:`~fugue.execution.execution_engine.ExecutionEngine`
23 | .. |NativeExecutionEngine| replace:: :class:`~fugue.execution.native_execution_engine.NativeExecutionEngine`
24 | .. |FugueWorkflow| replace:: :class:`~fugue.workflow.workflow.FugueWorkflow`
25 |
26 | .. |ReadJoin| replace:: Read Join tutorials on :ref:`workflow ` and :ref:`engine ` for details
27 | .. |FugueConfig| replace:: :ref:`the Fugue Configuration Tutorial `
28 | .. |PartitionTutorial| replace:: :ref:`the Partition Tutorial `
29 | .. |FugueSQLTutorial| replace:: :ref:`the Fugue SQL Tutorial `
30 | .. |DataFrameTutorial| replace:: :ref:`the DataFrame Tutorial `
31 | .. |ExecutionEngineTutorial| replace:: :ref:`the ExecutionEngine Tutorial `
32 |
33 |
34 | tune\_test.local\_optmizer
35 | --------------------------
36 |
37 | .. automodule:: tune_test.local_optmizer
38 | :members:
39 | :undoc-members:
40 | :show-inheritance:
41 |
42 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | # Configuration file for the Sphinx documentation builder.
3 | #
4 | # This file only contains a selection of the most common options. For a full
5 | # list see the documentation:
6 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
7 |
8 | # -- Path setup --------------------------------------------------------------
9 |
10 | # If extensions (or modules to document with autodoc) are in another directory,
11 | # add these directories to sys.path here. If the directory is relative to the
12 | # documentation root, use os.path.abspath to make it absolute, like shown here.
13 | #
14 | import os
15 | import sys
16 |
17 | sys.path.insert(0, os.path.abspath("../"))
18 |
19 | from tune_version import __version__
20 | import sphinx_rtd_theme
21 |
22 | # -- Project information -----------------------------------------------------
23 |
24 | project = "Tune"
25 | version = __version__
26 | copyright = "2022, Fugue Development Team" # noqa: A001
27 | author = "Han Wang"
28 |
29 |
30 | # -- General configuration ---------------------------------------------------
31 |
32 | # Add any Sphinx extension module names here, as strings. They can be
33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
34 | # ones.
35 | extensions = [
36 | "sphinx.ext.todo",
37 | "sphinx.ext.viewcode",
38 | "sphinx.ext.intersphinx",
39 | "sphinx.ext.autodoc",
40 | "sphinx.ext.autosummary",
41 | "sphinx.ext.autodoc.typehints",
42 | "sphinx_rtd_theme",
43 | "nbsphinx",
44 | ]
45 |
46 | add_module_names = False
47 |
48 | nbsphinx_execute = "never"
49 |
50 | autodoc_typehints = "description"
51 |
52 | # Add any paths that contain templates here, relative to this directory.
53 | templates_path = ["_templates"]
54 |
55 | # The language for content autogenerated by Sphinx. Refer to documentation
56 | # for a list of supported languages.
57 | #
58 | # This is also used if you do content translation via gettext catalogs.
59 | # Usually you set "language" from the command line for these cases.
60 | language = "python"
61 |
62 | # List of patterns, relative to source directory, that match files and
63 | # directories to ignore when looking for source files.
64 | # This pattern also affects html_static_path and html_extra_path.
65 |
66 |
67 | # -- Options for HTML output -------------------------------------------------
68 |
69 | # The theme to use for HTML and HTML Help pages. See the documentation for
70 | # a list of builtin themes.
71 | #
72 | # html_theme = "sphinx_rtd_theme"
73 | html_theme = "furo"
74 |
75 | # Add any paths that contain custom static files (such as style sheets) here,
76 | # relative to this directory. They are copied after the builtin static files,
77 | # so a file named "default.css" will overwrite the builtin "default.css".
78 | html_static_path = ["_static"]
79 | html_logo = "_static/logo_doc.svg"
80 | html_theme_options = {
81 | "logo_only": True,
82 | "display_version": False,
83 | "sidebar_hide_name": True,
84 | "style_nav_header_background": "#264263",
85 | }
86 |
87 | master_doc = "index"
88 |
89 | intersphinx_mapping = {
90 | "triad": ("https://triad.readthedocs.io/en/latest", None),
91 | "fugue": ("https://fugue.readthedocs.io/en/latest", None),
92 | "tutorial": ("https://fugue-tutorials.readthedocs.io/en/latest", None),
93 | "pa": ("https://arrow.apache.org/docs", None),
94 | "fs": ("https://docs.pyfilesystem.org/en/latest", None),
95 | "spark": ("https://spark.apache.org/docs/latest/api/python", None),
96 | "dask": ("https://docs.dask.org/en/latest", None),
97 | "np": ("https://numpy.org/doc/stable", None),
98 | }
99 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. Tune documentation master file, created by
2 | sphinx-quickstart on Sun May 17 21:49:44 2020.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Tune Documentation
7 | ====================
8 |
9 | .. toctree::
10 | :maxdepth: 3
11 |
12 | introduction
13 | top_api
14 | api
15 | short_tutorials
16 |
--------------------------------------------------------------------------------
/docs/introduction.rst:
--------------------------------------------------------------------------------
1 | Introduction
2 | =============
3 |
4 | Tune is an abstraction layer for general parameter tuning.
5 | It is built on `Fugue `_
6 | so it can seamlessly run on any backend supported by Fugue, such as Spark, Dask and local.
7 |
8 | Installation
9 | ------------
10 |
11 | .. code-block:: bash
12 |
13 | pip install tune
14 |
15 | It's recommended to also install Scikit-Learn (for all compatible models tuning) and Hyperopt
16 | (to enable `Bayesian Optimization `_
17 |
18 | .. code-block:: bash
19 |
20 | pip install tune[hyperopt,sklearn]
21 |
22 | Quick Start
23 | -----------
24 |
25 | To quickly start, please go through these tutorials on Kaggle:
26 |
27 | #. `Search Space `_
28 | #. `Non-iterative Problems `_, such as Scikit-Learn model tuning
29 | #. `Iterative Problems `_, such as Keras model tuning
30 |
31 |
32 | Design Philosophy
33 | -----------------
34 |
35 | Tune does not follow Scikit-Learn's model selection APIs and does not provide distributed backend for it. **We believe that parameter tuning is a general problem that is not only for machine learning**, so our abstractions are built from ground up, the lower level APIs do not assume the objective is a machine learning model, while the higher level APIs are dedicated to solve specific problems, such as Scikit-Learn compatible model tuning and Keras model tuning.
36 |
37 | Although we didn't base our solution on any of
38 | `HyperOpt `_,
39 | `Optuna `_,
40 | `Ray Tune `_ and
41 | `Nevergrad `_ etc.,
42 | we are truly inspired by these wonderful solutions and their design. We also integrated with many of them for deeper level optimizations.
43 |
44 | Tuning problems are never easy, here are our goals:
45 |
46 | * Provide the simplest and most intuitive APIs for major tuning cases.
47 | We always start from real tuning cases, figure out the minimal requirement for each of them and then determine
48 | the layers of abstraction.
49 | Read `this tutorial `_,
50 | you can see how minimal the interfaces can be.
51 | * Be scale agnostic and platform agnostic. We want you to worry less about *distributed computing*,
52 | and just focus on the tuning logic itself. Built on Fugue,
53 | Tune let you develop your tuning process iteratively.
54 | You can test with small spaces on local machine, and then switch to larger spaces and run
55 | distributedly with no code change. It can effectively save time and cost and make the process fun and rewarding.
56 | And to run any tuning logic distributedly, you only need a core framework itself (Spark, Dask, etc.)
57 | and you do not need a database, a queue service or even an embeded cluster.
58 | * Be highly extendable and flexible on lower level. For example
59 |
60 | * you can extend on Fugue level, for example create an execution engine for `Prefect `_ to run the tuning jobs as a Prefect workflow
61 | * you can integrate third party optimizers and use Tune just as a distributed orchestrator.
62 | * you can start external instances (e.g. EC2 instances) for different training subtasks and to fully utilize your cloud
63 | * you can combine with distributed training as long as your have enough compute resource
64 |
65 | Current Focuses
66 | ---------------
67 |
68 | Here are our current focuses:
69 |
70 | * A flexible space design and can describe a hybrid space of grid search, random search and second level optimization such as bayesian optimization
71 | * Integrate with 3rd party tuning frameworks. We have integrated HyperOpt and Optuna. And Nevergrad is on the way.
72 | * Create generalized and distributed versions of
73 | `Successive Halving `_,
74 | `Hyperband `_ and
75 | `Asynchronous Successive Halving `_.
76 |
77 |
78 | Collaboration
79 | -------------
80 |
81 | We are looking for collaborators, if you are interested,
82 | please let us know.
83 |
84 | Please join our
85 | `Slack channel `_.
86 |
87 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/notebooks/checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "language_info": {
4 | "codemirror_mode": {
5 | "name": "ipython",
6 | "version": 3
7 | },
8 | "file_extension": ".py",
9 | "mimetype": "text/x-python",
10 | "name": "python",
11 | "nbconvert_exporter": "python",
12 | "pygments_lexer": "ipython3",
13 | "version": "3.7.9"
14 | },
15 | "orig_nbformat": 2,
16 | "kernelspec": {
17 | "name": "python3",
18 | "display_name": "Python 3.7.9 64-bit"
19 | },
20 | "interpreter": {
21 | "hash": "4cd7ab41f5fca4b9b44701077e38c5ffd31fe66a6cab21e0214b68d958d0e462"
22 | }
23 | },
24 | "nbformat": 4,
25 | "nbformat_minor": 2,
26 | "cells": [
27 | {
28 | "cell_type": "markdown",
29 | "source": [
30 | "# Checkpoint\n",
31 | "\n",
32 | "Checkpoint is normally constructed and provided to you, but if you are interested, this can give you some details."
33 | ],
34 | "metadata": {}
35 | },
36 | {
37 | "cell_type": "code",
38 | "execution_count": 4,
39 | "source": [
40 | "from tune import Checkpoint\n",
41 | "from triad import FileSystem\n",
42 | "\n",
43 | "root = FileSystem()\n",
44 | "fs = root.makedirs(\"/tmp/test\", recreate=True)\n",
45 | "checkpoint = Checkpoint(fs)\n",
46 | "print(len(checkpoint))"
47 | ],
48 | "outputs": [
49 | {
50 | "output_type": "stream",
51 | "name": "stdout",
52 | "text": [
53 | "0\n"
54 | ]
55 | }
56 | ],
57 | "metadata": {}
58 | },
59 | {
60 | "cell_type": "code",
61 | "execution_count": 5,
62 | "source": [
63 | "!ls /tmp/test"
64 | ],
65 | "outputs": [],
66 | "metadata": {}
67 | },
68 | {
69 | "cell_type": "code",
70 | "execution_count": 6,
71 | "source": [
72 | "with checkpoint.create() as folder:\n",
73 | " folder.writetext(\"a.txt\", \"test\")"
74 | ],
75 | "outputs": [],
76 | "metadata": {}
77 | },
78 | {
79 | "cell_type": "code",
80 | "execution_count": 7,
81 | "source": [
82 | "!ls /tmp/test"
83 | ],
84 | "outputs": [
85 | {
86 | "output_type": "stream",
87 | "name": "stdout",
88 | "text": [
89 | "STATE d9ed2530-20f1-42b3-8818-7fbf1b8eedf3\n"
90 | ]
91 | }
92 | ],
93 | "metadata": {}
94 | },
95 | {
96 | "cell_type": "markdown",
97 | "source": [
98 | "Here is how to create a new checkpoint under `/tmp/test`"
99 | ],
100 | "metadata": {}
101 | },
102 | {
103 | "cell_type": "code",
104 | "execution_count": 8,
105 | "source": [
106 | "with checkpoint.create() as folder:\n",
107 | " folder.writetext(\"a.txt\", \"test2\")"
108 | ],
109 | "outputs": [],
110 | "metadata": {}
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": 9,
115 | "source": [
116 | "!ls /tmp/test/*/"
117 | ],
118 | "outputs": [
119 | {
120 | "output_type": "stream",
121 | "name": "stdout",
122 | "text": [
123 | "/tmp/test/8d4e7fed-2a4c-4789-a732-0cb46294e704/:\n",
124 | "a.txt\n",
125 | "\n",
126 | "/tmp/test/d9ed2530-20f1-42b3-8818-7fbf1b8eedf3/:\n",
127 | "a.txt\n"
128 | ]
129 | }
130 | ],
131 | "metadata": {}
132 | },
133 | {
134 | "cell_type": "markdown",
135 | "source": [
136 | "Here is how to get the latest checkpoint folder"
137 | ],
138 | "metadata": {}
139 | },
140 | {
141 | "cell_type": "code",
142 | "execution_count": 10,
143 | "source": [
144 | "print(len(checkpoint))\n",
145 | "print(checkpoint.latest.readtext(\"a.txt\"))"
146 | ],
147 | "outputs": [
148 | {
149 | "output_type": "stream",
150 | "name": "stdout",
151 | "text": [
152 | "2\n",
153 | "test2\n"
154 | ]
155 | }
156 | ],
157 | "metadata": {}
158 | },
159 | {
160 | "cell_type": "code",
161 | "execution_count": null,
162 | "source": [],
163 | "outputs": [],
164 | "metadata": {}
165 | }
166 | ]
167 | }
--------------------------------------------------------------------------------
/docs/short_tutorials.rst:
--------------------------------------------------------------------------------
1 | Short Tutorials
2 | ===============
3 |
4 | .. toctree::
5 |
6 | notebooks/space
7 | notebooks/noniterative
8 | notebooks/noniterative_objective
9 | notebooks/noniterative_optimizers
10 | notebooks/tune_dataset
11 | notebooks/checkpoint
12 |
--------------------------------------------------------------------------------
/images/logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
24 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | .[all]
2 |
3 | # test requirements
4 | pre-commit
5 | black
6 | mypy
7 | flake8
8 | autopep8
9 | pylint
10 | pytest
11 | pytest-cov
12 | pytest-mock
13 | pytest-spark
14 | sphinx>=2.4.0
15 | pandoc
16 | sphinx-rtd-theme
17 | furo==2022.4.7
18 | sphinx-autodoc-typehints
19 | nbsphinx
20 | flask
21 |
22 | fugue[spark,dask]
23 |
24 | # publish to pypi
25 | wheel
26 | twine
27 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | description-file = README.md
3 |
4 | [tool:pytest]
5 | addopts =
6 | --cov=tune
7 | --cov=tune_sklearn
8 | --cov=tune_tensorflow
9 | --cov=tune_hyperopt
10 | --cov=tune_optuna
11 | --cov-report=term-missing:skip-covered
12 | -vvv
13 | spark_options =
14 | spark.master: local[2]
15 | spark.sql.catalogImplementation: in-memory
16 | spark.sql.shuffle.partitions: 4
17 | spark.default.parallelism: 4
18 | spark.executor.cores: 4
19 | spark.sql.adaptive.enabled: false
20 |
21 | [flake8]
22 | ignore = E24,E203,W503,C408,A003,A005,W504,C407,C405
23 | max-line-length = 88
24 | format = pylint
25 | exclude = .svc,CVS,.bzr,.hg,.git,__pycache__,venv,tests/*,docs/*
26 | max-complexity = 10
27 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from setuptools import find_packages, setup
4 |
5 | from tune_version import __version__
6 |
7 | with open("README.md") as f:
8 | _text = ["# Tune"] + f.read().splitlines()[1:]
9 | LONG_DESCRIPTION = "\n".join(_text)
10 |
11 |
12 | def get_version() -> str:
13 | tag = os.environ.get("RELEASE_TAG", "")
14 | if "dev" in tag.split(".")[-1]:
15 | return tag
16 | if tag != "":
17 | assert tag == __version__, "release tag and version mismatch"
18 | return __version__
19 |
20 |
21 | setup(
22 | name="tune",
23 | version=get_version(),
24 | packages=find_packages(),
25 | description="An abstraction layer for hyper parameter tuning",
26 | long_description=LONG_DESCRIPTION,
27 | long_description_content_type="text/markdown",
28 | license="Apache-2.0",
29 | author="Han Wang",
30 | author_email="goodwanghan@gmail.com",
31 | keywords="hyper parameter hyperparameter tuning tune tuner optimzation",
32 | url="http://github.com/fugue-project/tune",
33 | install_requires=["fugue", "cloudpickle", "triad>=0.8.4", "fs"],
34 | extras_require={
35 | "hyperopt": ["hyperopt"],
36 | "optuna": ["optuna"],
37 | "tensorflow": ["tensorflow"],
38 | "notebook": ["fugue-jupyter", "seaborn"],
39 | "sklearn": ["scikit-learn"],
40 | "mlflow": ["mlflow"],
41 | "all": [
42 | "hyperopt",
43 | "optuna",
44 | "seaborn",
45 | "tensorflow",
46 | "fugue-jupyter",
47 | "scikit-learn",
48 | "mlflow",
49 | ],
50 | },
51 | classifiers=[
52 | # "3 - Alpha", "4 - Beta" or "5 - Production/Stable"
53 | "Development Status :: 3 - Alpha",
54 | "Intended Audience :: Developers",
55 | "Topic :: Software Development :: Libraries :: Python Modules",
56 | "License :: OSI Approved :: Apache Software License",
57 | "Programming Language :: Python :: 3.8",
58 | "Programming Language :: Python :: 3.9",
59 | "Programming Language :: Python :: 3.10",
60 | "Programming Language :: Python :: 3.11",
61 | "Programming Language :: Python :: 3.12",
62 | "Programming Language :: Python :: 3 :: Only",
63 | ],
64 | python_requires=">=3.8",
65 | entry_points={
66 | "tune.plugins": [
67 | "mlflow = tune_mlflow[mlflow]",
68 | "wandb = tune_wandb[wandb]",
69 | "hyperopt = tune_hyperopt[hyperopt]",
70 | "optuna = tune_optuna[optuna]",
71 | "monitor = tune_notebook[notebook]",
72 | ]
73 | },
74 | )
75 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/__init__.py
--------------------------------------------------------------------------------
/tests/tune/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/tune/__init__.py
--------------------------------------------------------------------------------
/tests/tune/_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/tune/_utils/__init__.py
--------------------------------------------------------------------------------
/tests/tune/_utils/test_collections.py:
--------------------------------------------------------------------------------
1 | from tune._utils import _EMPTY_ITER, dict_product, product, safe_iter
2 |
3 |
4 | def test_safe_iter():
5 | assert [1] == list(safe_iter([1]))
6 | assert [1] == list(safe_iter(safe_iter([1])))
7 | assert [None] == list(safe_iter(safe_iter([None])))
8 | assert [1] == list(safe_iter([1], safe=False))
9 |
10 | assert [_EMPTY_ITER] == list(safe_iter([]))
11 | assert [_EMPTY_ITER] == list(safe_iter(safe_iter([])))
12 | assert [] == list(safe_iter([], safe=False))
13 |
14 |
15 | def test_product():
16 | res = list(product([], safe=False))
17 | assert 0 == len(res)
18 | res = list(product([], safe=True))
19 | assert 1 == len(res)
20 |
21 | res = list(product([[]], safe=False))
22 | assert 0 == len(res)
23 | res = list(product([[]], safe=True))
24 | assert [[]] == res
25 |
26 | res = list(product([[], []]))
27 | assert 0 == len(res)
28 | res = list(product([[], []], safe=True))
29 | assert [[]] == res
30 | res = list(product([[], []], safe=True, remove_empty=False))
31 | assert [[None, None]] == res
32 |
33 | for safe in [True, False]:
34 | for remove_empty in [True, False]:
35 | res = list(product([[1], [2]], safe=safe, remove_empty=remove_empty))
36 | assert 1 == len(res)
37 | assert [1, 2] == res[0]
38 |
39 | res = list(product([[1], [2, 3]], safe=safe, remove_empty=remove_empty))
40 | assert [[1, 2], [1, 3]] == res
41 |
42 | res = list(product([[1], []]))
43 | assert [] == res
44 | res = list(product([[None], [], [2, 3]], safe=True, remove_empty=False))
45 | assert [[None, None, 2], [None, None, 3]] == res
46 | res = list(product([[None], [], [2, 3]], safe=True, remove_empty=True))
47 | assert [[None, 2], [None, 3]] == res
48 |
49 | res = list(product([[1], safe_iter([])], safe=False, remove_empty=True))
50 | assert [[1]] == res
51 | res = list(product([[1], safe_iter([])], safe=False, remove_empty=False))
52 | assert [[1, None]] == res
53 |
54 |
55 | def test_dict_product():
56 | res = list(dict_product({}, safe=True))
57 | assert [{}] == res
58 | res = list(dict_product({"a": []}, safe=True))
59 | assert [{}] == res
60 |
61 | res = list(dict_product({}, safe=False))
62 | assert [] == res
63 | res = list(dict_product({"a": []}, safe=False))
64 | assert [] == res
65 |
--------------------------------------------------------------------------------
/tests/tune/_utils/test_execution.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | from pytest import raises
4 | from triad.utils.convert import to_timedelta
5 | from tune._utils import run_monitored_process
6 | from tune.exceptions import TuneInterrupted
7 |
8 |
9 | def test_run_monitored_process():
10 | # happy case
11 | assert 10 == run_monitored_process(t1, [1], {}, lambda: True, "5sec")
12 | # stop before responding
13 | with raises(TuneInterrupted):
14 | run_monitored_process(t1, [1], dict(wait="20sec"), lambda: True, "0.2sec")
15 | # non stop before responding
16 | assert 10 == run_monitored_process(
17 | t1, [1], dict(wait="1sec"), lambda: False, "0.2sec"
18 | )
19 | with raises(NotImplementedError):
20 | run_monitored_process(t2, [], {}, lambda: True, "5sec")
21 | assert run_monitored_process(t3, [], {}, lambda: True, "5sec") is None
22 |
23 |
24 | def t1(a, wait=None):
25 | if wait is not None:
26 | sleep(to_timedelta(wait).total_seconds())
27 | return a * 10
28 |
29 |
30 | def t2():
31 | raise NotImplementedError
32 |
33 |
34 | def t3():
35 | pass
36 |
--------------------------------------------------------------------------------
/tests/tune/_utils/test_values.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | import numpy as np
4 | from tune._utils import normalize_hp
5 |
6 |
7 | def test_normalize_hp():
8 | assert isinstance(np.int64(10), np.int64)
9 | assert 10 == normalize_hp(np.int64(10))
10 | assert not isinstance(normalize_hp(np.int64(10)), np.int64)
11 |
12 | assert json.dumps(dict(a=[0, 1], b=1.1, c="x")) == json.dumps(
13 | normalize_hp(dict(a=[np.int64(0), 1], b=np.float64(1.1), c="x"))
14 | )
15 |
--------------------------------------------------------------------------------
/tests/tune/api/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/tune/api/__init__.py
--------------------------------------------------------------------------------
/tests/tune/concepts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/tune/concepts/__init__.py
--------------------------------------------------------------------------------
/tests/tune/concepts/space/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/tune/concepts/space/__init__.py
--------------------------------------------------------------------------------
/tests/tune/concepts/space/test_space.py:
--------------------------------------------------------------------------------
1 | from tune import Choice, Grid, Rand, RandInt, Space, TuningParametersTemplate, FuncParam
2 | from pytest import raises
3 |
4 |
5 | def test_single_space():
6 | raises(ValueError, lambda: Space("abc"))
7 | raises(ValueError, lambda: Space(1))
8 | raises(ValueError, lambda: Space(1, 2))
9 |
10 | space = Space(a=1, b=Grid(2, 3, 4))
11 | dicts = list(space)
12 | dicts = list(space) # make sure iteration doesn't alter the object
13 | assert 3 == len(dicts)
14 | assert dict(a=1, b=2) == dicts[0]
15 | assert dict(a=1, b=3) == dicts[1]
16 |
17 | dicts = list(Space(dict(a=Grid(None, "x"), b=Grid(2, 3))))
18 | assert 4 == len(dicts)
19 |
20 | dicts = list(Space(TuningParametersTemplate(dict(a=1, b=[Grid(2, 3), Grid(4, 5)]))))
21 | assert 4 == len(dicts)
22 | assert dict(a=1, b=[2, 4]) == dicts[0]
23 | assert dict(a=1, b=[2, 5]) == dicts[1]
24 | assert dict(a=1, b=[3, 4]) == dicts[2]
25 | assert dict(a=1, b=[3, 5]) == dicts[3]
26 |
27 | dicts = list(Space(a=1, b=dict(x=Grid(2, 3), y=Grid(4, 5))))
28 | assert 4 == len(dicts)
29 | assert dict(a=1, b=dict(x=2, y=4)) == dicts[0]
30 | assert dict(a=1, b=dict(x=2, y=5)) == dicts[1]
31 | assert dict(a=1, b=dict(x=3, y=4)) == dicts[2]
32 | assert dict(a=1, b=dict(x=3, y=5)) == dicts[3]
33 |
34 |
35 | def test_single_space_sample():
36 | assert not Space(a=1).has_stochastic
37 | assert not Space(a=1, b=Grid(1, 2)).has_stochastic
38 | assert Space(a=1, b=[Grid(1, 2), Rand(0.0, 1.0)]).has_stochastic
39 |
40 | dicts = list(Space(a=1, b=Grid(1, 2)).sample(100))
41 | assert 2 == len(dicts)
42 |
43 | dicts = list(Space(a=1, b=RandInt(1, 2)).sample(100))
44 | assert 100 == len(dicts)
45 |
46 | space = Space(a=1, b=[Grid(1, 2), Rand(0.0, 1.0)], c=Choice("a", "b"))
47 | assert list(space.sample(5, 0)) == list(space.sample(5, 0))
48 | assert list(space.sample(5, 0)) != list(space.sample(5, 1))
49 | dicts = list(space.sample(5, 0))
50 | assert 10 == len(dicts)
51 | assert 5 == len(set(d.template["b"][1] for d in dicts))
52 |
53 |
54 | def test_space_with_funct():
55 | s = Space(a=1, b=FuncParam(lambda x, y: x + y, x=Grid(0, 1), y=Grid(3, 4)))
56 | assert [
57 | dict(a=1, b=3),
58 | dict(a=1, b=4),
59 | dict(a=1, b=4),
60 | dict(a=1, b=5),
61 | ] == list(s)
62 |
63 | u = Grid(0, 1)
64 | s = Space(a=u, b=FuncParam(lambda x, y: x + y, x=u, y=u))
65 | assert [
66 | dict(a=0, b=0),
67 | dict(a=1, b=2),
68 | ] == list(s)
69 |
70 |
71 | def test_operators():
72 | s1 = Space(a=1, b=Grid(2, 3))
73 | s2 = Space(c=Grid("a", "b"))
74 | assert [
75 | dict(a=1, b=2, c="a"),
76 | dict(a=1, b=2, c="b"),
77 | dict(a=1, b=3, c="a"),
78 | dict(a=1, b=3, c="b"),
79 | ] == list(s1 * s2)
80 |
81 | assert [
82 | dict(a=1, b=2),
83 | dict(a=1, b=3),
84 | dict(c="a"),
85 | dict(c="b"),
86 | ] == list(s1 + s2)
87 |
88 | assert [
89 | dict(a=1, b=2, c="a"),
90 | dict(a=1, b=2, c="b"),
91 | dict(a=1, b=3, c="a"),
92 | dict(a=1, b=3, c="b"),
93 | ] == list(s1 * (Space(c="a") + Space(c="b")))
94 |
95 | assert [
96 | dict(a=1, b=2),
97 | dict(a=1, b=3),
98 | dict(c="a"),
99 | dict(c="b"),
100 | ] == list(s1 + (Space(c="a") + Space(c="b")))
101 |
102 | s1 = Space(a=1, b=Grid(2, 3))
103 | s2 = Space(c=Grid("a", "b"))
104 | s3 = Space(d=5)
105 | assert (
106 | [
107 | dict(a=1, b=2),
108 | dict(a=1, b=3),
109 | dict(c="a"),
110 | dict(c="b"),
111 | dict(d=5),
112 | ]
113 | == list(sum([s1, s2, s3]))
114 | == list(sum([s1, s2, s3], None))
115 | )
116 |
--------------------------------------------------------------------------------
/tests/tune/concepts/test_checkpoint.py:
--------------------------------------------------------------------------------
1 | from tune import Checkpoint
2 | from triad import FileSystem
3 | from pytest import raises
4 |
5 |
6 | def test_checkpoint(tmpdir):
7 | fs = FileSystem().opendir(str(tmpdir))
8 | cp = Checkpoint(fs)
9 | assert 0 == len(cp)
10 | with raises(AssertionError):
11 | cp.latest
12 | try:
13 | for i in range(4):
14 | with cp.create() as sfs:
15 | sfs.writetext("a.txt", str(i))
16 | if i == 3:
17 | raise Exception
18 | except Exception:
19 | pass
20 | assert 3 == len(cp)
21 | assert "2" == cp.latest.readtext("a.txt")
22 | files = fs.listdir(".")
23 | assert 4 == len(files)
24 | cp2 = Checkpoint(fs)
25 | assert 3 == len(cp2)
26 | assert "2" == cp2.latest.readtext("a.txt")
27 |
--------------------------------------------------------------------------------
/tests/tune/concepts/test_dataset.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | from fugue import (
4 | ArrayDataFrame,
5 | DataFrame,
6 | FugueWorkflow,
7 | NativeExecutionEngine,
8 | WorkflowDataFrames,
9 | )
10 | import cloudpickle
11 |
12 | import fugue
13 | from tune._utils.serialization import from_base64
14 |
15 | from tune.constants import (
16 | TUNE_DATASET_DF_PREFIX,
17 | TUNE_DATASET_PARAMS_PREFIX,
18 | TUNE_DATASET_TRIALS,
19 | TUNE_TEMP_PATH,
20 | )
21 | from tune.concepts.dataset import TuneDatasetBuilder, _to_trail_row, TuneDataset
22 | from tune.concepts.space import Grid, Rand
23 | from tune.concepts.space.spaces import Space
24 | from tune.concepts.flow import Trial
25 |
26 |
27 | def test_builder(tmpdir):
28 | space = Space(a=1, b=2, c=Grid(2, 3))
29 | builder = TuneDatasetBuilder(space, str(tmpdir))
30 |
31 | def assert_count(df: DataFrame, n: int, schema=None) -> None:
32 | assert len(df.as_array()) == n
33 | if schema is not None:
34 | assert df.schema == schema
35 |
36 | # test to_space
37 | with FugueWorkflow() as dag:
38 | df = builder.build(dag).data
39 | df.show()
40 | dag.run()
41 |
42 | df1 = ArrayDataFrame([[0, 1], [1, 1], [0, 2]], "a:int,b:int")
43 |
44 | # test single df
45 | with FugueWorkflow() as dag:
46 | builder.add_dfs(WorkflowDataFrames(x=dag.df(df1)))
47 | dataset = builder.build(dag)
48 | assert ["x"] == dataset.dfs
49 | assert [] == dataset.keys
50 | df = dataset.data
51 | df.show()
52 | df.output(
53 | assert_count,
54 | params=dict(n=2, schema=f"__tune_df__x:str,{TUNE_DATASET_TRIALS}:str"),
55 | )
56 | dag.run()
57 |
58 | space = Space(b=Rand(0, 1), a=1, c=Grid(2, 3), d=Grid("a", "b"))
59 | df2 = ArrayDataFrame([[0, 1], [1, 1], [3, 2]], "a:int,bb:int")
60 | df3 = ArrayDataFrame([[10, 1], [11, 1], [10, 2]], "a:int,c:int")
61 | builder = TuneDatasetBuilder(space)
62 | engine = NativeExecutionEngine(conf={TUNE_TEMP_PATH: str(tmpdir)})
63 |
64 | # test multiple dfs, batch_size and config
65 | with FugueWorkflow() as dag:
66 | dfs = WorkflowDataFrames(
67 | a=dag.df(df1).partition_by("a"), b=dag.df(df2).partition_by("a")
68 | )
69 | dataset = (
70 | builder.add_dfs(dfs, "inner").add_df("c", dag.df(df3), "cross").build(dag)
71 | )
72 | assert ["a"] == dataset.keys
73 | assert ["a", "b", "c"] == dataset.dfs
74 | df = dataset.data
75 | df.show()
76 | df.output(
77 | assert_count,
78 | params=dict(
79 | n=8,
80 | schema="a:int,__tune_df__a:str,__tune_df__b:str,"
81 | f"__tune_df__c:str,{TUNE_DATASET_TRIALS}:str",
82 | ),
83 | )
84 |
85 | df = builder.build(dag, batch_size=3).data
86 | df.show()
87 | df.output(
88 | assert_count,
89 | params=dict(
90 | n=4,
91 | schema="a:int,__tune_df__a:str,__tune_df__b:str,"
92 | f"__tune_df__c:str,{TUNE_DATASET_TRIALS}:str",
93 | ),
94 | )
95 | dag.run(engine)
96 |
97 |
98 | def test_dataset(tmpdir):
99 | space = Space(a=Grid(0, 1, 2, 3, 4), b=Grid(5, 6, 7, 8, 9))
100 | builder = TuneDatasetBuilder(space, str(tmpdir))
101 |
102 | dag = FugueWorkflow()
103 | dataset = builder.build(dag)
104 | ds = dataset.split([4, 1], 0)
105 | assert 2 == len(ds)
106 | ds[0].data.yield_dataframe_as("a")
107 | ds[1].data.yield_dataframe_as("b")
108 | res = dag.run()
109 | assert 25 == len(res["a"].as_array()) + len(res["b"].as_array())
110 | assert len(res["b"].as_array()) < 10
111 |
112 |
113 | def test_to_trial_row():
114 | data1 = {
115 | "b": 2,
116 | "a": 1,
117 | TUNE_DATASET_DF_PREFIX + "x": "x",
118 | TUNE_DATASET_PARAMS_PREFIX: cloudpickle.dumps(
119 | [{"b": 10, "a": 11}, {"a": 11, "b": 10}, {"b": 100, "a": 110}],
120 | ),
121 | }
122 | res1 = _to_trail_row(data1, {"m": 1})
123 | trials1 = from_base64(res1[TUNE_DATASET_TRIALS])
124 | assert 3 == len(trials1) # order matters in params
125 | data2 = {
126 | "a": 1,
127 | "b": 2,
128 | TUNE_DATASET_DF_PREFIX + "y": "x",
129 | TUNE_DATASET_PARAMS_PREFIX: cloudpickle.dumps(
130 | [{"b": 10, "a": 11}, {"b": 100, "a": 110}],
131 | ),
132 | }
133 | res2 = _to_trail_row(data2, {"m": 1})
134 | assert TUNE_DATASET_PARAMS_PREFIX not in res2
135 | trials2 = from_base64(res2[TUNE_DATASET_TRIALS])
136 | assert 2 == len(trials2)
137 | assert any(trials2[0].trial_id == x.trial_id for x in trials1)
138 | assert any(trials2[1].trial_id == x.trial_id for x in trials1)
139 |
140 | data3 = {
141 | "a": 10,
142 | "b": 2,
143 | TUNE_DATASET_DF_PREFIX + "y": "x",
144 | TUNE_DATASET_PARAMS_PREFIX: cloudpickle.dumps(
145 | [{"b": 10, "a": 11}, {"b": 100, "a": 110}],
146 | ),
147 | }
148 | res3 = _to_trail_row(data3, {"m": 1})
149 | trials3 = from_base64(res3[TUNE_DATASET_TRIALS])
150 | assert 2 == len(trials2)
151 | assert not any(trials3[0].trial_id == x.trial_id for x in trials1)
152 | assert not any(trials3[1].trial_id == x.trial_id for x in trials1)
153 |
--------------------------------------------------------------------------------
/tests/tune/concepts/test_logger.py:
--------------------------------------------------------------------------------
1 | from tune.concepts.logger import (
2 | MetricLogger,
3 | get_current_metric_logger,
4 | set_current_metric_logger,
5 | )
6 |
7 |
8 | def test_logger_context():
9 | m1 = MetricLogger()
10 | m2 = MetricLogger()
11 |
12 | with set_current_metric_logger(m1) as mm1:
13 | assert get_current_metric_logger() is m1
14 | assert mm1 is m1
15 | with set_current_metric_logger(m2) as mm2:
16 | assert get_current_metric_logger() is m2
17 | assert mm2 is m2
18 | assert get_current_metric_logger() is m1
19 |
--------------------------------------------------------------------------------
/tests/tune/iterative/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/tune/iterative/__init__.py
--------------------------------------------------------------------------------
/tests/tune/iterative/test_objective.py:
--------------------------------------------------------------------------------
1 | from fs.base import FS as FSBase
2 | from triad import FileSystem
3 | from tune.iterative.objective import (
4 | IterativeObjectiveFunc,
5 | validate_iterative_objective,
6 | )
7 | from tune.concepts.flow import Trial, TrialDecision, TrialJudge, TrialReport, Monitor
8 |
9 |
10 | class F(IterativeObjectiveFunc):
11 | def __init__(self):
12 | self.v = -10
13 | super().__init__()
14 |
15 | def copy(self) -> "F":
16 | return F()
17 |
18 | def initialize(self) -> None:
19 | self.v = 0
20 |
21 | def finalize(self) -> None:
22 | self.v = -10
23 |
24 | def load_checkpoint(self, fs: FSBase) -> None:
25 | self.v = int(fs.readtext("x"))
26 |
27 | def save_checkpoint(self, fs: FSBase) -> None:
28 | fs.writetext("x", str(self.v))
29 |
30 | def run_single_iteration(self) -> TrialReport:
31 | self.v += 1
32 | return TrialReport(self.current_trial, self.v, metadata={"d": 4})
33 |
34 | def generate_sort_metric(self, value: float) -> float:
35 | return -value
36 |
37 |
38 | class J(TrialJudge):
39 | def __init__(self, schedule):
40 | super().__init__()
41 | self.schedule = schedule
42 |
43 | def can_accept(self, trial: Trial) -> bool:
44 | return True
45 |
46 | def get_budget(self, trial: Trial, rung: int) -> float:
47 | return float(self.schedule[rung]) if rung < len(self.schedule) else 0.0
48 |
49 | def judge(self, report: TrialReport) -> TrialDecision:
50 | self.report = report
51 | return TrialDecision(
52 | report,
53 | budget=0, # stop at each rung
54 | should_checkpoint=True,
55 | metadata={"x": 1},
56 | )
57 |
58 |
59 | class M(Monitor):
60 | def __init__(self):
61 | self._reports = []
62 |
63 | def on_report(self, report: TrialReport) -> None:
64 | return self._reports.append(report)
65 |
66 |
67 | def test_objective_func(tmpdir):
68 | fs = FileSystem().opendir(str(tmpdir))
69 | j = J([3, 3, 2])
70 | f = F().copy()
71 | t = Trial("abc", {"a": 1})
72 | f.run(t, judge=j, checkpoint_basedir_fs=fs)
73 | assert -10 == f.v
74 | f.run(t, judge=j, checkpoint_basedir_fs=fs)
75 | assert -10 == f.v
76 | assert 6.0 == j.report.metric
77 | assert -6.0 == j.report.sort_metric
78 | f.run(t, judge=j, checkpoint_basedir_fs=fs)
79 | assert -10 == f.v
80 | assert 8.0 == j.report.metric
81 | assert -8.0 == j.report.sort_metric
82 |
83 |
84 | def test_validator():
85 | m = M()
86 | for cont in [True, False]:
87 | validate_iterative_objective(
88 | F(),
89 | Trial("abc", {"a": 1}),
90 | [3, 3, 2],
91 | lambda reports: [-3.0, -6.0, -8.0] == [x.sort_metric for x in reports],
92 | continuous=cont,
93 | monitor=m,
94 | )
95 | assert 6 == len(m._reports)
96 |
--------------------------------------------------------------------------------
/tests/tune/iterative/test_sha.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, Iterable
2 |
3 | from fugue import FugueWorkflow
4 | from tune.concepts.dataset import TuneDatasetBuilder
5 | from tune.iterative.objective import IterativeObjectiveFunc
6 | from tune import optimize_by_sha, optimize_by_hyperband
7 | from tune.concepts.space import Grid, Space
8 | from tune.concepts.flow import TrialReport
9 | from tune.constants import TUNE_REPORT_METRIC
10 |
11 |
12 | class F(IterativeObjectiveFunc):
13 | def __init__(self):
14 | super().__init__()
15 | self._it = 0
16 | self._all = [
17 | [9, 3, 1, 1],
18 | [8, 6, 5, 5],
19 | [8, 5, 4, 3],
20 | [7, 4, 3, 4],
21 | ]
22 |
23 | def save_checkpoint(self, fs):
24 | fs.writetext("x", str(self._it))
25 |
26 | def load_checkpoint(self, fs):
27 | self._it = int(fs.readtext("x"))
28 |
29 | def run_single_iteration(self):
30 | trial = self.current_trial
31 | metric = self._all[trial.params.simple_value["a"]][self._it]
32 | self._it += 1
33 | return TrialReport(trial, metric=metric)
34 |
35 | def copy(self):
36 | return F()
37 |
38 |
39 | def test_sha(tmpdir):
40 | def assert_metric(df: Iterable[Dict[str, Any]], metric: float, ct: int) -> None:
41 | n = 0
42 | for row in df:
43 | assert row[TUNE_REPORT_METRIC] == metric
44 | n += 1
45 | assert n == ct
46 |
47 | space = Space(a=Grid(0, 1, 2, 3))
48 | dag = FugueWorkflow()
49 | dataset = TuneDatasetBuilder(space, str(tmpdir)).build(dag)
50 | obj = F()
51 | res = optimize_by_sha(
52 | obj,
53 | dataset,
54 | plan=[[1.0, 3], [1.0, 2], [1.0, 1], [1.0, 1]],
55 | checkpoint_path=str(tmpdir),
56 | )
57 | res.result().output(assert_metric, dict(metric=4.0, ct=1))
58 |
59 | res = optimize_by_sha(
60 | obj,
61 | dataset,
62 | plan=[[2.0, 2], [1.0, 1], [1.0, 1]],
63 | checkpoint_path=str(tmpdir),
64 | )
65 | res.result().output(assert_metric, dict(metric=1.0, ct=1))
66 | dag.run()
67 |
68 |
69 | def test_hyperband(tmpdir):
70 | def assert_metric(df: Iterable[Dict[str, Any]], metric: float, ct: int) -> None:
71 | n = 0
72 | for row in df:
73 | if metric > 0:
74 | assert row[TUNE_REPORT_METRIC] == metric
75 | n += 1
76 | assert n == ct
77 |
78 | space = Space(a=Grid(0, 1, 2, 3))
79 | dag = FugueWorkflow()
80 | dataset = TuneDatasetBuilder(space, str(tmpdir)).build(dag)
81 | obj = F()
82 | res = optimize_by_hyperband(
83 | obj,
84 | dataset,
85 | plans=[
86 | [[1.0, 3], [1.0, 2], [1.0, 1], [1.0, 1]],
87 | [[2.0, 2], [1.0, 1], [1.0, 1]],
88 | ],
89 | checkpoint_path=str(tmpdir),
90 | )
91 | res.result().output(assert_metric, dict(metric=0.0, ct=2))
92 | res.result(1).output(assert_metric, dict(metric=1.0, ct=1))
93 | dag.run()
94 |
--------------------------------------------------------------------------------
/tests/tune/iterative/test_study.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, Iterable
2 | import numpy as np
3 | from fugue.workflow.workflow import FugueWorkflow
4 | from tune import Space, Trial, TrialDecision, TrialReport
5 | from tune.constants import TUNE_REPORT_METRIC
6 | from tune.concepts.dataset import TuneDatasetBuilder
7 | from tune.iterative.objective import IterativeObjectiveFunc
8 | from tune.iterative.study import IterativeStudy
9 | from tune.concepts.flow import TrialJudge
10 |
11 |
12 | def f(x, a, b):
13 | return -np.log(x + 0.01) * a + b
14 |
15 |
16 | class F(IterativeObjectiveFunc):
17 | def __init__(self) -> None:
18 | self.step = 0
19 | super().__init__()
20 |
21 | def copy(self) -> "F":
22 | return F()
23 |
24 | def initialize(self) -> None:
25 | assert self.step == 0 # because of copy
26 |
27 | def run_single_iteration(self) -> TrialReport:
28 | self.step += 1
29 | trial = self.current_trial
30 | return TrialReport(
31 | trial=trial,
32 | metric=f(
33 | self.step,
34 | trial.params.simple_value["a"],
35 | trial.params.simple_value["b"],
36 | ),
37 | )
38 |
39 |
40 | class J(TrialJudge):
41 | def __init__(self, schedule):
42 | super().__init__()
43 | self.schedule = schedule
44 |
45 | def can_accept(self, trial: Trial) -> bool:
46 | return True
47 |
48 | def get_budget(self, trial: Trial, rung: int) -> float:
49 | return float(self.schedule[rung]) if rung < len(self.schedule) else 0.0
50 |
51 | def judge(self, report: TrialReport) -> TrialDecision:
52 | return TrialDecision(
53 | report,
54 | budget=self.get_budget(report.trial, report.rung + 1),
55 | should_checkpoint=False,
56 | metadata={},
57 | )
58 |
59 |
60 | def test_iterative_study(tmpdir):
61 | def assert_metric(df: Iterable[Dict[str, Any]], metric: float) -> None:
62 | for row in df:
63 | assert row[TUNE_REPORT_METRIC] < metric
64 |
65 | study = IterativeStudy(F(), str(tmpdir))
66 | space = sum(
67 | Space(a=a, b=b)
68 | for a, b in [(1.1, 0.2), (0.8, -0.2), (1.2, -0.1), (0.7, 0.3), (1.0, 1.5)]
69 | )
70 | dag = FugueWorkflow()
71 | dataset = TuneDatasetBuilder(space, str(tmpdir)).build(dag)
72 | result = study.optimize(
73 | dataset,
74 | J([1, 2, 3, 4]),
75 | )
76 | result.result(1).show()
77 | result.result(1).output(assert_metric, params=dict(metric=-2.8))
78 |
79 | dag.run()
80 |
--------------------------------------------------------------------------------
/tests/tune/noniterative/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/tune/noniterative/__init__.py
--------------------------------------------------------------------------------
/tests/tune/noniterative/test_convert.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, Tuple
2 |
3 | from pytest import raises
4 | from tune.exceptions import TuneCompileError
5 | from tune.concepts.flow import Trial, TrialReport
6 | from tune.noniterative.convert import noniterative_objective, to_noniterative_objective
7 | from tune.noniterative.objective import NonIterativeObjectiveFunc
8 |
9 |
10 | def test_to_noniterative_objective():
11 | def f1(a, b: int) -> float:
12 | return a - b
13 |
14 | func = to_noniterative_objective(f1)
15 | assert func.min_better
16 | trial = Trial("abc", dict(b=20, a=10), dict(c=3))
17 | report = func.safe_run(trial)
18 | assert report.trial is trial
19 | assert report.metric == -10
20 | assert report.sort_metric == -10
21 | assert report.params == trial.params
22 | assert report.metadata == {}
23 |
24 | func = to_noniterative_objective("f1", min_better=False)
25 | assert not func.min_better
26 | trial = Trial("abc", dict(b=20, a=10), dict(c=3))
27 | report = func.safe_run(trial)
28 | assert report.trial is trial
29 | assert report.metric == -10
30 | assert report.sort_metric == 10
31 | assert report.params == trial.params
32 | assert report.metadata == {}
33 |
34 | assert -1 == func(1, 2)
35 |
36 | def f2(a, b: int) -> Tuple[float, Dict[str, Any]]:
37 | return a - b, dict(c=5)
38 |
39 | func = to_noniterative_objective(f2)
40 | trial = Trial("abc", dict(b=20, a=10), dict(c=3))
41 | report = func.safe_run(trial)
42 | assert report.trial is trial
43 | assert report.metric == -10
44 | assert report.params == trial.params
45 | assert report.metadata == dict(c=5)
46 |
47 | def f3(t: Trial) -> TrialReport:
48 | return TrialReport(
49 | t,
50 | t.params.simple_value["a"] - t.params.simple_value["b"],
51 | params=dict(a=1),
52 | metadata=dict(c=6),
53 | )
54 |
55 | func = to_noniterative_objective(f3)
56 | trial = Trial("abc", dict(b=20, a=10), dict(c=3))
57 | report = func.safe_run(trial)
58 | assert report.trial is trial
59 | assert report.metric == -10
60 | assert report.params == dict(a=1)
61 | assert report.metadata == dict(c=6)
62 |
63 | class F4(NonIterativeObjectiveFunc):
64 | def run(self, t: Trial) -> TrialReport:
65 | return TrialReport(
66 | t, t.params["a"] - t.params["b"], params=dict(a=1), metadata=dict(c=6)
67 | )
68 |
69 | f4 = F4()
70 | f4_ = to_noniterative_objective(f4)
71 | assert isinstance(f4_, F4)
72 | assert f4 is not f4_
73 |
74 | raises(TuneCompileError, lambda: to_noniterative_objective("abc"))
75 |
76 |
77 | def test_noniterative_objective():
78 | @noniterative_objective
79 | def f1(a, b: int) -> float:
80 | return a - b
81 |
82 | assert isinstance(f1, NonIterativeObjectiveFunc)
83 | trial = Trial("abc", dict(b=20, a=10), dict(c=3))
84 | report = f1.safe_run(trial)
85 | assert report.trial is trial
86 | assert report.metric == -10
87 | assert report.params == trial.params
88 | assert report.metadata == {}
89 |
90 | func = to_noniterative_objective("f1")
91 | report = func.safe_run(trial)
92 | assert report.trial is trial
93 | assert report.metric == -10
94 | assert report.params == trial.params
95 | assert report.metadata == {}
96 |
97 | @noniterative_objective(min_better=False)
98 | def f2(a, b: int) -> float:
99 | return a - b
100 |
101 | assert isinstance(f2, NonIterativeObjectiveFunc)
102 | assert not f2.min_better
103 |
--------------------------------------------------------------------------------
/tests/tune/noniterative/test_stopper.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from time import sleep
3 |
4 | from pytest import raises
5 | from tune.concepts.flow.report import TrialReport
6 | from tune.concepts.flow.trial import Trial
7 | from tune.noniterative.stopper import (
8 | SimpleNonIterativeStopper,
9 | n_samples,
10 | n_updates,
11 | no_update_period,
12 | small_improvement,
13 | )
14 |
15 |
16 | def test_simple_stopper():
17 | r1 = mr([], 0.2)
18 | r2 = mr(["x"], 0.4)
19 | r3 = mr([], 0.1)
20 | r4 = mr(["x"], 0.5)
21 |
22 | s = MockSimpleStopper(lambda latest, updated, reports: len(reports) >= 1)
23 | assert 0 == len(s.get_reports(r1.trial))
24 | assert s.can_accept(r1.trial)
25 | s.judge(r1)
26 | assert s._last is r1
27 |
28 | assert s.can_accept(r2.trial)
29 | s.judge(r2)
30 | assert s._last is r2
31 |
32 | assert not s.can_accept(r3.trial)
33 | assert not s.can_accept(r4.trial)
34 |
35 |
36 | def test_logical_ops():
37 | r1 = mr([], 0.5)
38 | r2 = mr([], 0.4)
39 | r3 = mr([], 0.3)
40 | r4 = mr([], 0.2)
41 | take_two = MockSimpleStopper(lambda latest, updated, reports: len(reports) >= 2)
42 | ends_small = MockSimpleStopper(
43 | lambda latest, updated, reports: reports[-1].sort_metric <= 0.3
44 | )
45 |
46 | s = take_two & ends_small
47 | assert s.can_accept(r1.trial)
48 | s.judge(r1)
49 | assert s.can_accept(r2.trial)
50 | s.judge(r2)
51 | assert s.can_accept(r3.trial)
52 | s.judge(r3)
53 | assert not s.can_accept(r4.trial)
54 |
55 | with raises(AssertionError):
56 | take_two | ends_small # can't reuse updated stoppers
57 |
58 | take_two = MockSimpleStopper(lambda latest, updated, reports: len(reports) >= 2)
59 | ends_small = MockSimpleStopper(
60 | lambda latest, updated, reports: reports[-1].sort_metric <= 0.3
61 | )
62 |
63 | s = take_two | ends_small
64 | assert s.can_accept(r1.trial)
65 | s.judge(r1)
66 | assert s.can_accept(r2.trial)
67 | s.judge(r2)
68 | assert not s.can_accept(r3.trial)
69 |
70 |
71 | def test_n_samples():
72 | r1 = mr([], 0.1)
73 | r2 = mr([], 0.4)
74 | r3 = mr([], 0.3)
75 |
76 | s = n_samples(2)
77 | assert s.can_accept(r1.trial)
78 | s.judge(r1)
79 | assert s.can_accept(r2.trial)
80 | s.judge(r2)
81 | assert not s.can_accept(r3.trial)
82 |
83 |
84 | def test_n_updates():
85 | r1 = mr([], 0.4)
86 | r2 = mr([], 0.5)
87 | r3 = mr([], 0.3)
88 | r4 = mr([], 0.2)
89 |
90 | s = n_updates(2)
91 | assert s.can_accept(r1.trial)
92 | s.judge(r1)
93 | assert s.can_accept(r2.trial)
94 | s.judge(r2)
95 | assert s.can_accept(r3.trial)
96 | s.judge(r3) # updates satisfied
97 | assert not s.can_accept(r4.trial)
98 |
99 |
100 | def test_no_update_period():
101 | r1 = mr([], 0.1)
102 | r2 = mr([], 0.4)
103 | r3 = mr([], 0.3)
104 | r4 = mr([], 0.3)
105 |
106 | s = no_update_period("0.2sec")
107 | assert s.can_accept(r1.trial)
108 | s.judge(r1)
109 | sleep(0.5)
110 | assert s.can_accept(r2.trial)
111 | s.judge(r2) # not an update, and exceeded timeout, so trigger the shut down
112 | assert not s.can_accept(r3.trial)
113 |
114 | s = no_update_period("0.2sec")
115 | assert s.can_accept(r2.trial)
116 | s.judge(r2)
117 | sleep(0.5)
118 | assert s.can_accept(r3.trial)
119 | s.judge(r3) # an update will not trigger shut down
120 | assert s.can_accept(r1.trial)
121 | s.judge(r1)
122 | assert s.can_accept(r4.trial)
123 | s.judge(r4)
124 |
125 |
126 | def test_small_improvement():
127 | r1 = mr([], 0.5)
128 | r2 = mr([], 0.4)
129 | r22 = mr([], 0.51)
130 | r3 = mr([], 0.39)
131 | r4 = mr([], 0.2)
132 |
133 | s = small_improvement(0.09, 1)
134 | assert s.can_accept(r1.trial)
135 | s.judge(r1)
136 | assert s.can_accept(r2.trial)
137 | s.judge(r2)
138 | assert s.can_accept(r3.trial)
139 | s.judge(r3)
140 | assert not s.can_accept(r4.trial)
141 |
142 | s = small_improvement(0.25, 2)
143 | assert s.can_accept(r1.trial)
144 | s.judge(r1)
145 | assert s.can_accept(r2.trial)
146 | s.judge(r2)
147 | assert s.can_accept(r22.trial)
148 | s.judge(r22) # if not an update, it doesn't count
149 | assert s.can_accept(r3.trial)
150 | s.judge(r3)
151 | assert not s.can_accept(r4.trial)
152 |
153 |
154 | class MockSimpleStopper(SimpleNonIterativeStopper):
155 | def __init__(self, func):
156 | super().__init__(
157 | partition_should_stop=self.partition_should_stop, log_best_only=False
158 | )
159 | self._last = None
160 | self._func = func
161 |
162 | def partition_should_stop(self, latest_report, updated, reports) -> bool:
163 | self._last = latest_report
164 | return self._func(latest_report, updated, reports)
165 |
166 |
167 | def mr(keys, metric):
168 | t = Trial(str(metric), dict(a=1), keys=keys)
169 | return TrialReport(t, metric, log_time=datetime.now())
170 |
--------------------------------------------------------------------------------
/tests/tune_hyperopt/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/tune_hyperopt/__init__.py
--------------------------------------------------------------------------------
/tests/tune_hyperopt/test_local_optimizer.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | from hyperopt.early_stop import no_progress_loss
4 | from tune import parse_noniterative_local_optimizer
5 | from tune.noniterative.objective import NonIterativeObjectiveLocalOptimizer
6 | from tune_test.local_optmizer import NonIterativeObjectiveLocalOptimizerTests
7 |
8 | from tune_hyperopt import HyperoptLocalOptimizer
9 |
10 |
11 | class HyperoptLocalOptimizerTests(NonIterativeObjectiveLocalOptimizerTests.Tests):
12 | def make_optimizer(self, **kwargs: Any) -> NonIterativeObjectiveLocalOptimizer:
13 | kwargs = {"seed": 0, "kwargs_func": _add_conf, **kwargs}
14 | return HyperoptLocalOptimizer(**kwargs)
15 |
16 |
17 | def _add_conf(func, trial):
18 | return dict(early_stop_fn=no_progress_loss(50))
19 |
20 |
21 | def test_parse_noniterative_local_optimizer():
22 | o1 = parse_noniterative_local_optimizer("hyperopt")
23 | assert isinstance(o1, HyperoptLocalOptimizer)
24 | assert o1._max_iter == 30
25 |
26 | o2 = parse_noniterative_local_optimizer("hyperopt:10")
27 | assert isinstance(o2, HyperoptLocalOptimizer)
28 | assert o2._max_iter == 10
29 |
--------------------------------------------------------------------------------
/tests/tune_notebook/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/tune_notebook/__init__.py
--------------------------------------------------------------------------------
/tests/tune_notebook/test_monitors.py:
--------------------------------------------------------------------------------
1 | from tune import Trial, TrialReport
2 |
3 | from tune_notebook import (
4 | NotebookSimpleChart,
5 | NotebookSimpleHist,
6 | NotebookSimpleRungs,
7 | NotebookSimpleTimeSeries,
8 | PrintBest,
9 | )
10 | from tune_notebook.monitors import _ReportBin
11 |
12 |
13 | def test_report_bin():
14 | t1 = Trial("a", dict(a=1, b=2), keys=["x", "y"])
15 | r1 = TrialReport(t1, 0.8, sort_metric=-0.8)
16 | t2 = Trial("b", dict(a=11, b=12), keys=["xx", "y"])
17 | r2 = TrialReport(t2, 0.7, sort_metric=-0.7)
18 | t3 = Trial("c", dict(a=10, b=20), keys=["x", "y"])
19 | r3 = TrialReport(t3, 0.9, sort_metric=-0.9)
20 |
21 | b = _ReportBin()
22 | assert 0 == len(b.records)
23 | assert b.best is None
24 | assert b.on_report(r1)
25 | assert b.on_report(r3)
26 | assert r3 is b.best
27 | assert 2 == len(b.records)
28 |
29 | b = _ReportBin()
30 | assert b.on_report(r3)
31 | assert not b.on_report(r1)
32 | assert r3 is b.best
33 | assert 2 == len(b.records)
34 |
35 | b = _ReportBin(new_best_only=True)
36 | assert b.on_report(r3)
37 | assert not b.on_report(r1)
38 | assert r3 is b.best
39 | assert 1 == len(b.records)
40 |
41 |
42 | def test_print_best():
43 | t1 = Trial("a", dict(a=1, b=2), keys=["x", "y"])
44 | r1 = TrialReport(t1, 0.8, sort_metric=-0.8)
45 | t2 = Trial("b", dict(a=11, b=12), keys=["xx", "y"])
46 | r2 = TrialReport(t2, 0.7, sort_metric=-0.7)
47 | t3 = Trial("c", dict(a=10, b=20), keys=["x", "y"])
48 | r3 = TrialReport(t3, 0.9, sort_metric=-0.9)
49 |
50 | b = PrintBest()
51 | b.on_report(r3)
52 | b.on_report(r2)
53 | b.on_report(r1)
54 |
55 |
56 | def _test_charts():
57 | t1 = Trial("a", dict(a=1, b=2), keys=["x", "y"])
58 | r1 = TrialReport(t1, 0.8, sort_metric=-0.8)
59 | t2 = Trial("b", dict(a=11, b=12), keys=["xx", "y"])
60 | r2 = TrialReport(t2, 0.7, sort_metric=-0.7)
61 | t3 = Trial("c", dict(a=10, b=20), keys=["x", "y"])
62 | r3 = TrialReport(t3, 0.9, sort_metric=-0.9)
63 |
64 | b = NotebookSimpleTimeSeries("1sec")
65 | b.on_report(r3)
66 | b.on_report(r2)
67 | b.on_report(r1)
68 |
69 | b = NotebookSimpleHist("2sec")
70 | b.on_report(r3)
71 | b.on_report(r2)
72 | b.on_report(r1)
73 |
74 | b = NotebookSimpleRungs("3sec")
75 | b.on_report(r3)
76 | b.on_report(r2)
77 | b.on_report(r1)
78 |
--------------------------------------------------------------------------------
/tests/tune_optuna/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/tune_optuna/__init__.py
--------------------------------------------------------------------------------
/tests/tune_optuna/test_local_optimizer.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | from tune import parse_noniterative_local_optimizer
4 | from tune.noniterative.objective import NonIterativeObjectiveLocalOptimizer
5 | from tune_test.local_optmizer import NonIterativeObjectiveLocalOptimizerTests
6 |
7 | from tune_optuna.optimizer import OptunaLocalOptimizer
8 |
9 |
10 | class HyperoptLocalOptimizerTests(NonIterativeObjectiveLocalOptimizerTests.Tests):
11 | def make_optimizer(self, **kwargs: Any) -> NonIterativeObjectiveLocalOptimizer:
12 | if "max_iter" in kwargs and len(kwargs) == 1:
13 | return parse_noniterative_local_optimizer(
14 | "optuna:" + str(kwargs["max_iter"])
15 | )
16 | return OptunaLocalOptimizer(**kwargs)
17 |
18 |
19 | def test_parse_noniterative_local_optimizer():
20 | o1 = parse_noniterative_local_optimizer("optuna")
21 | assert isinstance(o1, OptunaLocalOptimizer)
22 | assert o1._max_iter == 30
23 |
24 | o2 = parse_noniterative_local_optimizer("optuna:10")
25 | assert isinstance(o2, OptunaLocalOptimizer)
26 | assert o2._max_iter == 10
27 |
--------------------------------------------------------------------------------
/tests/tune_sklearn/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/tune_sklearn/__init__.py
--------------------------------------------------------------------------------
/tests/tune_sklearn/test_objective.py:
--------------------------------------------------------------------------------
1 | from sklearn.datasets import load_iris
2 | from sklearn.linear_model import LogisticRegression
3 | from tune import RandInt, Trial
4 | from tune.constants import SPACE_MODEL_NAME, TUNE_DATASET_DF_DEFAULT_NAME
5 | from tune.noniterative.objective import validate_noniterative_objective
6 | from tune_hyperopt import HyperoptLocalOptimizer
7 |
8 | from tune_sklearn.objective import SKCVObjective
9 |
10 |
11 | def test_objective(tmpdir):
12 | dfs = load_iris(as_frame=True)
13 | df = dfs["data"]
14 | df["label"] = dfs["target"]
15 | df = df[df.label <= 1]
16 |
17 | t = Trial(
18 | "x",
19 | params={
20 | "max_iter": RandInt(2, 4),
21 | SPACE_MODEL_NAME: "sklearn.linear_model.LogisticRegression",
22 | },
23 | dfs={TUNE_DATASET_DF_DEFAULT_NAME: df},
24 | )
25 | obj = SKCVObjective(scoring="accuracy")
26 | optimizer = HyperoptLocalOptimizer(5, 0)
27 |
28 | def v(report):
29 | print(report)
30 | assert report.sort_metric < 0
31 | assert "cv_scores" in report.metadata
32 | # assert report.trial.params["max_iter"] >= 2
33 |
34 | validate_noniterative_objective(obj, t, v, optimizer=optimizer)
35 |
36 | obj = SKCVObjective(scoring="accuracy", checkpoint_path=str(tmpdir))
37 |
38 | def v2(report):
39 | print(report)
40 | assert report.sort_metric < 0
41 | assert "cv_scores" in report.metadata
42 | assert "checkpoint_path" in report.metadata
43 | # assert report.trial.params["max_iter"] >= 2
44 |
45 | validate_noniterative_objective(obj, t, v2, optimizer=optimizer)
46 |
--------------------------------------------------------------------------------
/tests/tune_sklearn/test_suggest.py:
--------------------------------------------------------------------------------
1 | import fugue.test as ft
2 | from sklearn.datasets import load_diabetes
3 | from sklearn.linear_model import Lasso, LinearRegression
4 |
5 | from tune import TUNE_OBJECT_FACTORY, Grid, Rand
6 | from tune_hyperopt.optimizer import HyperoptLocalOptimizer
7 | from tune_sklearn import sk_space, suggest_sk_models, suggest_sk_models_by_cv
8 |
9 | # from fugue_spark import SparkExecutionEngine
10 |
11 |
12 | @ft.with_backend("dask")
13 | def test_suggest(tmpdir):
14 | TUNE_OBJECT_FACTORY.set_temp_path(str(tmpdir))
15 |
16 | data = load_diabetes(as_frame=True)
17 | alldata = data["data"]
18 | alldata["label"] = data["target"]
19 | alldata = alldata.sample(frac=1, random_state=0)
20 | test = alldata.iloc[:20]
21 | train = alldata.iloc[20:]
22 |
23 | space1 = sk_space(LinearRegression, fit_intercept=Grid(True, False))
24 | space2 = sk_space(
25 | Lasso, fit_intercept=Grid(True, False), alpha=Rand(0.5, 1)
26 | ).sample(3, 0)
27 | space3 = sk_space(Lasso, fit_intercept=Grid(True, False), alpha=Rand(0.5, 1))
28 | result = suggest_sk_models(
29 | space1 + space3,
30 | train,
31 | test,
32 | "neg_mean_absolute_error",
33 | top_n=0,
34 | distributed=False,
35 | local_optimizer=HyperoptLocalOptimizer(max_iter=10, seed=0),
36 | execution_engine="native",
37 | )
38 | assert 4 == len(result)
39 | assert 50 > result[0].sort_metric
40 |
41 | result = suggest_sk_models(
42 | space1 + space2,
43 | train,
44 | test,
45 | "neg_mean_absolute_error",
46 | top_n=0,
47 | partition_keys=["sex"],
48 | temp_path=str(tmpdir),
49 | save_model=True,
50 | execution_engine="native",
51 | )
52 | assert 16 == len(result)
53 | assert 50 > result[0].sort_metric
54 |
55 | result = suggest_sk_models(
56 | space1 + space2,
57 | train,
58 | test,
59 | "neg_mean_absolute_error",
60 | top_n=1,
61 | partition_keys=["sex"],
62 | execution_engine="dask",
63 | )
64 | assert 2 == len(result)
65 | assert 50 > result[0].sort_metric
66 |
67 |
68 | @ft.with_backend("dask")
69 | def test_suggest_cv(tmpdir):
70 | TUNE_OBJECT_FACTORY.set_temp_path(str(tmpdir))
71 |
72 | data = load_diabetes(as_frame=True)
73 | train = data["data"]
74 | train["label"] = data["target"]
75 |
76 | space1 = sk_space(LinearRegression, fit_intercept=Grid(True, False))
77 | space2 = sk_space(
78 | Lasso, fit_intercept=Grid(True, False), alpha=Rand(0.5, 1)
79 | ).sample(3, 0)
80 | space3 = sk_space(Lasso, fit_intercept=Grid(True, False), alpha=Rand(0.5, 1))
81 | result = suggest_sk_models_by_cv(
82 | space1 + space3,
83 | train,
84 | "neg_mean_absolute_error",
85 | top_n=0,
86 | distributed=False,
87 | local_optimizer=HyperoptLocalOptimizer(max_iter=10, seed=0),
88 | execution_engine="native",
89 | )
90 | assert 4 == len(result)
91 | assert 50 > result[0].sort_metric
92 |
93 | result = suggest_sk_models_by_cv(
94 | space1 + space2,
95 | train,
96 | "neg_mean_absolute_error",
97 | top_n=0,
98 | partition_keys=["sex"],
99 | temp_path=str(tmpdir),
100 | save_model=True,
101 | execution_engine="native",
102 | )
103 | assert 16 == len(result)
104 | assert 50 > result[0].sort_metric
105 |
106 | result = suggest_sk_models_by_cv(
107 | space1 + space2,
108 | train,
109 | "neg_mean_absolute_error",
110 | top_n=1,
111 | partition_keys=["sex"],
112 | execution_engine="dask",
113 | )
114 | assert 2 == len(result)
115 | assert 50 > result[0].sort_metric
116 |
117 | # TODO: why the following unit test is so slow
118 |
119 | # result = suggest_sk_models_by_cv(
120 | # space1 + space2,
121 | # train,
122 | # "neg_mean_absolute_error",
123 | # top_n=1,
124 | # partition_keys=["sex"],
125 | # execution_engine=SparkExecutionEngine,
126 | # execution_engine_conf={
127 | # "spark.sql.shuffle.partitions": 4,
128 | # "spark.default.parallelism": 4,
129 | # "spark.executor.cores": 4,
130 | # "spark.sql.adaptive.enabled": "false",
131 | # },
132 | # )
133 | # assert 2 == len(result)
134 | # assert 50 > result[0].sort_metric
135 |
--------------------------------------------------------------------------------
/tests/tune_sklearn/test_utils.py:
--------------------------------------------------------------------------------
1 | from pytest import raises
2 | from tune import Grid
3 | from tune.constants import SPACE_MODEL_NAME
4 |
5 | from tune_sklearn.utils import sk_space, to_sk_model, to_sk_model_expr
6 |
7 |
8 | def test_conversion():
9 | model = to_sk_model("sklearn.linear_model.LinearRegression")
10 | expr = to_sk_model_expr(model)
11 | assert "sklearn.linear_model._base.LinearRegression" == expr
12 | expr = to_sk_model_expr("sklearn.linear_model.LinearRegression")
13 | assert "sklearn.linear_model._base.LinearRegression" == expr
14 |
15 | raises(TypeError, lambda: to_sk_model("int"))
16 |
17 |
18 | def test_sk_space():
19 | rows = list(
20 | sk_space(
21 | "sklearn.linear_model.LinearRegression", fit_intercept=Grid(True, False)
22 | )
23 | )
24 | assert 2 == len(rows)
25 | assert "sklearn.linear_model._base.LinearRegression" == rows[0].simple_value[SPACE_MODEL_NAME]
26 |
--------------------------------------------------------------------------------
/tests/tune_tensorflow/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tests/tune_tensorflow/__init__.py
--------------------------------------------------------------------------------
/tests/tune_tensorflow/mock.py:
--------------------------------------------------------------------------------
1 | from tensorflow.keras import layers, models
2 | from tensorflow.keras.datasets import boston_housing
3 |
4 | from tune_tensorflow import KerasTrainingSpec
5 |
6 |
7 | class MockSpec(KerasTrainingSpec):
8 | def __init__(self, params, dfs):
9 | super().__init__(params, dfs)
10 | (self.train_data, self.train_targets), (
11 | self.test_data,
12 | self.test_targets,
13 | ) = boston_housing.load_data()
14 |
15 | def get_model(self):
16 | l1, l2 = self.params.simple_value["l1"], self.params.simple_value["l2"]
17 | model = models.Sequential()
18 | model.add(
19 | layers.Dense(l1, activation="relu", input_shape=(self.train_data.shape[1],))
20 | )
21 | model.add(layers.Dense(l2, activation="relu"))
22 | model.add(layers.Dense(1))
23 | return model
24 |
25 | def get_compile_params(self):
26 | return dict(optimizer="rmsprop", loss="mse", metrics=["mae"])
27 |
28 | def get_fit_params(self):
29 | return [self.train_data, self.train_targets], dict(
30 | validation_data=(self.test_data, self.test_targets), shuffle=True
31 | )
32 |
33 | def get_fit_metric(self, history):
34 | return float(history.history["val_mae"][-1])
35 |
36 | def generate_sort_metric(self, metric: float) -> float:
37 | return metric
38 |
--------------------------------------------------------------------------------
/tests/tune_tensorflow/test_objective.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | from tests.tune_tensorflow.mock import MockSpec
3 | from tune import Trial, validate_iterative_objective
4 |
5 | from tune_tensorflow import keras_space
6 | from tune_tensorflow.objective import KerasObjective
7 | from tune_tensorflow.utils import _TYPE_DICT
8 |
9 |
10 | def test_spec():
11 | spec = MockSpec(dict(l1=16, l2=16), {})
12 | metric = spec.compute_sort_metric(epochs=10)
13 | assert metric < 15
14 |
15 | spec = MockSpec(dict(l1=16, l2=16), {"x": pd.DataFrame([[0]], columns=["a"])})
16 | metric = spec.compute_sort_metric(epochs=10)
17 | assert metric < 15
18 | assert isinstance(spec.dfs["x"], pd.DataFrame)
19 |
20 |
21 | def test_objective():
22 | def validate(reports):
23 | assert reports[-1].metric < 15
24 |
25 | space = keras_space(MockSpec, l1=16, l2=16)
26 | obj = KerasObjective(_TYPE_DICT)
27 |
28 | for cont in [True, False]:
29 | validate_iterative_objective(
30 | obj,
31 | Trial("a", params=list(space)[0]),
32 | budgets=[3, 3, 4],
33 | continuous=cont,
34 | validator=validate,
35 | )
36 |
--------------------------------------------------------------------------------
/tests/tune_tensorflow/test_suggest.py:
--------------------------------------------------------------------------------
1 | from pytest import raises
2 | from tune import TUNE_OBJECT_FACTORY, RandInt
3 | from tune.exceptions import TuneCompileError
4 |
5 | from tune_tensorflow import (
6 | keras_space,
7 | suggest_keras_models_by_continuous_asha,
8 | suggest_keras_models_by_hyperband,
9 | suggest_keras_models_by_sha,
10 | )
11 |
12 | from tests.tune_tensorflow.mock import MockSpec
13 |
14 |
15 | def test_sha(tmpdir):
16 | TUNE_OBJECT_FACTORY.set_temp_path(str(tmpdir))
17 |
18 | space = keras_space(MockSpec, l1=RandInt(8, 16), l2=RandInt(8, 24))
19 | with raises(TuneCompileError):
20 | suggest_keras_models_by_sha(space, plan=[(2.0, 4), (4.0, 2)])
21 |
22 | space = space.sample(6, 0)
23 | reports = suggest_keras_models_by_sha(space, plan=[(2.0, 4), (4.0, 2)], top_n=2)
24 | for r in reports:
25 | print(r)
26 | assert 2 == len(reports)
27 |
28 |
29 | def test_hyperband(tmpdir):
30 | TUNE_OBJECT_FACTORY.set_temp_path(str(tmpdir))
31 |
32 | space = keras_space(MockSpec, l1=RandInt(8, 16), l2=RandInt(8, 24))
33 | with raises(TuneCompileError):
34 | suggest_keras_models_by_hyperband(
35 | space,
36 | plans=[
37 | [(2.0, 4), (4.0, 2)],
38 | [(4.0, 2), (2.0, 4)],
39 | ],
40 | )
41 |
42 | space = space.sample(10, 0)
43 | reports = suggest_keras_models_by_hyperband(
44 | space,
45 | plans=[
46 | [(2.0, 4), (4.0, 2)],
47 | [(4.0, 2), (2.0, 4)],
48 | ],
49 | top_n=2,
50 | )
51 | for r in reports:
52 | print(r)
53 | assert 2 == len(reports)
54 |
55 |
56 | def test_asha(tmpdir):
57 | TUNE_OBJECT_FACTORY.set_temp_path(str(tmpdir))
58 |
59 | space = keras_space(MockSpec, l1=RandInt(8, 16), l2=RandInt(8, 24))
60 | with raises(TuneCompileError):
61 | suggest_keras_models_by_continuous_asha(space, plan=[(2.0, 4), (4.0, 2)])
62 |
63 | space = space.sample(6, 0)
64 | reports = suggest_keras_models_by_continuous_asha(
65 | space, plan=[(2.0, 4), (4.0, 2)], top_n=2
66 | )
67 | for r in reports:
68 | print(r)
69 | assert 2 == len(reports)
70 |
--------------------------------------------------------------------------------
/tests/tune_tensorflow/test_utils.py:
--------------------------------------------------------------------------------
1 | from tests.tune_tensorflow.mock import MockSpec
2 | from tune_tensorflow.utils import (
3 | extract_keras_spec,
4 | to_keras_spec,
5 | to_keras_spec_expr,
6 | keras_space,
7 | )
8 |
9 |
10 | def test_keras_space():
11 | space = keras_space(MockSpec, a=1, b=2)
12 | spec = extract_keras_spec(list(space)[0], {})
13 | assert spec == MockSpec
14 | spec = extract_keras_spec(
15 | list(space)[0], {to_keras_spec_expr(MockSpec): "dummy"}
16 | )
17 | assert "dummy" == spec
18 |
19 |
20 | def test_to_keras_spec():
21 | expr = to_keras_spec_expr(MockSpec)
22 | assert to_keras_spec(expr) == MockSpec
23 |
24 | expr = to_keras_spec_expr(to_keras_spec_expr(MockSpec))
25 | assert to_keras_spec(expr) == MockSpec
--------------------------------------------------------------------------------
/tune/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 |
3 | from tune_version import __version__
4 |
5 | from tune.api.factory import (
6 | TUNE_OBJECT_FACTORY,
7 | parse_iterative_objective,
8 | parse_logger,
9 | parse_monitor,
10 | parse_noniterative_local_optimizer,
11 | parse_noniterative_objective,
12 | parse_noniterative_stopper,
13 | )
14 | from tune.api.optimize import (
15 | optimize_by_continuous_asha,
16 | optimize_by_hyperband,
17 | optimize_by_sha,
18 | optimize_noniterative,
19 | )
20 | from tune.api.suggest import (
21 | suggest_by_continuous_asha,
22 | suggest_by_hyperband,
23 | suggest_by_sha,
24 | suggest_for_noniterative_objective,
25 | )
26 | from tune.concepts.checkpoint import Checkpoint
27 | from tune.concepts.dataset import StudyResult, TuneDataset, TuneDatasetBuilder
28 | from tune.concepts.flow import (
29 | Monitor,
30 | Trial,
31 | TrialDecision,
32 | TrialReport,
33 | TrialReportLogger,
34 | )
35 | from tune.concepts.logger import MetricLogger
36 | from tune.concepts.space import (
37 | Choice,
38 | FuncParam,
39 | Grid,
40 | Rand,
41 | RandInt,
42 | Space,
43 | StochasticExpression,
44 | TransitionChoice,
45 | TuningParametersTemplate,
46 | )
47 | from tune.iterative.objective import (
48 | IterativeObjectiveFunc,
49 | validate_iterative_objective,
50 | )
51 | from tune.noniterative.convert import noniterative_objective, to_noniterative_objective
52 | from tune.noniterative.objective import (
53 | NonIterativeObjectiveFunc,
54 | NonIterativeObjectiveLocalOptimizer,
55 | validate_noniterative_objective,
56 | )
57 | from tune.noniterative.stopper import (
58 | NonIterativeStopper,
59 | n_samples,
60 | n_updates,
61 | no_update_period,
62 | small_improvement,
63 | )
64 |
--------------------------------------------------------------------------------
/tune/_utils/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 |
3 | from tune._utils.collections import _EMPTY_ITER, dict_product, product, safe_iter
4 | from tune._utils.execution import run_monitored_process
5 | from tune._utils.math import (
6 | adjust_high,
7 | normal_to_continuous,
8 | normal_to_discrete,
9 | normal_to_integers,
10 | uniform_to_choice,
11 | uniform_to_continuous,
12 | uniform_to_discrete,
13 | uniform_to_integers,
14 | )
15 | from tune._utils.test import assert_close
16 | from tune._utils.values import normalize_hp
17 | from tune._utils.serialization import to_base64, from_base64
18 |
--------------------------------------------------------------------------------
/tune/_utils/collections.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | from typing import Any, Dict, Iterable, List, Tuple
3 |
4 |
5 | def dict_product(
6 | d: Dict[str, Iterable[Any]], safe: bool = True
7 | ) -> Iterable[Dict[str, Any]]:
8 | keys = d.keys()
9 | arrays = list(d.values())
10 | if len(arrays) == 0:
11 | if safe:
12 | yield {}
13 | return
14 | for element in _safe_product(arrays, safe):
15 | yield {k: v for k, v in zip(keys, element) if v is not _EMPTY_ITER}
16 |
17 |
18 | def product(
19 | arrays: List[Iterable[Any]], safe: bool = False, remove_empty: bool = True
20 | ) -> Iterable[List[Any]]:
21 | if len(arrays) == 0:
22 | if safe:
23 | yield []
24 | return
25 | if remove_empty:
26 | for x in _safe_product(arrays, safe):
27 | yield [xx for xx in x if xx is not _EMPTY_ITER]
28 | else:
29 | for x in _safe_product(arrays, safe):
30 | yield [None if xx is _EMPTY_ITER else xx for xx in x]
31 |
32 |
33 | def safe_iter(it: Iterable[Any], safe: bool = True) -> Iterable[Any]:
34 | if not safe:
35 | yield from it
36 | else:
37 | n = 0
38 | for x in it:
39 | yield x
40 | n += 1
41 | if n == 0:
42 | yield _EMPTY_ITER
43 |
44 |
45 | def _safe_product(arrays: List[Iterable[Any]], safe: bool = True) -> Iterable[Tuple]:
46 | if not safe:
47 | yield from itertools.product(*arrays)
48 | else:
49 | arr = [safe_iter(t) for t in arrays]
50 | yield from itertools.product(*arr)
51 |
52 |
53 | class _EmptyIter:
54 | pass
55 |
56 |
57 | _EMPTY_ITER = _EmptyIter()
58 |
--------------------------------------------------------------------------------
/tune/_utils/execution.py:
--------------------------------------------------------------------------------
1 | from multiprocessing import Pool
2 | from typing import Any, Callable, Dict, List, TypeVar
3 |
4 | import cloudpickle
5 | from triad.utils.convert import to_timedelta
6 | from tune.constants import TUNE_STOPPER_DEFAULT_CHECK_INTERVAL
7 | from tune.exceptions import TuneInterrupted
8 |
9 | T = TypeVar("T")
10 |
11 |
12 | def run_monitored_process(
13 | target: Callable[..., T],
14 | args: List[Any],
15 | kwargs: Dict[str, Any],
16 | stop_checker: Callable[[], bool],
17 | interval: Any = TUNE_STOPPER_DEFAULT_CHECK_INTERVAL,
18 | ) -> T:
19 | interval_sec = to_timedelta(interval).total_seconds()
20 | blob = cloudpickle.dumps((target, args, kwargs))
21 | pool = Pool(1)
22 | result = pool.map_async(_run_target, [blob])
23 | while True:
24 | result.wait(interval_sec)
25 | if result.ready():
26 | pool.close()
27 | pool.join()
28 | res = result.get(timeout=1)[0]
29 | return cloudpickle.loads(res)
30 | if stop_checker():
31 | pool.terminate()
32 | pool.join()
33 | raise TuneInterrupted()
34 |
35 |
36 | def _run_target(blob: Any):
37 | tp = cloudpickle.loads(blob)
38 | return cloudpickle.dumps(tp[0](*tp[1], **tp[2]))
39 |
--------------------------------------------------------------------------------
/tune/_utils/math.py:
--------------------------------------------------------------------------------
1 | from typing import Any, List, Optional, Union
2 |
3 | import numpy as np
4 |
5 | _IGNORABLE_ERROR = 1e-8
6 |
7 |
8 | def uniform_to_continuous(
9 | value: Any, low: float, high: float, log: bool = False, base: Optional[float] = None
10 | ) -> Any:
11 | if low >= high:
12 | return low if np.isscalar(value) else np.full(np.shape(value))
13 | if not log:
14 | return value * (high - low) + low
15 | if base is None:
16 | ll, lh = np.log(low), np.log(high)
17 | return np.exp(value * (lh - ll) + ll)
18 | else:
19 | b = np.log(base)
20 | ll, lh = np.log(low) / b, np.log(high) / b
21 | return np.power(base, value * (lh - ll) + ll)
22 |
23 |
24 | def uniform_to_discrete(
25 | value: Any,
26 | low: float,
27 | high: float,
28 | q: float,
29 | log: bool = False,
30 | include_high: bool = True,
31 | base: Optional[float] = None,
32 | ) -> Any:
33 | if low >= high:
34 | return low if np.isscalar(value) else np.full(np.shape(value))
35 | _high = adjust_high(low, high, q, include_high=include_high)
36 | _value = uniform_to_continuous(value, low, _high, log=log, base=base)
37 | return np.floor((_value - low) / q) * q + low
38 |
39 |
40 | def uniform_to_integers(
41 | value: Any,
42 | low: int,
43 | high: int,
44 | q: int = 1,
45 | log: bool = False,
46 | include_high: bool = True,
47 | base: Optional[float] = None,
48 | ) -> Union[int, List[int]]:
49 | res = np.round(
50 | uniform_to_discrete(
51 | value, low, high, q=q, log=log, include_high=include_high, base=base
52 | )
53 | )
54 | if np.isscalar(res):
55 | return int(res)
56 | return [int(x) for x in res]
57 |
58 |
59 | def uniform_to_choice(
60 | value: Any,
61 | choices: List[Any],
62 | log: bool = False,
63 | base: Optional[float] = None,
64 | ) -> Any:
65 | idx = uniform_to_integers(
66 | value, 1, len(choices), log=log, include_high=True, base=base
67 | )
68 | if isinstance(idx, int):
69 | return choices[idx - 1]
70 | return [choices[x - 1] for x in idx]
71 |
72 |
73 | def normal_to_continuous(value: Any, mean: float, sigma: float) -> Any:
74 | return value * sigma + mean
75 |
76 |
77 | def normal_to_discrete(value: Any, mean: float, sigma: float, q: float) -> Any:
78 | return np.round(value * sigma / q) * q + mean
79 |
80 |
81 | def normal_to_integers(
82 | value: Any, mean: int, sigma: float, q: int = 1
83 | ) -> Union[int, List[int]]:
84 | res = normal_to_discrete(value, mean=mean, sigma=sigma, q=q)
85 | if np.isscalar(res):
86 | return int(res)
87 | return [int(x) for x in res]
88 |
89 |
90 | def adjust_high(low: float, high: float, q: float, include_high: bool):
91 | _high = low + np.floor((high - low) / q + _IGNORABLE_ERROR) * q
92 | if abs(_high - high) < _IGNORABLE_ERROR:
93 | if include_high:
94 | _high = high + q
95 | else:
96 | _high += q
97 | return _high
98 |
--------------------------------------------------------------------------------
/tune/_utils/serialization.py:
--------------------------------------------------------------------------------
1 | import cloudpickle
2 | import base64
3 | from typing import Any
4 |
5 |
6 | def to_base64(obj: Any) -> str:
7 | """Convert an object to base64 string
8 |
9 | :param obj: a picklable object
10 | :return: a base64 string
11 | """
12 | return base64.b64encode(cloudpickle.dumps(obj)).decode("ascii")
13 |
14 |
15 | def from_base64(data: str) -> Any:
16 | """Convert back to an object from a serialized base64 string
17 |
18 | :param data: base64 string
19 | :return: the unpickled data object
20 | """
21 | return cloudpickle.loads(base64.b64decode(data.encode("ascii"))) # type: ignore
22 |
--------------------------------------------------------------------------------
/tune/_utils/test.py:
--------------------------------------------------------------------------------
1 | from typing import Iterable, Any, List
2 |
3 |
4 | def assert_close(seq1: Iterable[Any], seq2: Iterable[Any], error: float = 1e-5):
5 | def dedup(seq: List[Any]) -> Iterable[Any]:
6 | last: Any = None
7 | for x in sorted(seq):
8 | if last is None:
9 | last = x
10 | yield x
11 | elif abs(x - last) < error:
12 | continue
13 | else:
14 | last = x
15 | yield x
16 |
17 | _s1, _s2 = list(dedup(seq1)), list(dedup(seq2)) # type: ignore
18 | assert len(_s1) == len(_s2), f"{_s1},{_s2}"
19 | for x, y in zip(_s1, _s2):
20 | assert abs(x - y) < error, f"{_s1},{_s2}"
21 |
--------------------------------------------------------------------------------
/tune/_utils/values.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | import numpy as np
4 |
5 |
6 | def normalize_hp(hp: Any) -> Any:
7 | if isinstance(hp, str):
8 | return hp
9 | elif isinstance(hp, list):
10 | return [normalize_hp(x) for x in hp]
11 | elif isinstance(hp, dict):
12 | return {k: normalize_hp(v) for k, v in hp.items()}
13 | elif isinstance(hp, np.generic):
14 | return hp.item()
15 | else:
16 | return hp
17 |
--------------------------------------------------------------------------------
/tune/api/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tune/api/__init__.py
--------------------------------------------------------------------------------
/tune/api/optimize.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import Any, Callable, List, Optional, Tuple
3 | from uuid import uuid4
4 |
5 | from triad import FileSystem
6 | from tune.api.factory import (
7 | TUNE_OBJECT_FACTORY,
8 | parse_iterative_objective,
9 | parse_monitor,
10 | parse_noniterative_local_optimizer,
11 | parse_noniterative_objective,
12 | parse_noniterative_stopper,
13 | )
14 | from tune.concepts.dataset import StudyResult, TuneDataset
15 | from tune.concepts.flow import TrialReport
16 | from tune.iterative.asha import ASHAJudge, RungHeap
17 | from tune.iterative.sha import _NonIterativeObjectiveWrapper
18 | from tune.iterative.study import IterativeStudy
19 | from tune.noniterative.study import NonIterativeStudy
20 |
21 |
22 | def optimize_noniterative(
23 | objective: Any,
24 | dataset: TuneDataset,
25 | optimizer: Any = None,
26 | distributed: Optional[bool] = None,
27 | logger: Any = None,
28 | monitor: Any = None,
29 | stopper: Any = None,
30 | stop_check_interval: Any = None,
31 | ) -> StudyResult:
32 | _objective = parse_noniterative_objective(objective)
33 | _optimizer = parse_noniterative_local_optimizer(optimizer)
34 | _stopper = parse_noniterative_stopper(stopper)
35 | _monitor = parse_monitor(monitor)
36 | study = NonIterativeStudy(_objective, _optimizer)
37 | return study.optimize(
38 | dataset,
39 | distributed=distributed,
40 | monitor=_monitor,
41 | stopper=_stopper,
42 | stop_check_interval=stop_check_interval,
43 | logger=logger,
44 | )
45 |
46 |
47 | def optimize_by_sha(
48 | objective: Any,
49 | dataset: TuneDataset,
50 | plan: List[Tuple[float, int]],
51 | checkpoint_path: str = "",
52 | distributed: Optional[bool] = None,
53 | monitor: Any = None,
54 | ) -> StudyResult:
55 | _objective = parse_iterative_objective(objective)
56 | _monitor = parse_monitor(monitor)
57 | checkpoint_path = TUNE_OBJECT_FACTORY.get_path_or_temp(checkpoint_path)
58 | path = os.path.join(checkpoint_path, str(uuid4()))
59 | for budget, keep in plan:
60 | obj = _NonIterativeObjectiveWrapper(
61 | _objective, checkpoint_path=path, budget=budget
62 | )
63 | result = optimize_noniterative(
64 | obj, dataset, distributed=distributed, monitor=_monitor
65 | )
66 | dataset = result.next_tune_dataset(keep)
67 | return result
68 |
69 |
70 | def optimize_by_hyperband(
71 | objective: Any,
72 | dataset: TuneDataset,
73 | plans: List[List[Tuple[float, int]]],
74 | checkpoint_path: str = "",
75 | distributed: Optional[bool] = None,
76 | monitor: Any = None,
77 | ) -> StudyResult:
78 | _monitor = parse_monitor(monitor)
79 | weights = [float(p[0][1]) for p in plans]
80 | datasets = dataset.split(weights, seed=0)
81 | result: Any = None
82 | for d, plan in zip(datasets, plans):
83 | r = optimize_by_sha(
84 | objective=objective,
85 | dataset=d,
86 | plan=plan,
87 | checkpoint_path=checkpoint_path,
88 | distributed=distributed,
89 | monitor=_monitor,
90 | )
91 | if result is None:
92 | result = r
93 | else:
94 | result.union_with(r)
95 | return result
96 |
97 |
98 | def optimize_by_continuous_asha(
99 | objective: Any,
100 | dataset: TuneDataset,
101 | plan: List[Tuple[float, int]],
102 | checkpoint_path: str = "",
103 | always_checkpoint: bool = False,
104 | study_early_stop: Optional[Callable[[List[Any], List[RungHeap]], bool]] = None,
105 | trial_early_stop: Optional[
106 | Callable[[TrialReport, List[TrialReport], List[RungHeap]], bool]
107 | ] = None,
108 | monitor: Any = None,
109 | ) -> StudyResult:
110 | _objective = parse_iterative_objective(objective)
111 | _monitor = parse_monitor(monitor)
112 | checkpoint_path = TUNE_OBJECT_FACTORY.get_path_or_temp(checkpoint_path)
113 | judge = ASHAJudge(
114 | schedule=plan,
115 | always_checkpoint=always_checkpoint,
116 | study_early_stop=study_early_stop,
117 | trial_early_stop=trial_early_stop,
118 | monitor=_monitor,
119 | )
120 | path = os.path.join(checkpoint_path, str(uuid4()))
121 | FileSystem().makedirs(path, recreate=True)
122 | study = IterativeStudy(_objective, checkpoint_path=path)
123 | return study.optimize(dataset, judge=judge)
124 |
--------------------------------------------------------------------------------
/tune/concepts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tune/concepts/__init__.py
--------------------------------------------------------------------------------
/tune/concepts/checkpoint.py:
--------------------------------------------------------------------------------
1 | import json
2 | from typing import Any, List
3 | from uuid import uuid4
4 |
5 | from fs.base import FS as FSBase
6 | from triad import assert_or_throw
7 |
8 | _CHECKPOINT_STATE_FILE = "STATE"
9 |
10 |
11 | class Checkpoint:
12 | """An abstraction for tuning checkpoint
13 |
14 | :param fs: the file system
15 |
16 | .. attention::
17 |
18 | Normally you don't need to create a checkpoint by yourself,
19 | please read :ref:`Checkpoint Tutorial `
20 | if you want to understand how it works.
21 | """
22 |
23 | def __init__(self, fs: FSBase):
24 | self._fs = fs
25 | try:
26 | self._iterations: List[str] = json.loads(
27 | fs.readtext(_CHECKPOINT_STATE_FILE)
28 | )
29 | except Exception:
30 | self._iterations = []
31 |
32 | def __len__(self) -> int:
33 | """Count of the current iterations"""
34 | return len(self._iterations)
35 |
36 | @property
37 | def latest(self) -> FSBase:
38 | """latest checkpoint folder
39 |
40 | :raises AssertionError: if there was no checkpoint
41 | """
42 | assert_or_throw(len(self) > 0, "checkpoint history is empty")
43 | return self._fs.opendir(self._iterations[-1])
44 |
45 | def create(self) -> "NewCheckpoint":
46 | """Create a new checkpoint"""
47 | return NewCheckpoint(self)
48 |
49 |
50 | class NewCheckpoint:
51 | """A helper class for adding new checkpoints
52 |
53 | :param checkpoint: the parent checkpoint
54 |
55 | .. attention::
56 |
57 | Do not construct this class directly, please read
58 | :ref:`Checkpoint Tutorial `
59 | for details
60 | """
61 |
62 | def __init__(self, checkpoint: Checkpoint):
63 | self._parent = checkpoint
64 | self._name = str(uuid4())
65 |
66 | def __enter__(self) -> FSBase:
67 | return self._parent._fs.makedir(self._name)
68 |
69 | def __exit__(self, exc_type: Any, exc_value: Any, exc_traceback: Any) -> None:
70 | if exc_type is not None:
71 | try:
72 | self._parent._fs.removetree(self._name)
73 | except Exception: # pragma: no cover
74 | pass
75 | else:
76 | new_iterations = self._parent._iterations + [self._name]
77 | self._parent._fs.writetext(
78 | _CHECKPOINT_STATE_FILE, json.dumps(new_iterations)
79 | )
80 | self._parent._iterations = new_iterations
81 |
--------------------------------------------------------------------------------
/tune/concepts/flow/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 |
3 | from tune.concepts.flow.judge import (
4 | Monitor,
5 | NoOpTrailJudge,
6 | RemoteTrialJudge,
7 | TrialCallback,
8 | TrialDecision,
9 | TrialJudge,
10 | )
11 | from tune.concepts.flow.report import TrialReport, TrialReportHeap, TrialReportLogger
12 | from tune.concepts.flow.trial import Trial
13 |
--------------------------------------------------------------------------------
/tune/concepts/flow/judge.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Callable, Dict, Optional
2 |
3 | from tune.concepts.flow.report import TrialReport
4 | from tune.concepts.flow.trial import Trial
5 |
6 |
7 | class TrialDecision:
8 | def __init__(
9 | self,
10 | report: TrialReport,
11 | budget: float,
12 | should_checkpoint: bool,
13 | reason: str = "",
14 | metadata: Optional[Dict[str, Any]] = None,
15 | ):
16 | self._report = report
17 | self._budget = budget
18 | self._should_checkpoint = should_checkpoint
19 | self._reason = reason
20 | self._metadata = metadata or {}
21 |
22 | def __repr__(self) -> str:
23 | return repr(
24 | dict(
25 | report=self._report,
26 | budget=self._budget,
27 | should_checkpoint=self._should_checkpoint,
28 | reason=self._reason,
29 | metadata=self._metadata,
30 | )
31 | )
32 |
33 | def __copy__(self) -> "TrialDecision":
34 | return self
35 |
36 | def __deepcopy__(self, memo: Any) -> "TrialDecision":
37 | return self
38 |
39 | @property
40 | def report(self) -> TrialReport:
41 | return self._report
42 |
43 | @property
44 | def trial(self) -> Trial:
45 | return self.report.trial
46 |
47 | @property
48 | def trial_id(self) -> str:
49 | return self.trial.trial_id
50 |
51 | @property
52 | def budget(self) -> float:
53 | return self._budget
54 |
55 | @property
56 | def should_stop(self) -> bool:
57 | return self.budget <= 0
58 |
59 | @property
60 | def should_checkpoint(self) -> bool:
61 | return self._should_checkpoint
62 |
63 | @property
64 | def reason(self) -> str:
65 | return self._reason
66 |
67 | @property
68 | def metadata(self) -> Dict[str, Any]:
69 | return self._metadata
70 |
71 |
72 | class TrialJudge:
73 | def __init__(self, monitor: Optional["Monitor"] = None):
74 | self.reset_monitor(monitor)
75 |
76 | @property
77 | def monitor(self) -> "Monitor":
78 | assert self._trial_judge_monitor is not None
79 | return self._trial_judge_monitor
80 |
81 | def reset_monitor(self, monitor: Optional["Monitor"] = None) -> None:
82 | self._trial_judge_monitor = monitor or Monitor()
83 |
84 | def can_accept(self, trial: Trial) -> bool: # pragma: no cover
85 | raise NotImplementedError
86 |
87 | def get_budget(self, trial: Trial, rung: int) -> float: # pragma: no cover
88 | raise NotImplementedError
89 |
90 | def judge(self, report: TrialReport) -> TrialDecision: # pragma: no cover
91 | raise NotImplementedError
92 |
93 |
94 | class RemoteTrialJudge(TrialJudge):
95 | def __init__(self, entrypoint: Callable[[str, Dict[str, Any]], Any]):
96 | super().__init__()
97 | self._entrypoint = entrypoint
98 | self._report: Optional[TrialReport] = None
99 |
100 | @property
101 | def report(self) -> Optional[TrialReport]:
102 | return self._report
103 |
104 | def can_accept(self, trial: Trial) -> bool:
105 | return self._entrypoint("can_accept", dict(trial=trial))
106 |
107 | def judge(self, report: TrialReport) -> TrialDecision:
108 | self._report = report
109 | return self._entrypoint("judge", dict(report=report))
110 |
111 | def get_budget(self, trial: Trial, rung: int) -> float:
112 | return self._entrypoint("get_budget", dict(trial=trial, rung=rung))
113 |
114 |
115 | class NoOpTrailJudge(TrialJudge):
116 | def can_accept(self, trial: Trial) -> bool: # pragma: no cover
117 | return True
118 |
119 | def get_budget(self, trial: Trial, rung: int) -> float: # pragma: no cover
120 | return 0.0
121 |
122 | def judge(self, report: TrialReport) -> TrialDecision: # pragma: no cover
123 | self.monitor.on_report(report)
124 | return TrialDecision(report, 0.0, False)
125 |
126 |
127 | class TrialCallback:
128 | def __init__(self, judge: TrialJudge):
129 | self._judge = judge
130 |
131 | def entrypoint(self, name, kwargs: Dict[str, Any]) -> Any:
132 | if name == "can_accept":
133 | return self._judge.can_accept(kwargs["trial"])
134 | if name == "judge":
135 | return self._judge.judge(kwargs["report"])
136 | if name == "get_budget":
137 | return self._judge.get_budget(kwargs["trial"], kwargs["rung"])
138 | raise NotImplementedError # pragma: no cover
139 |
140 |
141 | class Monitor:
142 | def __init__(self):
143 | self._judge: Optional[TrialJudge] = None
144 |
145 | def on_report(self, report: TrialReport) -> None: # pragma: no cover
146 | pass
147 |
148 | def on_get_budget(
149 | self, trial: Trial, rung: int, budget: float
150 | ) -> None: # pragma: no cover
151 | pass
152 |
153 | def on_judge(self, decision: TrialDecision) -> None: # pragma: no cover
154 | pass
155 |
156 | def initialize(self) -> None: # pragma: no cover
157 | pass
158 |
159 | def finalize(self) -> None: # pragma: no cover
160 | pass
161 |
--------------------------------------------------------------------------------
/tune/concepts/flow/trial.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, List, Optional
2 |
3 | from tune.concepts.space import to_template
4 | from tune.concepts.space.parameters import TuningParametersTemplate
5 |
6 |
7 | class Trial:
8 | """The input data collection for running an objective.
9 | It is immutable.
10 |
11 | :param trial_id: the unique id for a trial
12 | :param params: parameters for tuning, an object convertible to
13 | ``TuningParametersTemplate`` by
14 | :func:`~tune.concepts.space.parameters.to_template`
15 | :param metadata: metadata for tuning, defaults to None. It is set
16 | during the construction of :class:`~.tune.concepts.dataset.TuneDataset`
17 | :param keys: partitions keys of the
18 | :class:`~.tune.concepts.dataset.TuneDataset`, defaults to None
19 | :param dfs: dataframes extracted from
20 | :class:`~.tune.concepts.dataset.TuneDataset`, defaults to None
21 |
22 | .. attention::
23 |
24 | This class is not for users to construct directly.
25 | Use :class:`~tune.concepts.space.spaces.Space` instead.
26 |
27 | """
28 |
29 | def __init__(
30 | self,
31 | trial_id: str,
32 | params: Any,
33 | metadata: Optional[Dict[str, Any]] = None,
34 | keys: Optional[List[str]] = None,
35 | dfs: Optional[Dict[str, Any]] = None,
36 | ):
37 | self._trial_id = trial_id
38 | self._params = to_template(params)
39 | self._metadata = metadata or {}
40 | self._keys = keys or []
41 | self._dfs = dfs or {}
42 |
43 | def copy(self) -> "Trial":
44 | """Copy the current object.
45 |
46 | :return: the copied object
47 |
48 | .. note::
49 | This is shallow copy, but it is also used by `__deepcopy__`
50 | of this object. This is because we disable deepcopy
51 | of Trial.
52 | """
53 | return Trial(
54 | trial_id=self._trial_id,
55 | params=self._params,
56 | metadata=self._metadata,
57 | keys=self._keys,
58 | dfs=self._dfs,
59 | )
60 |
61 | def __repr__(self) -> str:
62 | return repr(
63 | dict(
64 | trial_id=self._trial_id,
65 | params=self._params,
66 | metadata=self._metadata,
67 | keys=self._keys,
68 | )
69 | )
70 |
71 | def __copy__(self) -> "Trial":
72 | """Shallow copy"""
73 | return self.copy()
74 |
75 | def __deepcopy__(self, memo: Any) -> "Trial":
76 | """(Enforced) shallow copy"""
77 | return self.copy()
78 |
79 | @property
80 | def trial_id(self) -> str:
81 | """The unique id of this trial"""
82 | return self._trial_id
83 |
84 | @property
85 | def params(self) -> TuningParametersTemplate:
86 | """Parameters for tuning"""
87 | return self._params
88 |
89 | @property
90 | def keys(self) -> List[str]:
91 | """Partitions keys of the
92 | :class:`~.tune.concepts.dataset.TuneDataset`
93 | """
94 | return self._keys
95 |
96 | @property
97 | def dfs(self) -> Dict[str, Any]:
98 | """Dataframes extracted from
99 | :class:`~.tune.concepts.dataset.TuneDataset`
100 | """
101 | return self._dfs
102 |
103 | def with_dfs(self, dfs: Dict[str, Any]) -> "Trial":
104 | """Set dataframes for the trial, a new Trial object will
105 | be constructed and with the new ``dfs``
106 |
107 | :param dfs: dataframes to attach to the trial
108 |
109 | """
110 | if len(dfs) == 0 and len(self.dfs) == 0:
111 | return self
112 | t = self.copy()
113 | t._dfs = dfs
114 | return t
115 |
116 | def with_params(self, params: Any) -> "Trial":
117 | """Set parameters for the trial, a new Trial object will
118 | be constructed and with the new ``params``
119 |
120 | :param params: parameters for tuning
121 | """
122 | t = self.copy()
123 | t._params = to_template(params)
124 | return t
125 |
126 | @property
127 | def metadata(self) -> Dict[str, Any]:
128 | """Metadata of the trial"""
129 | return self._metadata
130 |
--------------------------------------------------------------------------------
/tune/concepts/space/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 |
3 | from tune.concepts.space.parameters import (
4 | Choice,
5 | FuncParam,
6 | Grid,
7 | NormalRand,
8 | NormalRandInt,
9 | Rand,
10 | RandInt,
11 | StochasticExpression,
12 | TransitionChoice,
13 | TuningParametersTemplate,
14 | to_template,
15 | )
16 | from tune.concepts.space.spaces import Space
17 |
--------------------------------------------------------------------------------
/tune/constants.py:
--------------------------------------------------------------------------------
1 | TUNE_TEMP_PATH = "tune.temp.path"
2 | TUNE_PREFIX = "__tune_"
3 | TUNE_DATASET_DF_PREFIX = TUNE_PREFIX + "df__"
4 | TUNE_DATASET_PARAMS_PREFIX = TUNE_PREFIX + "params__"
5 | TUNE_DATASET_TRIALS = TUNE_PREFIX + "trials__"
6 |
7 | TUNE_REPORT_ID = TUNE_PREFIX + "trail_id__"
8 | TUNE_REPORT_METRIC = TUNE_PREFIX + "metric__"
9 | TUNE_REPORT = TUNE_PREFIX + "report__"
10 |
11 | TUNE_REPORT_ADD_SCHEMA = (
12 | f"{TUNE_REPORT_ID}:str,{TUNE_REPORT_METRIC}:double,{TUNE_REPORT}:str"
13 | )
14 |
15 | TUNE_DATASET_DF_DEFAULT_NAME = TUNE_PREFIX + "_df_"
16 | TUNE_DATASET_VALIDATION_DF_DEFAULT_NAME = TUNE_DATASET_DF_DEFAULT_NAME + "_validation_"
17 |
18 | TUNE_STOPPER_DEFAULT_CHECK_INTERVAL = "60sec"
19 |
20 | SPACE_PARAM_PREFIX = "__space_"
21 | SPACE_MODEL_NAME = SPACE_PARAM_PREFIX + "_model"
22 |
--------------------------------------------------------------------------------
/tune/exceptions.py:
--------------------------------------------------------------------------------
1 | from fugue.exceptions import FugueWorkflowCompileError, FugueWorkflowRuntimeError
2 |
3 |
4 | class TuneCompileError(FugueWorkflowCompileError):
5 | pass
6 |
7 |
8 | class TuneRuntimeError(FugueWorkflowRuntimeError):
9 | pass
10 |
11 |
12 | class TuneInterrupted(TuneRuntimeError):
13 | pass
14 |
--------------------------------------------------------------------------------
/tune/iterative/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 |
--------------------------------------------------------------------------------
/tune/iterative/sha.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from triad import FileSystem
4 | from tune.iterative.objective import IterativeObjectiveFunc
5 | from tune.noniterative.objective import NonIterativeObjectiveFunc
6 | from tune.concepts.flow import Trial, TrialDecision, TrialJudge, TrialReport
7 |
8 |
9 | class _NonIterativeObjectiveWrapper(NonIterativeObjectiveFunc):
10 | def __init__(
11 | self,
12 | func: IterativeObjectiveFunc,
13 | checkpoint_path: str,
14 | budget: float,
15 | ):
16 | super().__init__()
17 | self._budget = budget
18 | self._func = func
19 | self._checkpoint_path = checkpoint_path
20 |
21 | def generate_sort_metric(self, value: float) -> float:
22 | return self._func.generate_sort_metric(value)
23 |
24 | def run(self, trial: Trial) -> TrialReport: # pragma: no cover
25 | judge = _NonIterativeJudgeWrapper(self._budget)
26 | base_fs = FileSystem()
27 | fs = base_fs.makedirs(self._checkpoint_path, recreate=True)
28 | self._func = self._func.copy()
29 | self._func.run(trial, judge=judge, checkpoint_basedir_fs=fs)
30 | return judge.report
31 |
32 |
33 | class _NonIterativeJudgeWrapper(TrialJudge):
34 | def __init__(self, budget: float):
35 | super().__init__()
36 | self._report: Optional[TrialReport] = None
37 | self._budget = budget
38 |
39 | @property
40 | def report(self) -> TrialReport:
41 | assert self._report is not None
42 | return self._report
43 |
44 | def can_accept(self, trial: Trial) -> bool:
45 | return True
46 |
47 | def get_budget(self, trial: Trial, rung: int):
48 | return self._budget
49 |
50 | def judge(self, report: TrialReport) -> TrialDecision:
51 | self._report = report
52 | return TrialDecision(report, budget=0.0, should_checkpoint=True, metadata={})
53 |
--------------------------------------------------------------------------------
/tune/iterative/study.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Callable, Dict, Iterable
2 |
3 | from triad import FileSystem
4 | from tune.constants import TUNE_REPORT_ADD_SCHEMA
5 | from tune.concepts.dataset import StudyResult, TuneDataset, _get_trials_from_row
6 | from tune.iterative.objective import IterativeObjectiveFunc
7 | from tune.concepts.flow import RemoteTrialJudge, TrialCallback, TrialJudge
8 |
9 |
10 | class IterativeStudy:
11 | def __init__(self, objective: IterativeObjectiveFunc, checkpoint_path: str):
12 | self._objective = objective
13 | self._checkpoint_path = checkpoint_path
14 |
15 | def optimize(self, dataset: TuneDataset, judge: TrialJudge) -> StudyResult:
16 | callback = TrialCallback(judge)
17 |
18 | res = dataset.data.per_row().transform(
19 | self._compute,
20 | schema=f"*,{TUNE_REPORT_ADD_SCHEMA}",
21 | callback=callback.entrypoint,
22 | )
23 |
24 | return StudyResult(dataset=dataset, result=res)
25 |
26 | def _compute(
27 | self,
28 | df: Iterable[Dict[str, Any]],
29 | entrypoint: Callable[[str, Dict[str, Any]], Any],
30 | ) -> Iterable[Dict[str, Any]]:
31 | fs = FileSystem()
32 | ck_fs = fs.makedirs(self._checkpoint_path, recreate=True)
33 | for row in df:
34 | for trial in _get_trials_from_row(row):
35 | rjudge = RemoteTrialJudge(entrypoint)
36 | self._objective.copy().run(trial, rjudge, ck_fs)
37 | if rjudge.report is not None:
38 | yield rjudge.report.fill_dict(dict(row))
39 |
--------------------------------------------------------------------------------
/tune/noniterative/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 |
--------------------------------------------------------------------------------
/tune/noniterative/convert.py:
--------------------------------------------------------------------------------
1 | import copy
2 | from typing import Any, Callable, Dict, Optional, Tuple, no_type_check
3 |
4 | from fugue._utils.interfaceless import is_class_method
5 | from triad import assert_or_throw
6 | from triad.collections.function_wrapper import (
7 | AnnotatedParam,
8 | FunctionWrapper,
9 | function_wrapper,
10 | )
11 | from triad.utils.convert import get_caller_global_local_vars, to_function
12 |
13 | from tune.concepts.flow import Trial, TrialReport
14 | from tune.exceptions import TuneCompileError
15 | from tune.noniterative.objective import NonIterativeObjectiveFunc
16 |
17 |
18 | def noniterative_objective(
19 | func: Optional[Callable] = None, min_better: bool = True
20 | ) -> Callable[[Any], NonIterativeObjectiveFunc]:
21 | def deco(func: Callable) -> NonIterativeObjectiveFunc:
22 | assert_or_throw(
23 | not is_class_method(func),
24 | NotImplementedError(
25 | "non_iterative_objective decorator can't be used on class methods"
26 | ),
27 | )
28 | return _NonIterativeObjectiveFuncWrapper.from_func(func, min_better)
29 |
30 | if func is None:
31 | return deco
32 | else:
33 | return deco(func) # type: ignore
34 |
35 |
36 | def to_noniterative_objective(
37 | obj: Any,
38 | min_better: bool = True,
39 | global_vars: Optional[Dict[str, Any]] = None,
40 | local_vars: Optional[Dict[str, Any]] = None,
41 | ) -> NonIterativeObjectiveFunc:
42 | if isinstance(obj, NonIterativeObjectiveFunc):
43 | return copy.copy(obj)
44 | global_vars, local_vars = get_caller_global_local_vars(global_vars, local_vars)
45 | try:
46 | f = to_function(obj, global_vars=global_vars, local_vars=local_vars)
47 | # this is for string expression of function with decorator
48 | if isinstance(f, NonIterativeObjectiveFunc):
49 | return copy.copy(f)
50 | # this is for functions without decorator
51 | return _NonIterativeObjectiveFuncWrapper.from_func(f, min_better)
52 | except Exception as e:
53 | exp = e
54 | raise TuneCompileError(f"{obj} is not a valid tunable function", exp)
55 |
56 |
57 | class _NonIterativeObjectiveFuncWrapper(NonIterativeObjectiveFunc):
58 | def __init__(self, min_better: bool):
59 | self._min_better = min_better
60 |
61 | @property
62 | def min_better(self) -> bool:
63 | return self._min_better
64 |
65 | def generate_sort_metric(self, value: float) -> float:
66 | return float(value) if self._min_better else -float(value)
67 |
68 | @no_type_check
69 | def run(self, trial: Trial) -> TrialReport:
70 | if self._orig_input:
71 | result = self._func(trial)
72 | else:
73 | result = self._func(**trial.params.simple_value, **trial.dfs)
74 | return self._output_f(result, trial)
75 |
76 | @no_type_check
77 | def __call__(self, *args: Any, **kwargs: Any) -> Any:
78 | return self._func(*args, **kwargs)
79 |
80 | @no_type_check
81 | @staticmethod
82 | def from_func(
83 | func: Callable, min_better: bool
84 | ) -> "_NonIterativeObjectiveFuncWrapper":
85 | f = _NonIterativeObjectiveFuncWrapper(min_better=min_better)
86 | w = _NonIterativeObjectiveWrapper(func)
87 | f._func = w._func
88 | f._orig_input = w._orig_input
89 | f._output_f = w._rt.to_report
90 | return f
91 |
92 |
93 | @function_wrapper(None)
94 | class _NonIterativeObjectiveWrapper(FunctionWrapper):
95 | def __init__(self, func: Callable):
96 | super().__init__(func, ".*", "^[r12]$")
97 | param = self._params.get_value_by_index(0)
98 | self._orig_input = isinstance(param, _TrialParam)
99 | self._orig_output = isinstance(self._rt, _RawReportParam)
100 |
101 |
102 | class _ReportParam(AnnotatedParam):
103 | def to_report(self, v: Any, trial: Trial) -> TrialReport:
104 | raise NotImplementedError # pragma: no cover
105 |
106 |
107 | @_NonIterativeObjectiveWrapper.annotated_param(TrialReport, "r")
108 | class _RawReportParam(_ReportParam):
109 | def to_report(self, v: Any, trial: Trial) -> TrialReport:
110 | return v
111 |
112 |
113 | @_NonIterativeObjectiveWrapper.annotated_param(float, "1")
114 | class _MetricParam(_ReportParam):
115 | def to_report(self, v: Any, trial: Trial) -> TrialReport:
116 | return TrialReport(trial, metric=float(v), params=trial.params, metadata={})
117 |
118 |
119 | @_NonIterativeObjectiveWrapper.annotated_param(Tuple[float, Dict[str, Any]], "2")
120 | class _MetricMetadataParam(_ReportParam):
121 | def to_report(self, v: Any, trial: Trial) -> TrialReport:
122 | return TrialReport(
123 | trial, metric=float(v[0]), params=trial.params, metadata=v[1]
124 | )
125 |
126 |
127 | @_NonIterativeObjectiveWrapper.annotated_param(Trial, "t")
128 | class _TrialParam(AnnotatedParam):
129 | pass
130 |
--------------------------------------------------------------------------------
/tune/noniterative/objective.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Callable, Optional
2 |
3 | from tune._utils import run_monitored_process
4 | from tune.concepts.flow import Trial, TrialReport
5 | from tune.concepts.logger import make_logger, set_current_metric_logger
6 | from tune.constants import TUNE_STOPPER_DEFAULT_CHECK_INTERVAL
7 |
8 |
9 | class NonIterativeObjectiveFunc:
10 | def generate_sort_metric(self, value: float) -> float: # pragma: no cover
11 | return value
12 |
13 | def run(self, trial: Trial) -> TrialReport: # pragma: no cover
14 | raise NotImplementedError
15 |
16 | def safe_run(self, trial: Trial) -> TrialReport:
17 | report = self.run(trial)
18 | return report.with_sort_metric(self.generate_sort_metric(report.metric))
19 |
20 |
21 | class NonIterativeObjectiveLocalOptimizer:
22 | @property
23 | def distributable(self) -> bool:
24 | return True
25 |
26 | def run(
27 | self, func: NonIterativeObjectiveFunc, trial: Trial, logger: Any
28 | ) -> TrialReport:
29 | # TODO: how to utilize execution_engine?
30 | if logger is None:
31 | report = func.safe_run(trial)
32 | else:
33 | with make_logger(logger) as p_logger:
34 | with set_current_metric_logger(
35 | p_logger.create_child(
36 | name=trial.trial_id[:5] + "-" + p_logger.unique_id,
37 | description=repr(trial),
38 | )
39 | ) as c_logger:
40 | report = func.safe_run(trial)
41 | c_logger.log_report(
42 | report, log_params=True, extract_metrics=True, log_metadata=True
43 | )
44 | return report
45 |
46 | def run_monitored_process(
47 | self,
48 | func: NonIterativeObjectiveFunc,
49 | trial: Trial,
50 | stop_checker: Callable[[], bool],
51 | logger: Any,
52 | interval: Any = TUNE_STOPPER_DEFAULT_CHECK_INTERVAL,
53 | ) -> TrialReport:
54 | return run_monitored_process(
55 | self.run,
56 | [func, trial],
57 | {"logger": logger},
58 | stop_checker=stop_checker,
59 | interval=interval,
60 | )
61 |
62 |
63 | def validate_noniterative_objective(
64 | func: NonIterativeObjectiveFunc,
65 | trial: Trial,
66 | validator: Callable[[TrialReport], None],
67 | optimizer: Optional[NonIterativeObjectiveLocalOptimizer] = None,
68 | logger: Any = None,
69 | ) -> None:
70 | _optimizer = optimizer or NonIterativeObjectiveLocalOptimizer()
71 | validator(
72 | _optimizer.run_monitored_process(
73 | func, trial, lambda: False, interval="1sec", logger=logger
74 | )
75 | )
76 |
--------------------------------------------------------------------------------
/tune_hyperopt/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 |
3 | from tune_hyperopt.optimizer import HyperoptLocalOptimizer
4 | from tune import parse_noniterative_local_optimizer
5 |
6 |
7 | @parse_noniterative_local_optimizer.candidate(
8 | lambda obj: isinstance(obj, str)
9 | and (obj == "hyperopt" or obj.startswith("hyperopt:"))
10 | )
11 | def _parse_optimizer(obj: str) -> HyperoptLocalOptimizer:
12 | p = obj.split(":", 1)
13 | max_iter = 30 if len(p) == 1 else int(p[1])
14 | return HyperoptLocalOptimizer(max_iter=max_iter, seed=0)
15 |
--------------------------------------------------------------------------------
/tune_mlflow/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 |
3 | from .loggers import get_or_create_experiment, get_or_create_run
4 |
--------------------------------------------------------------------------------
/tune_notebook/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 |
3 | from tune_notebook.monitors import (
4 | NotebookSimpleChart,
5 | NotebookSimpleHist,
6 | NotebookSimpleRungs,
7 | NotebookSimpleTimeSeries,
8 | PrintBest,
9 | )
10 |
--------------------------------------------------------------------------------
/tune_optuna/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 |
3 | from tune_optuna.optimizer import OptunaLocalOptimizer
4 | from tune import parse_noniterative_local_optimizer
5 |
6 |
7 | @parse_noniterative_local_optimizer.candidate(
8 | lambda obj: isinstance(obj, str) and (obj == "optuna" or obj.startswith("optuna:"))
9 | )
10 | def _parse_optimizer(obj: str) -> OptunaLocalOptimizer:
11 | p = obj.split(":", 1)
12 | max_iter = 30 if len(p) == 1 else int(p[1])
13 | return OptunaLocalOptimizer(max_iter=max_iter)
14 |
--------------------------------------------------------------------------------
/tune_optuna/optimizer.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Callable, Dict, List, Optional
2 |
3 | import optuna
4 | from optuna.study import Study
5 | from triad import SerializableRLock
6 | from tune import (
7 | Choice,
8 | NonIterativeObjectiveFunc,
9 | NonIterativeObjectiveLocalOptimizer,
10 | Rand,
11 | RandInt,
12 | TransitionChoice,
13 | Trial,
14 | TrialReport,
15 | )
16 | from tune._utils.math import _IGNORABLE_ERROR, uniform_to_discrete, uniform_to_integers
17 | from tune.concepts.logger import make_logger, set_current_metric_logger
18 | from tune.concepts.space import TuningParametersTemplate
19 |
20 |
21 | class OptunaLocalOptimizer(NonIterativeObjectiveLocalOptimizer):
22 | def __init__(
23 | self, max_iter: int, create_study: Optional[Callable[[], Study]] = None
24 | ):
25 | self._max_iter = max_iter
26 | self._create_study = create_study or optuna.create_study
27 |
28 | def run(
29 | self, func: NonIterativeObjectiveFunc, trial: Trial, logger: Any
30 | ) -> TrialReport:
31 | template = trial.params
32 | if template.empty:
33 | tmp = NonIterativeObjectiveLocalOptimizer()
34 | return tmp.run(func, trial, logger=logger)
35 | lock = SerializableRLock()
36 | best_report: List[TrialReport] = []
37 |
38 | with make_logger(logger) as p_logger:
39 | with set_current_metric_logger(
40 | p_logger.create_child(
41 | name=trial.trial_id[:5] + "-" + p_logger.unique_id,
42 | description=repr(trial),
43 | )
44 | ) as c_logger:
45 |
46 | def obj(otrial: optuna.trial.Trial) -> float:
47 | with set_current_metric_logger(
48 | c_logger.create_child(is_step=True)
49 | ) as s_logger:
50 | params = template.fill_dict(_convert(otrial, template))
51 | report = func.safe_run(trial.with_params(params))
52 | with lock:
53 | if len(best_report) == 0:
54 | best_report.append(report)
55 | elif report.sort_metric < best_report[0].sort_metric:
56 | best_report[0] = report
57 | s_logger.log_report(best_report[0])
58 | return report.sort_metric
59 |
60 | study = self._create_study()
61 | study.optimize(obj, n_trials=self._max_iter)
62 | assert 1 == len(best_report)
63 | report = best_report[0]
64 | c_logger.log_params(report.trial.params.simple_value)
65 | c_logger.log_metrics({"OBJECTIVE_METRIC": report.metric})
66 | nm = {
67 | k: v
68 | for k, v in report.metadata.items()
69 | if isinstance(v, (int, float))
70 | }
71 | c_logger.log_metrics(nm)
72 | c_logger.log_metadata(report.metadata)
73 | return report
74 |
75 |
76 | def _convert(
77 | trial: optuna.trial.Trial, template: TuningParametersTemplate
78 | ) -> Dict[str, Any]:
79 | result: Dict[str, Any] = {}
80 | for k, v in template.params_dict.items():
81 | if isinstance(v, RandInt):
82 | if v.log and v.q is not None:
83 | value = trial.suggest_float(name=k, low=0, high=1.0)
84 | result[k] = uniform_to_integers(
85 | value,
86 | low=v.low,
87 | high=v.high,
88 | q=v.q, # type: ignore
89 | log=True,
90 | include_high=v.include_high,
91 | )
92 | else:
93 | _high: Any = v.high if v.include_high else v.high - 1
94 | result[k] = trial.suggest_int(
95 | name=k, low=v.low, high=_high, step=v.q, log=v.log
96 | )
97 | elif isinstance(v, Rand):
98 | if v.log and v.q is not None:
99 | value = trial.suggest_float(name=k, low=0, high=1.0)
100 | result[k] = uniform_to_discrete(
101 | value,
102 | low=v.low,
103 | high=v.high,
104 | q=v.q,
105 | log=True,
106 | include_high=v.include_high,
107 | )
108 | else:
109 | _high = v.high
110 | if v.q is not None and not v.include_high:
111 | _high -= _IGNORABLE_ERROR
112 | result[k] = trial.suggest_float(
113 | name=k, low=v.low, high=_high, step=v.q, log=v.log
114 | )
115 | elif isinstance(v, TransitionChoice):
116 | result[k] = v.values[
117 | trial.suggest_int(name=k, low=0, high=len(v.values) - 1)
118 | ]
119 | elif isinstance(v, Choice):
120 | result[k] = trial.suggest_categorical(name=k, choices=v.values)
121 | else: # pragma: no cover
122 | raise NotImplementedError
123 | return result
124 |
--------------------------------------------------------------------------------
/tune_sklearn/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | from tune_sklearn.suggest import suggest_sk_models, suggest_sk_models_by_cv
3 | from tune_sklearn.utils import sk_space
4 |
--------------------------------------------------------------------------------
/tune_sklearn/objective.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import Any, Optional, Tuple
3 | from uuid import uuid4
4 |
5 | import cloudpickle
6 | import numpy as np
7 | import pandas as pd
8 | from sklearn.metrics import get_scorer
9 | from sklearn.model_selection import cross_val_score
10 | from triad import FileSystem
11 |
12 | from tune import NonIterativeObjectiveFunc, Trial, TrialReport
13 | from tune.api.factory import TUNE_OBJECT_FACTORY
14 | from tune.constants import (
15 | SPACE_MODEL_NAME,
16 | TUNE_DATASET_DF_DEFAULT_NAME,
17 | TUNE_DATASET_VALIDATION_DF_DEFAULT_NAME,
18 | )
19 | from tune_sklearn.utils import to_sk_model, to_sk_model_expr
20 |
21 |
22 | class SKObjective(NonIterativeObjectiveFunc):
23 | def __init__(
24 | self,
25 | scoring: Any,
26 | feature_prefix: str = "",
27 | label_col: str = "label",
28 | checkpoint_path: Optional[str] = None,
29 | ) -> None:
30 | super().__init__()
31 | self._last_id = ""
32 | self._model_type: Any = None
33 | self._model_expr: str = ""
34 |
35 | self._scoring = scoring
36 | self._feature_prefix = feature_prefix
37 | self._label_col = label_col
38 | if checkpoint_path is None:
39 | self._checkpoint_path = checkpoint_path
40 | else:
41 | self._checkpoint_path = TUNE_OBJECT_FACTORY.get_path_or_temp(
42 | checkpoint_path
43 | )
44 |
45 | def generate_sort_metric(self, value: float) -> float:
46 | return -value
47 |
48 | def run(self, trial: Trial) -> TrialReport:
49 | params = dict(trial.params.simple_value)
50 | if trial.trial_id != self._last_id:
51 | self._model_type = to_sk_model(params.pop(SPACE_MODEL_NAME))
52 | self._model_expr = to_sk_model_expr(self._model_type)
53 | self._train_x, self._train_y = self._reset_xy(
54 | trial.dfs[TUNE_DATASET_DF_DEFAULT_NAME]
55 | )
56 | self._test_x, self._test_y = self._reset_xy(
57 | trial.dfs[TUNE_DATASET_VALIDATION_DF_DEFAULT_NAME]
58 | )
59 | self._last_id = trial.trial_id
60 | else:
61 | params.pop(SPACE_MODEL_NAME)
62 |
63 | model = self._model_type(**params).fit(self._train_x, self._train_y)
64 | metric = get_scorer(self._scoring)(model, self._test_x, self._test_y)
65 | metadata = dict(model=self._model_expr)
66 | if self._checkpoint_path is not None:
67 | fp = os.path.join(self._checkpoint_path, str(uuid4()) + ".pkl")
68 | with FileSystem().openbin(fp, mode="wb") as f:
69 | cloudpickle.dump(model, f)
70 | metadata["checkpoint_path"] = fp
71 | return TrialReport(
72 | trial,
73 | metric=metric,
74 | metadata=metadata,
75 | sort_metric=self.generate_sort_metric(metric),
76 | )
77 |
78 | def _reset_xy(self, df: pd.DataFrame) -> Tuple[pd.DataFrame, pd.DataFrame]:
79 | train_df = df.sample(frac=1, random_state=0).reset_index(drop=True)
80 |
81 | train_x = train_df.drop([self._label_col], axis=1)
82 | cols = [x for x in train_x.columns if x.startswith(self._feature_prefix)]
83 | return train_x[cols], train_df[self._label_col]
84 |
85 |
86 | class SKCVObjective(SKObjective):
87 | def __init__(
88 | self,
89 | scoring: Any,
90 | cv: int = 5,
91 | feature_prefix: str = "",
92 | label_col: str = "label",
93 | checkpoint_path: Optional[str] = None,
94 | ) -> None:
95 | super().__init__(
96 | scoring=scoring,
97 | feature_prefix=feature_prefix,
98 | label_col=label_col,
99 | checkpoint_path=checkpoint_path,
100 | )
101 | self._cv = cv
102 |
103 | def run(self, trial: Trial) -> TrialReport:
104 | params = dict(trial.params.simple_value)
105 | if trial.trial_id != self._last_id:
106 | self._model_type = to_sk_model(params.pop(SPACE_MODEL_NAME))
107 | self._model_expr = to_sk_model_expr(self._model_type)
108 | self._train_x, self._train_y = self._reset_xy(
109 | trial.dfs[TUNE_DATASET_DF_DEFAULT_NAME]
110 | )
111 | self._last_id = trial.trial_id
112 | else:
113 | params.pop(SPACE_MODEL_NAME)
114 |
115 | model = self._model_type(**params)
116 | s = cross_val_score(
117 | model, self._train_x, self._train_y, cv=self._cv, scoring=self._scoring
118 | )
119 | metadata = dict(model=self._model_expr, cv_scores=[float(x) for x in s])
120 | if self._checkpoint_path is not None:
121 | model.fit(self._train_x, self._train_y)
122 | fp = os.path.join(self._checkpoint_path, str(uuid4()) + ".pkl")
123 | with FileSystem().openbin(fp, mode="wb") as f:
124 | cloudpickle.dump(model, f)
125 | metadata["checkpoint_path"] = fp
126 | metric = float(np.mean(s))
127 | return TrialReport(
128 | trial,
129 | metric=metric,
130 | metadata=metadata,
131 | sort_metric=self.generate_sort_metric(metric),
132 | )
133 |
--------------------------------------------------------------------------------
/tune_sklearn/suggest.py:
--------------------------------------------------------------------------------
1 | from typing import Any, List, Optional
2 |
3 | from fugue import FugueWorkflow
4 | from tune import (
5 | TUNE_OBJECT_FACTORY,
6 | NonIterativeObjectiveLocalOptimizer,
7 | Space,
8 | TrialReport,
9 | optimize_noniterative,
10 | )
11 | from tune._utils import from_base64
12 | from tune.constants import TUNE_REPORT, TUNE_REPORT_METRIC
13 |
14 | from tune_sklearn.objective import SKCVObjective, SKObjective
15 |
16 |
17 | def suggest_sk_models(
18 | space: Space,
19 | train_df: Any,
20 | test_df: Any,
21 | scoring: str,
22 | temp_path: str = "",
23 | feature_prefix: str = "",
24 | label_col: str = "label",
25 | save_model: bool = False,
26 | partition_keys: Optional[List[str]] = None,
27 | top_n: int = 1,
28 | local_optimizer: Optional[NonIterativeObjectiveLocalOptimizer] = None,
29 | monitor: Any = None,
30 | stopper: Any = None,
31 | stop_check_interval: Any = None,
32 | distributed: Optional[bool] = None,
33 | execution_engine: Any = None,
34 | execution_engine_conf: Any = None,
35 | ) -> List[TrialReport]:
36 | dag = FugueWorkflow()
37 | dataset = TUNE_OBJECT_FACTORY.make_dataset(
38 | dag,
39 | space,
40 | df=train_df,
41 | test_df=test_df,
42 | partition_keys=partition_keys,
43 | temp_path=temp_path,
44 | )
45 | objective = SKObjective(
46 | scoring=scoring,
47 | feature_prefix=feature_prefix,
48 | label_col=label_col,
49 | checkpoint_path=temp_path if save_model else None,
50 | )
51 | study = optimize_noniterative(
52 | objective=objective,
53 | dataset=dataset,
54 | optimizer=local_optimizer,
55 | distributed=distributed,
56 | monitor=monitor,
57 | stopper=stopper,
58 | stop_check_interval=stop_check_interval,
59 | )
60 | study.result(top_n).yield_dataframe_as("result")
61 |
62 | rows = list(
63 | dag.run(
64 | execution_engine,
65 | conf=execution_engine_conf,
66 | )["result"].as_dict_iterable()
67 | )
68 | return [
69 | from_base64(r[TUNE_REPORT])
70 | for r in sorted(rows, key=lambda r: r[TUNE_REPORT_METRIC])
71 | ]
72 |
73 |
74 | def suggest_sk_models_by_cv(
75 | space: Space,
76 | train_df: Any,
77 | scoring: str,
78 | cv: int = 5,
79 | temp_path: str = "",
80 | feature_prefix: str = "",
81 | label_col: str = "label",
82 | save_model: bool = False,
83 | partition_keys: Optional[List[str]] = None,
84 | top_n: int = 1,
85 | local_optimizer: Optional[NonIterativeObjectiveLocalOptimizer] = None,
86 | monitor: Any = None,
87 | stopper: Any = None,
88 | stop_check_interval: Any = None,
89 | distributed: Optional[bool] = None,
90 | execution_engine: Any = None,
91 | execution_engine_conf: Any = None,
92 | ) -> List[TrialReport]:
93 | dag = FugueWorkflow()
94 | dataset = TUNE_OBJECT_FACTORY.make_dataset(
95 | dag,
96 | space,
97 | df=train_df,
98 | partition_keys=partition_keys,
99 | temp_path=temp_path,
100 | )
101 | objective = SKCVObjective(
102 | scoring=scoring,
103 | cv=cv,
104 | feature_prefix=feature_prefix,
105 | label_col=label_col,
106 | checkpoint_path=temp_path if save_model else None,
107 | )
108 | study = optimize_noniterative(
109 | objective=objective,
110 | dataset=dataset,
111 | optimizer=local_optimizer,
112 | distributed=distributed,
113 | monitor=monitor,
114 | stopper=stopper,
115 | stop_check_interval=stop_check_interval,
116 | )
117 | study.result(top_n).yield_dataframe_as("result")
118 |
119 | rows = list(
120 | dag.run(
121 | execution_engine,
122 | conf=execution_engine_conf,
123 | )["result"].as_dict_iterable()
124 | )
125 | return [
126 | from_base64(r[TUNE_REPORT])
127 | for r in sorted(rows, key=lambda r: r[TUNE_REPORT_METRIC])
128 | ]
129 |
--------------------------------------------------------------------------------
/tune_sklearn/utils.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, Type
2 |
3 | from sklearn.base import is_classifier, is_regressor
4 | from triad import assert_or_throw
5 | from triad.utils.convert import get_full_type_path, to_type
6 | from tune.constants import SPACE_MODEL_NAME
7 | from tune.concepts.space.spaces import Space
8 |
9 |
10 | def to_sk_model(obj: Any) -> Type:
11 | if isinstance(obj, str):
12 | obj = to_type(obj)
13 | assert_or_throw(
14 | is_classifier(obj) or is_regressor(obj),
15 | TypeError(f"{obj} is neither a sklearn classifier or regressor"),
16 | )
17 | return obj
18 |
19 |
20 | def to_sk_model_expr(model: Any) -> Any:
21 | if isinstance(model, str):
22 | model = to_sk_model(model)
23 | return get_full_type_path(model)
24 |
25 |
26 | def sk_space(model: str, **params: Dict[str, Any]) -> Space:
27 | data = {SPACE_MODEL_NAME: to_sk_model_expr(model)}
28 | data.update(params)
29 | return Space(**data)
30 |
--------------------------------------------------------------------------------
/tune_tensorflow/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 |
3 | from tune_tensorflow.objective import KerasObjective
4 | from tune_tensorflow.spec import KerasTrainingSpec
5 | from tune_tensorflow.suggest import (
6 | suggest_keras_models_by_continuous_asha,
7 | suggest_keras_models_by_hyperband,
8 | suggest_keras_models_by_sha,
9 | )
10 | from tune_tensorflow.utils import keras_space
11 |
--------------------------------------------------------------------------------
/tune_tensorflow/objective.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, Optional, Type
2 |
3 | from fs.base import FS as FSBase
4 | from tensorflow import keras
5 | from tune import IterativeObjectiveFunc, TrialReport
6 |
7 | from tune_tensorflow.spec import KerasTrainingSpec
8 | from tune_tensorflow.utils import extract_keras_spec
9 |
10 |
11 | class KerasObjective(IterativeObjectiveFunc):
12 | def __init__(self, type_dict: Dict[str, Type[KerasTrainingSpec]]) -> None:
13 | super().__init__()
14 | self._epochs = 0
15 | self._spec: Optional[KerasTrainingSpec] = None
16 | self._model: Optional[keras.models.Model] = None
17 | self._type_dict = type_dict
18 |
19 | @property
20 | def model(self) -> keras.models.Model:
21 | assert self._model is not None
22 | return self._model
23 |
24 | @property
25 | def spec(self) -> KerasTrainingSpec:
26 | assert self._spec is not None
27 | return self._spec
28 |
29 | def copy(self) -> "KerasObjective":
30 | return KerasObjective(self._type_dict)
31 |
32 | def generate_sort_metric(self, value: float) -> float:
33 | return self.spec.generate_sort_metric(value)
34 |
35 | def save_checkpoint(self, fs: FSBase) -> None:
36 | self.spec.save_checkpoint(fs, self.model)
37 | fs.writetext("epoch", str(self._epochs))
38 |
39 | def load_checkpoint(self, fs: FSBase) -> None:
40 | self.spec.load_checkpoint(fs, self.model)
41 | self._epochs = int(fs.readtext("epoch"))
42 |
43 | def run_single_rung(self, budget: float) -> TrialReport:
44 | trial = self.current_trial
45 | fit_args, fit_kwargs = self.spec.get_fit_params()
46 | fit_kwargs = dict(fit_kwargs)
47 | fit_kwargs.update(
48 | dict(epochs=self._epochs + int(budget), initial_epoch=self._epochs)
49 | )
50 | h = self.model.fit(*fit_args, **fit_kwargs)
51 | metric = self.spec.get_fit_metric(h)
52 | self._epochs += int(budget)
53 | return TrialReport(trial=trial, metric=metric, cost=budget, rung=self.rung)
54 |
55 | def initialize(self) -> None:
56 | spec = extract_keras_spec(self.current_trial.params, self._type_dict)
57 | self._spec = spec(self.current_trial.params, self.current_trial.dfs)
58 | self._model = self.spec.compile_model()
59 |
60 | def finalize(self) -> None:
61 | self.spec.finalize()
62 |
--------------------------------------------------------------------------------
/tune_tensorflow/spec.py:
--------------------------------------------------------------------------------
1 | import tempfile
2 | from typing import Any, Dict, List, Tuple
3 |
4 | from fs.base import FS as FSBase
5 | from tensorflow import keras
6 | from triad import FileSystem
7 | from tune.concepts.space import to_template, TuningParametersTemplate
8 |
9 |
10 | class KerasTrainingSpec:
11 | def __init__(self, params: Any, dfs: Dict[str, Any]):
12 | self._params = to_template(params)
13 | self._dfs = dfs
14 |
15 | @property
16 | def params(self) -> TuningParametersTemplate:
17 | return self._params
18 |
19 | @property
20 | def dfs(self) -> Dict[str, Any]:
21 | return self._dfs
22 |
23 | def finalize(self) -> None:
24 | pass
25 |
26 | def generate_sort_metric(self, metric: float) -> float:
27 | raise NotImplementedError # pragma: no cover
28 |
29 | def get_fit_metric(self, history: keras.callbacks.History) -> float:
30 | raise NotImplementedError # pragma: no cover
31 |
32 | def get_fit_params(self) -> Tuple[List[Any], Dict[str, Any]]:
33 | raise NotImplementedError # pragma: no cover
34 |
35 | def get_compile_params(self) -> Dict[str, Any]:
36 | raise NotImplementedError # pragma: no cover
37 |
38 | def get_model(self) -> keras.models.Model:
39 | raise NotImplementedError # pragma: no cover
40 |
41 | def save_checkpoint(self, fs: FSBase, model: keras.models.Model) -> None:
42 | with tempfile.NamedTemporaryFile(suffix=".weights.h5") as tf:
43 | model.save_weights(tf.name)
44 | with open(tf.name, "rb") as fin:
45 | fs.writefile("model.h5", fin)
46 |
47 | def load_checkpoint(self, fs: FSBase, model: keras.models.Model) -> None:
48 | with tempfile.NamedTemporaryFile(suffix=".weights.h5") as tf:
49 | local_fs = FileSystem()
50 | with fs.open("model.h5", "rb") as fin:
51 | local_fs.writefile(tf.name, fin)
52 | model.load_weights(tf.name)
53 |
54 | def compile_model(self, **add_kwargs: Any) -> keras.models.Model:
55 | params = dict(self.get_compile_params())
56 | params.update(add_kwargs)
57 | model = self.get_model()
58 | model.compile(**params)
59 | return model
60 |
61 | def fit(self, **add_kwargs: Any) -> keras.callbacks.History:
62 | args, kwargs = self.get_fit_params()
63 | kwargs = dict(kwargs)
64 | kwargs.update(add_kwargs)
65 | model = self.compile_model()
66 | metric = model.fit(*args, **kwargs)
67 | self.finalize()
68 | return metric
69 |
70 | def compute_sort_metric(self, **add_kwargs: Any) -> float:
71 | metric = self.get_fit_metric(self.fit(**add_kwargs))
72 | return self.generate_sort_metric(metric)
73 |
--------------------------------------------------------------------------------
/tune_tensorflow/suggest.py:
--------------------------------------------------------------------------------
1 | from typing import Any, List, Optional, Tuple
2 |
3 | from tune import (
4 | Space,
5 | TrialReport,
6 | suggest_by_continuous_asha,
7 | suggest_by_hyperband,
8 | suggest_by_sha,
9 | )
10 |
11 | from tune_tensorflow.objective import KerasObjective
12 | from tune_tensorflow.utils import _TYPE_DICT
13 |
14 |
15 | def suggest_keras_models_by_sha(
16 | space: Space,
17 | plan: List[Tuple[float, int]],
18 | train_df: Any = None,
19 | temp_path: str = "",
20 | partition_keys: Optional[List[str]] = None,
21 | top_n: int = 1,
22 | monitor: Any = None,
23 | distributed: Optional[bool] = None,
24 | execution_engine: Any = None,
25 | execution_engine_conf: Any = None,
26 | ) -> List[TrialReport]:
27 | return suggest_by_sha(
28 | objective=KerasObjective(_TYPE_DICT),
29 | space=space,
30 | plan=plan,
31 | train_df=train_df,
32 | temp_path=temp_path,
33 | partition_keys=partition_keys,
34 | top_n=top_n,
35 | monitor=monitor,
36 | distributed=distributed,
37 | execution_engine=execution_engine,
38 | execution_engine_conf=execution_engine_conf,
39 | )
40 |
41 |
42 | def suggest_keras_models_by_hyperband(
43 | space: Space,
44 | plans: List[List[Tuple[float, int]]],
45 | train_df: Any = None,
46 | temp_path: str = "",
47 | partition_keys: Optional[List[str]] = None,
48 | top_n: int = 1,
49 | monitor: Any = None,
50 | distributed: Optional[bool] = None,
51 | execution_engine: Any = None,
52 | execution_engine_conf: Any = None,
53 | ) -> List[TrialReport]:
54 | return suggest_by_hyperband(
55 | objective=KerasObjective(_TYPE_DICT),
56 | space=space,
57 | plans=plans,
58 | train_df=train_df,
59 | temp_path=temp_path,
60 | partition_keys=partition_keys,
61 | top_n=top_n,
62 | monitor=monitor,
63 | distributed=distributed,
64 | execution_engine=execution_engine,
65 | execution_engine_conf=execution_engine_conf,
66 | )
67 |
68 |
69 | def suggest_keras_models_by_continuous_asha(
70 | space: Space,
71 | plan: List[Tuple[float, int]],
72 | train_df: Any = None,
73 | temp_path: str = "",
74 | partition_keys: Optional[List[str]] = None,
75 | top_n: int = 1,
76 | monitor: Any = None,
77 | execution_engine: Any = None,
78 | execution_engine_conf: Any = None,
79 | ) -> List[TrialReport]:
80 | return suggest_by_continuous_asha(
81 | objective=KerasObjective(_TYPE_DICT),
82 | space=space,
83 | plan=plan,
84 | train_df=train_df,
85 | temp_path=temp_path,
86 | partition_keys=partition_keys,
87 | top_n=top_n,
88 | monitor=monitor,
89 | execution_engine=execution_engine,
90 | execution_engine_conf=execution_engine_conf,
91 | )
92 |
--------------------------------------------------------------------------------
/tune_tensorflow/utils.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, Type
2 |
3 | from triad.utils.convert import get_full_type_path, to_type
4 |
5 | from tune import Space
6 | from tune.concepts.space.parameters import TuningParametersTemplate
7 | from tune.constants import SPACE_MODEL_NAME
8 | from tune_tensorflow.spec import KerasTrainingSpec
9 |
10 | _TYPE_DICT: Dict[str, Type[KerasTrainingSpec]] = {}
11 |
12 |
13 | def to_keras_spec(obj: Any) -> Type[KerasTrainingSpec]:
14 | if isinstance(obj, str) and obj in _TYPE_DICT:
15 | return _TYPE_DICT[obj]
16 | return to_type(obj, KerasTrainingSpec)
17 |
18 |
19 | def to_keras_spec_expr(spec: Any) -> str:
20 | if isinstance(spec, str):
21 | spec = to_keras_spec(spec)
22 | return get_full_type_path(spec)
23 |
24 |
25 | def keras_space(model: Any, **params: Any) -> Space:
26 | expr = to_keras_spec_expr(model)
27 | _TYPE_DICT[expr] = to_keras_spec(model)
28 | data = {SPACE_MODEL_NAME: expr}
29 | data.update(params)
30 | return Space(**data)
31 |
32 |
33 | def extract_keras_spec(
34 | params: TuningParametersTemplate, type_dict: Dict[str, Any]
35 | ) -> Type[KerasTrainingSpec]:
36 | obj = params.simple_value[SPACE_MODEL_NAME]
37 | if isinstance(obj, str) and obj in type_dict:
38 | return type_dict[obj]
39 | return to_keras_spec(obj)
40 |
--------------------------------------------------------------------------------
/tune_test/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fugue-project/tune/8b8b34488ae7acea63e570dc7d3183ceae5a4b9d/tune_test/__init__.py
--------------------------------------------------------------------------------
/tune_version/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = "0.1.6"
2 |
--------------------------------------------------------------------------------
/tune_wandb/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 |
3 | from .loggers import WandbGroupLogger
4 |
--------------------------------------------------------------------------------