├── .gitignore
├── .mypy.ini
├── .zenodo.json
├── CHANGELOG.md
├── LICENSE
├── Makefile
├── README.md
├── docs
├── Makefile
├── _static
│ └── custom.css
├── api
│ ├── collectors.rst
│ ├── components.rst
│ ├── helpers.rst
│ ├── problems.rst
│ └── solvers.rst
├── conf.py
├── guide
│ ├── collectors.ipynb
│ ├── features.ipynb
│ ├── primal.ipynb
│ ├── problems.ipynb
│ └── solvers.ipynb
├── index.rst
├── make.bat
└── tutorials
│ ├── Manifest.toml
│ ├── Project.toml
│ ├── cuts-gurobipy.ipynb
│ ├── getting-started-gurobipy.ipynb
│ ├── getting-started-jump.ipynb
│ ├── getting-started-pyomo.ipynb
│ └── gurobi.env
├── miplearn
├── .io.py.swp
├── __init__.py
├── classifiers
│ ├── __init__.py
│ ├── minprob.py
│ └── singleclass.py
├── collectors
│ ├── __init__.py
│ ├── basic.py
│ └── priority.py
├── components
│ ├── __init__.py
│ ├── cuts
│ │ ├── __init__.py
│ │ ├── expert.py
│ │ └── mem.py
│ ├── lazy
│ │ ├── __init__.py
│ │ ├── expert.py
│ │ └── mem.py
│ ├── primal
│ │ ├── __init__.py
│ │ ├── actions.py
│ │ ├── expert.py
│ │ ├── indep.py
│ │ ├── joint.py
│ │ └── mem.py
│ └── priority.py
├── extractors
│ ├── AlvLouWeh2017.py
│ ├── __init__.py
│ ├── abstract.py
│ ├── dummy.py
│ └── fields.py
├── h5.py
├── io.py
├── parallel.py
├── problems
│ ├── __init__.py
│ ├── binpack.py
│ ├── multiknapsack.py
│ ├── pmedian.py
│ ├── setcover.py
│ ├── setpack.py
│ ├── stab.py
│ ├── tsp.py
│ ├── uc.py
│ └── vertexcover.py
└── solvers
│ ├── __init__.py
│ ├── abstract.py
│ ├── gurobi.py
│ ├── learning.py
│ └── pyomo.py
├── requirements.txt
├── setup.py
└── tests
├── __init__.py
├── components
├── __init__.py
├── cuts
│ ├── __init__.py
│ └── test_mem.py
├── lazy
│ ├── __init__.py
│ └── test_mem.py
└── primal
│ ├── __init__.py
│ ├── test_expert.py
│ ├── test_indep.py
│ ├── test_joint.py
│ └── test_mem.py
├── conftest.py
├── extractors
├── __init__.py
├── test_dummy.py
└── test_fields.py
├── fixtures
├── gen_stab.py
├── gen_tsp.py
├── multiknapsack-n100-m4-00000.h5
├── multiknapsack-n100-m4-00000.mps.gz
├── multiknapsack-n100-m4-00000.pkl.gz
├── multiknapsack-n100-m4-00001.h5
├── multiknapsack-n100-m4-00001.mps.gz
├── multiknapsack-n100-m4-00001.pkl.gz
├── multiknapsack-n100-m4-00002.h5
├── multiknapsack-n100-m4-00002.mps.gz
├── multiknapsack-n100-m4-00002.pkl.gz
├── stab-gp-n50-00000.h5
├── stab-gp-n50-00000.mps.gz
├── stab-gp-n50-00000.pkl.gz
├── stab-gp-n50-00001.h5
├── stab-gp-n50-00001.mps.gz
├── stab-gp-n50-00001.pkl.gz
├── stab-gp-n50-00002.h5
├── stab-gp-n50-00002.mps.gz
├── stab-gp-n50-00002.pkl.gz
├── stab-pyo-n50-00000.h5
├── stab-pyo-n50-00000.mps.gz
├── stab-pyo-n50-00000.pkl.gz
├── stab-pyo-n50-00001.h5
├── stab-pyo-n50-00001.mps.gz
├── stab-pyo-n50-00001.pkl.gz
├── stab-pyo-n50-00002.h5
├── stab-pyo-n50-00002.mps.gz
├── stab-pyo-n50-00002.pkl.gz
├── tsp-gp-n20-00000.h5
├── tsp-gp-n20-00000.mps.gz
├── tsp-gp-n20-00000.pkl.gz
├── tsp-gp-n20-00001.h5
├── tsp-gp-n20-00001.mps.gz
├── tsp-gp-n20-00001.pkl.gz
├── tsp-gp-n20-00002.h5
├── tsp-gp-n20-00002.mps.gz
├── tsp-gp-n20-00002.pkl.gz
├── tsp-pyo-n20-00000.h5
├── tsp-pyo-n20-00000.mps.gz
├── tsp-pyo-n20-00000.pkl.gz
├── tsp-pyo-n20-00001.h5
├── tsp-pyo-n20-00001.mps.gz
├── tsp-pyo-n20-00001.pkl.gz
├── tsp-pyo-n20-00002.h5
├── tsp-pyo-n20-00002.mps.gz
└── tsp-pyo-n20-00002.pkl.gz
├── problems
├── __init__.py
├── test_binpack.py
├── test_multiknapsack.py
├── test_pmedian.py
├── test_setcover.py
├── test_setpack.py
├── test_stab.py
├── test_tsp.py
├── test_uc.py
└── test_vertexcover.py
├── test_h5.py
├── test_lazy_pyomo.py
└── test_solvers.py
/.gitignore:
--------------------------------------------------------------------------------
1 | TODO.md
2 | .idea
3 | *.gz
4 | done
5 | *.bin
6 | *$py.class
7 | *.cover
8 | *.egg
9 | *.egg-info/
10 | *.log
11 | *.manifest
12 | *.mo
13 | *.pot
14 | *.py,cover
15 | *.py[cod]
16 | *.sage.py
17 | *.so
18 | *.spec
19 | .Python
20 | .cache
21 | .coverage
22 | .coverage.*
23 | .dmypy.json
24 | .eggs/
25 | .env
26 | .hypothesis/
27 | .installed.cfg
28 | .ipynb_checkpoints
29 | .mypy_cache/
30 | .nox/
31 | .pyre/
32 | .pytest_cache/
33 | .python-version
34 | .ropeproject
35 | .scrapy
36 | .spyderproject
37 | .spyproject
38 | .tox/
39 | .venv
40 | .webassets-cache
41 | /site
42 | ENV/
43 | MANIFEST
44 | **/__pycache__/
45 | **/__pypackages__/
46 | build/
47 | celerybeat-schedule
48 | celerybeat.pid
49 | coverage.xml
50 | db.sqlite3
51 | db.sqlite3-journal
52 | develop-eggs/
53 | dist/
54 | dmypy.json
55 | docs/_build/
56 | downloads/
57 | eggs/
58 | env.bak/
59 | env/
60 | htmlcov/
61 | ipython_config.py
62 | lib/
63 | lib64/
64 | local_settings.py
65 | nosetests.xml
66 | parts/
67 | pip-delete-this-directory.txt
68 | pip-log.txt
69 | pip-wheel-metadata/
70 | profile_default/
71 | sdist/
72 | share/python-wheels/
73 | target/
74 | var/
75 | venv.bak/
76 | venv/
77 | wheels/
78 | notebooks/
79 | .vscode
80 | tmp
81 | benchmark/data
82 | benchmark/results
83 | **/*.xz
84 | **/*.h5
85 | **/*.jld2
86 |
--------------------------------------------------------------------------------
/.mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | ignore_missing_imports = True
3 | disallow_untyped_defs = True
4 | disallow_untyped_calls = True
5 | disallow_incomplete_defs = True
6 | pretty = True
7 | no_implicit_optional = True
8 |
--------------------------------------------------------------------------------
/.zenodo.json:
--------------------------------------------------------------------------------
1 | {
2 | "creators": [
3 | {
4 | "orcid": "0000-0002-5022-9802",
5 | "affiliation": "Argonne National Laboratory",
6 | "name": "Santos Xavier, Alinson"
7 | },
8 | {
9 | "affiliation": "Argonne National Laboratory",
10 | "name": "Qiu, Feng"
11 | },
12 | {
13 | "affiliation": "Georgia Institute of Technology",
14 | "name": "Gu, Xiaoyi"
15 | },
16 | {
17 | "affiliation": "Georgia Institute of Technology",
18 | "name": "Becu, Berkay"
19 | },
20 | {
21 | "affiliation": "Georgia Institute of Technology",
22 | "name": "Dey, Santanu S."
23 | }
24 | ],
25 | "title": "MIPLearn: An Extensible Framework for Learning-Enhanced Optimization",
26 | "description": "MIPLearn is an extensible framework for solving discrete optimization problems using a combination of Mixed-Integer Linear Programming (MIP) and Machine Learning (ML). MIPLearn uses ML methods to automatically identify patterns in previously solved instances of the problem, then uses these patterns to accelerate the performance of conventional state-of-the-art MIP solvers such as CPLEX, Gurobi or XPRESS."
27 | }
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this project will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6 | and this project adheres to
7 | [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
8 |
9 | ## [0.4.2] - 2024-12-10
10 |
11 | ## Changed
12 |
13 | - H5File: Use float64 precision instead of float32
14 | - LearningSolver: optimize now returns (model, stats) instead of just stats
15 | - Update dependency: Gurobi 11
16 |
17 | ## [0.4.0] - 2024-02-06
18 |
19 | ### Added
20 |
21 | - Add ML strategies for user cuts
22 | - Add ML strategies for lazy constraints
23 |
24 | ### Changed
25 |
26 | - LearningSolver.solve no longer generates HDF5 files; use a collector instead.
27 | - Add `_gurobipy` suffix to all `build_model` functions; implement some `_pyomo`
28 | and `_jump` functions.
29 |
30 | ## [0.3.0] - 2023-06-08
31 |
32 | This is a complete rewrite of the original prototype package, with an entirely
33 | new API, focused on performance, scalability and flexibility.
34 |
35 | ### Added
36 |
37 | - Add support for Python/Gurobipy and Julia/JuMP, in addition to the existing
38 | Python/Pyomo interface.
39 | - Add six new random instance generators (bin packing, capacitated p-median, set
40 | cover, set packing, unit commitment, vertex cover), in addition to the three
41 | existing generators (multiknapsack, stable set, tsp).
42 | - Collect some additional raw training data (e.g. basis status, reduced costs,
43 | etc)
44 | - Add new primal solution ML strategies (memorizing, independent vars and joint
45 | vars)
46 | - Add new primal solution actions (set warm start, fix variables, enforce
47 | proximity)
48 | - Add runnable tutorials and user guides to the documentation.
49 |
50 | ### Changed
51 |
52 | - To support large-scale problems and datasets, switch from an in-memory
53 | architecture to a file-based architecture, using HDF5 files.
54 | - To accelerate development cycle, split training data collection from feature
55 | extraction.
56 |
57 | ### Removed
58 |
59 | - Temporarily remove ML strategies for lazy constraints
60 | - Remove benchmarks from documentation. These will be published in a separate
61 | paper.
62 |
63 | ## [0.1.0] - 2020-11-23
64 |
65 | - Initial public release
66 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright © 2020, UChicago Argonne, LLC
2 |
3 | All Rights Reserved
4 |
5 | Software Name: MIPLearn
6 |
7 | By: Argonne National Laboratory
8 |
9 | OPEN SOURCE LICENSE
10 | -------------------
11 |
12 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
13 |
14 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
15 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
16 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
17 |
18 | ********************************************************************************
19 |
20 | DISCLAIMER
21 | ----------
22 |
23 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 |
25 | ********************************************************************************
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | PYTHON := python3
2 | PYTEST := pytest
3 | PIP := $(PYTHON) -m pip
4 | MYPY := $(PYTHON) -m mypy
5 | PYTEST_ARGS := -W ignore::DeprecationWarning -vv --log-level=DEBUG
6 | VERSION := 0.4
7 |
8 | all: docs test
9 |
10 | clean:
11 | rm -rf build/* dist/*
12 |
13 | develop:
14 | $(PYTHON) setup.py develop
15 |
16 | dist:
17 | $(PYTHON) setup.py sdist bdist_wheel
18 |
19 | dist-upload:
20 | $(PYTHON) -m twine upload dist/*
21 |
22 | docs:
23 | rm -rf ../docs/$(VERSION)
24 | cd docs; make dirhtml
25 | rsync -avP --delete-after docs/_build/dirhtml/ ../docs/$(VERSION)/
26 |
27 | install-deps:
28 | $(PIP) install --upgrade pip
29 | $(PIP) install --upgrade -r requirements.txt
30 |
31 | install:
32 | $(PYTHON) setup.py install
33 |
34 | uninstall:
35 | $(PIP) uninstall miplearn
36 |
37 | reformat:
38 | $(PYTHON) -m black .
39 |
40 | test:
41 | # pyflakes miplearn tests
42 | black --check .
43 | # rm -rf .mypy_cache
44 | $(MYPY) -p miplearn
45 | $(MYPY) -p tests
46 | $(PYTEST) $(PYTEST_ARGS)
47 |
48 | .PHONY: test test-watch docs install dist
49 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
MIPLearn
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 | **MIPLearn** is an extensible framework for solving discrete optimization problems using a combination of Mixed-Integer Linear Programming (MIP) and Machine Learning (ML). MIPLearn uses ML methods to automatically identify patterns in previously solved instances of the problem, then uses these patterns to accelerate the performance of conventional state-of-the-art MIP solvers such as CPLEX, Gurobi or XPRESS.
18 |
19 | Unlike pure ML methods, MIPLearn is not only able to find high-quality solutions to discrete optimization problems, but it can also prove the optimality and feasibility of these solutions. Unlike conventional MIP solvers, MIPLearn can take full advantage of very specific observations that happen to be true in a particular family of instances (such as the observation that a particular constraint is typically redundant, or that a particular variable typically assumes a certain value). For certain classes of problems, this approach may provide significant performance benefits.
20 |
21 | Documentation
22 | -------------
23 |
24 | - Tutorials:
25 | 1. [Getting started (Pyomo)](https://anl-ceeesa.github.io/MIPLearn/0.4/tutorials/getting-started-pyomo/)
26 | 2. [Getting started (Gurobipy)](https://anl-ceeesa.github.io/MIPLearn/0.4/tutorials/getting-started-gurobipy/)
27 | 3. [Getting started (JuMP)](https://anl-ceeesa.github.io/MIPLearn/0.4/tutorials/getting-started-jump/)
28 | 4. [User cuts and lazy constraints](https://anl-ceeesa.github.io/MIPLearn/0.4/tutorials/cuts-gurobipy/)
29 | - User Guide
30 | 1. [Benchmark problems](https://anl-ceeesa.github.io/MIPLearn/0.4/guide/problems/)
31 | 2. [Training data collectors](https://anl-ceeesa.github.io/MIPLearn/0.4/guide/collectors/)
32 | 3. [Feature extractors](https://anl-ceeesa.github.io/MIPLearn/0.4/guide/features/)
33 | 4. [Primal components](https://anl-ceeesa.github.io/MIPLearn/0.4/guide/primal/)
34 | 5. [Learning solver](https://anl-ceeesa.github.io/MIPLearn/0.4/guide/solvers/)
35 | - Python API Reference
36 | 1. [Benchmark problems](https://anl-ceeesa.github.io/MIPLearn/0.4/api/problems/)
37 | 2. [Collectors & extractors](https://anl-ceeesa.github.io/MIPLearn/0.4/api/collectors/)
38 | 3. [Components](https://anl-ceeesa.github.io/MIPLearn/0.4/api/components/)
39 | 4. [Solvers](https://anl-ceeesa.github.io/MIPLearn/0.4/api/solvers/)
40 | 5. [Helpers](https://anl-ceeesa.github.io/MIPLearn/0.4/api/helpers/)
41 |
42 | Authors
43 | -------
44 |
45 | - **Alinson S. Xavier** (Argonne National Laboratory)
46 | - **Feng Qiu** (Argonne National Laboratory)
47 | - **Xiaoyi Gu** (Georgia Institute of Technology)
48 | - **Berkay Becu** (Georgia Institute of Technology)
49 | - **Santanu S. Dey** (Georgia Institute of Technology)
50 |
51 |
52 | Acknowledgments
53 | ---------------
54 | * Based upon work supported by **Laboratory Directed Research and Development** (LDRD) funding from Argonne National Laboratory, provided by the Director, Office of Science, of the U.S. Department of Energy.
55 | * Based upon work supported by the **U.S. Department of Energy Advanced Grid Modeling Program**.
56 |
57 | Citing MIPLearn
58 | ---------------
59 |
60 | If you use MIPLearn in your research (either the solver or the included problem generators), we kindly request that you cite the package as follows:
61 |
62 | * **Alinson S. Xavier, Feng Qiu, Xiaoyi Gu, Berkay Becu, Santanu S. Dey.** *MIPLearn: An Extensible Framework for Learning-Enhanced Optimization (Version 0.4)*. Zenodo (2024). DOI: [10.5281/zenodo.4287567](https://doi.org/10.5281/zenodo.4287567)
63 |
64 | If you use MIPLearn in the field of power systems optimization, we kindly request that you cite the reference below, in which the main techniques implemented in MIPLearn were first developed:
65 |
66 | * **Alinson S. Xavier, Feng Qiu, Shabbir Ahmed.** *Learning to Solve Large-Scale Unit Commitment Problems.* INFORMS Journal on Computing (2020). DOI: [10.1287/ijoc.2020.0976](https://doi.org/10.1287/ijoc.2020.0976)
67 |
68 | License
69 | -------
70 |
71 | Released under the modified BSD license. See `LICENSE` for more details.
72 |
73 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/_static/custom.css:
--------------------------------------------------------------------------------
1 | h1.site-logo {
2 | font-size: 30px !important;
3 | }
4 |
5 | h1.site-logo small {
6 | font-size: 20px !important;
7 | }
8 |
9 | code {
10 | display: inline-block;
11 | border-radius: 4px;
12 | padding: 0 4px;
13 | background-color: #eee;
14 | color: rgb(232, 62, 140);
15 | }
16 |
17 | .right-next, .left-prev {
18 | border-radius: 8px;
19 | border-width: 0px !important;
20 | box-shadow: 2px 2px 6px rgba(0, 0, 0, 0.2);
21 | }
22 |
23 | .right-next:hover, .left-prev:hover {
24 | text-decoration: none;
25 | }
26 |
27 | .admonition {
28 | border-radius: 8px;
29 | border-width: 0;
30 | box-shadow: 0 0 0 !important;
31 | }
32 |
33 | .note { background-color: rgba(0, 123, 255, 0.1); }
34 | .note * { color: rgb(69 94 121); }
35 |
36 | .warning { background-color: rgb(220 150 40 / 10%); }
37 | .warning * { color: rgb(105 72 28); }
38 |
39 | .input_area, .output_area, .output_area img {
40 | border-radius: 8px !important;
41 | border-width: 0 !important;
42 | margin: 8px 0 8px 0;
43 | }
44 |
45 | .output_area {
46 | padding: 4px;
47 | background-color: hsl(227 60% 11% / 0.7) !important;
48 | }
49 |
50 | .output_area pre {
51 | color: #fff;
52 | line-height: 20px !important;
53 | }
54 |
55 | .input_area pre {
56 | background-color: rgba(0 0 0 / 3%) !important;
57 | padding: 12px !important;
58 | line-height: 20px;
59 | }
60 |
61 | .ansi-green-intense-fg {
62 | color: #64d88b !important;
63 | }
64 |
65 | #site-navigation {
66 | background-color: #fafafa;
67 | }
68 |
69 | .container, .container-lg, .container-md, .container-sm, .container-xl {
70 | max-width: inherit !important;
71 | }
72 |
73 | h1, h2 {
74 | font-weight: bold !important;
75 | }
76 |
77 | #main-content .section {
78 | max-width: 900px !important;
79 | margin: 0 auto !important;
80 | font-size: 16px;
81 | }
82 |
83 | p.caption {
84 | font-weight: bold;
85 | }
86 |
87 | h2 {
88 | padding-bottom: 5px;
89 | border-bottom: 1px solid #ccc;
90 | }
91 |
92 | h3 {
93 | margin-top: 1.5rem;
94 | }
95 |
96 | tbody, thead, pre {
97 | border: 1px solid rgba(0, 0, 0, 0.25);
98 | }
99 |
100 | table td, th {
101 | padding: 8px;
102 | }
103 |
104 | table p {
105 | margin-bottom: 0;
106 | }
107 |
108 | table td code {
109 | white-space: nowrap;
110 | }
111 |
112 | table tr,
113 | table th {
114 | border-bottom: 1px solid rgba(0, 0, 0, 0.1);
115 | }
116 |
117 | table tr:last-child {
118 | border-bottom: 0;
119 | }
120 |
121 |
--------------------------------------------------------------------------------
/docs/api/collectors.rst:
--------------------------------------------------------------------------------
1 | Collectors & Extractors
2 | =======================
3 |
4 | miplearn.classifiers.minprob
5 | ----------------------------
6 |
7 | .. automodule:: miplearn.classifiers.minprob
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | miplearn.classifiers.singleclass
13 | --------------------------------
14 |
15 | .. automodule:: miplearn.classifiers.singleclass
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
20 | miplearn.collectors.basic
21 | -------------------------
22 |
23 | .. automodule:: miplearn.collectors.basic
24 | :members:
25 | :undoc-members:
26 | :show-inheritance:
27 |
28 | miplearn.extractors.fields
29 | --------------------------
30 |
31 | .. automodule:: miplearn.extractors.fields
32 | :members:
33 | :undoc-members:
34 | :show-inheritance:
35 |
36 | miplearn.extractors.AlvLouWeh2017
37 | ---------------------------------
38 |
39 | .. automodule:: miplearn.extractors.AlvLouWeh2017
40 | :members:
41 | :undoc-members:
42 | :show-inheritance:
43 |
--------------------------------------------------------------------------------
/docs/api/components.rst:
--------------------------------------------------------------------------------
1 | Components
2 | ==========
3 |
4 | miplearn.components.primal.actions
5 | ----------------------------------
6 |
7 | .. automodule:: miplearn.components.primal.actions
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | miplearn.components.primal.expert
13 | ----------------------------------
14 |
15 | .. automodule:: miplearn.components.primal.expert
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
20 | miplearn.components.primal.indep
21 | ----------------------------------
22 |
23 | .. automodule:: miplearn.components.primal.indep
24 | :members:
25 | :undoc-members:
26 | :show-inheritance:
27 |
28 | miplearn.components.primal.joint
29 | ----------------------------------
30 |
31 | .. automodule:: miplearn.components.primal.joint
32 | :members:
33 | :undoc-members:
34 | :show-inheritance:
35 |
36 | miplearn.components.primal.mem
37 | ----------------------------------
38 |
39 | .. automodule:: miplearn.components.primal.mem
40 | :members:
41 | :undoc-members:
42 | :show-inheritance:
43 |
44 |
--------------------------------------------------------------------------------
/docs/api/helpers.rst:
--------------------------------------------------------------------------------
1 | Helpers
2 | =======
3 |
4 | miplearn.io
5 | -----------
6 |
7 | .. automodule:: miplearn.io
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | miplearn.h5
13 | -----------
14 |
15 | .. automodule:: miplearn.h5
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
--------------------------------------------------------------------------------
/docs/api/problems.rst:
--------------------------------------------------------------------------------
1 | Benchmark Problems
2 | ==================
3 |
4 | miplearn.problems.binpack
5 | -------------------------
6 |
7 | .. automodule:: miplearn.problems.binpack
8 | :members:
9 |
10 | miplearn.problems.multiknapsack
11 | -------------------------------
12 |
13 | .. automodule:: miplearn.problems.multiknapsack
14 | :members:
15 |
16 | miplearn.problems.pmedian
17 | -------------------------
18 |
19 | .. automodule:: miplearn.problems.pmedian
20 | :members:
21 |
22 | miplearn.problems.setcover
23 | --------------------------
24 |
25 | .. automodule:: miplearn.problems.setcover
26 | :members:
27 |
28 | miplearn.problems.setpack
29 | -------------------------
30 |
31 | .. automodule:: miplearn.problems.setpack
32 | :members:
33 |
34 | miplearn.problems.stab
35 | ----------------------
36 |
37 | .. automodule:: miplearn.problems.stab
38 | :members:
39 |
40 | miplearn.problems.tsp
41 | ---------------------
42 |
43 | .. automodule:: miplearn.problems.tsp
44 | :members:
45 |
46 | miplearn.problems.uc
47 | --------------------
48 |
49 | .. automodule:: miplearn.problems.uc
50 | :members:
51 |
52 | miplearn.problems.vertexcover
53 | -----------------------------
54 |
55 | .. automodule:: miplearn.problems.vertexcover
56 | :members:
57 |
58 |
--------------------------------------------------------------------------------
/docs/api/solvers.rst:
--------------------------------------------------------------------------------
1 | Solvers
2 | =======
3 |
4 | miplearn.solvers.abstract
5 | -------------------------
6 |
7 | .. automodule:: miplearn.solvers.abstract
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | miplearn.solvers.gurobi
13 | -------------------------
14 |
15 | .. automodule:: miplearn.solvers.gurobi
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
20 | miplearn.solvers.learning
21 | -------------------------
22 |
23 | .. automodule:: miplearn.solvers.learning
24 | :members:
25 | :undoc-members:
26 | :show-inheritance:
27 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | project = "MIPLearn"
2 | copyright = "2020-2023, UChicago Argonne, LLC"
3 | author = ""
4 | release = "0.4"
5 | extensions = [
6 | "myst_parser",
7 | "nbsphinx",
8 | "sphinx_multitoc_numbering",
9 | "sphinx.ext.autodoc",
10 | "sphinx.ext.napoleon",
11 | ]
12 | templates_path = ["_templates"]
13 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
14 | html_theme = "sphinx_book_theme"
15 | html_static_path = ["_static"]
16 | html_css_files = [
17 | "custom.css",
18 | ]
19 | html_theme_options = {
20 | "repository_url": "https://github.com/ANL-CEEESA/MIPLearn/",
21 | "use_repository_button": False,
22 | "extra_navbar": "",
23 | }
24 | html_title = f"MIPLearn {release}"
25 | nbsphinx_execute = "never"
26 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | MIPLearn
2 | ========
3 | **MIPLearn** is an extensible framework for solving discrete optimization problems using a combination of Mixed-Integer Linear Programming (MIP) and Machine Learning (ML). MIPLearn uses ML methods to automatically identify patterns in previously solved instances of the problem, then uses these patterns to accelerate the performance of conventional state-of-the-art MIP solvers such as CPLEX, Gurobi or XPRESS.
4 |
5 | Unlike pure ML methods, MIPLearn is not only able to find high-quality solutions to discrete optimization problems, but it can also prove the optimality and feasibility of these solutions. Unlike conventional MIP solvers, MIPLearn can take full advantage of very specific observations that happen to be true in a particular family of instances (such as the observation that a particular constraint is typically redundant, or that a particular variable typically assumes a certain value). For certain classes of problems, this approach may provide significant performance benefits.
6 |
7 |
8 | Contents
9 | --------
10 |
11 | .. toctree::
12 | :maxdepth: 1
13 | :caption: Tutorials
14 | :numbered: 2
15 |
16 | tutorials/getting-started-pyomo
17 | tutorials/getting-started-gurobipy
18 | tutorials/getting-started-jump
19 | tutorials/cuts-gurobipy
20 |
21 | .. toctree::
22 | :maxdepth: 2
23 | :caption: User Guide
24 | :numbered: 2
25 |
26 | guide/problems
27 | guide/collectors
28 | guide/features
29 | guide/primal
30 | guide/solvers
31 |
32 | .. toctree::
33 | :maxdepth: 1
34 | :caption: Python API Reference
35 | :numbered: 2
36 |
37 | api/problems
38 | api/collectors
39 | api/components
40 | api/solvers
41 | api/helpers
42 |
43 |
44 | Authors
45 | -------
46 |
47 | - **Alinson S. Xavier** (Argonne National Laboratory)
48 | - **Feng Qiu** (Argonne National Laboratory)
49 | - **Xiaoyi Gu** (Georgia Institute of Technology)
50 | - **Berkay Becu** (Georgia Institute of Technology)
51 | - **Santanu S. Dey** (Georgia Institute of Technology)
52 |
53 |
54 | Acknowledgments
55 | ---------------
56 | * Based upon work supported by **Laboratory Directed Research and Development** (LDRD) funding from Argonne National Laboratory, provided by the Director, Office of Science, of the U.S. Department of Energy.
57 | * Based upon work supported by the **U.S. Department of Energy Advanced Grid Modeling Program**.
58 |
59 | Citing MIPLearn
60 | ---------------
61 |
62 | If you use MIPLearn in your research (either the solver or the included problem generators), we kindly request that you cite the package as follows:
63 |
64 | * **Alinson S. Xavier, Feng Qiu, Xiaoyi Gu, Berkay Becu, Santanu S. Dey.** *MIPLearn: An Extensible Framework for Learning-Enhanced Optimization (Version 0.4)*. Zenodo (2024). DOI: https://doi.org/10.5281/zenodo.4287567
65 |
66 | If you use MIPLearn in the field of power systems optimization, we kindly request that you cite the reference below, in which the main techniques implemented in MIPLearn were first developed:
67 |
68 | * **Alinson S. Xavier, Feng Qiu, Shabbir Ahmed.** *Learning to Solve Large-Scale Unit Commitment Problems.* INFORMS Journal on Computing (2020). DOI: https://doi.org/10.1287/ijoc.2020.0976
69 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | %SPHINXBUILD% >NUL 2>NUL
14 | if errorlevel 9009 (
15 | echo.
16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
17 | echo.installed, then set the SPHINXBUILD environment variable to point
18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
19 | echo.may add the Sphinx directory to PATH.
20 | echo.
21 | echo.If you don't have Sphinx installed, grab it from
22 | echo.https://www.sphinx-doc.org/
23 | exit /b 1
24 | )
25 |
26 | if "%1" == "" goto help
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/tutorials/Project.toml:
--------------------------------------------------------------------------------
1 | [deps]
2 | Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
3 | Gurobi = "2e9cd046-0924-5485-92f1-d5272153d98b"
4 | JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
5 | MIPLearn = "2b1277c3-b477-4c49-a15e-7ba350325c68"
6 | PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0"
7 | Suppressor = "fd094767-a336-5f1f-9728-57cf17d0bbfb"
8 |
--------------------------------------------------------------------------------
/docs/tutorials/gurobi.env:
--------------------------------------------------------------------------------
1 | Threads 1
--------------------------------------------------------------------------------
/miplearn/.io.py.swp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/miplearn/.io.py.swp
--------------------------------------------------------------------------------
/miplearn/__init__.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
--------------------------------------------------------------------------------
/miplearn/classifiers/__init__.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
--------------------------------------------------------------------------------
/miplearn/classifiers/minprob.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | from typing import List, Any, Callable, Optional
5 |
6 | import numpy as np
7 | import sklearn
8 | from sklearn.base import BaseEstimator
9 | from sklearn.utils.multiclass import unique_labels
10 |
11 |
12 | class MinProbabilityClassifier(BaseEstimator):
13 | """
14 | Meta-classifier that returns NaN for predictions made by a base classifier that
15 | have probability below a given threshold. More specifically, this meta-classifier
16 | calls base_clf.predict_proba and compares the result against the provided
17 | thresholds. If the probability for one of the classes is above its threshold,
18 | the meta-classifier returns that prediction. Otherwise, it returns NaN.
19 | """
20 |
21 | def __init__(
22 | self,
23 | base_clf: Any,
24 | thresholds: List[float],
25 | clone_fn: Callable[[Any], Any] = sklearn.base.clone,
26 | ) -> None:
27 | assert len(thresholds) == 2
28 | self.base_clf = base_clf
29 | self.thresholds = thresholds
30 | self.clone_fn = clone_fn
31 | self.clf_: Optional[Any] = None
32 | self.classes_: Optional[List[Any]] = None
33 |
34 | def fit(self, x: np.ndarray, y: np.ndarray) -> None:
35 | assert len(y.shape) == 1
36 | assert len(x.shape) == 2
37 | classes = unique_labels(y)
38 | assert len(classes) == len(self.thresholds)
39 |
40 | self.clf_ = self.clone_fn(self.base_clf)
41 | self.clf_.fit(x, y)
42 | self.classes_ = self.clf_.classes_
43 |
44 | def predict(self, x: np.ndarray) -> np.ndarray:
45 | assert self.clf_ is not None
46 | assert self.classes_ is not None
47 |
48 | y_proba = self.clf_.predict_proba(x)
49 | assert len(y_proba.shape) == 2
50 | assert y_proba.shape[0] == x.shape[0]
51 | assert y_proba.shape[1] == 2
52 | n_samples = x.shape[0]
53 |
54 | y_pred = []
55 | for sample_idx in range(n_samples):
56 | yi = float("nan")
57 | for class_idx, class_val in enumerate(self.classes_):
58 | if y_proba[sample_idx, class_idx] >= self.thresholds[class_idx]:
59 | yi = class_val
60 | y_pred.append(yi)
61 | return np.array(y_pred)
62 |
--------------------------------------------------------------------------------
/miplearn/classifiers/singleclass.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | from typing import Callable, Optional
5 |
6 | import numpy as np
7 | import sklearn.base
8 | from sklearn.base import BaseEstimator
9 | from sklearn.utils.multiclass import unique_labels
10 |
11 |
12 | class SingleClassFix(BaseEstimator):
13 | """
14 | Some sklearn classifiers, such as logistic regression, have issues with datasets
15 | that contain a single class. This meta-classifier fixes the issue. If the
16 | training data contains a single class, this meta-classifier always returns that
17 | class as a prediction. Otherwise, it fits the provided base classifier,
18 | and returns its predictions instead.
19 | """
20 |
21 | def __init__(
22 | self,
23 | base_clf: BaseEstimator,
24 | clone_fn: Callable = sklearn.base.clone,
25 | ):
26 | self.base_clf = base_clf
27 | self.clf_: Optional[BaseEstimator] = None
28 | self.constant_ = None
29 | self.classes_ = None
30 | self.clone_fn = clone_fn
31 |
32 | def fit(self, x: np.ndarray, y: np.ndarray) -> None:
33 | classes = unique_labels(y)
34 | if len(classes) == 1:
35 | assert classes[0] is not None
36 | self.clf_ = None
37 | self.constant_ = classes[0]
38 | self.classes_ = classes
39 | else:
40 | self.clf_ = self.clone_fn(self.base_clf)
41 | assert self.clf_ is not None
42 | self.clf_.fit(x, y)
43 | self.constant_ = None
44 | self.classes_ = self.clf_.classes_
45 |
46 | def predict(self, x: np.ndarray) -> np.ndarray:
47 | if self.constant_ is not None:
48 | return np.full(x.shape[0], self.constant_)
49 | else:
50 | assert self.clf_ is not None
51 | return self.clf_.predict(x)
52 |
--------------------------------------------------------------------------------
/miplearn/collectors/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/miplearn/collectors/__init__.py
--------------------------------------------------------------------------------
/miplearn/collectors/basic.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import json
6 | import os
7 | import sys
8 |
9 | from io import StringIO
10 | from os.path import exists
11 | from typing import Callable, List, Any
12 | import traceback
13 |
14 | from ..h5 import H5File
15 | from ..io import _RedirectOutput, gzip, _to_h5_filename
16 | from ..parallel import p_umap
17 |
18 |
19 | class BasicCollector:
20 | def __init__(self, skip_lp: bool = False, write_mps: bool = True) -> None:
21 | self.skip_lp = skip_lp
22 | self.write_mps = write_mps
23 |
24 | def collect(
25 | self,
26 | filenames: List[str],
27 | build_model: Callable,
28 | n_jobs: int = 1,
29 | progress: bool = False,
30 | verbose: bool = False,
31 | ) -> None:
32 | def _collect(data_filename: str) -> None:
33 | try:
34 | h5_filename = _to_h5_filename(data_filename)
35 | mps_filename = h5_filename.replace(".h5", ".mps")
36 |
37 | if exists(h5_filename):
38 | # Try to read optimal solution
39 | mip_var_values = None
40 | try:
41 | with H5File(h5_filename, "r") as h5:
42 | mip_var_values = h5.get_array("mip_var_values")
43 | except:
44 | pass
45 |
46 | if mip_var_values is None:
47 | print(f"Removing empty/corrupted h5 file: {h5_filename}")
48 | os.remove(h5_filename)
49 | else:
50 | return
51 |
52 | with H5File(h5_filename, "w") as h5:
53 | streams: List[Any] = [StringIO()]
54 | if verbose:
55 | streams += [sys.stdout]
56 | with _RedirectOutput(streams):
57 | # Load and extract static features
58 | model = build_model(data_filename)
59 | model.extract_after_load(h5)
60 |
61 | if not self.skip_lp:
62 | # Solve LP relaxation
63 | relaxed = model.relax()
64 | relaxed.optimize()
65 | relaxed.extract_after_lp(h5)
66 |
67 | # Solve MIP
68 | model.optimize()
69 | model.extract_after_mip(h5)
70 |
71 | if self.write_mps:
72 | # Add lazy constraints to model
73 | model._lazy_enforce_collected()
74 |
75 | # Save MPS file
76 | model.write(mps_filename)
77 | gzip(mps_filename)
78 |
79 | h5.put_scalar("mip_log", streams[0].getvalue())
80 | except:
81 | print(f"Error processing: data_filename")
82 | traceback.print_exc()
83 |
84 | if n_jobs > 1:
85 | p_umap(
86 | _collect,
87 | filenames,
88 | num_cpus=n_jobs,
89 | desc="collect",
90 | smoothing=0,
91 | disable=not progress,
92 | )
93 | else:
94 | for filename in filenames:
95 | _collect(filename)
96 |
--------------------------------------------------------------------------------
/miplearn/collectors/priority.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import os
6 | import subprocess
7 | from typing import Callable
8 |
9 | from ..h5 import H5File
10 |
11 |
12 | class BranchPriorityCollector:
13 | def __init__(
14 | self,
15 | time_limit: float = 900.0,
16 | print_interval: int = 1,
17 | node_limit: int = 500,
18 | ) -> None:
19 | self.time_limit = time_limit
20 | self.print_interval = print_interval
21 | self.node_limit = node_limit
22 |
23 | def collect(self, data_filename: str, _: Callable) -> None:
24 | basename = data_filename.replace(".pkl.gz", "")
25 | env = os.environ.copy()
26 | env["JULIA_NUM_THREADS"] = "1"
27 | ret = subprocess.run(
28 | [
29 | "julia",
30 | "--project=.",
31 | "-e",
32 | (
33 | f"using CPLEX, JuMP, MIPLearn.BB; "
34 | f"BB.solve!("
35 | f' optimizer_with_attributes(CPLEX.Optimizer, "CPXPARAM_Threads" => 1),'
36 | f' "{basename}",'
37 | f" print_interval={self.print_interval},"
38 | f" time_limit={self.time_limit:.2f},"
39 | f" node_limit={self.node_limit},"
40 | f")"
41 | ),
42 | ],
43 | check=True,
44 | capture_output=True,
45 | env=env,
46 | )
47 | h5_filename = f"{basename}.h5"
48 | with H5File(h5_filename, "r+") as h5:
49 | h5.put_scalar("bb_log", ret.stdout)
50 |
--------------------------------------------------------------------------------
/miplearn/components/__init__.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
--------------------------------------------------------------------------------
/miplearn/components/cuts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/miplearn/components/cuts/__init__.py
--------------------------------------------------------------------------------
/miplearn/components/cuts/expert.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import json
6 | import logging
7 | from typing import Dict, Any, List
8 |
9 | from miplearn.components.cuts.mem import convert_lists_to_tuples
10 | from miplearn.h5 import H5File
11 | from miplearn.solvers.abstract import AbstractModel
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | class ExpertCutsComponent:
17 | def fit(
18 | self,
19 | _: List[str],
20 | ) -> None:
21 | pass
22 |
23 | def before_mip(
24 | self,
25 | test_h5: str,
26 | model: AbstractModel,
27 | stats: Dict[str, Any],
28 | ) -> None:
29 | with H5File(test_h5, "r") as h5:
30 | cuts_str = h5.get_scalar("mip_cuts")
31 | assert cuts_str is not None
32 | assert isinstance(cuts_str, str)
33 | cuts = list(set(convert_lists_to_tuples(json.loads(cuts_str))))
34 | model.set_cuts(cuts)
35 | stats["Cuts: AOT"] = len(cuts)
36 |
--------------------------------------------------------------------------------
/miplearn/components/cuts/mem.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import json
6 | import logging
7 | from typing import List, Dict, Any, Hashable
8 |
9 | import numpy as np
10 | from sklearn.preprocessing import MultiLabelBinarizer
11 |
12 | from miplearn.extractors.abstract import FeaturesExtractor
13 | from miplearn.h5 import H5File
14 | from miplearn.solvers.abstract import AbstractModel
15 |
16 | logger = logging.getLogger(__name__)
17 |
18 |
19 | def convert_lists_to_tuples(obj: Any) -> Any:
20 | if isinstance(obj, list):
21 | return tuple(convert_lists_to_tuples(item) for item in obj)
22 | elif isinstance(obj, dict):
23 | return {key: convert_lists_to_tuples(value) for key, value in obj.items()}
24 | else:
25 | return obj
26 |
27 |
28 | class _BaseMemorizingConstrComponent:
29 | def __init__(self, clf: Any, extractor: FeaturesExtractor, field: str) -> None:
30 | self.clf = clf
31 | self.extractor = extractor
32 | self.constrs_: List[Hashable] = []
33 | self.n_features_: int = 0
34 | self.n_targets_: int = 0
35 | self.field = field
36 |
37 | def fit(
38 | self,
39 | train_h5: List[str],
40 | ) -> None:
41 | logger.info("Reading training data...")
42 | n_samples = len(train_h5)
43 | x, y, constrs, n_features = [], [], [], None
44 | constr_to_idx: Dict[Hashable, int] = {}
45 | for h5_filename in train_h5:
46 | with H5File(h5_filename, "r") as h5:
47 | # Store constraints
48 | sample_constrs_str = h5.get_scalar(self.field)
49 | assert sample_constrs_str is not None
50 | assert isinstance(sample_constrs_str, str)
51 | sample_constrs = convert_lists_to_tuples(json.loads(sample_constrs_str))
52 | y_sample = []
53 | for c in sample_constrs:
54 | if c not in constr_to_idx:
55 | constr_to_idx[c] = len(constr_to_idx)
56 | constrs.append(c)
57 | y_sample.append(constr_to_idx[c])
58 | y.append(y_sample)
59 |
60 | # Extract features
61 | x_sample = self.extractor.get_instance_features(h5)
62 | assert len(x_sample.shape) == 1
63 | if n_features is None:
64 | n_features = len(x_sample)
65 | else:
66 | assert len(x_sample) == n_features
67 | x.append(x_sample)
68 | logger.info("Constructing matrices...")
69 | assert n_features is not None
70 | self.n_features_ = n_features
71 | self.constrs_ = constrs
72 | self.n_targets_ = len(constr_to_idx)
73 | x_np = np.vstack(x)
74 | assert x_np.shape == (n_samples, n_features)
75 | y_np = MultiLabelBinarizer().fit_transform(y)
76 | assert y_np.shape == (n_samples, self.n_targets_)
77 | logger.info(
78 | f"Dataset has {n_samples:,d} samples, "
79 | f"{n_features:,d} features and {self.n_targets_:,d} targets"
80 | )
81 | logger.info("Training classifier...")
82 | self.clf.fit(x_np, y_np)
83 |
84 | def predict(
85 | self,
86 | msg: str,
87 | test_h5: str,
88 | ) -> List[Hashable]:
89 | with H5File(test_h5, "r") as h5:
90 | x_sample = self.extractor.get_instance_features(h5)
91 | assert x_sample.shape == (self.n_features_,)
92 | x_sample = x_sample.reshape(1, -1)
93 | logger.info(msg)
94 | y = self.clf.predict(x_sample)
95 | assert y.shape == (1, self.n_targets_)
96 | y = y.reshape(-1)
97 | return [self.constrs_[i] for (i, yi) in enumerate(y) if yi > 0.5]
98 |
99 |
100 | class MemorizingCutsComponent(_BaseMemorizingConstrComponent):
101 | def __init__(self, clf: Any, extractor: FeaturesExtractor) -> None:
102 | super().__init__(clf, extractor, "mip_cuts")
103 |
104 | def before_mip(
105 | self,
106 | test_h5: str,
107 | model: AbstractModel,
108 | stats: Dict[str, Any],
109 | ) -> None:
110 | assert self.constrs_ is not None
111 | cuts = self.predict("Predicting cutting planes...", test_h5)
112 | model.set_cuts(cuts)
113 | stats["Cuts: AOT"] = len(cuts)
114 |
--------------------------------------------------------------------------------
/miplearn/components/lazy/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/miplearn/components/lazy/__init__.py
--------------------------------------------------------------------------------
/miplearn/components/lazy/expert.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import json
6 | import logging
7 | from typing import Dict, Any, List
8 |
9 | from miplearn.components.cuts.mem import convert_lists_to_tuples
10 | from miplearn.h5 import H5File
11 | from miplearn.solvers.abstract import AbstractModel
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | class ExpertLazyComponent:
17 | def fit(
18 | self,
19 | _: List[str],
20 | ) -> None:
21 | pass
22 |
23 | def before_mip(
24 | self,
25 | test_h5: str,
26 | model: AbstractModel,
27 | stats: Dict[str, Any],
28 | ) -> None:
29 | with H5File(test_h5, "r") as h5:
30 | violations_str = h5.get_scalar("mip_lazy")
31 | assert violations_str is not None
32 | assert isinstance(violations_str, str)
33 | violations = list(set(convert_lists_to_tuples(json.loads(violations_str))))
34 | logger.info(f"Enforcing {len(violations)} constraints ahead-of-time...")
35 | model.lazy_enforce(violations)
36 | stats["Lazy Constraints: AOT"] = len(violations)
37 |
--------------------------------------------------------------------------------
/miplearn/components/lazy/mem.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import logging
6 | from typing import List, Dict, Any, Hashable
7 |
8 | from miplearn.components.cuts.mem import (
9 | _BaseMemorizingConstrComponent,
10 | )
11 | from miplearn.extractors.abstract import FeaturesExtractor
12 | from miplearn.solvers.abstract import AbstractModel
13 |
14 | logger = logging.getLogger(__name__)
15 |
16 |
17 | class MemorizingLazyComponent(_BaseMemorizingConstrComponent):
18 | def __init__(self, clf: Any, extractor: FeaturesExtractor) -> None:
19 | super().__init__(clf, extractor, "mip_lazy")
20 |
21 | def before_mip(
22 | self,
23 | test_h5: str,
24 | model: AbstractModel,
25 | stats: Dict[str, Any],
26 | ) -> None:
27 | assert self.constrs_ is not None
28 | violations = self.predict("Predicting violated lazy constraints...", test_h5)
29 | logger.info(f"Enforcing {len(violations)} constraints ahead-of-time...")
30 | model.lazy_enforce(violations)
31 | stats["Lazy Constraints: AOT"] = len(violations)
32 |
--------------------------------------------------------------------------------
/miplearn/components/primal/__init__.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | from typing import Tuple, List
5 |
6 | import numpy as np
7 |
8 | from miplearn.h5 import H5File
9 |
10 |
11 | def _extract_var_names_values(
12 | h5: H5File,
13 | selected_var_types: List[bytes],
14 | ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
15 | bin_var_names, bin_var_indices = _extract_var_names(h5, selected_var_types)
16 | var_values = h5.get_array("mip_var_values")
17 | assert var_values is not None
18 | bin_var_values = var_values[bin_var_indices].astype(int)
19 | return bin_var_names, bin_var_values, bin_var_indices
20 |
21 |
22 | def _extract_var_names(
23 | h5: H5File,
24 | selected_var_types: List[bytes],
25 | ) -> Tuple[np.ndarray, np.ndarray]:
26 | var_types = h5.get_array("static_var_types")
27 | var_names = h5.get_array("static_var_names")
28 | assert var_types is not None
29 | assert var_names is not None
30 | bin_var_indices = np.where(np.isin(var_types, selected_var_types))[0]
31 | bin_var_names = var_names[bin_var_indices]
32 | assert len(bin_var_names.shape) == 1
33 | return bin_var_names, bin_var_indices
34 |
35 |
36 | def _extract_bin_var_names_values(
37 | h5: H5File,
38 | ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
39 | return _extract_var_names_values(h5, [b"B"])
40 |
41 |
42 | def _extract_bin_var_names(h5: H5File) -> Tuple[np.ndarray, np.ndarray]:
43 | return _extract_var_names(h5, [b"B"])
44 |
45 |
46 | def _extract_int_var_names_values(
47 | h5: H5File,
48 | ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
49 | return _extract_var_names_values(h5, [b"B", b"I"])
50 |
51 |
52 | def _extract_int_var_names(h5: H5File) -> Tuple[np.ndarray, np.ndarray]:
53 | return _extract_var_names(h5, [b"B", b"I"])
54 |
--------------------------------------------------------------------------------
/miplearn/components/primal/actions.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | import logging
5 | from abc import ABC, abstractmethod
6 | from typing import Optional, Dict
7 |
8 | import numpy as np
9 |
10 | from miplearn.solvers.abstract import AbstractModel
11 |
12 | logger = logging.getLogger()
13 |
14 |
15 | class PrimalComponentAction(ABC):
16 | @abstractmethod
17 | def perform(
18 | self,
19 | model: AbstractModel,
20 | var_names: np.ndarray,
21 | var_values: np.ndarray,
22 | stats: Optional[Dict],
23 | ) -> None:
24 | pass
25 |
26 |
27 | class SetWarmStart(PrimalComponentAction):
28 | def perform(
29 | self,
30 | model: AbstractModel,
31 | var_names: np.ndarray,
32 | var_values: np.ndarray,
33 | stats: Optional[Dict],
34 | ) -> None:
35 | logger.info("Setting warm starts...")
36 | model.set_warm_starts(var_names, var_values, stats)
37 |
38 |
39 | class FixVariables(PrimalComponentAction):
40 | def perform(
41 | self,
42 | model: AbstractModel,
43 | var_names: np.ndarray,
44 | var_values: np.ndarray,
45 | stats: Optional[Dict],
46 | ) -> None:
47 | logger.info("Fixing variables...")
48 | assert len(var_values.shape) == 2
49 | assert var_values.shape[0] == 1
50 | var_values = var_values.reshape(-1)
51 | model.fix_variables(var_names, var_values, stats)
52 | if stats is not None:
53 | stats["Heuristic"] = True
54 |
55 |
56 | class EnforceProximity(PrimalComponentAction):
57 | def __init__(self, tol: float) -> None:
58 | self.tol = tol
59 |
60 | def perform(
61 | self,
62 | model: AbstractModel,
63 | var_names: np.ndarray,
64 | var_values: np.ndarray,
65 | stats: Optional[Dict],
66 | ) -> None:
67 | assert len(var_values.shape) == 2
68 | assert var_values.shape[0] == 1
69 | var_values = var_values.reshape(-1)
70 |
71 | constr_lhs = []
72 | constr_vars = []
73 | constr_rhs = 0.0
74 | for i, var_name in enumerate(var_names):
75 | if np.isnan(var_values[i]):
76 | continue
77 | constr_lhs.append(1.0 if var_values[i] < 0.5 else -1.0)
78 | constr_rhs -= var_values[i]
79 | constr_vars.append(var_name)
80 |
81 | constr_rhs += len(constr_vars) * self.tol
82 | logger.info(
83 | f"Adding proximity constraint (tol={self.tol}, nz={len(constr_vars)})..."
84 | )
85 |
86 | model.add_constrs(
87 | np.array(constr_vars),
88 | np.array([constr_lhs]),
89 | np.array(["<"], dtype="S"),
90 | np.array([constr_rhs]),
91 | )
92 | if stats is not None:
93 | stats["Heuristic"] = True
94 |
--------------------------------------------------------------------------------
/miplearn/components/primal/expert.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import logging
6 | from typing import Any, Dict, List
7 |
8 | from . import _extract_int_var_names_values
9 | from .actions import PrimalComponentAction
10 | from ...solvers.abstract import AbstractModel
11 | from ...h5 import H5File
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | class ExpertPrimalComponent:
17 | def __init__(self, action: PrimalComponentAction):
18 | self.action = action
19 |
20 | """
21 | Component that predicts warm starts by peeking at the optimal solution.
22 | """
23 |
24 | def fit(self, train_h5: List[str]) -> None:
25 | pass
26 |
27 | def before_mip(
28 | self, test_h5: str, model: AbstractModel, stats: Dict[str, Any]
29 | ) -> None:
30 | with H5File(test_h5, "r") as h5:
31 | names, values, _ = _extract_int_var_names_values(h5)
32 | self.action.perform(model, names, values.reshape(1, -1), stats)
33 |
--------------------------------------------------------------------------------
/miplearn/components/primal/indep.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | import logging
5 | from typing import Any, Dict, List, Callable, Optional
6 |
7 | import numpy as np
8 | import sklearn
9 |
10 | from miplearn.components.primal import (
11 | _extract_bin_var_names_values,
12 | _extract_bin_var_names,
13 | )
14 | from miplearn.components.primal.actions import PrimalComponentAction
15 | from miplearn.extractors.abstract import FeaturesExtractor
16 | from miplearn.solvers.abstract import AbstractModel
17 | from miplearn.h5 import H5File
18 |
19 | logger = logging.getLogger(__name__)
20 |
21 |
22 | class IndependentVarsPrimalComponent:
23 | def __init__(
24 | self,
25 | base_clf: Any,
26 | extractor: FeaturesExtractor,
27 | action: PrimalComponentAction,
28 | clone_fn: Callable[[Any], Any] = sklearn.clone,
29 | ):
30 | self.base_clf = base_clf
31 | self.extractor = extractor
32 | self.clf_: Dict[bytes, Any] = {}
33 | self.bin_var_names_: Optional[np.ndarray] = None
34 | self.n_features_: Optional[int] = None
35 | self.clone_fn = clone_fn
36 | self.action = action
37 |
38 | def fit(self, train_h5: List[str]) -> None:
39 | logger.info("Reading training data...")
40 | self.bin_var_names_ = None
41 | n_bin_vars: Optional[int] = None
42 | n_vars: Optional[int] = None
43 | x, y = [], []
44 | for h5_filename in train_h5:
45 | with H5File(h5_filename, "r") as h5:
46 | # Get number of variables
47 | var_types = h5.get_array("static_var_types")
48 | assert var_types is not None
49 | n_vars = len(var_types)
50 |
51 | # Extract features
52 | (
53 | bin_var_names,
54 | bin_var_values,
55 | bin_var_indices,
56 | ) = _extract_bin_var_names_values(h5)
57 |
58 | # Store/check variable names
59 | if self.bin_var_names_ is None:
60 | self.bin_var_names_ = bin_var_names
61 | n_bin_vars = len(self.bin_var_names_)
62 | else:
63 | assert np.all(bin_var_names == self.bin_var_names_)
64 |
65 | # Build x and y vectors
66 | x_sample = self.extractor.get_var_features(h5)
67 | assert len(x_sample.shape) == 2
68 | assert x_sample.shape[0] == n_vars
69 | x_sample = x_sample[bin_var_indices]
70 | if self.n_features_ is None:
71 | self.n_features_ = x_sample.shape[1]
72 | else:
73 | assert x_sample.shape[1] == self.n_features_
74 | x.append(x_sample)
75 | y.append(bin_var_values)
76 |
77 | assert n_bin_vars is not None
78 | assert self.bin_var_names_ is not None
79 |
80 | logger.info("Constructing matrices...")
81 | x_np = np.vstack(x)
82 | y_np = np.hstack(y)
83 | n_samples = len(train_h5) * n_bin_vars
84 | assert x_np.shape == (n_samples, self.n_features_)
85 | assert y_np.shape == (n_samples,)
86 | logger.info(
87 | f"Dataset has {n_bin_vars} binary variables, "
88 | f"{len(train_h5):,d} samples per variable, "
89 | f"{self.n_features_:,d} features, 1 target and 2 classes"
90 | )
91 |
92 | logger.info(f"Training {n_bin_vars} classifiers...")
93 | self.clf_ = {}
94 | for var_idx, var_name in enumerate(self.bin_var_names_):
95 | self.clf_[var_name] = self.clone_fn(self.base_clf)
96 | self.clf_[var_name].fit(
97 | x_np[var_idx::n_bin_vars, :], y_np[var_idx::n_bin_vars]
98 | )
99 |
100 | logger.info("Done fitting.")
101 |
102 | def before_mip(
103 | self, test_h5: str, model: AbstractModel, stats: Dict[str, Any]
104 | ) -> None:
105 | assert self.bin_var_names_ is not None
106 | assert self.n_features_ is not None
107 |
108 | # Read features
109 | with H5File(test_h5, "r") as h5:
110 | x_sample = self.extractor.get_var_features(h5)
111 | bin_var_names, bin_var_indices = _extract_bin_var_names(h5)
112 | assert np.all(bin_var_names == self.bin_var_names_)
113 | x_sample = x_sample[bin_var_indices]
114 |
115 | assert x_sample.shape == (len(self.bin_var_names_), self.n_features_)
116 |
117 | # Predict optimal solution
118 | logger.info("Predicting warm starts...")
119 | y_pred = []
120 | for var_idx, var_name in enumerate(self.bin_var_names_):
121 | x_var = x_sample[var_idx, :].reshape(1, -1)
122 | y_var = self.clf_[var_name].predict(x_var)
123 | assert y_var.shape == (1,)
124 | y_pred.append(y_var[0])
125 |
126 | # Construct warm starts, based on prediction
127 | y_pred_np = np.array(y_pred).reshape(1, -1)
128 | assert y_pred_np.shape == (1, len(self.bin_var_names_))
129 | self.action.perform(model, self.bin_var_names_, y_pred_np, stats)
130 |
--------------------------------------------------------------------------------
/miplearn/components/primal/joint.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | import logging
5 | from typing import List, Dict, Any, Optional
6 |
7 | import numpy as np
8 |
9 | from miplearn.components.primal import _extract_bin_var_names_values
10 | from miplearn.components.primal.actions import PrimalComponentAction
11 | from miplearn.extractors.abstract import FeaturesExtractor
12 | from miplearn.solvers.abstract import AbstractModel
13 | from miplearn.h5 import H5File
14 |
15 | logger = logging.getLogger(__name__)
16 |
17 |
18 | class JointVarsPrimalComponent:
19 | def __init__(
20 | self, clf: Any, extractor: FeaturesExtractor, action: PrimalComponentAction
21 | ):
22 | self.clf = clf
23 | self.extractor = extractor
24 | self.bin_var_names_: Optional[np.ndarray] = None
25 | self.action = action
26 |
27 | def fit(self, train_h5: List[str]) -> None:
28 | logger.info("Reading training data...")
29 | self.bin_var_names_ = None
30 | x, y, n_samples, n_features = [], [], len(train_h5), None
31 | for h5_filename in train_h5:
32 | with H5File(h5_filename, "r") as h5:
33 | bin_var_names, bin_var_values, _ = _extract_bin_var_names_values(h5)
34 |
35 | # Store/check variable names
36 | if self.bin_var_names_ is None:
37 | self.bin_var_names_ = bin_var_names
38 | else:
39 | assert np.all(bin_var_names == self.bin_var_names_)
40 |
41 | # Build x and y vectors
42 | x_sample = self.extractor.get_instance_features(h5)
43 | assert len(x_sample.shape) == 1
44 | if n_features is None:
45 | n_features = len(x_sample)
46 | else:
47 | assert len(x_sample) == n_features
48 | x.append(x_sample)
49 | y.append(bin_var_values)
50 | assert self.bin_var_names_ is not None
51 |
52 | logger.info("Constructing matrices...")
53 | x_np = np.vstack(x)
54 | y_np = np.array(y)
55 | assert len(x_np.shape) == 2
56 | assert x_np.shape[0] == n_samples
57 | assert x_np.shape[1] == n_features
58 | assert y_np.shape == (n_samples, len(self.bin_var_names_))
59 | logger.info(
60 | f"Dataset has {n_samples:,d} samples, "
61 | f"{n_features:,d} features and {y_np.shape[1]:,d} targets"
62 | )
63 |
64 | logger.info("Training classifier...")
65 | self.clf.fit(x_np, y_np)
66 |
67 | logger.info("Done fitting.")
68 |
69 | def before_mip(
70 | self, test_h5: str, model: AbstractModel, stats: Dict[str, Any]
71 | ) -> None:
72 | assert self.bin_var_names_ is not None
73 |
74 | # Read features
75 | with H5File(test_h5, "r") as h5:
76 | x_sample = self.extractor.get_instance_features(h5)
77 | assert len(x_sample.shape) == 1
78 | x_sample = x_sample.reshape(1, -1)
79 |
80 | # Predict optimal solution
81 | logger.info("Predicting warm starts...")
82 | y_pred = self.clf.predict(x_sample)
83 | assert len(y_pred.shape) == 2
84 | assert y_pred.shape[0] == 1
85 | assert y_pred.shape[1] == len(self.bin_var_names_)
86 |
87 | # Construct warm starts, based on prediction
88 | self.action.perform(model, self.bin_var_names_, y_pred, stats)
89 |
--------------------------------------------------------------------------------
/miplearn/components/primal/mem.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import logging
6 | from abc import ABC, abstractmethod
7 | from typing import List, Dict, Any, Optional, Tuple
8 |
9 | import numpy as np
10 |
11 | from . import _extract_bin_var_names_values
12 | from .actions import PrimalComponentAction
13 | from ...extractors.abstract import FeaturesExtractor
14 | from ...solvers.abstract import AbstractModel
15 | from ...h5 import H5File
16 |
17 | logger = logging.getLogger()
18 |
19 |
20 | class SolutionConstructor(ABC):
21 | @abstractmethod
22 | def construct(self, y_proba: np.ndarray, solutions: np.ndarray) -> np.ndarray:
23 | pass
24 |
25 |
26 | class MemorizingPrimalComponent:
27 | """
28 | Component that memorizes all solutions seen during training, then fits a
29 | single classifier to predict which of the memorized solutions should be
30 | provided to the solver. Optionally combines multiple memorized solutions
31 | into a single, partial one.
32 | """
33 |
34 | def __init__(
35 | self,
36 | clf: Any,
37 | extractor: FeaturesExtractor,
38 | constructor: SolutionConstructor,
39 | action: PrimalComponentAction,
40 | ) -> None:
41 | assert clf is not None
42 | self.clf = clf
43 | self.extractor = extractor
44 | self.constructor = constructor
45 | self.solutions_: Optional[np.ndarray] = None
46 | self.bin_var_names_: Optional[np.ndarray] = None
47 | self.action = action
48 |
49 | def fit(self, train_h5: List[str]) -> None:
50 | logger.info("Reading training data...")
51 | n_samples = len(train_h5)
52 | solutions_ = []
53 | self.bin_var_names_ = None
54 | x, y, n_features = [], [], None
55 | solution_to_idx: Dict[Tuple, int] = {}
56 | for h5_filename in train_h5:
57 | with H5File(h5_filename, "r") as h5:
58 | bin_var_names, bin_var_values, _ = _extract_bin_var_names_values(h5)
59 |
60 | # Store/check variable names
61 | if self.bin_var_names_ is None:
62 | self.bin_var_names_ = bin_var_names
63 | else:
64 | assert np.all(bin_var_names == self.bin_var_names_)
65 |
66 | # Store solution
67 | sol = tuple(np.where(bin_var_values)[0])
68 | if sol not in solution_to_idx:
69 | solutions_.append(bin_var_values)
70 | solution_to_idx[sol] = len(solution_to_idx)
71 | y.append(solution_to_idx[sol])
72 |
73 | # Extract features
74 | x_sample = self.extractor.get_instance_features(h5)
75 | assert len(x_sample.shape) == 1
76 | if n_features is None:
77 | n_features = len(x_sample)
78 | else:
79 | assert len(x_sample) == n_features
80 | x.append(x_sample)
81 |
82 | logger.info("Constructing matrices...")
83 | x_np = np.vstack(x)
84 | y_np = np.array(y)
85 | assert len(x_np.shape) == 2
86 | assert x_np.shape[0] == n_samples
87 | assert x_np.shape[1] == n_features
88 | assert y_np.shape == (n_samples,)
89 | self.solutions_ = np.array(solutions_)
90 | n_classes = len(solution_to_idx)
91 | logger.info(
92 | f"Dataset has {n_samples:,d} samples, "
93 | f"{n_features:,d} features and {n_classes:,d} classes"
94 | )
95 |
96 | logger.info("Training classifier...")
97 | self.clf.fit(x_np, y_np)
98 |
99 | logger.info("Done fitting.")
100 |
101 | def before_mip(
102 | self, test_h5: str, model: AbstractModel, stats: Dict[str, Any]
103 | ) -> None:
104 | assert self.solutions_ is not None
105 | assert self.bin_var_names_ is not None
106 |
107 | # Read features
108 | with H5File(test_h5, "r") as h5:
109 | x_sample = self.extractor.get_instance_features(h5)
110 | assert len(x_sample.shape) == 1
111 | x_sample = x_sample.reshape(1, -1)
112 |
113 | # Predict optimal solution
114 | logger.info("Predicting primal solution...")
115 | y_proba = self.clf.predict_proba(x_sample)
116 | assert len(y_proba.shape) == 2
117 | assert y_proba.shape[0] == 1
118 | assert y_proba.shape[1] == len(self.solutions_)
119 |
120 | # Construct warm starts, based on prediction
121 | starts = self.constructor.construct(y_proba[0, :], self.solutions_)
122 | self.action.perform(model, self.bin_var_names_, starts, stats)
123 |
124 |
125 | class SelectTopSolutions(SolutionConstructor):
126 | """
127 | Warm start construction strategy that selects and returns the top k solutions.
128 | """
129 |
130 | def __init__(self, k: int) -> None:
131 | self.k = k
132 |
133 | def construct(self, y_proba: np.ndarray, solutions: np.ndarray) -> np.ndarray:
134 | # Check arguments
135 | assert len(y_proba.shape) == 1
136 | assert len(solutions.shape) == 2
137 | assert len(y_proba) == solutions.shape[0]
138 |
139 | # Select top k solutions
140 | ind = np.argsort(-y_proba, kind="stable")
141 | selected = ind[: min(self.k, len(ind))]
142 | return solutions[selected, :]
143 |
144 |
145 | class MergeTopSolutions(SolutionConstructor):
146 | """
147 | Warm start construction strategy that first selects the top k solutions,
148 | then merges them into a single solution.
149 |
150 | To merge the solutions, the strategy first computes the mean optimal value of each
151 | decision variable, then: (i) sets the variable to zero if the mean is below
152 | thresholds[0]; (ii) sets the variable to one if the mean is above thresholds[1];
153 | (iii) leaves the variable free otherwise.
154 | """
155 |
156 | def __init__(self, k: int, thresholds: List[float]):
157 | assert len(thresholds) == 2
158 | self.k = k
159 | self.thresholds = thresholds
160 |
161 | def construct(self, y_proba: np.ndarray, solutions: np.ndarray) -> np.ndarray:
162 | filtered = SelectTopSolutions(self.k).construct(y_proba, solutions)
163 | mean = filtered.mean(axis=0)
164 | start = np.full((1, solutions.shape[1]), float("nan"))
165 | start[0, mean <= self.thresholds[0]] = 0
166 | start[0, mean >= self.thresholds[1]] = 1
167 | return start
168 |
--------------------------------------------------------------------------------
/miplearn/components/priority.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from math import log
6 | from typing import List, Dict, Any
7 | import numpy as np
8 |
9 | import gurobipy as gp
10 |
11 | from ..h5 import H5File
12 |
13 |
14 | class ExpertBranchPriorityComponent:
15 | def __init__(self) -> None:
16 | pass
17 |
18 | def fit(self, train_h5: List[str]) -> None:
19 | pass
20 |
21 | def before_mip(self, test_h5: str, model: gp.Model, _: Dict[str, Any]) -> None:
22 | with H5File(test_h5, "r") as h5:
23 | var_names = h5.get_array("static_var_names")
24 | var_priority = h5.get_array("bb_var_priority")
25 | assert var_priority is not None
26 | assert var_names is not None
27 |
28 | for var_idx, var_name in enumerate(var_names):
29 | if np.isfinite(var_priority[var_idx]):
30 | var = model.getVarByName(var_name.decode())
31 | var.branchPriority = int(log(1 + var_priority[var_idx]))
32 |
--------------------------------------------------------------------------------
/miplearn/extractors/AlvLouWeh2017.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from typing import Tuple, Optional
6 |
7 | import numpy as np
8 |
9 | from miplearn.extractors.abstract import FeaturesExtractor
10 | from miplearn.h5 import H5File
11 |
12 |
13 | class AlvLouWeh2017Extractor(FeaturesExtractor):
14 | def __init__(
15 | self,
16 | with_m1: bool = True,
17 | with_m2: bool = True,
18 | with_m3: bool = True,
19 | ):
20 | self.with_m1 = with_m1
21 | self.with_m2 = with_m2
22 | self.with_m3 = with_m3
23 |
24 | def get_instance_features(self, h5: H5File) -> np.ndarray:
25 | raise NotImplementedError()
26 |
27 | def get_var_features(self, h5: H5File) -> np.ndarray:
28 | """
29 | Computes static variable features described in:
30 | Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based
31 | approximation of strong branching. INFORMS Journal on Computing, 29(1),
32 | 185-195.
33 | """
34 | A = h5.get_sparse("static_constr_lhs")
35 | b = h5.get_array("static_constr_rhs")
36 | c = h5.get_array("static_var_obj_coeffs")
37 | c_sa_up = h5.get_array("lp_var_sa_obj_up")
38 | c_sa_down = h5.get_array("lp_var_sa_obj_down")
39 | values = h5.get_array("lp_var_values")
40 |
41 | assert A is not None
42 | assert b is not None
43 | assert c is not None
44 |
45 | nvars = len(c)
46 | curr = 0
47 | max_n_features = 40
48 | features = np.zeros((nvars, max_n_features))
49 |
50 | def push(v: np.ndarray) -> None:
51 | nonlocal curr
52 | assert v.shape == (nvars,), f"{v.shape} != ({nvars},)"
53 | features[:, curr] = v
54 | curr += 1
55 |
56 | def push_sign_abs(v: np.ndarray) -> None:
57 | assert v.shape == (nvars,), f"{v.shape} != ({nvars},)"
58 | push(np.sign(v))
59 | push(np.abs(v))
60 |
61 | def maxmin(M: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
62 | M_max = np.ravel(M.max(axis=0).todense())
63 | M_min = np.ravel(M.min(axis=0).todense())
64 | return M_max, M_min
65 |
66 | with np.errstate(divide="ignore", invalid="ignore"):
67 | # Feature 1
68 | push(np.sign(c))
69 |
70 | # Feature 2
71 | c_pos_sum = c[c > 0].sum()
72 | push(np.abs(c) / c_pos_sum)
73 |
74 | # Feature 3
75 | c_neg_sum = -c[c < 0].sum()
76 | push(np.abs(c) / c_neg_sum)
77 |
78 | if A is not None and self.with_m1:
79 | # Compute A_ji / |b_j|
80 | M1 = A.T.multiply(1.0 / np.abs(b)).T.tocsr()
81 |
82 | # Select rows with positive b_j and compute max/min
83 | M1_pos = M1[b > 0, :]
84 | if M1_pos.shape[0] > 0:
85 | M1_pos_max = np.asarray(M1_pos.max(axis=0).todense()).flatten()
86 | M1_pos_min = np.asarray(M1_pos.min(axis=0).todense()).flatten()
87 | else:
88 | M1_pos_max = np.zeros(nvars)
89 | M1_pos_min = np.zeros(nvars)
90 |
91 | # Select rows with negative b_j and compute max/min
92 | M1_neg = M1[b < 0, :]
93 | if M1_neg.shape[0] > 0:
94 | M1_neg_max = np.asarray(M1_neg.max(axis=0).todense()).flatten()
95 | M1_neg_min = np.asarray(M1_neg.min(axis=0).todense()).flatten()
96 | else:
97 | M1_neg_max = np.zeros(nvars)
98 | M1_neg_min = np.zeros(nvars)
99 |
100 | # Features 4-11
101 | push_sign_abs(M1_pos_min)
102 | push_sign_abs(M1_pos_max)
103 | push_sign_abs(M1_neg_min)
104 | push_sign_abs(M1_neg_max)
105 |
106 | if A is not None and self.with_m2:
107 | # Compute |c_i| / A_ij
108 | M2 = A.power(-1).multiply(np.abs(c)).tocsc()
109 |
110 | # Compute max/min
111 | M2_max, M2_min = maxmin(M2)
112 |
113 | # Make copies of M2 and erase elements based on sign(c)
114 | M2_pos_max = M2_max.copy()
115 | M2_neg_max = M2_max.copy()
116 | M2_pos_min = M2_min.copy()
117 | M2_neg_min = M2_min.copy()
118 | M2_pos_max[c <= 0] = 0
119 | M2_pos_min[c <= 0] = 0
120 | M2_neg_max[c >= 0] = 0
121 | M2_neg_min[c >= 0] = 0
122 |
123 | # Features 12-19
124 | push_sign_abs(M2_pos_min)
125 | push_sign_abs(M2_pos_max)
126 | push_sign_abs(M2_neg_min)
127 | push_sign_abs(M2_neg_max)
128 |
129 | if A is not None and self.with_m3:
130 | # Compute row sums
131 | S_pos = A.maximum(0).sum(axis=1)
132 | S_neg = np.abs(A.minimum(0).sum(axis=1))
133 |
134 | # Divide A by positive and negative row sums
135 | M3_pos = A.multiply(1 / S_pos).tocsr()
136 | M3_neg = A.multiply(1 / S_neg).tocsr()
137 |
138 | # Remove +inf and -inf generated by division by zero
139 | M3_pos.data[~np.isfinite(M3_pos.data)] = 0.0
140 | M3_neg.data[~np.isfinite(M3_neg.data)] = 0.0
141 | M3_pos.eliminate_zeros()
142 | M3_neg.eliminate_zeros()
143 |
144 | # Split each matrix into positive and negative parts
145 | M3_pos_pos = M3_pos.maximum(0)
146 | M3_pos_neg = -(M3_pos.minimum(0))
147 | M3_neg_pos = M3_neg.maximum(0)
148 | M3_neg_neg = -(M3_neg.minimum(0))
149 |
150 | # Calculate max/min
151 | M3_pos_pos_max, M3_pos_pos_min = maxmin(M3_pos_pos)
152 | M3_pos_neg_max, M3_pos_neg_min = maxmin(M3_pos_neg)
153 | M3_neg_pos_max, M3_neg_pos_min = maxmin(M3_neg_pos)
154 | M3_neg_neg_max, M3_neg_neg_min = maxmin(M3_neg_neg)
155 |
156 | # Features 20-35
157 | push_sign_abs(M3_pos_pos_max)
158 | push_sign_abs(M3_pos_pos_min)
159 | push_sign_abs(M3_pos_neg_max)
160 | push_sign_abs(M3_pos_neg_min)
161 | push_sign_abs(M3_neg_pos_max)
162 | push_sign_abs(M3_neg_pos_min)
163 | push_sign_abs(M3_neg_neg_max)
164 | push_sign_abs(M3_neg_neg_min)
165 |
166 | # Feature 36: only available during B&B
167 |
168 | # Feature 37
169 | if values is not None:
170 | push(
171 | np.minimum(
172 | values - np.floor(values),
173 | np.ceil(values) - values,
174 | )
175 | )
176 |
177 | # Features 38-43: only available during B&B
178 |
179 | # Feature 44
180 | if c_sa_up is not None:
181 | assert c_sa_down is not None
182 |
183 | # Features 44 and 46
184 | push(np.sign(c_sa_up))
185 | push(np.sign(c_sa_down))
186 |
187 | # Feature 45 is duplicated
188 |
189 | # Feature 47-48
190 | push(np.log(c - c_sa_down / np.sign(c)))
191 | push(np.log(c - c_sa_up / np.sign(c)))
192 |
193 | # Features 49-64: only available during B&B
194 |
195 | features = features[:, 0:curr]
196 | _fix_infinity(features)
197 | return features
198 |
199 | def get_constr_features(self, h5: H5File) -> np.ndarray:
200 | raise NotImplementedError()
201 |
202 |
203 | def _fix_infinity(m: Optional[np.ndarray]) -> None:
204 | if m is None:
205 | return
206 | masked = np.ma.masked_invalid(m) # type: ignore
207 | max_values = np.max(masked, axis=0)
208 | min_values = np.min(masked, axis=0)
209 | m[:] = np.maximum(np.minimum(m, max_values), min_values)
210 | m[~np.isfinite(m)] = 0.0
211 |
--------------------------------------------------------------------------------
/miplearn/extractors/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/miplearn/extractors/__init__.py
--------------------------------------------------------------------------------
/miplearn/extractors/abstract.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 |
3 | import numpy as np
4 |
5 | from miplearn.h5 import H5File
6 |
7 |
8 | class FeaturesExtractor(ABC):
9 | @abstractmethod
10 | def get_instance_features(self, h5: H5File) -> np.ndarray:
11 | pass
12 |
13 | @abstractmethod
14 | def get_var_features(self, h5: H5File) -> np.ndarray:
15 | pass
16 |
17 | @abstractmethod
18 | def get_constr_features(self, h5: H5File) -> np.ndarray:
19 | pass
20 |
--------------------------------------------------------------------------------
/miplearn/extractors/dummy.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | import numpy as np
5 |
6 | from miplearn.extractors.abstract import FeaturesExtractor
7 | from miplearn.h5 import H5File
8 |
9 |
10 | class DummyExtractor(FeaturesExtractor):
11 | def get_instance_features(self, h5: H5File) -> np.ndarray:
12 | return np.zeros(1)
13 |
14 | def get_var_features(self, h5: H5File) -> np.ndarray:
15 | var_types = h5.get_array("static_var_types")
16 | assert var_types is not None
17 | n_vars = len(var_types)
18 | return np.zeros((n_vars, 1))
19 |
20 | def get_constr_features(self, h5: H5File) -> np.ndarray:
21 | constr_sense = h5.get_array("static_constr_sense")
22 | assert constr_sense is not None
23 | n_constr = len(constr_sense)
24 | return np.zeros((n_constr, 1))
25 |
--------------------------------------------------------------------------------
/miplearn/extractors/fields.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | from typing import Optional, List
5 |
6 | import numpy as np
7 |
8 | from miplearn.extractors.abstract import FeaturesExtractor
9 | from miplearn.h5 import H5File
10 |
11 |
12 | class H5FieldsExtractor(FeaturesExtractor):
13 | def __init__(
14 | self,
15 | instance_fields: Optional[List[str]] = None,
16 | var_fields: Optional[List[str]] = None,
17 | constr_fields: Optional[List[str]] = None,
18 | ):
19 | self.instance_fields = instance_fields
20 | self.var_fields = var_fields
21 | self.constr_fields = constr_fields
22 |
23 | def get_instance_features(self, h5: H5File) -> np.ndarray:
24 | if self.instance_fields is None:
25 | raise Exception("No instance fields provided")
26 | x = []
27 | for field in self.instance_fields:
28 | try:
29 | data = h5.get_array(field)
30 | except ValueError:
31 | data = h5.get_scalar(field)
32 | assert data is not None
33 | x.append(data)
34 | x_np = np.hstack(x)
35 | assert len(x_np.shape) == 1
36 | return x_np
37 |
38 | def get_var_features(self, h5: H5File) -> np.ndarray:
39 | var_types = h5.get_array("static_var_types")
40 | assert var_types is not None
41 | n_vars = len(var_types)
42 | if self.var_fields is None:
43 | raise Exception("No var fields provided")
44 | return self._extract(h5, self.var_fields, n_vars)
45 |
46 | def get_constr_features(self, h5: H5File) -> np.ndarray:
47 | constr_sense = h5.get_array("static_constr_sense")
48 | assert constr_sense is not None
49 | n_constr = len(constr_sense)
50 | if self.constr_fields is None:
51 | raise Exception("No constr fields provided")
52 | return self._extract(h5, self.constr_fields, n_constr)
53 |
54 | def _extract(self, h5: H5File, fields: List[str], n_expected: int) -> np.ndarray:
55 | x = []
56 | for field in fields:
57 | try:
58 | data = h5.get_array(field)
59 | except ValueError:
60 | v = h5.get_scalar(field)
61 | assert v is not None
62 | data = np.repeat(v, n_expected)
63 | assert data is not None
64 | assert len(data.shape) == 1
65 | assert data.shape[0] == n_expected
66 | x.append(data)
67 | features = np.vstack(x).T
68 | assert len(features.shape) == 2
69 | assert features.shape[0] == n_expected
70 | return features
71 |
--------------------------------------------------------------------------------
/miplearn/h5.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from types import TracebackType
6 | from typing import Optional, Any, Union, List, Type, Literal
7 |
8 | import h5py
9 | import numpy as np
10 | from scipy.sparse import coo_matrix
11 |
12 | Bytes = Union[bytes, bytearray]
13 |
14 | Scalar = Union[None, bool, str, int, float]
15 |
16 | Vector = Union[
17 | None,
18 | List[bool],
19 | List[str],
20 | List[int],
21 | List[float],
22 | List[Optional[str]],
23 | np.ndarray,
24 | ]
25 |
26 | VectorList = Union[
27 | List[List[bool]],
28 | List[List[str]],
29 | List[List[int]],
30 | List[List[float]],
31 | List[Optional[List[bool]]],
32 | List[Optional[List[str]]],
33 | List[Optional[List[int]]],
34 | List[Optional[List[float]]],
35 | ]
36 |
37 |
38 | class H5File:
39 | def __init__(
40 | self,
41 | filename: str,
42 | mode: str = "r+",
43 | ) -> None:
44 | self.file = h5py.File(filename, mode, libver="latest")
45 |
46 | def get_scalar(self, key: str) -> Optional[Any]:
47 | if key not in self.file:
48 | return None
49 | ds = self.file[key]
50 | assert (
51 | len(ds.shape) == 0
52 | ), f"0-dimensional array expected; found shape {ds.shape}"
53 | if h5py.check_string_dtype(ds.dtype):
54 | return ds.asstr()[()]
55 | else:
56 | return ds[()].tolist()
57 |
58 | def put_scalar(self, key: str, value: Any) -> None:
59 | if value is None:
60 | return
61 | self._assert_is_scalar(value)
62 | if key in self.file:
63 | del self.file[key]
64 | self.file.create_dataset(key, data=value)
65 |
66 | def put_array(self, key: str, value: Optional[np.ndarray]) -> None:
67 | if value is None:
68 | return
69 | self._assert_is_array(value)
70 | if value.dtype.kind == "f":
71 | value = value.astype("float64")
72 | if key in self.file:
73 | del self.file[key]
74 | return self.file.create_dataset(key, data=value, compression="gzip")
75 |
76 | def get_array(self, key: str) -> Optional[np.ndarray]:
77 | if key not in self.file:
78 | return None
79 | return self.file[key][:]
80 |
81 | def put_sparse(self, key: str, value: coo_matrix) -> None:
82 | if value is None:
83 | return
84 | self._assert_is_sparse(value)
85 | self.put_array(f"{key}_row", value.row)
86 | self.put_array(f"{key}_col", value.col)
87 | self.put_array(f"{key}_data", value.data)
88 |
89 | def get_sparse(self, key: str) -> Optional[coo_matrix]:
90 | row = self.get_array(f"{key}_row")
91 | if row is None:
92 | return None
93 | col = self.get_array(f"{key}_col")
94 | data = self.get_array(f"{key}_data")
95 | assert col is not None
96 | assert data is not None
97 | return coo_matrix((data, (row, col)))
98 |
99 | def get_bytes(self, key: str) -> Optional[Bytes]:
100 | if key not in self.file:
101 | return None
102 | ds = self.file[key]
103 | assert (
104 | len(ds.shape) == 1
105 | ), f"1-dimensional array expected; found shape {ds.shape}"
106 | return ds[()].tobytes()
107 |
108 | def put_bytes(self, key: str, value: Bytes) -> None:
109 | assert isinstance(
110 | value, (bytes, bytearray)
111 | ), f"bytes expected; found: {value.__class__}" # type: ignore
112 | self.put_array(key, np.frombuffer(value, dtype="uint8"))
113 |
114 | def close(self) -> None:
115 | self.file.close()
116 |
117 | def __enter__(self) -> "H5File":
118 | return self
119 |
120 | def __exit__(
121 | self,
122 | exc_type: Optional[Type[BaseException]],
123 | exc_val: Optional[BaseException],
124 | exc_tb: Optional[TracebackType],
125 | ) -> Literal[False]:
126 | self.file.close()
127 | return False
128 |
129 | def _assert_is_scalar(self, value: Any) -> None:
130 | if value is None:
131 | return
132 | if isinstance(value, (str, bool, int, float, bytes, np.bytes_)):
133 | return
134 | assert False, f"scalar expected; found instead: {value} ({value.__class__})"
135 |
136 | def _assert_is_array(self, value: np.ndarray) -> None:
137 | assert isinstance(
138 | value, np.ndarray
139 | ), f"np.ndarray expected; found instead: {value.__class__}"
140 | assert value.dtype.kind in "biufS", f"Unsupported dtype: {value.dtype}"
141 |
142 | def _assert_is_sparse(self, value: Any) -> None:
143 | assert isinstance(
144 | value, coo_matrix
145 | ), f"coo_matrix expected; found: {value.__class__}"
146 | self._assert_is_array(value.data)
147 |
--------------------------------------------------------------------------------
/miplearn/io.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from gzip import GzipFile
6 | import os
7 | import pickle
8 | import sys
9 | from typing import IO, Any, Callable, List, cast, TextIO
10 |
11 | from .parallel import p_umap
12 | import shutil
13 |
14 |
15 | class _RedirectOutput:
16 | def __init__(self, streams: List[Any]) -> None:
17 | self.streams = streams
18 |
19 | def write(self, data: Any) -> None:
20 | for stream in self.streams:
21 | stream.write(data)
22 |
23 | def flush(self) -> None:
24 | for stream in self.streams:
25 | stream.flush()
26 |
27 | def __enter__(self) -> Any:
28 | self._original_stdout = sys.stdout
29 | self._original_stderr = sys.stderr
30 | sys.stdout = cast(TextIO, self)
31 | sys.stderr = cast(TextIO, self)
32 | return self
33 |
34 | def __exit__(
35 | self,
36 | _type: Any,
37 | _value: Any,
38 | _traceback: Any,
39 | ) -> None:
40 | sys.stdout = self._original_stdout
41 | sys.stderr = self._original_stderr
42 |
43 |
44 | def write_pkl_gz(
45 | objs: List[Any],
46 | dirname: str,
47 | prefix: str = "",
48 | n_jobs: int = 1,
49 | progress: bool = False,
50 | ) -> List[str]:
51 | filenames = [f"{dirname}/{prefix}{i:05d}.pkl.gz" for i in range(len(objs))]
52 |
53 | def _process(i: int) -> None:
54 | filename = filenames[i]
55 | obj = objs[i]
56 | os.makedirs(os.path.dirname(filename), exist_ok=True)
57 | with GzipFile(filename, "wb") as file:
58 | pickle.dump(obj, cast(IO[bytes], file))
59 |
60 | if n_jobs > 1:
61 | p_umap(
62 | _process,
63 | range(len(objs)),
64 | smoothing=0,
65 | num_cpus=n_jobs,
66 | maxtasksperchild=None,
67 | disable=not progress,
68 | )
69 | else:
70 | for i in range(len(objs)):
71 | _process(i)
72 | return filenames
73 |
74 |
75 | def gzip(filename: str) -> None:
76 | with open(filename, "rb") as input_file:
77 | with GzipFile(f"{filename}.gz", "wb") as output_file:
78 | shutil.copyfileobj(input_file, output_file)
79 | os.remove(filename)
80 |
81 |
82 | def read_pkl_gz(filename: str) -> Any:
83 | with GzipFile(filename, "rb") as file:
84 | return pickle.load(cast(IO[bytes], file))
85 |
86 |
87 | def _to_h5_filename(data_filename: str) -> str:
88 | output = f"{data_filename}.h5"
89 | output = output.replace(".gz.h5", ".h5")
90 | output = output.replace(".csv.h5", ".h5")
91 | output = output.replace(".jld2.h5", ".h5")
92 | output = output.replace(".json.h5", ".h5")
93 | output = output.replace(".lp.h5", ".h5")
94 | output = output.replace(".mps.h5", ".h5")
95 | output = output.replace(".pkl.h5", ".h5")
96 | return output
97 |
--------------------------------------------------------------------------------
/miplearn/parallel.py:
--------------------------------------------------------------------------------
1 | # Modified version of: https://github.com/swansonk14/p_tqdm
2 | # Copyright (c) 2022 Kyle Swanson
3 | # MIT License
4 |
5 | from collections.abc import Sized
6 | from typing import Any, Callable, Generator, Iterable, List
7 |
8 | from pathos.multiprocessing import _ProcessPool as Pool
9 | from tqdm.auto import tqdm
10 |
11 |
12 | def _parallel(function: Callable, *iterables: Iterable, **kwargs: Any) -> Generator:
13 | # Determine length of tqdm (equal to length of the shortest iterable or total kwarg)
14 | total = kwargs.pop("total", None)
15 | lengths = [len(iterable) for iterable in iterables if isinstance(iterable, Sized)]
16 | length = total or (min(lengths) if lengths else None)
17 |
18 | # Create parallel generator
19 | num_cpus = kwargs.pop("num_cpus", 1)
20 | maxtasksperchild = kwargs.pop("maxtasksperchild", 1)
21 | chunksize = kwargs.pop("chunksize", 1)
22 | with Pool(num_cpus, maxtasksperchild=maxtasksperchild) as pool:
23 | for item in tqdm(
24 | pool.imap_unordered(function, *iterables, chunksize=chunksize),
25 | total=length,
26 | **kwargs
27 | ):
28 | yield item
29 |
30 |
31 | def p_umap(function: Callable, *iterables: Iterable, **kwargs: Any) -> List[Any]:
32 | return list(_parallel(function, *iterables, **kwargs))
33 |
--------------------------------------------------------------------------------
/miplearn/problems/__init__.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from typing import Any, Optional
6 |
7 | import gurobipy as gp
8 | from pyomo import environ as pe
9 |
10 |
11 | def _gurobipy_set_params(model: gp.Model, params: Optional[dict[str, Any]]) -> None:
12 | assert isinstance(model, gp.Model)
13 | if params is not None:
14 | for param_name, param_value in params.items():
15 | setattr(model.params, param_name, param_value)
16 |
17 |
18 | def _pyomo_set_params(
19 | model: pe.ConcreteModel,
20 | params: Optional[dict[str, Any]],
21 | solver: str,
22 | ) -> None:
23 | assert (
24 | solver == "gurobi_persistent"
25 | ), "setting parameters is only supported with gurobi_persistent"
26 | if solver == "gurobi_persistent" and params is not None:
27 | for param_name, param_value in params.items():
28 | model.solver.set_gurobi_param(param_name, param_value)
29 |
--------------------------------------------------------------------------------
/miplearn/problems/binpack.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from dataclasses import dataclass
6 | from typing import List, Optional, Union
7 |
8 | import gurobipy as gp
9 | import numpy as np
10 | from gurobipy import GRB, quicksum
11 | from scipy.stats import uniform, randint
12 | from scipy.stats.distributions import rv_frozen
13 |
14 | from miplearn.io import read_pkl_gz
15 | from miplearn.solvers.gurobi import GurobiModel
16 |
17 |
18 | @dataclass
19 | class BinPackData:
20 | """Data for the bin packing problem.
21 |
22 | Parameters
23 | ----------
24 | sizes
25 | Sizes of the items
26 | capacity
27 | Capacity of the bin
28 | """
29 |
30 | sizes: np.ndarray
31 | capacity: int
32 |
33 |
34 | class BinPackGenerator:
35 | """Random instance generator for the bin packing problem.
36 |
37 | If `fix_items=False`, the class samples the user-provided probability distributions
38 | n, sizes and capacity to decide, respectively, the number of items, the sizes of
39 | the items and capacity of the bin. All values are sampled independently.
40 |
41 | If `fix_items=True`, the class creates a reference instance, using the method
42 | previously described, then generates additional instances by perturbing its item
43 | sizes and bin capacity. More specifically, the sizes of the items are set to `s_i
44 | * gamma_i` where `s_i` is the size of the i-th item in the reference instance and
45 | `gamma_i` is sampled from `sizes_jitter`. Similarly, the bin capacity is set to `B *
46 | beta`, where `B` is the reference bin capacity and `beta` is sampled from
47 | `capacity_jitter`. The number of items remains the same across all generated
48 | instances.
49 |
50 | Args
51 | ----
52 | n
53 | Probability distribution for the number of items.
54 | sizes
55 | Probability distribution for the item sizes.
56 | capacity
57 | Probability distribution for the bin capacity.
58 | sizes_jitter
59 | Probability distribution for the item size randomization.
60 | capacity_jitter
61 | Probability distribution for the bin capacity.
62 | fix_items
63 | If `True`, generates a reference instance, then applies some perturbation to it.
64 | If `False`, generates completely different instances.
65 | """
66 |
67 | def __init__(
68 | self,
69 | n: rv_frozen,
70 | sizes: rv_frozen,
71 | capacity: rv_frozen,
72 | sizes_jitter: rv_frozen,
73 | capacity_jitter: rv_frozen,
74 | fix_items: bool,
75 | ) -> None:
76 | self.n = n
77 | self.sizes = sizes
78 | self.capacity = capacity
79 | self.sizes_jitter = sizes_jitter
80 | self.capacity_jitter = capacity_jitter
81 | self.fix_items = fix_items
82 | self.ref_data: Optional[BinPackData] = None
83 |
84 | def generate(self, n_samples: int) -> List[BinPackData]:
85 | """Generates random instances.
86 |
87 | Parameters
88 | ----------
89 | n_samples
90 | Number of samples to generate.
91 | """
92 |
93 | def _sample() -> BinPackData:
94 | if self.ref_data is None:
95 | n = self.n.rvs()
96 | sizes = self.sizes.rvs(n)
97 | capacity = self.capacity.rvs()
98 | if self.fix_items:
99 | self.ref_data = BinPackData(sizes, capacity)
100 | else:
101 | n = self.ref_data.sizes.shape[0]
102 | sizes = self.ref_data.sizes
103 | capacity = self.ref_data.capacity
104 |
105 | sizes = sizes * self.sizes_jitter.rvs(n)
106 | capacity = capacity * self.capacity_jitter.rvs()
107 | return BinPackData(sizes.round(2), capacity.round(2))
108 |
109 | return [_sample() for n in range(n_samples)]
110 |
111 |
112 | def build_binpack_model_gurobipy(data: Union[str, BinPackData]) -> GurobiModel:
113 | """Converts bin packing problem data into a concrete Gurobipy model."""
114 | if isinstance(data, str):
115 | data = read_pkl_gz(data)
116 | assert isinstance(data, BinPackData)
117 |
118 | model = gp.Model()
119 | n = data.sizes.shape[0]
120 |
121 | # Var: Use bin
122 | y = model.addVars(n, name="y", vtype=GRB.BINARY)
123 |
124 | # Var: Assign item to bin
125 | x = model.addVars(n, n, name="x", vtype=GRB.BINARY)
126 |
127 | # Obj: Minimize number of bins
128 | model.setObjective(quicksum(y[i] for i in range(n)))
129 |
130 | # Eq: Enforce bin capacity
131 | model.addConstrs(
132 | (
133 | quicksum(data.sizes[i] * x[i, j] for i in range(n)) <= data.capacity * y[j]
134 | for j in range(n)
135 | ),
136 | name="eq_capacity",
137 | )
138 |
139 | # Eq: Must assign all items to bins
140 | model.addConstrs(
141 | (quicksum(x[i, j] for j in range(n)) == 1 for i in range(n)),
142 | name="eq_assign",
143 | )
144 |
145 | model.update()
146 | return GurobiModel(model)
147 |
--------------------------------------------------------------------------------
/miplearn/problems/multiknapsack.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from dataclasses import dataclass
6 | from typing import List, Optional, Union
7 |
8 | import gurobipy as gp
9 | import numpy as np
10 | from gurobipy import GRB
11 | from scipy.stats import uniform, randint
12 | from scipy.stats.distributions import rv_frozen
13 |
14 | from miplearn.io import read_pkl_gz
15 | from miplearn.solvers.gurobi import GurobiModel
16 |
17 |
18 | @dataclass
19 | class MultiKnapsackData:
20 | """Data for the multi-dimensional knapsack problem
21 |
22 | Args
23 | ----
24 | prices
25 | Item prices.
26 | capacities
27 | Knapsack capacities.
28 | weights
29 | Matrix of item weights.
30 | """
31 |
32 | prices: np.ndarray
33 | capacities: np.ndarray
34 | weights: np.ndarray
35 |
36 |
37 | # noinspection PyPep8Naming
38 | class MultiKnapsackGenerator:
39 | """Random instance generator for the multi-dimensional knapsack problem.
40 |
41 | Instances have a random number of items (or variables) and a random number of
42 | knapsacks (or constraints), as specified by the provided probability
43 | distributions `n` and `m`, respectively. The weight of each item `i` on knapsack
44 | `j` is sampled independently from the provided distribution `w`. The capacity of
45 | knapsack `j` is set to ``alpha_j * sum(w[i,j] for i in range(n))``,
46 | where `alpha_j`, the tightness ratio, is sampled from the provided probability
47 | distribution `alpha`.
48 |
49 | To make the instances more challenging, the costs of the items are linearly
50 | correlated to their average weights. More specifically, the weight of each item
51 | `i` is set to ``sum(w[i,j]/m for j in range(m)) + K * u_i``, where `K`,
52 | the correlation coefficient, and `u_i`, the correlation multiplier, are sampled
53 | from the provided probability distributions. Note that `K` is only sample once
54 | for the entire instance.
55 |
56 | If `fix_w=True`, then `weights[i,j]` are kept the same in all generated
57 | instances. This also implies that n and m are kept fixed. Although the prices and
58 | capacities are derived from `weights[i,j]`, as long as `u` and `K` are not
59 | constants, the generated instances will still not be completely identical.
60 |
61 | If a probability distribution `w_jitter` is provided, then item weights will be
62 | set to ``w[i,j] * gamma[i,j]`` where `gamma[i,j]` is sampled from `w_jitter`.
63 | When combined with `fix_w=True`, this argument may be used to generate instances
64 | where the weight of each item is roughly the same, but not exactly identical,
65 | across all instances. The prices of the items and the capacities of the knapsacks
66 | will be calculated as above, but using these perturbed weights instead.
67 |
68 | By default, all generated prices, weights and capacities are rounded to the
69 | nearest integer number. If `round=False` is provided, this rounding will be
70 | disabled.
71 |
72 | Parameters
73 | ----------
74 | n: rv_discrete
75 | Probability distribution for the number of items (or variables).
76 | m: rv_discrete
77 | Probability distribution for the number of knapsacks (or constraints).
78 | w: rv_continuous
79 | Probability distribution for the item weights.
80 | K: rv_continuous
81 | Probability distribution for the profit correlation coefficient.
82 | u: rv_continuous
83 | Probability distribution for the profit multiplier.
84 | alpha: rv_continuous
85 | Probability distribution for the tightness ratio.
86 | fix_w: boolean
87 | If true, weights are kept the same (minus the noise from w_jitter) in all
88 | instances.
89 | w_jitter: rv_continuous
90 | Probability distribution for random noise added to the weights.
91 | round: boolean
92 | If true, all prices, weights and capacities are rounded to the nearest
93 | integer.
94 | """
95 |
96 | def __init__(
97 | self,
98 | n: rv_frozen = randint(low=100, high=101),
99 | m: rv_frozen = randint(low=30, high=31),
100 | w: rv_frozen = randint(low=0, high=1000),
101 | K: rv_frozen = randint(low=500, high=501),
102 | u: rv_frozen = uniform(loc=0.0, scale=1.0),
103 | alpha: rv_frozen = uniform(loc=0.25, scale=0.0),
104 | fix_w: bool = False,
105 | w_jitter: rv_frozen = uniform(loc=1.0, scale=0.0),
106 | p_jitter: rv_frozen = uniform(loc=1.0, scale=0.0),
107 | round: bool = True,
108 | ):
109 | assert isinstance(n, rv_frozen), "n should be a SciPy probability distribution"
110 | assert isinstance(m, rv_frozen), "m should be a SciPy probability distribution"
111 | assert isinstance(w, rv_frozen), "w should be a SciPy probability distribution"
112 | assert isinstance(K, rv_frozen), "K should be a SciPy probability distribution"
113 | assert isinstance(u, rv_frozen), "u should be a SciPy probability distribution"
114 | assert isinstance(
115 | alpha, rv_frozen
116 | ), "alpha should be a SciPy probability distribution"
117 | assert isinstance(fix_w, bool), "fix_w should be boolean"
118 | assert isinstance(
119 | w_jitter, rv_frozen
120 | ), "w_jitter should be a SciPy probability distribution"
121 |
122 | self.n = n
123 | self.m = m
124 | self.w = w
125 | self.u = u
126 | self.K = K
127 | self.alpha = alpha
128 | self.w_jitter = w_jitter
129 | self.p_jitter = p_jitter
130 | self.round = round
131 | self.fix_n: Optional[int] = None
132 | self.fix_m: Optional[int] = None
133 | self.fix_w: Optional[np.ndarray] = None
134 | self.fix_u: Optional[np.ndarray] = None
135 | self.fix_K: Optional[float] = None
136 |
137 | if fix_w:
138 | self.fix_n = self.n.rvs()
139 | self.fix_m = self.m.rvs()
140 | self.fix_w = np.array([self.w.rvs(self.fix_n) for _ in range(self.fix_m)])
141 | self.fix_u = self.u.rvs(self.fix_n)
142 | self.fix_K = self.K.rvs()
143 |
144 | def generate(self, n_samples: int) -> List[MultiKnapsackData]:
145 | def _sample() -> MultiKnapsackData:
146 | if self.fix_w is not None:
147 | assert self.fix_m is not None
148 | assert self.fix_n is not None
149 | assert self.fix_u is not None
150 | assert self.fix_K is not None
151 | n = self.fix_n
152 | m = self.fix_m
153 | w = self.fix_w
154 | u = self.fix_u
155 | K = self.fix_K
156 | else:
157 | n = self.n.rvs()
158 | m = self.m.rvs()
159 | w = np.array([self.w.rvs(n) for _ in range(m)])
160 | u = self.u.rvs(n)
161 | K = self.K.rvs()
162 | w = w * np.array([self.w_jitter.rvs(n) for _ in range(m)])
163 | alpha = self.alpha.rvs(m)
164 | p = np.array(
165 | [w[:, j].sum() / m + K * u[j] for j in range(n)]
166 | ) * self.p_jitter.rvs(n)
167 | b = np.array([w[i, :].sum() * alpha[i] for i in range(m)])
168 | if self.round:
169 | p = p.round()
170 | b = b.round()
171 | w = w.round()
172 | return MultiKnapsackData(p, b, w)
173 |
174 | return [_sample() for _ in range(n_samples)]
175 |
176 |
177 | def build_multiknapsack_model_gurobipy(
178 | data: Union[str, MultiKnapsackData]
179 | ) -> GurobiModel:
180 | """Converts multi-knapsack problem data into a concrete Gurobipy model."""
181 | if isinstance(data, str):
182 | data = read_pkl_gz(data)
183 | assert isinstance(data, MultiKnapsackData)
184 |
185 | model = gp.Model()
186 | m, n = data.weights.shape
187 | x = model.addMVar(n, vtype=GRB.BINARY, name="x")
188 | model.addConstr(data.weights @ x <= data.capacities)
189 | model.setObjective(-data.prices @ x)
190 | model.update()
191 | return GurobiModel(model)
192 |
--------------------------------------------------------------------------------
/miplearn/problems/pmedian.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from dataclasses import dataclass
6 | from typing import List, Optional, Union
7 |
8 | import gurobipy as gp
9 | import numpy as np
10 | from gurobipy import quicksum, GRB
11 | from scipy.spatial.distance import pdist, squareform
12 | from scipy.stats import uniform, randint
13 | from scipy.stats.distributions import rv_frozen
14 |
15 | from miplearn.io import read_pkl_gz
16 | from miplearn.solvers.gurobi import GurobiModel
17 |
18 |
19 | @dataclass
20 | class PMedianData:
21 | """Data for the capacitated p-median problem
22 |
23 | Args
24 | ----
25 | distances
26 | Matrix of distances between customer i and facility j.
27 | demands
28 | Customer demands.
29 | p
30 | Number of medians that need to be chosen.
31 | capacities
32 | Facility capacities.
33 | """
34 |
35 | distances: np.ndarray
36 | demands: np.ndarray
37 | p: int
38 | capacities: np.ndarray
39 |
40 |
41 | class PMedianGenerator:
42 | """Random generator for the capacitated p-median problem.
43 |
44 | This class first decides the number of customers and the parameter `p` by
45 | sampling the provided `n` and `p` distributions, respectively. Then, for each
46 | customer `i`, the class builds its geographical location `(xi, yi)` by sampling
47 | the provided `x` and `y` distributions. For each `i`, the demand for customer `i`
48 | and the capacity of facility `i` are decided by sampling the distributions
49 | `demands` and `capacities`, respectively. Finally, the costs `w[i,j]` are set to
50 | the Euclidean distance between the locations of customers `i` and `j`.
51 |
52 | If `fixed=True`, then the number of customers, their locations, the parameter
53 | `p`, the demands and the capacities are only sampled from their respective
54 | distributions exactly once, to build a reference instance which is then
55 | perturbed. Specifically, for each perturbation, the distances, demands and
56 | capacities are multiplied by factors sampled from the distributions
57 | `distances_jitter`, `demands_jitter` and `capacities_jitter`, respectively. The
58 | result is a list of instances that have the same set of customers, but slightly
59 | different demands, capacities and distances.
60 |
61 | Parameters
62 | ----------
63 | x
64 | Probability distribution for the x-coordinate of the points.
65 | y
66 | Probability distribution for the y-coordinate of the points.
67 | n
68 | Probability distribution for the number of customer.
69 | p
70 | Probability distribution for the number of medians.
71 | demands
72 | Probability distribution for the customer demands.
73 | capacities
74 | Probability distribution for the facility capacities.
75 | distances_jitter
76 | Probability distribution for the random scaling factor applied to distances.
77 | demands_jitter
78 | Probability distribution for the random scaling factor applied to demands.
79 | capacities_jitter
80 | Probability distribution for the random scaling factor applied to capacities.
81 | fixed
82 | If `True`, then customer are kept the same across instances.
83 | """
84 |
85 | def __init__(
86 | self,
87 | x: rv_frozen = uniform(loc=0.0, scale=100.0),
88 | y: rv_frozen = uniform(loc=0.0, scale=100.0),
89 | n: rv_frozen = randint(low=100, high=101),
90 | p: rv_frozen = randint(low=10, high=11),
91 | demands: rv_frozen = uniform(loc=0, scale=20),
92 | capacities: rv_frozen = uniform(loc=0, scale=100),
93 | distances_jitter: rv_frozen = uniform(loc=1.0, scale=0.0),
94 | demands_jitter: rv_frozen = uniform(loc=1.0, scale=0.0),
95 | capacities_jitter: rv_frozen = uniform(loc=1.0, scale=0.0),
96 | fixed: bool = True,
97 | ):
98 | self.x = x
99 | self.y = y
100 | self.n = n
101 | self.p = p
102 | self.demands = demands
103 | self.capacities = capacities
104 | self.distances_jitter = distances_jitter
105 | self.demands_jitter = demands_jitter
106 | self.capacities_jitter = capacities_jitter
107 | self.fixed = fixed
108 | self.ref_data: Optional[PMedianData] = None
109 |
110 | def generate(self, n_samples: int) -> List[PMedianData]:
111 | def _sample() -> PMedianData:
112 | if self.ref_data is None:
113 | n = self.n.rvs()
114 | p = self.p.rvs()
115 | loc = np.array([(self.x.rvs(), self.y.rvs()) for _ in range(n)])
116 | distances = squareform(pdist(loc))
117 | demands = self.demands.rvs(n)
118 | capacities = self.capacities.rvs(n)
119 | else:
120 | n = self.ref_data.demands.shape[0]
121 | distances = self.ref_data.distances * self.distances_jitter.rvs(
122 | size=(n, n)
123 | )
124 | distances = np.tril(distances) + np.triu(distances.T, 1)
125 | demands = self.ref_data.demands * self.demands_jitter.rvs(n)
126 | capacities = self.ref_data.capacities * self.capacities_jitter.rvs(n)
127 | p = self.ref_data.p
128 |
129 | data = PMedianData(
130 | distances=distances.round(2),
131 | demands=demands.round(2),
132 | p=p,
133 | capacities=capacities.round(2),
134 | )
135 |
136 | if self.fixed and self.ref_data is None:
137 | self.ref_data = data
138 |
139 | return data
140 |
141 | return [_sample() for _ in range(n_samples)]
142 |
143 |
144 | def build_pmedian_model_gurobipy(data: Union[str, PMedianData]) -> GurobiModel:
145 | """Converts capacitated p-median data into a concrete Gurobipy model."""
146 | if isinstance(data, str):
147 | data = read_pkl_gz(data)
148 | assert isinstance(data, PMedianData)
149 |
150 | model = gp.Model()
151 | n = len(data.demands)
152 |
153 | # Decision variables
154 | x = model.addVars(n, n, vtype=GRB.BINARY, name="x")
155 | y = model.addVars(n, vtype=GRB.BINARY, name="y")
156 |
157 | # Objective function
158 | model.setObjective(
159 | quicksum(data.distances[i, j] * x[i, j] for i in range(n) for j in range(n))
160 | )
161 |
162 | # Eq: Must serve each customer
163 | model.addConstrs(
164 | (quicksum(x[i, j] for j in range(n)) == 1 for i in range(n)),
165 | name="eq_demand",
166 | )
167 |
168 | # Eq: Must choose p medians
169 | model.addConstr(
170 | quicksum(y[j] for j in range(n)) == data.p,
171 | name="eq_choose",
172 | )
173 |
174 | # Eq: Must not exceed capacity
175 | model.addConstrs(
176 | (
177 | quicksum(data.demands[i] * x[i, j] for i in range(n))
178 | <= data.capacities[j] * y[j]
179 | for j in range(n)
180 | ),
181 | name="eq_capacity",
182 | )
183 |
184 | model.update()
185 | return GurobiModel(model)
186 |
--------------------------------------------------------------------------------
/miplearn/problems/setcover.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from dataclasses import dataclass
6 | from typing import List, Union
7 |
8 | import gurobipy as gp
9 | import numpy as np
10 | import pyomo.environ as pe
11 | from gurobipy.gurobipy import GRB
12 | from scipy.stats import uniform, randint
13 | from scipy.stats.distributions import rv_frozen
14 |
15 | from miplearn.io import read_pkl_gz
16 | from miplearn.solvers.gurobi import GurobiModel
17 | from miplearn.solvers.pyomo import PyomoModel
18 |
19 |
20 | @dataclass
21 | class SetCoverData:
22 | costs: np.ndarray
23 | incidence_matrix: np.ndarray
24 |
25 |
26 | class SetCoverGenerator:
27 | def __init__(
28 | self,
29 | n_elements: rv_frozen = randint(low=50, high=51),
30 | n_sets: rv_frozen = randint(low=100, high=101),
31 | costs: rv_frozen = uniform(loc=0.0, scale=100.0),
32 | costs_jitter: rv_frozen = uniform(loc=-5.0, scale=10.0),
33 | K: rv_frozen = uniform(loc=25.0, scale=0.0),
34 | density: rv_frozen = uniform(loc=0.02, scale=0.00),
35 | fix_sets: bool = True,
36 | ):
37 | self.n_elements = n_elements
38 | self.n_sets = n_sets
39 | self.costs = costs
40 | self.costs_jitter = costs_jitter
41 | self.density = density
42 | self.K = K
43 | self.fix_sets = fix_sets
44 | self.fixed_costs = None
45 | self.fixed_matrix = None
46 |
47 | def generate(self, n_samples: int) -> List[SetCoverData]:
48 | def _sample() -> SetCoverData:
49 | if self.fixed_matrix is None:
50 | n_sets = self.n_sets.rvs()
51 | n_elements = self.n_elements.rvs()
52 | density = self.density.rvs()
53 |
54 | incidence_matrix = np.random.rand(n_elements, n_sets) < density
55 | incidence_matrix = incidence_matrix.astype(int)
56 |
57 | # Ensure each element belongs to at least one set
58 | for j in range(n_elements):
59 | if incidence_matrix[j, :].sum() == 0:
60 | incidence_matrix[j, randint(low=0, high=n_sets).rvs()] = 1
61 |
62 | # Ensure each set contains at least one element
63 | for i in range(n_sets):
64 | if incidence_matrix[:, i].sum() == 0:
65 | incidence_matrix[randint(low=0, high=n_elements).rvs(), i] = 1
66 |
67 | costs = self.costs.rvs(n_sets) + self.K.rvs() * incidence_matrix.sum(
68 | axis=0
69 | )
70 | if self.fix_sets:
71 | self.fixed_matrix = incidence_matrix
72 | self.fixed_costs = costs
73 | else:
74 | incidence_matrix = self.fixed_matrix
75 | (_, n_sets) = incidence_matrix.shape
76 | costs = self.fixed_costs * self.costs_jitter.rvs(n_sets)
77 | return SetCoverData(
78 | costs=costs.round(2),
79 | incidence_matrix=incidence_matrix,
80 | )
81 |
82 | return [_sample() for _ in range(n_samples)]
83 |
84 |
85 | def build_setcover_model_gurobipy(data: Union[str, SetCoverData]) -> GurobiModel:
86 | data = _read_setcover_data(data)
87 | (n_elements, n_sets) = data.incidence_matrix.shape
88 | model = gp.Model()
89 | x = model.addMVar(n_sets, vtype=GRB.BINARY, name="x")
90 | model.addConstr(data.incidence_matrix @ x >= np.ones(n_elements), name="eqs")
91 | model.setObjective(data.costs @ x)
92 | model.update()
93 | return GurobiModel(model)
94 |
95 |
96 | def build_setcover_model_pyomo(
97 | data: Union[str, SetCoverData],
98 | solver: str = "gurobi_persistent",
99 | ) -> PyomoModel:
100 | data = _read_setcover_data(data)
101 | (n_elements, n_sets) = data.incidence_matrix.shape
102 | model = pe.ConcreteModel()
103 | model.sets = pe.Set(initialize=range(n_sets))
104 | model.x = pe.Var(model.sets, domain=pe.Boolean, name="x")
105 | model.eqs = pe.Constraint(pe.Any)
106 | for i in range(n_elements):
107 | model.eqs[i] = (
108 | sum(data.incidence_matrix[i, j] * model.x[j] for j in range(n_sets)) >= 1
109 | )
110 | model.obj = pe.Objective(
111 | expr=sum(data.costs[j] * model.x[j] for j in range(n_sets))
112 | )
113 | return PyomoModel(model, solver)
114 |
115 |
116 | def _read_setcover_data(data: Union[str, SetCoverData]) -> SetCoverData:
117 | if isinstance(data, str):
118 | data = read_pkl_gz(data)
119 | assert isinstance(data, SetCoverData)
120 | return data
121 |
--------------------------------------------------------------------------------
/miplearn/problems/setpack.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from dataclasses import dataclass
6 | from typing import List, Union
7 |
8 | import gurobipy as gp
9 | import numpy as np
10 | from gurobipy.gurobipy import GRB
11 | from scipy.stats import uniform, randint
12 | from scipy.stats.distributions import rv_frozen
13 |
14 | from .setcover import SetCoverGenerator
15 | from miplearn.solvers.gurobi import GurobiModel
16 | from ..io import read_pkl_gz
17 |
18 |
19 | @dataclass
20 | class SetPackData:
21 | costs: np.ndarray
22 | incidence_matrix: np.ndarray
23 |
24 |
25 | class SetPackGenerator:
26 | def __init__(
27 | self,
28 | n_elements: rv_frozen = randint(low=50, high=51),
29 | n_sets: rv_frozen = randint(low=100, high=101),
30 | costs: rv_frozen = uniform(loc=0.0, scale=100.0),
31 | costs_jitter: rv_frozen = uniform(loc=-5.0, scale=10.0),
32 | K: rv_frozen = uniform(loc=25.0, scale=0.0),
33 | density: rv_frozen = uniform(loc=0.02, scale=0.00),
34 | fix_sets: bool = True,
35 | ) -> None:
36 | self.gen = SetCoverGenerator(
37 | n_elements=n_elements,
38 | n_sets=n_sets,
39 | costs=costs,
40 | costs_jitter=costs_jitter,
41 | K=K,
42 | density=density,
43 | fix_sets=fix_sets,
44 | )
45 |
46 | def generate(self, n_samples: int) -> List[SetPackData]:
47 | return [
48 | SetPackData(
49 | s.costs,
50 | s.incidence_matrix,
51 | )
52 | for s in self.gen.generate(n_samples)
53 | ]
54 |
55 |
56 | def build_setpack_model_gurobipy(data: Union[str, SetPackData]) -> GurobiModel:
57 | if isinstance(data, str):
58 | data = read_pkl_gz(data)
59 | assert isinstance(data, SetPackData)
60 | (n_elements, n_sets) = data.incidence_matrix.shape
61 | model = gp.Model()
62 | x = model.addMVar(n_sets, vtype=GRB.BINARY, name="x")
63 | model.addConstr(data.incidence_matrix @ x <= np.ones(n_elements))
64 | model.setObjective(-data.costs @ x)
65 | model.update()
66 | return GurobiModel(model)
67 |
--------------------------------------------------------------------------------
/miplearn/problems/stab.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import logging
6 | from dataclasses import dataclass
7 | from typing import List, Union, Any, Hashable, Optional
8 |
9 | import gurobipy as gp
10 | import networkx as nx
11 | import numpy as np
12 | import pyomo.environ as pe
13 | from gurobipy import GRB, quicksum
14 | from miplearn.io import read_pkl_gz
15 | from miplearn.solvers.gurobi import GurobiModel
16 | from miplearn.solvers.pyomo import PyomoModel
17 | from networkx import Graph
18 | from scipy.stats import uniform, randint
19 | from scipy.stats.distributions import rv_frozen
20 |
21 | from . import _gurobipy_set_params, _pyomo_set_params
22 |
23 | logger = logging.getLogger(__name__)
24 |
25 |
26 | @dataclass
27 | class MaxWeightStableSetData:
28 | graph: Graph
29 | weights: np.ndarray
30 |
31 |
32 | class MaxWeightStableSetGenerator:
33 | """Random instance generator for the Maximum-Weight Stable Set Problem.
34 |
35 | The generator has two modes of operation. When `fix_graph=True` is provided,
36 | one random Erdős-Rényi graph $G_{n,p}$ is generated in the constructor, where $n$
37 | and $p$ are sampled from user-provided probability distributions `n` and `p`. To
38 | generate each instance, the generator independently samples each $w_v$ from the
39 | user-provided probability distribution `w`.
40 |
41 | When `fix_graph=False`, a new random graph is generated for each instance; the
42 | remaining parameters are sampled in the same way.
43 | """
44 |
45 | def __init__(
46 | self,
47 | w: rv_frozen = uniform(loc=10.0, scale=1.0),
48 | n: rv_frozen = randint(low=250, high=251),
49 | p: rv_frozen = uniform(loc=0.05, scale=0.0),
50 | fix_graph: bool = True,
51 | ):
52 | """Initialize the problem generator.
53 |
54 | Parameters
55 | ----------
56 | w: rv_continuous
57 | Probability distribution for vertex weights.
58 | n: rv_discrete
59 | Probability distribution for parameter $n$ in Erdős-Rényi model.
60 | p: rv_continuous
61 | Probability distribution for parameter $p$ in Erdős-Rényi model.
62 | """
63 | assert isinstance(w, rv_frozen), "w should be a SciPy probability distribution"
64 | assert isinstance(n, rv_frozen), "n should be a SciPy probability distribution"
65 | assert isinstance(p, rv_frozen), "p should be a SciPy probability distribution"
66 | self.w = w
67 | self.n = n
68 | self.p = p
69 | self.fix_graph = fix_graph
70 | self.graph = None
71 | if fix_graph:
72 | self.graph = self._generate_graph()
73 |
74 | def generate(self, n_samples: int) -> List[MaxWeightStableSetData]:
75 | def _sample() -> MaxWeightStableSetData:
76 | if self.graph is not None:
77 | graph = self.graph
78 | else:
79 | graph = self._generate_graph()
80 | weights = np.round(self.w.rvs(graph.number_of_nodes()), 2)
81 | return MaxWeightStableSetData(graph, weights)
82 |
83 | return [_sample() for _ in range(n_samples)]
84 |
85 | def _generate_graph(self) -> Graph:
86 | return nx.generators.random_graphs.binomial_graph(self.n.rvs(), self.p.rvs())
87 |
88 |
89 | def build_stab_model_gurobipy(
90 | data: Union[str, MaxWeightStableSetData],
91 | params: Optional[dict[str, Any]] = None,
92 | ) -> GurobiModel:
93 | model = gp.Model()
94 | _gurobipy_set_params(model, params)
95 |
96 | data = _stab_read(data)
97 | nodes = list(data.graph.nodes)
98 |
99 | # Variables and objective function
100 | x = model.addVars(nodes, vtype=GRB.BINARY, name="x")
101 | model.setObjective(quicksum(-data.weights[i] * x[i] for i in nodes))
102 |
103 | # Edge inequalities
104 | for i1, i2 in data.graph.edges:
105 | model.addConstr(x[i1] + x[i2] <= 1)
106 |
107 | def cuts_separate(m: GurobiModel) -> List[Hashable]:
108 | x_val = m.inner.cbGetNodeRel(x)
109 | return _stab_separate(data, x_val)
110 |
111 | def cuts_enforce(m: GurobiModel, violations: List[Any]) -> None:
112 | logger.info(f"Adding {len(violations)} clique cuts...")
113 | for clique in violations:
114 | m.add_constr(quicksum(x[i] for i in clique) <= 1)
115 |
116 | model.update()
117 |
118 | return GurobiModel(
119 | model,
120 | cuts_separate=cuts_separate,
121 | cuts_enforce=cuts_enforce,
122 | )
123 |
124 |
125 | def build_stab_model_pyomo(
126 | data: MaxWeightStableSetData,
127 | solver: str = "gurobi_persistent",
128 | params: Optional[dict[str, Any]] = None,
129 | ) -> PyomoModel:
130 | data = _stab_read(data)
131 | model = pe.ConcreteModel()
132 | nodes = pe.Set(initialize=list(data.graph.nodes))
133 |
134 | # Variables and objective function
135 | model.x = pe.Var(nodes, domain=pe.Boolean, name="x")
136 | model.obj = pe.Objective(expr=sum([-data.weights[i] * model.x[i] for i in nodes]))
137 |
138 | # Edge inequalities
139 | model.edge_eqs = pe.ConstraintList()
140 | for i1, i2 in data.graph.edges:
141 | model.edge_eqs.add(model.x[i1] + model.x[i2] <= 1)
142 |
143 | # Clique inequalities
144 | model.clique_eqs = pe.ConstraintList()
145 |
146 | def cuts_separate(m: PyomoModel) -> List[Hashable]:
147 | m.solver.cbGetNodeRel([model.x[i] for i in nodes])
148 | x_val = [model.x[i].value for i in nodes]
149 | return _stab_separate(data, x_val)
150 |
151 | def cuts_enforce(m: PyomoModel, violations: List[Any]) -> None:
152 | logger.info(f"Adding {len(violations)} clique cuts...")
153 | for clique in violations:
154 | m.add_constr(model.clique_eqs.add(sum(model.x[i] for i in clique) <= 1))
155 |
156 | pm = PyomoModel(
157 | model,
158 | solver,
159 | cuts_separate=cuts_separate,
160 | cuts_enforce=cuts_enforce,
161 | )
162 | _pyomo_set_params(pm, params, solver)
163 | return pm
164 |
165 |
166 | def _stab_read(data: Union[str, MaxWeightStableSetData]) -> MaxWeightStableSetData:
167 | if isinstance(data, str):
168 | data = read_pkl_gz(data)
169 | assert isinstance(data, MaxWeightStableSetData)
170 | return data
171 |
172 |
173 | def _stab_separate(data: MaxWeightStableSetData, x_val: List[float]) -> List:
174 | # Check that we selected at most one vertex for each
175 | # clique in the graph (sum <= 1)
176 | violations: List[Any] = []
177 | for clique in nx.find_cliques(data.graph):
178 | if sum(x_val[i] for i in clique) > 1.0001:
179 | violations.append(sorted(clique))
180 | return violations
181 |
--------------------------------------------------------------------------------
/miplearn/problems/tsp.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import logging
6 | from dataclasses import dataclass
7 | from typing import List, Tuple, Optional, Any, Union
8 |
9 | import gurobipy as gp
10 | import networkx as nx
11 | import numpy as np
12 | import pyomo.environ as pe
13 | from gurobipy import quicksum, GRB, tuplelist
14 | from miplearn.io import read_pkl_gz
15 | from miplearn.problems import _gurobipy_set_params, _pyomo_set_params
16 | from miplearn.solvers.gurobi import GurobiModel
17 | from scipy.spatial.distance import pdist, squareform
18 | from scipy.stats import uniform, randint
19 | from scipy.stats.distributions import rv_frozen
20 |
21 | from miplearn.solvers.pyomo import PyomoModel
22 |
23 | logger = logging.getLogger(__name__)
24 |
25 |
26 | @dataclass
27 | class TravelingSalesmanData:
28 | n_cities: int
29 | distances: np.ndarray
30 |
31 |
32 | class TravelingSalesmanGenerator:
33 | """Random generator for the Traveling Salesman Problem."""
34 |
35 | def __init__(
36 | self,
37 | x: rv_frozen = uniform(loc=0.0, scale=1000.0),
38 | y: rv_frozen = uniform(loc=0.0, scale=1000.0),
39 | n: rv_frozen = randint(low=100, high=101),
40 | gamma: rv_frozen = uniform(loc=1.0, scale=0.0),
41 | fix_cities: bool = True,
42 | round: bool = True,
43 | ) -> None:
44 | """Initializes the problem generator.
45 |
46 | Initially, the generator creates n cities (x_1,y_1),...,(x_n,y_n) where n,
47 | x_i and y_i are sampled independently from the provided probability
48 | distributions `n`, `x` and `y`. For each (unordered) pair of cities (i,j),
49 | the distance d[i,j] between them is set to:
50 |
51 | d[i,j] = gamma[i,j] \\sqrt{(x_i - x_j)^2 + (y_i - y_j)^2}
52 |
53 | where gamma is sampled from the provided probability distribution `gamma`.
54 |
55 | If fix_cities=True, the list of cities is kept the same for all generated
56 | instances. The gamma values, and therefore also the distances, are still
57 | different.
58 |
59 | By default, all distances d[i,j] are rounded to the nearest integer. If
60 | `round=False` is provided, this rounding will be disabled.
61 |
62 | Arguments
63 | ---------
64 | x: rv_continuous
65 | Probability distribution for the x-coordinate of each city.
66 | y: rv_continuous
67 | Probability distribution for the y-coordinate of each city.
68 | n: rv_discrete
69 | Probability distribution for the number of cities.
70 | fix_cities: bool
71 | If False, cities will be resampled for every generated instance. Otherwise, list
72 | of cities will be computed once, during the constructor.
73 | round: bool
74 | If True, distances are rounded to the nearest integer.
75 | """
76 | assert isinstance(x, rv_frozen), "x should be a SciPy probability distribution"
77 | assert isinstance(y, rv_frozen), "y should be a SciPy probability distribution"
78 | assert isinstance(n, rv_frozen), "n should be a SciPy probability distribution"
79 | assert isinstance(
80 | gamma,
81 | rv_frozen,
82 | ), "gamma should be a SciPy probability distribution"
83 | self.x = x
84 | self.y = y
85 | self.n = n
86 | self.gamma = gamma
87 | self.round = round
88 |
89 | if fix_cities:
90 | self.fixed_n: Optional[int]
91 | self.fixed_cities: Optional[np.ndarray]
92 | self.fixed_n, self.fixed_cities = self._generate_cities()
93 | else:
94 | self.fixed_n = None
95 | self.fixed_cities = None
96 |
97 | def generate(self, n_samples: int) -> List[TravelingSalesmanData]:
98 | def _sample() -> TravelingSalesmanData:
99 | if self.fixed_cities is not None:
100 | assert self.fixed_n is not None
101 | n, cities = self.fixed_n, self.fixed_cities
102 | else:
103 | n, cities = self._generate_cities()
104 | distances = squareform(pdist(cities)) * self.gamma.rvs(size=(n, n))
105 | distances = np.tril(distances) + np.triu(distances.T, 1)
106 | if self.round:
107 | distances = distances.round()
108 | return TravelingSalesmanData(n, distances)
109 |
110 | return [_sample() for _ in range(n_samples)]
111 |
112 | def _generate_cities(self) -> Tuple[int, np.ndarray]:
113 | n = self.n.rvs()
114 | cities = np.array([(self.x.rvs(), self.y.rvs()) for _ in range(n)])
115 | return n, cities
116 |
117 |
118 | def build_tsp_model_gurobipy(
119 | data: Union[str, TravelingSalesmanData],
120 | params: Optional[dict[str, Any]] = None,
121 | ) -> GurobiModel:
122 | model = gp.Model()
123 | _gurobipy_set_params(model, params)
124 |
125 | data = _tsp_read(data)
126 | edges = tuplelist(
127 | (i, j) for i in range(data.n_cities) for j in range(i + 1, data.n_cities)
128 | )
129 |
130 | # Decision variables
131 | x = model.addVars(edges, vtype=GRB.BINARY, name="x")
132 |
133 | model._x = x
134 | model._edges = edges
135 | model._n_cities = data.n_cities
136 |
137 | # Objective function
138 | model.setObjective(quicksum(x[(i, j)] * data.distances[i, j] for (i, j) in edges))
139 |
140 | # Eq: Must choose two edges adjacent to each node
141 | model.addConstrs(
142 | (
143 | quicksum(x[min(i, j), max(i, j)] for j in range(data.n_cities) if i != j)
144 | == 2
145 | for i in range(data.n_cities)
146 | ),
147 | name="eq_degree",
148 | )
149 |
150 | def lazy_separate(model: GurobiModel) -> List[Any]:
151 | x_val = model.inner.cbGetSolution(model.inner._x)
152 | return _tsp_separate(x_val, edges, data.n_cities)
153 |
154 | def lazy_enforce(model: GurobiModel, violations: List[Any]) -> None:
155 | for violation in violations:
156 | model.add_constr(
157 | quicksum(model.inner._x[e[0], e[1]] for e in violation) >= 2
158 | )
159 | logger.info(f"tsp: added {len(violations)} subtour elimination constraints")
160 |
161 | model.update()
162 |
163 | return GurobiModel(
164 | model,
165 | lazy_separate=lazy_separate,
166 | lazy_enforce=lazy_enforce,
167 | )
168 |
169 |
170 | def build_tsp_model_pyomo(
171 | data: Union[str, TravelingSalesmanData],
172 | solver: str = "gurobi_persistent",
173 | params: Optional[dict[str, Any]] = None,
174 | ) -> PyomoModel:
175 | model = pe.ConcreteModel()
176 | data = _tsp_read(data)
177 |
178 | edges = tuplelist(
179 | (i, j) for i in range(data.n_cities) for j in range(i + 1, data.n_cities)
180 | )
181 |
182 | # Decision variables
183 | model.x = pe.Var(edges, domain=pe.Boolean, name="x")
184 | model.obj = pe.Objective(
185 | expr=sum(model.x[i, j] * data.distances[i, j] for (i, j) in edges)
186 | )
187 |
188 | # Eq: Must choose two edges adjacent to each node
189 | model.degree_eqs = pe.ConstraintList()
190 | for i in range(data.n_cities):
191 | model.degree_eqs.add(
192 | sum(model.x[min(i, j), max(i, j)] for j in range(data.n_cities) if i != j)
193 | == 2
194 | )
195 |
196 | # Eq: Subtour elimination
197 | model.subtour_eqs = pe.ConstraintList()
198 |
199 | def lazy_separate(m: PyomoModel) -> List[Any]:
200 | m.solver.cbGetSolution([model.x[e] for e in edges])
201 | x_val = {e: model.x[e].value for e in edges}
202 | return _tsp_separate(x_val, edges, data.n_cities)
203 |
204 | def lazy_enforce(m: PyomoModel, violations: List[Any]) -> None:
205 | logger.warning(f"Adding {len(violations)} subtour elimination constraints...")
206 | for violation in violations:
207 | m.add_constr(
208 | model.subtour_eqs.add(sum(model.x[e[0], e[1]] for e in violation) >= 2)
209 | )
210 |
211 | pm = PyomoModel(
212 | model,
213 | solver,
214 | lazy_separate=lazy_separate,
215 | lazy_enforce=lazy_enforce,
216 | )
217 | _pyomo_set_params(pm, params, solver)
218 | return pm
219 |
220 |
221 | def _tsp_read(data: Union[str, TravelingSalesmanData]) -> TravelingSalesmanData:
222 | if isinstance(data, str):
223 | data = read_pkl_gz(data)
224 | assert isinstance(data, TravelingSalesmanData)
225 | return data
226 |
227 |
228 | def _tsp_separate(
229 | x_val: dict[Tuple[int, int], float],
230 | edges: List[Tuple[int, int]],
231 | n_cities: int,
232 | ) -> List:
233 | violations = []
234 | selected_edges = [e for e in edges if x_val[e] > 0.5]
235 | graph = nx.Graph()
236 | graph.add_edges_from(selected_edges)
237 | for component in list(nx.connected_components(graph)):
238 | if len(component) < n_cities:
239 | cut_edges = [
240 | [e[0], e[1]]
241 | for e in edges
242 | if (e[0] in component and e[1] not in component)
243 | or (e[0] not in component and e[1] in component)
244 | ]
245 | violations.append(cut_edges)
246 | return violations
247 |
--------------------------------------------------------------------------------
/miplearn/problems/uc.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from dataclasses import dataclass
6 | from math import pi
7 | from typing import List, Optional, Union
8 |
9 | import gurobipy as gp
10 | import numpy as np
11 | from gurobipy import GRB, quicksum
12 | from scipy.stats import uniform, randint
13 | from scipy.stats.distributions import rv_frozen
14 |
15 | from miplearn.io import read_pkl_gz
16 | from miplearn.solvers.gurobi import GurobiModel
17 |
18 |
19 | @dataclass
20 | class UnitCommitmentData:
21 | demand: np.ndarray
22 | min_power: np.ndarray
23 | max_power: np.ndarray
24 | min_uptime: np.ndarray
25 | min_downtime: np.ndarray
26 | cost_startup: np.ndarray
27 | cost_prod: np.ndarray
28 | cost_fixed: np.ndarray
29 |
30 |
31 | class UnitCommitmentGenerator:
32 | def __init__(
33 | self,
34 | n_units: rv_frozen = randint(low=1_000, high=1_001),
35 | n_periods: rv_frozen = randint(low=72, high=73),
36 | max_power: rv_frozen = uniform(loc=50, scale=450),
37 | min_power: rv_frozen = uniform(loc=0.5, scale=0.25),
38 | cost_startup: rv_frozen = uniform(loc=0, scale=10_000),
39 | cost_prod: rv_frozen = uniform(loc=0, scale=50),
40 | cost_fixed: rv_frozen = uniform(loc=0, scale=1_000),
41 | min_uptime: rv_frozen = randint(low=2, high=8),
42 | min_downtime: rv_frozen = randint(low=2, high=8),
43 | cost_jitter: rv_frozen = uniform(loc=0.75, scale=0.5),
44 | demand_jitter: rv_frozen = uniform(loc=0.9, scale=0.2),
45 | fix_units: bool = False,
46 | ) -> None:
47 | self.n_units = n_units
48 | self.n_periods = n_periods
49 | self.max_power = max_power
50 | self.min_power = min_power
51 | self.cost_startup = cost_startup
52 | self.cost_prod = cost_prod
53 | self.cost_fixed = cost_fixed
54 | self.min_uptime = min_uptime
55 | self.min_downtime = min_downtime
56 | self.cost_jitter = cost_jitter
57 | self.demand_jitter = demand_jitter
58 | self.fix_units = fix_units
59 | self.ref_data: Optional[UnitCommitmentData] = None
60 |
61 | def generate(self, n_samples: int) -> List[UnitCommitmentData]:
62 | def _sample() -> UnitCommitmentData:
63 | if self.ref_data is None:
64 | T = self.n_periods.rvs()
65 | G = self.n_units.rvs()
66 |
67 | # Generate unit parameteres
68 | max_power = self.max_power.rvs(G)
69 | min_power = max_power * self.min_power.rvs(G)
70 | max_power = max_power
71 | min_uptime = self.min_uptime.rvs(G)
72 | min_downtime = self.min_downtime.rvs(G)
73 | cost_startup = self.cost_startup.rvs(G)
74 | cost_prod = self.cost_prod.rvs(G)
75 | cost_fixed = self.cost_fixed.rvs(G)
76 | capacity = max_power.sum()
77 |
78 | # Generate periodic demand in the range [0.4, 0.8] * capacity, with a peak every 12 hours.
79 | demand = np.sin([i / 6 * pi for i in range(T)])
80 | demand *= uniform(loc=0, scale=1).rvs(T)
81 | demand -= demand.min()
82 | demand /= demand.max() / 0.4
83 | demand += 0.4
84 | demand *= capacity
85 | else:
86 | T, G = len(self.ref_data.demand), len(self.ref_data.max_power)
87 | demand = self.ref_data.demand * self.demand_jitter.rvs(T)
88 | min_power = self.ref_data.min_power
89 | max_power = self.ref_data.max_power
90 | min_uptime = self.ref_data.min_uptime
91 | min_downtime = self.ref_data.min_downtime
92 | cost_startup = self.ref_data.cost_startup * self.cost_jitter.rvs(G)
93 | cost_prod = self.ref_data.cost_prod * self.cost_jitter.rvs(G)
94 | cost_fixed = self.ref_data.cost_fixed * self.cost_jitter.rvs(G)
95 |
96 | data = UnitCommitmentData(
97 | demand.round(2),
98 | min_power.round(2),
99 | max_power.round(2),
100 | min_uptime,
101 | min_downtime,
102 | cost_startup.round(2),
103 | cost_prod.round(2),
104 | cost_fixed.round(2),
105 | )
106 |
107 | if self.ref_data is None and self.fix_units:
108 | self.ref_data = data
109 |
110 | return data
111 |
112 | return [_sample() for _ in range(n_samples)]
113 |
114 |
115 | def build_uc_model_gurobipy(data: Union[str, UnitCommitmentData]) -> GurobiModel:
116 | """
117 | Models the unit commitment problem according to equations (1)-(5) of:
118 |
119 | Bendotti, P., Fouilhoux, P. & Rottner, C. The min-up/min-down unit
120 | commitment polytope. J Comb Optim 36, 1024-1058 (2018).
121 | https://doi.org/10.1007/s10878-018-0273-y
122 |
123 | """
124 | if isinstance(data, str):
125 | data = read_pkl_gz(data)
126 | assert isinstance(data, UnitCommitmentData)
127 |
128 | T = len(data.demand)
129 | G = len(data.min_power)
130 | D = data.demand
131 | Pmin, Pmax = data.min_power, data.max_power
132 | L = data.min_uptime
133 | l = data.min_downtime
134 |
135 | model = gp.Model()
136 | is_on = model.addVars(G, T, vtype=GRB.BINARY, name="is_on")
137 | switch_on = model.addVars(G, T, vtype=GRB.BINARY, name="switch_on")
138 | prod = model.addVars(G, T, name="prod")
139 |
140 | # Objective function
141 | model.setObjective(
142 | quicksum(
143 | is_on[g, t] * data.cost_fixed[g]
144 | + switch_on[g, t] * data.cost_startup[g]
145 | + prod[g, t] * data.cost_prod[g]
146 | for g in range(G)
147 | for t in range(T)
148 | )
149 | )
150 |
151 | # Eq 1: Minimum up-time constraint: If unit g is down at time t, then it
152 | # cannot have start up during the previous L[g] periods.
153 | model.addConstrs(
154 | (
155 | quicksum(switch_on[g, k] for k in range(t - L[g] + 1, t + 1)) <= is_on[g, t]
156 | for g in range(G)
157 | for t in range(L[g] - 1, T)
158 | ),
159 | name="eq_min_uptime",
160 | )
161 |
162 | # Eq 2: Minimum down-time constraint: Symmetric to the minimum-up constraint.
163 | model.addConstrs(
164 | (
165 | quicksum(switch_on[g, k] for k in range(t - l[g] + 1, t + 1))
166 | <= 1 - is_on[g, t - l[g] + 1]
167 | for g in range(G)
168 | for t in range(l[g] - 1, T)
169 | ),
170 | name="eq_min_downtime",
171 | )
172 |
173 | # Eq 3: Ensures that if unit g start up at time t, then the start-up variable
174 | # must be one.
175 | model.addConstrs(
176 | (
177 | switch_on[g, t] >= is_on[g, t] - is_on[g, t - 1]
178 | for g in range(G)
179 | for t in range(1, T)
180 | ),
181 | name="eq_startup",
182 | )
183 |
184 | # Eq 4: Ensures that demand is satisfied at each time period.
185 | model.addConstrs(
186 | (quicksum(prod[g, t] for g in range(G)) >= D[t] for t in range(T)),
187 | name="eq_demand",
188 | )
189 |
190 | # Eq 5: Sets the bounds to the quantity of power produced by each unit.
191 | model.addConstrs(
192 | (Pmin[g] * is_on[g, t] <= prod[g, t] for g in range(G) for t in range(T)),
193 | name="eq_prod_lb",
194 | )
195 | model.addConstrs(
196 | (prod[g, t] <= Pmax[g] * is_on[g, t] for g in range(G) for t in range(T)),
197 | name="eq_prod_ub",
198 | )
199 | model.update()
200 |
201 | return GurobiModel(model)
202 |
--------------------------------------------------------------------------------
/miplearn/problems/vertexcover.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from dataclasses import dataclass
6 | from typing import List, Union
7 |
8 | import gurobipy as gp
9 | import numpy as np
10 | from gurobipy import GRB, quicksum
11 | from networkx import Graph
12 | from scipy.stats import uniform, randint
13 | from scipy.stats.distributions import rv_frozen
14 |
15 | from .stab import MaxWeightStableSetGenerator
16 | from miplearn.solvers.gurobi import GurobiModel
17 | from ..io import read_pkl_gz
18 |
19 |
20 | @dataclass
21 | class MinWeightVertexCoverData:
22 | graph: Graph
23 | weights: np.ndarray
24 |
25 |
26 | class MinWeightVertexCoverGenerator:
27 | def __init__(
28 | self,
29 | w: rv_frozen = uniform(loc=10.0, scale=1.0),
30 | n: rv_frozen = randint(low=250, high=251),
31 | p: rv_frozen = uniform(loc=0.05, scale=0.0),
32 | fix_graph: bool = True,
33 | ):
34 | self._generator = MaxWeightStableSetGenerator(w, n, p, fix_graph)
35 |
36 | def generate(self, n_samples: int) -> List[MinWeightVertexCoverData]:
37 | return [
38 | MinWeightVertexCoverData(s.graph, s.weights)
39 | for s in self._generator.generate(n_samples)
40 | ]
41 |
42 |
43 | def build_vertexcover_model_gurobipy(
44 | data: Union[str, MinWeightVertexCoverData]
45 | ) -> GurobiModel:
46 | if isinstance(data, str):
47 | data = read_pkl_gz(data)
48 | assert isinstance(data, MinWeightVertexCoverData)
49 | model = gp.Model()
50 | nodes = list(data.graph.nodes)
51 | x = model.addVars(nodes, vtype=GRB.BINARY, name="x")
52 | model.setObjective(quicksum(data.weights[i] * x[i] for i in nodes))
53 | for v1, v2 in data.graph.edges:
54 | model.addConstr(x[v1] + x[v2] >= 1)
55 | model.update()
56 | return GurobiModel(model)
57 |
--------------------------------------------------------------------------------
/miplearn/solvers/__init__.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
--------------------------------------------------------------------------------
/miplearn/solvers/abstract.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from abc import ABC, abstractmethod
6 | from typing import Optional, Dict, Callable, Hashable, List, Any
7 |
8 | import numpy as np
9 |
10 | from miplearn.h5 import H5File
11 |
12 |
13 | class AbstractModel(ABC):
14 | _supports_basis_status = False
15 | _supports_sensitivity_analysis = False
16 | _supports_node_count = False
17 | _supports_solution_pool = False
18 |
19 | WHERE_DEFAULT = "default"
20 | WHERE_CUTS = "cuts"
21 | WHERE_LAZY = "lazy"
22 |
23 | def __init__(self) -> None:
24 | self._lazy_enforce: Optional[Callable] = None
25 | self._lazy_separate: Optional[Callable] = None
26 | self._lazy: Optional[List[Any]] = None
27 | self._cuts_enforce: Optional[Callable] = None
28 | self._cuts_separate: Optional[Callable] = None
29 | self._cuts: Optional[List[Any]] = None
30 | self._cuts_aot: Optional[List[Any]] = None
31 | self._where = self.WHERE_DEFAULT
32 |
33 | @abstractmethod
34 | def add_constrs(
35 | self,
36 | var_names: np.ndarray,
37 | constrs_lhs: np.ndarray,
38 | constrs_sense: np.ndarray,
39 | constrs_rhs: np.ndarray,
40 | stats: Optional[Dict] = None,
41 | ) -> None:
42 | pass
43 |
44 | @abstractmethod
45 | def extract_after_load(self, h5: H5File) -> None:
46 | pass
47 |
48 | @abstractmethod
49 | def extract_after_lp(self, h5: H5File) -> None:
50 | pass
51 |
52 | @abstractmethod
53 | def extract_after_mip(self, h5: H5File) -> None:
54 | pass
55 |
56 | @abstractmethod
57 | def fix_variables(
58 | self,
59 | var_names: np.ndarray,
60 | var_values: np.ndarray,
61 | stats: Optional[Dict] = None,
62 | ) -> None:
63 | pass
64 |
65 | @abstractmethod
66 | def optimize(self) -> None:
67 | pass
68 |
69 | @abstractmethod
70 | def relax(self) -> "AbstractModel":
71 | pass
72 |
73 | @abstractmethod
74 | def set_warm_starts(
75 | self,
76 | var_names: np.ndarray,
77 | var_values: np.ndarray,
78 | stats: Optional[Dict] = None,
79 | ) -> None:
80 | pass
81 |
82 | @abstractmethod
83 | def write(self, filename: str) -> None:
84 | pass
85 |
86 | def set_cuts(self, cuts: List) -> None:
87 | self._cuts_aot = cuts
88 |
89 | def lazy_enforce(self, violations: List[Any]) -> None:
90 | if self._lazy_enforce is not None:
91 | self._lazy_enforce(self, violations)
92 |
93 | def _lazy_enforce_collected(self) -> None:
94 | """Adds all lazy constraints identified in the callback as actual model constraints. Useful for generating
95 | a final MPS file with the constraints that were required in this run."""
96 | if self._lazy_enforce is not None:
97 | self._lazy_enforce(self, self._lazy)
98 |
--------------------------------------------------------------------------------
/miplearn/solvers/learning.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | from os.path import exists
5 | from tempfile import NamedTemporaryFile
6 | from typing import List, Any, Union, Dict, Callable, Optional, Tuple
7 |
8 | from miplearn.h5 import H5File
9 | from miplearn.io import _to_h5_filename
10 | from miplearn.solvers.abstract import AbstractModel
11 | import shutil
12 |
13 |
14 | class LearningSolver:
15 | def __init__(self, components: List[Any], skip_lp: bool = False) -> None:
16 | self.components = components
17 | self.skip_lp = skip_lp
18 |
19 | def fit(self, data_filenames: List[str]) -> None:
20 | h5_filenames = [_to_h5_filename(f) for f in data_filenames]
21 | for comp in self.components:
22 | comp.fit(h5_filenames)
23 |
24 | def optimize(
25 | self,
26 | model: Union[str, AbstractModel],
27 | build_model: Optional[Callable] = None,
28 | ) -> Tuple[AbstractModel, Dict[str, Any]]:
29 | h5_filename, mode = NamedTemporaryFile().name, "w"
30 | if isinstance(model, str):
31 | assert build_model is not None
32 | old_h5_filename = _to_h5_filename(model)
33 | model = build_model(model)
34 | assert isinstance(model, AbstractModel)
35 |
36 | # If the instance has an associate H5 file, we make a temporary copy of it,
37 | # then work on that copy. We keep the original file unmodified
38 | if exists(old_h5_filename):
39 | shutil.copy(old_h5_filename, h5_filename)
40 | mode = "r+"
41 |
42 | stats: Dict[str, Any] = {}
43 | with H5File(h5_filename, mode) as h5:
44 | model.extract_after_load(h5)
45 | if not self.skip_lp:
46 | relaxed = model.relax()
47 | relaxed.optimize()
48 | relaxed.extract_after_lp(h5)
49 | for comp in self.components:
50 | comp_stats = comp.before_mip(h5_filename, model, stats)
51 | if comp_stats is not None:
52 | stats.update(comp_stats)
53 | model.optimize()
54 | model.extract_after_mip(h5)
55 |
56 | return model, stats
57 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | -e .[dev]
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from setuptools import setup, find_namespace_packages
6 |
7 | setup(
8 | name="miplearn",
9 | version="0.4.2",
10 | author="Alinson S. Xavier",
11 | author_email="axavier@anl.gov",
12 | description="Extensible Framework for Learning-Enhanced Mixed-Integer Optimization",
13 | url="https://github.com/ANL-CEEESA/MIPLearn/",
14 | packages=find_namespace_packages(),
15 | python_requires=">=3.9",
16 | install_requires=[
17 | "Jinja2<3.1",
18 | "gurobipy>=11,<12",
19 | "h5py>=3,<4",
20 | "networkx>=2,<3",
21 | "numpy>=1,<2",
22 | "pandas>=1,<2",
23 | "pathos>=0.2,<0.3",
24 | "pyomo>=6,<7",
25 | "scikit-learn>=1,<2",
26 | "scipy>=1,<2",
27 | "tqdm>=4,<5",
28 | ],
29 | extras_require={
30 | "dev": [
31 | "Sphinx>=3,<4",
32 | "black==22.6.0",
33 | "mypy==1.8",
34 | "myst-parser==0.14.0",
35 | "nbsphinx>=0.9,<0.10",
36 | "pyflakes==2.5.0",
37 | "pytest>=7,<8",
38 | "sphinx-book-theme==0.1.0",
39 | "sphinxcontrib-applehelp==1.0.4",
40 | "sphinxcontrib-devhelp==1.0.2",
41 | "sphinxcontrib-htmlhelp==2.0.1",
42 | "sphinxcontrib-serializinghtml==1.1.5",
43 | "sphinxcontrib-qthelp==1.0.3",
44 | "sphinx-multitoc-numbering>=0.1,<0.2",
45 | "twine>=6,<7",
46 | ]
47 | },
48 | )
49 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
--------------------------------------------------------------------------------
/tests/components/__init__.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
--------------------------------------------------------------------------------
/tests/components/cuts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/components/cuts/__init__.py
--------------------------------------------------------------------------------
/tests/components/cuts/test_mem.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2023, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from typing import Any, List, Dict
6 | from unittest.mock import Mock
7 |
8 | from miplearn.components.cuts.mem import MemorizingCutsComponent
9 | from miplearn.extractors.abstract import FeaturesExtractor
10 | from miplearn.problems.stab import build_stab_model_gurobipy, build_stab_model_pyomo
11 | from miplearn.solvers.learning import LearningSolver
12 | from sklearn.dummy import DummyClassifier
13 | from sklearn.neighbors import KNeighborsClassifier
14 | from typing import Callable
15 |
16 |
17 | def test_mem_component_gp(
18 | stab_gp_h5: List[str],
19 | stab_pyo_h5: List[str],
20 | default_extractor: FeaturesExtractor,
21 | ) -> None:
22 | for h5 in [stab_pyo_h5, stab_gp_h5]:
23 | clf = Mock(wraps=DummyClassifier())
24 | comp = MemorizingCutsComponent(clf=clf, extractor=default_extractor)
25 | comp.fit(h5)
26 |
27 | # Should call fit method with correct arguments
28 | clf.fit.assert_called()
29 | x, y = clf.fit.call_args.args
30 | assert x.shape == (3, 50)
31 | assert y.shape == (3, 412)
32 | y = y.tolist()
33 | assert y[0][40:50] == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
34 | assert y[1][40:50] == [1, 1, 0, 1, 1, 1, 1, 1, 1, 1]
35 | assert y[2][40:50] == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
36 |
37 | # Should store violations
38 | assert comp.constrs_ is not None
39 | assert comp.n_features_ == 50
40 | assert comp.n_targets_ == 412
41 | assert len(comp.constrs_) == 412
42 |
43 | # Call before-mip
44 | stats: Dict[str, Any] = {}
45 | model = Mock()
46 | comp.before_mip(h5[0], model, stats)
47 |
48 | # Should call predict with correct args
49 | clf.predict.assert_called()
50 | (x_test,) = clf.predict.call_args.args
51 | assert x_test.shape == (1, 50)
52 |
53 | # Should call set_cuts
54 | model.set_cuts.assert_called()
55 | (cuts_aot_,) = model.set_cuts.call_args.args
56 | assert cuts_aot_ is not None
57 | assert len(cuts_aot_) == 256
58 |
59 |
60 | def test_usage_stab(
61 | stab_gp_h5: List[str],
62 | stab_pyo_h5: List[str],
63 | default_extractor: FeaturesExtractor,
64 | ) -> None:
65 | for h5, build_model in [
66 | (stab_pyo_h5, build_stab_model_pyomo),
67 | (stab_gp_h5, build_stab_model_gurobipy),
68 | ]:
69 | data_filenames = [f.replace(".h5", ".pkl.gz") for f in h5]
70 | clf = KNeighborsClassifier(n_neighbors=1)
71 | comp = MemorizingCutsComponent(clf=clf, extractor=default_extractor)
72 | solver = LearningSolver(components=[comp])
73 | solver.fit(data_filenames)
74 | model, stats = solver.optimize(data_filenames[0], build_model) # type: ignore
75 | assert stats["Cuts: AOT"] > 0
76 |
--------------------------------------------------------------------------------
/tests/components/lazy/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/components/lazy/__init__.py
--------------------------------------------------------------------------------
/tests/components/lazy/test_mem.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from typing import List, Dict, Any
6 | from unittest.mock import Mock
7 |
8 | from sklearn.dummy import DummyClassifier
9 | from sklearn.neighbors import KNeighborsClassifier
10 |
11 | from miplearn.components.lazy.mem import MemorizingLazyComponent
12 | from miplearn.extractors.abstract import FeaturesExtractor
13 | from miplearn.problems.tsp import build_tsp_model_gurobipy, build_tsp_model_pyomo
14 | from miplearn.solvers.learning import LearningSolver
15 |
16 |
17 | def test_mem_component(
18 | tsp_gp_h5: List[str],
19 | tsp_pyo_h5: List[str],
20 | default_extractor: FeaturesExtractor,
21 | ) -> None:
22 | for h5 in [tsp_gp_h5, tsp_pyo_h5]:
23 | clf = Mock(wraps=DummyClassifier())
24 | comp = MemorizingLazyComponent(clf=clf, extractor=default_extractor)
25 | comp.fit(tsp_gp_h5)
26 |
27 | # Should call fit method with correct arguments
28 | clf.fit.assert_called()
29 | x, y = clf.fit.call_args.args
30 | assert x.shape == (3, 190)
31 | assert y.tolist() == [
32 | [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0],
33 | [1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0],
34 | [1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1],
35 | ]
36 |
37 | # Should store violations
38 | assert comp.constrs_ is not None
39 | assert comp.n_features_ == 190
40 | assert comp.n_targets_ == 20
41 | assert len(comp.constrs_) == 20
42 |
43 | # Call before-mip
44 | stats: Dict[str, Any] = {}
45 | model = Mock()
46 | comp.before_mip(tsp_gp_h5[0], model, stats)
47 |
48 | # Should call predict with correct args
49 | clf.predict.assert_called()
50 | (x_test,) = clf.predict.call_args.args
51 | assert x_test.shape == (1, 190)
52 |
53 |
54 | def test_usage_tsp(
55 | tsp_gp_h5: List[str],
56 | tsp_pyo_h5: List[str],
57 | default_extractor: FeaturesExtractor,
58 | ) -> None:
59 | for h5, build_model in [
60 | (tsp_pyo_h5, build_tsp_model_pyomo),
61 | (tsp_gp_h5, build_tsp_model_gurobipy),
62 | ]:
63 | data_filenames = [f.replace(".h5", ".pkl.gz") for f in h5]
64 | clf = KNeighborsClassifier(n_neighbors=1)
65 | comp = MemorizingLazyComponent(clf=clf, extractor=default_extractor)
66 | solver = LearningSolver(components=[comp])
67 | solver.fit(data_filenames)
68 | model, stats = solver.optimize(data_filenames[0], build_model) # type: ignore
69 | assert stats["Lazy Constraints: AOT"] > 0
70 |
--------------------------------------------------------------------------------
/tests/components/primal/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/components/primal/__init__.py
--------------------------------------------------------------------------------
/tests/components/primal/test_expert.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | from typing import List, Dict, Any
5 | from unittest.mock import Mock
6 |
7 | from miplearn.components.primal.actions import SetWarmStart, FixVariables
8 | from miplearn.components.primal.expert import ExpertPrimalComponent
9 |
10 |
11 | def test_expert(multiknapsack_h5: List[str]) -> None:
12 | model = Mock()
13 | stats: Dict[str, Any] = {}
14 | comp = ExpertPrimalComponent(action=SetWarmStart())
15 | comp.before_mip(multiknapsack_h5[0], model, stats)
16 | model.set_warm_starts.assert_called()
17 | names, starts, _ = model.set_warm_starts.call_args.args
18 | assert names.shape == (100,)
19 | assert starts.shape == (1, 100)
20 |
21 | comp = ExpertPrimalComponent(action=FixVariables())
22 | comp.before_mip(multiknapsack_h5[0], model, stats)
23 | model.fix_variables.assert_called()
24 | names, v, _ = model.fix_variables.call_args.args
25 | assert names.shape == (100,)
26 | assert v.shape == (100,)
27 |
--------------------------------------------------------------------------------
/tests/components/primal/test_indep.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | from typing import List, Dict, Any
5 | from unittest.mock import Mock, call
6 |
7 | from sklearn.dummy import DummyClassifier
8 |
9 | from miplearn.components.primal.actions import SetWarmStart
10 | from miplearn.components.primal.indep import IndependentVarsPrimalComponent
11 | from miplearn.extractors.fields import H5FieldsExtractor
12 |
13 |
14 | def test_indep(multiknapsack_h5: List[str]) -> None:
15 | # Create and fit component
16 | clone_fn = Mock(return_value=Mock(wraps=DummyClassifier()))
17 | comp = IndependentVarsPrimalComponent(
18 | base_clf="dummy",
19 | extractor=H5FieldsExtractor(var_fields=["lp_var_values"]),
20 | clone_fn=clone_fn,
21 | action=SetWarmStart(),
22 | )
23 | comp.fit(multiknapsack_h5)
24 |
25 | # Should call clone 100 times and store the 100 classifiers
26 | clone_fn.assert_has_calls([call("dummy") for _ in range(100)])
27 | assert len(comp.clf_) == 100
28 |
29 | for v in [b"x[0]", b"x[1]"]:
30 | # Should pass correct data to fit
31 | comp.clf_[v].fit.assert_called()
32 | x, y = comp.clf_[v].fit.call_args.args
33 | assert x.shape == (3, 1)
34 | assert y.shape == (3,)
35 |
36 | # Call before-mip
37 | stats: Dict[str, Any] = {}
38 | model = Mock()
39 | comp.before_mip(multiknapsack_h5[0], model, stats)
40 |
41 | # Should call predict with correct args
42 | for v in [b"x[0]", b"x[1]"]:
43 | comp.clf_[v].predict.assert_called()
44 | (x_test,) = comp.clf_[v].predict.call_args.args
45 | assert x_test.shape == (1, 1)
46 |
47 | # Should set warm starts
48 | model.set_warm_starts.assert_called()
49 | names, starts, _ = model.set_warm_starts.call_args.args
50 | assert len(names) == 100
51 | assert starts.shape == (1, 100)
52 |
--------------------------------------------------------------------------------
/tests/components/primal/test_joint.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | from typing import List, Dict, Any
5 | from unittest.mock import Mock
6 |
7 | from sklearn.dummy import DummyClassifier
8 |
9 | from miplearn.components.primal.actions import SetWarmStart
10 | from miplearn.components.primal.joint import JointVarsPrimalComponent
11 | from miplearn.extractors.fields import H5FieldsExtractor
12 |
13 |
14 | def test_joint(multiknapsack_h5: List[str]) -> None:
15 | # Create mock classifier
16 | clf = Mock(wraps=DummyClassifier())
17 |
18 | # Create and fit component
19 | comp = JointVarsPrimalComponent(
20 | clf=clf,
21 | extractor=H5FieldsExtractor(instance_fields=["static_var_obj_coeffs"]),
22 | action=SetWarmStart(),
23 | )
24 | comp.fit(multiknapsack_h5)
25 |
26 | # Should call fit method with correct arguments
27 | clf.fit.assert_called()
28 | x, y = clf.fit.call_args.args
29 | assert x.shape == (3, 100)
30 | assert y.shape == (3, 100)
31 |
32 | # Call before-mip
33 | stats: Dict[str, Any] = {}
34 | model = Mock()
35 | comp.before_mip(multiknapsack_h5[0], model, stats)
36 |
37 | # Should call predict with correct args
38 | clf.predict.assert_called()
39 | (x_test,) = clf.predict.call_args.args
40 | assert x_test.shape == (1, 100)
41 |
42 | # Should set warm starts
43 | model.set_warm_starts.assert_called()
44 | names, starts, _ = model.set_warm_starts.call_args.args
45 | assert len(names) == 100
46 | assert starts.shape == (1, 100)
47 |
--------------------------------------------------------------------------------
/tests/components/primal/test_mem.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | import logging
5 | from typing import List, Dict, Any
6 | from unittest.mock import Mock
7 |
8 | import numpy as np
9 | from sklearn.dummy import DummyClassifier
10 |
11 | from miplearn.components.primal.actions import SetWarmStart
12 | from miplearn.components.primal.mem import (
13 | MemorizingPrimalComponent,
14 | SelectTopSolutions,
15 | MergeTopSolutions,
16 | )
17 | from miplearn.extractors.abstract import FeaturesExtractor
18 |
19 | logger = logging.getLogger(__name__)
20 |
21 |
22 | def test_mem_component(
23 | multiknapsack_h5: List[str],
24 | default_extractor: FeaturesExtractor,
25 | ) -> None:
26 | # Create mock classifier
27 | clf = Mock(wraps=DummyClassifier())
28 |
29 | # Create and fit component
30 | comp = MemorizingPrimalComponent(
31 | clf,
32 | extractor=default_extractor,
33 | constructor=SelectTopSolutions(2),
34 | action=SetWarmStart(),
35 | )
36 | comp.fit(multiknapsack_h5)
37 |
38 | # Should call fit method with correct arguments
39 | clf.fit.assert_called()
40 | x, y = clf.fit.call_args.args
41 | assert x.shape == (3, 100)
42 | assert y.tolist() == [0, 1, 2]
43 |
44 | # Should store solutions
45 | assert comp.solutions_ is not None
46 | assert comp.solutions_.shape == (3, 100)
47 | assert comp.bin_var_names_ is not None
48 | assert len(comp.bin_var_names_) == 100
49 |
50 | # Call before-mip
51 | stats: Dict[str, Any] = {}
52 | model = Mock()
53 | comp.before_mip(multiknapsack_h5[0], model, stats)
54 |
55 | # Should call predict_proba with correct args
56 | clf.predict_proba.assert_called()
57 | (x_test,) = clf.predict_proba.call_args.args
58 | assert x_test.shape == (1, 100)
59 |
60 | # Should set warm starts
61 | model.set_warm_starts.assert_called()
62 | names, starts, _ = model.set_warm_starts.call_args.args
63 | assert len(names) == 100
64 | assert starts.shape == (2, 100)
65 | assert np.all(starts[0, :] == comp.solutions_[0, :])
66 | assert np.all(starts[1, :] == comp.solutions_[1, :])
67 |
68 |
69 | def test_merge_top_solutions() -> None:
70 | solutions = np.array(
71 | [
72 | [0, 1, 0, 0],
73 | [0, 1, 0, 1],
74 | [0, 1, 1, 1],
75 | [0, 1, 1, 1],
76 | [1, 1, 1, 1],
77 | ]
78 | )
79 | y_proba = np.array([0.25, 0.25, 0.25, 0.25, 0])
80 | starts = MergeTopSolutions(k=4, thresholds=[0.25, 0.75]).construct(
81 | y_proba, solutions
82 | )
83 | assert starts.shape == (1, 4)
84 | assert starts[0, 0] == 0
85 | assert starts[0, 1] == 1
86 | assert np.isnan(starts[0, 2])
87 | assert starts[0, 3] == 1
88 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | import os
5 | import shutil
6 | import tempfile
7 | from glob import glob
8 | from os.path import dirname, basename, isfile
9 | from tempfile import NamedTemporaryFile
10 | from typing import List, Any
11 |
12 | import pytest
13 |
14 | from miplearn.extractors.abstract import FeaturesExtractor
15 | from miplearn.extractors.fields import H5FieldsExtractor
16 |
17 |
18 | def _h5_fixture(pattern: str, request: Any) -> List[str]:
19 | """
20 | Create a temporary copy of the provided .h5 files, along with the companion
21 | .pkl.gz files, and return the path to the copy. Also register a finalizer,
22 | so that the temporary folder is removed after the tests.
23 | """
24 | filenames = glob(f"{dirname(__file__)}/fixtures/{pattern}")
25 | print(filenames)
26 | tmpdir = tempfile.mkdtemp()
27 |
28 | def cleanup() -> None:
29 | shutil.rmtree(tmpdir)
30 |
31 | request.addfinalizer(cleanup)
32 |
33 | print(tmpdir)
34 | for f in filenames:
35 | fbase, _ = os.path.splitext(f)
36 | for ext in [".h5", ".pkl.gz"]:
37 | dest = os.path.join(tmpdir, f"{basename(fbase)}{ext}")
38 | print(dest)
39 | shutil.copy(f"{fbase}{ext}", dest)
40 | assert isfile(dest)
41 | return sorted(glob(f"{tmpdir}/*.h5"))
42 |
43 |
44 | @pytest.fixture()
45 | def multiknapsack_h5(request: Any) -> List[str]:
46 | return _h5_fixture("multiknapsack*.h5", request)
47 |
48 |
49 | @pytest.fixture()
50 | def tsp_gp_h5(request: Any) -> List[str]:
51 | return _h5_fixture("tsp-gp*.h5", request)
52 |
53 |
54 | @pytest.fixture()
55 | def tsp_pyo_h5(request: Any) -> List[str]:
56 | return _h5_fixture("tsp-pyo*.h5", request)
57 |
58 |
59 | @pytest.fixture()
60 | def stab_gp_h5(request: Any) -> List[str]:
61 | return _h5_fixture("stab-gp*.h5", request)
62 |
63 |
64 | @pytest.fixture()
65 | def stab_pyo_h5(request: Any) -> List[str]:
66 | return _h5_fixture("stab-pyo*.h5", request)
67 |
68 |
69 | @pytest.fixture()
70 | def default_extractor() -> FeaturesExtractor:
71 | return H5FieldsExtractor(
72 | instance_fields=["static_var_obj_coeffs"],
73 | var_fields=["lp_var_features"],
74 | )
75 |
--------------------------------------------------------------------------------
/tests/extractors/__init__.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
--------------------------------------------------------------------------------
/tests/extractors/test_dummy.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from typing import List
6 |
7 | from miplearn.extractors.dummy import DummyExtractor
8 | from miplearn.h5 import H5File
9 |
10 |
11 | def test_dummy(multiknapsack_h5: List[str]) -> None:
12 | ext = DummyExtractor()
13 | with H5File(multiknapsack_h5[0], "r") as h5:
14 | x = ext.get_instance_features(h5)
15 | assert x.shape == (1,)
16 | x = ext.get_var_features(h5)
17 | assert x.shape == (100, 1)
18 | x = ext.get_constr_features(h5)
19 | assert x.shape == (4, 1)
20 |
--------------------------------------------------------------------------------
/tests/extractors/test_fields.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | from typing import List
5 |
6 | import pytest
7 |
8 | from miplearn.extractors.fields import H5FieldsExtractor
9 | from miplearn.h5 import H5File
10 |
11 |
12 | def test_fields_instance(multiknapsack_h5: List[str]) -> None:
13 | ext = H5FieldsExtractor(
14 | instance_fields=[
15 | "lp_obj_value",
16 | "lp_var_values",
17 | "static_var_obj_coeffs",
18 | ],
19 | var_fields=["lp_var_values"],
20 | )
21 | with H5File(multiknapsack_h5[0], "r") as h5:
22 | x = ext.get_instance_features(h5)
23 | assert x.shape == (201,)
24 |
25 | x = ext.get_var_features(h5)
26 | assert x.shape == (100, 1)
27 |
28 |
29 | def test_fields_instance_none(multiknapsack_h5: List[str]) -> None:
30 | ext = H5FieldsExtractor(instance_fields=None)
31 | with H5File(multiknapsack_h5[0], "r") as h5:
32 | with pytest.raises(Exception):
33 | ext.get_instance_features(h5)
34 |
--------------------------------------------------------------------------------
/tests/fixtures/gen_stab.py:
--------------------------------------------------------------------------------
1 | from os.path import dirname
2 |
3 | import numpy as np
4 | from scipy.stats import uniform, randint
5 |
6 | from miplearn.collectors.basic import BasicCollector
7 | from miplearn.io import write_pkl_gz
8 | from miplearn.problems.stab import (
9 | MaxWeightStableSetGenerator,
10 | build_stab_model_gurobipy,
11 | build_stab_model_pyomo,
12 | )
13 |
14 |
15 | np.random.seed(42)
16 | gen = MaxWeightStableSetGenerator(
17 | w=uniform(10.0, scale=1.0),
18 | n=randint(low=50, high=51),
19 | p=uniform(loc=0.5, scale=0.0),
20 | fix_graph=True,
21 | )
22 | data = gen.generate(3)
23 |
24 | params = {"seed": 42, "threads": 1}
25 |
26 | # Gurobipy
27 | data_filenames = write_pkl_gz(data, dirname(__file__), prefix="stab-gp-n50-")
28 | collector = BasicCollector()
29 | collector.collect(
30 | data_filenames,
31 | lambda data: build_stab_model_gurobipy(data, params=params),
32 | progress=True,
33 | verbose=True,
34 | )
35 |
36 | # Pyomo
37 | data_filenames = write_pkl_gz(data, dirname(__file__), prefix="stab-pyo-n50-")
38 | collector = BasicCollector()
39 | collector.collect(
40 | data_filenames,
41 | lambda model: build_stab_model_pyomo(model, params=params),
42 | progress=True,
43 | verbose=True,
44 | )
45 |
--------------------------------------------------------------------------------
/tests/fixtures/gen_tsp.py:
--------------------------------------------------------------------------------
1 | from os.path import dirname
2 |
3 | import numpy as np
4 | from scipy.stats import uniform, randint
5 |
6 | from miplearn.collectors.basic import BasicCollector
7 | from miplearn.io import write_pkl_gz
8 | from miplearn.problems.tsp import (
9 | TravelingSalesmanGenerator,
10 | build_tsp_model_gurobipy,
11 | build_tsp_model_pyomo,
12 | )
13 |
14 | np.random.seed(42)
15 | gen = TravelingSalesmanGenerator(
16 | x=uniform(loc=0.0, scale=1000.0),
17 | y=uniform(loc=0.0, scale=1000.0),
18 | n=randint(low=20, high=21),
19 | gamma=uniform(loc=1.0, scale=0.25),
20 | fix_cities=True,
21 | round=True,
22 | )
23 |
24 | data = gen.generate(3)
25 |
26 | params = {"seed": 42, "threads": 1}
27 |
28 | # Gurobipy
29 | data_filenames = write_pkl_gz(data, dirname(__file__), prefix="tsp-gp-n20-")
30 | collector = BasicCollector()
31 | collector.collect(
32 | data_filenames,
33 | lambda d: build_tsp_model_gurobipy(d, params=params),
34 | progress=True,
35 | verbose=True,
36 | )
37 |
38 | # Pyomo
39 | data_filenames = write_pkl_gz(data, dirname(__file__), prefix="tsp-pyo-n20-")
40 | collector = BasicCollector()
41 | collector.collect(
42 | data_filenames,
43 | lambda d: build_tsp_model_pyomo(d, params=params),
44 | progress=True,
45 | verbose=True,
46 | )
47 |
--------------------------------------------------------------------------------
/tests/fixtures/multiknapsack-n100-m4-00000.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/multiknapsack-n100-m4-00000.h5
--------------------------------------------------------------------------------
/tests/fixtures/multiknapsack-n100-m4-00000.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/multiknapsack-n100-m4-00000.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/multiknapsack-n100-m4-00000.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/multiknapsack-n100-m4-00000.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/multiknapsack-n100-m4-00001.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/multiknapsack-n100-m4-00001.h5
--------------------------------------------------------------------------------
/tests/fixtures/multiknapsack-n100-m4-00001.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/multiknapsack-n100-m4-00001.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/multiknapsack-n100-m4-00001.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/multiknapsack-n100-m4-00001.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/multiknapsack-n100-m4-00002.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/multiknapsack-n100-m4-00002.h5
--------------------------------------------------------------------------------
/tests/fixtures/multiknapsack-n100-m4-00002.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/multiknapsack-n100-m4-00002.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/multiknapsack-n100-m4-00002.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/multiknapsack-n100-m4-00002.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/stab-gp-n50-00000.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-gp-n50-00000.h5
--------------------------------------------------------------------------------
/tests/fixtures/stab-gp-n50-00000.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-gp-n50-00000.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/stab-gp-n50-00000.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-gp-n50-00000.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/stab-gp-n50-00001.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-gp-n50-00001.h5
--------------------------------------------------------------------------------
/tests/fixtures/stab-gp-n50-00001.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-gp-n50-00001.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/stab-gp-n50-00001.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-gp-n50-00001.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/stab-gp-n50-00002.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-gp-n50-00002.h5
--------------------------------------------------------------------------------
/tests/fixtures/stab-gp-n50-00002.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-gp-n50-00002.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/stab-gp-n50-00002.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-gp-n50-00002.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/stab-pyo-n50-00000.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-pyo-n50-00000.h5
--------------------------------------------------------------------------------
/tests/fixtures/stab-pyo-n50-00000.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-pyo-n50-00000.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/stab-pyo-n50-00000.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-pyo-n50-00000.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/stab-pyo-n50-00001.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-pyo-n50-00001.h5
--------------------------------------------------------------------------------
/tests/fixtures/stab-pyo-n50-00001.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-pyo-n50-00001.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/stab-pyo-n50-00001.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-pyo-n50-00001.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/stab-pyo-n50-00002.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-pyo-n50-00002.h5
--------------------------------------------------------------------------------
/tests/fixtures/stab-pyo-n50-00002.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-pyo-n50-00002.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/stab-pyo-n50-00002.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/stab-pyo-n50-00002.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/tsp-gp-n20-00000.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-gp-n20-00000.h5
--------------------------------------------------------------------------------
/tests/fixtures/tsp-gp-n20-00000.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-gp-n20-00000.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/tsp-gp-n20-00000.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-gp-n20-00000.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/tsp-gp-n20-00001.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-gp-n20-00001.h5
--------------------------------------------------------------------------------
/tests/fixtures/tsp-gp-n20-00001.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-gp-n20-00001.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/tsp-gp-n20-00001.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-gp-n20-00001.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/tsp-gp-n20-00002.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-gp-n20-00002.h5
--------------------------------------------------------------------------------
/tests/fixtures/tsp-gp-n20-00002.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-gp-n20-00002.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/tsp-gp-n20-00002.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-gp-n20-00002.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/tsp-pyo-n20-00000.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-pyo-n20-00000.h5
--------------------------------------------------------------------------------
/tests/fixtures/tsp-pyo-n20-00000.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-pyo-n20-00000.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/tsp-pyo-n20-00000.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-pyo-n20-00000.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/tsp-pyo-n20-00001.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-pyo-n20-00001.h5
--------------------------------------------------------------------------------
/tests/fixtures/tsp-pyo-n20-00001.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-pyo-n20-00001.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/tsp-pyo-n20-00001.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-pyo-n20-00001.pkl.gz
--------------------------------------------------------------------------------
/tests/fixtures/tsp-pyo-n20-00002.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-pyo-n20-00002.h5
--------------------------------------------------------------------------------
/tests/fixtures/tsp-pyo-n20-00002.mps.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-pyo-n20-00002.mps.gz
--------------------------------------------------------------------------------
/tests/fixtures/tsp-pyo-n20-00002.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ANL-CEEESA/MIPLearn/3775c3f78002f3ffe70e20944578f7ec3fbfb2be/tests/fixtures/tsp-pyo-n20-00002.pkl.gz
--------------------------------------------------------------------------------
/tests/problems/__init__.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
--------------------------------------------------------------------------------
/tests/problems/test_binpack.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import numpy as np
6 | from scipy.stats import uniform, randint
7 |
8 | from miplearn.problems.binpack import (
9 | build_binpack_model_gurobipy,
10 | BinPackData,
11 | BinPackGenerator,
12 | )
13 |
14 |
15 | def test_binpack_generator() -> None:
16 | np.random.seed(42)
17 | gen = BinPackGenerator(
18 | n=randint(low=10, high=11),
19 | sizes=uniform(loc=0, scale=10),
20 | capacity=uniform(loc=100, scale=0),
21 | sizes_jitter=uniform(loc=0.9, scale=0.2),
22 | capacity_jitter=uniform(loc=0.9, scale=0.2),
23 | fix_items=True,
24 | )
25 | data = gen.generate(2)
26 | assert data[0].sizes.tolist() == [
27 | 3.39,
28 | 10.4,
29 | 7.81,
30 | 5.64,
31 | 1.46,
32 | 1.46,
33 | 0.56,
34 | 8.7,
35 | 5.93,
36 | 6.79,
37 | ]
38 | assert data[0].capacity == 102.24
39 | assert data[1].sizes.tolist() == [
40 | 3.48,
41 | 9.11,
42 | 7.12,
43 | 5.93,
44 | 1.65,
45 | 1.47,
46 | 0.58,
47 | 8.82,
48 | 5.47,
49 | 7.23,
50 | ]
51 | assert data[1].capacity == 93.41
52 |
53 |
54 | def test_binpack() -> None:
55 | model = build_binpack_model_gurobipy(
56 | BinPackData(
57 | sizes=np.array([4, 8, 1, 4, 2, 1]),
58 | capacity=10,
59 | )
60 | )
61 | model.optimize()
62 | assert model.inner.objVal == 2.0
63 |
--------------------------------------------------------------------------------
/tests/problems/test_multiknapsack.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import numpy as np
6 | from scipy.stats import uniform, randint
7 |
8 | from miplearn.problems.multiknapsack import (
9 | MultiKnapsackGenerator,
10 | MultiKnapsackData,
11 | build_multiknapsack_model_gurobipy,
12 | )
13 |
14 |
15 | def test_knapsack_generator() -> None:
16 | np.random.seed(42)
17 | gen = MultiKnapsackGenerator(
18 | n=randint(low=5, high=6),
19 | m=randint(low=3, high=4),
20 | w=randint(low=0, high=1000),
21 | K=randint(low=500, high=501),
22 | u=uniform(loc=0.0, scale=1.0),
23 | alpha=uniform(loc=0.25, scale=0.0),
24 | fix_w=True,
25 | w_jitter=uniform(loc=0.9, scale=0.2),
26 | p_jitter=uniform(loc=0.9, scale=0.2),
27 | round=True,
28 | )
29 | data = gen.generate(2)
30 | assert data[0].prices.tolist() == [433.0, 477.0, 802.0, 494.0, 458.0]
31 | assert data[0].capacities.tolist() == [458.0, 357.0, 392.0]
32 | assert data[0].weights.tolist() == [
33 | [111.0, 392.0, 945.0, 276.0, 108.0],
34 | [64.0, 633.0, 20.0, 602.0, 110.0],
35 | [510.0, 203.0, 303.0, 469.0, 85.0],
36 | ]
37 |
38 | assert data[1].prices.tolist() == [344.0, 527.0, 658.0, 519.0, 460.0]
39 | assert data[1].capacities.tolist() == [449.0, 377.0, 380.0]
40 | assert data[1].weights.tolist() == [
41 | [92.0, 473.0, 871.0, 264.0, 96.0],
42 | [67.0, 664.0, 21.0, 628.0, 129.0],
43 | [436.0, 209.0, 309.0, 481.0, 86.0],
44 | ]
45 |
46 |
47 | def test_knapsack_model() -> None:
48 | data = MultiKnapsackData(
49 | prices=np.array([344.0, 527.0, 658.0, 519.0, 460.0]),
50 | capacities=np.array([449.0, 377.0, 380.0]),
51 | weights=np.array(
52 | [
53 | [92.0, 473.0, 871.0, 264.0, 96.0],
54 | [67.0, 664.0, 21.0, 628.0, 129.0],
55 | [436.0, 209.0, 309.0, 481.0, 86.0],
56 | ]
57 | ),
58 | )
59 | model = build_multiknapsack_model_gurobipy(data)
60 | model.optimize()
61 | assert model.inner.objVal == -460.0
62 |
--------------------------------------------------------------------------------
/tests/problems/test_pmedian.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import numpy as np
6 | from scipy.stats import uniform, randint
7 |
8 | from miplearn.problems.pmedian import PMedianGenerator, build_pmedian_model_gurobipy
9 |
10 |
11 | def test_pmedian() -> None:
12 | np.random.seed(42)
13 | gen = PMedianGenerator(
14 | x=uniform(loc=0.0, scale=100.0),
15 | y=uniform(loc=0.0, scale=100.0),
16 | n=randint(low=5, high=6),
17 | p=randint(low=2, high=3),
18 | demands=uniform(loc=0, scale=20),
19 | capacities=uniform(loc=0, scale=100),
20 | distances_jitter=uniform(loc=0.95, scale=0.1),
21 | demands_jitter=uniform(loc=0.95, scale=0.1),
22 | capacities_jitter=uniform(loc=0.95, scale=0.1),
23 | fixed=True,
24 | )
25 | data = gen.generate(2)
26 |
27 | assert data[0].p == 2
28 | assert data[0].demands.tolist() == [0.41, 19.4, 16.65, 4.25, 3.64]
29 | assert data[0].capacities.tolist() == [18.34, 30.42, 52.48, 43.19, 29.12]
30 | assert data[0].distances.tolist() == [
31 | [0.0, 50.17, 82.42, 32.76, 33.2],
32 | [50.17, 0.0, 72.64, 72.51, 17.06],
33 | [82.42, 72.64, 0.0, 71.69, 70.92],
34 | [32.76, 72.51, 71.69, 0.0, 56.56],
35 | [33.2, 17.06, 70.92, 56.56, 0.0],
36 | ]
37 |
38 | assert data[1].p == 2
39 | assert data[1].demands.tolist() == [0.42, 19.03, 16.68, 4.27, 3.53]
40 | assert data[1].capacities.tolist() == [19.2, 31.26, 54.79, 44.9, 29.41]
41 | assert data[1].distances.tolist() == [
42 | [0.0, 51.6, 83.31, 33.77, 31.95],
43 | [51.6, 0.0, 70.25, 71.09, 17.05],
44 | [83.31, 70.25, 0.0, 68.81, 67.62],
45 | [33.77, 71.09, 68.81, 0.0, 58.88],
46 | [31.95, 17.05, 67.62, 58.88, 0.0],
47 | ]
48 |
49 | model = build_pmedian_model_gurobipy(data[0])
50 | assert model.inner.numVars == 30
51 | assert model.inner.numConstrs == 11
52 | model.optimize()
53 | assert round(model.inner.objVal) == 107
54 |
--------------------------------------------------------------------------------
/tests/problems/test_setcover.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from tempfile import NamedTemporaryFile
6 |
7 | import numpy as np
8 | from scipy.stats import randint, uniform
9 |
10 | from miplearn.h5 import H5File
11 | from miplearn.problems.setcover import (
12 | SetCoverData,
13 | build_setcover_model_gurobipy,
14 | SetCoverGenerator,
15 | build_setcover_model_pyomo,
16 | )
17 | from miplearn.solvers.abstract import AbstractModel
18 |
19 |
20 | def test_set_cover_generator() -> None:
21 | np.random.seed(42)
22 | gen = SetCoverGenerator(
23 | n_elements=randint(low=3, high=4),
24 | n_sets=randint(low=5, high=6),
25 | costs=uniform(loc=0.0, scale=100.0),
26 | costs_jitter=uniform(loc=0.95, scale=0.10),
27 | density=uniform(loc=0.5, scale=0),
28 | K=uniform(loc=25, scale=0),
29 | fix_sets=False,
30 | )
31 | data = gen.generate(2)
32 |
33 | assert data[0].costs.round(1).tolist() == [136.8, 86.2, 25.7, 27.3, 102.5]
34 | assert data[0].incidence_matrix.tolist() == [
35 | [1, 0, 1, 0, 1],
36 | [1, 1, 0, 0, 0],
37 | [1, 0, 0, 1, 1],
38 | ]
39 | assert data[1].costs.round(1).tolist() == [63.5, 76.6, 48.1, 74.1, 93.3]
40 | assert data[1].incidence_matrix.tolist() == [
41 | [1, 1, 0, 1, 1],
42 | [0, 1, 0, 1, 0],
43 | [0, 1, 1, 0, 0],
44 | ]
45 |
46 |
47 | def test_set_cover_generator_with_fixed_sets() -> None:
48 | np.random.seed(42)
49 | gen = SetCoverGenerator(
50 | n_elements=randint(low=3, high=4),
51 | n_sets=randint(low=5, high=6),
52 | costs=uniform(loc=0.0, scale=100.0),
53 | costs_jitter=uniform(loc=0.95, scale=0.10),
54 | density=uniform(loc=0.5, scale=0.00),
55 | fix_sets=True,
56 | )
57 | data = gen.generate(3)
58 |
59 | assert data[0].costs.tolist() == [136.75, 86.17, 25.71, 27.31, 102.48]
60 | assert data[1].costs.tolist() == [135.38, 82.26, 26.92, 26.58, 98.28]
61 | assert data[2].costs.tolist() == [138.37, 85.15, 26.95, 27.22, 106.17]
62 |
63 | print(data[0].incidence_matrix)
64 |
65 | for i in range(3):
66 | assert data[i].incidence_matrix.tolist() == [
67 | [1, 0, 1, 0, 1],
68 | [1, 1, 0, 0, 0],
69 | [1, 0, 0, 1, 1],
70 | ]
71 |
72 |
73 | def test_set_cover() -> None:
74 | data = SetCoverData(
75 | costs=np.array([5, 10, 12, 6, 8]),
76 | incidence_matrix=np.array(
77 | [
78 | [1, 0, 0, 1, 0],
79 | [1, 1, 0, 0, 0],
80 | [0, 0, 1, 1, 1],
81 | ],
82 | ),
83 | )
84 | for model in [
85 | build_setcover_model_pyomo(data),
86 | build_setcover_model_gurobipy(data),
87 | ]:
88 | assert isinstance(model, AbstractModel)
89 | with NamedTemporaryFile() as tempfile:
90 | with H5File(tempfile.name) as h5:
91 | model.optimize()
92 | model.extract_after_mip(h5)
93 | assert h5.get_scalar("mip_obj_value") == 11.0
94 |
--------------------------------------------------------------------------------
/tests/problems/test_setpack.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import numpy as np
6 |
7 | from miplearn.problems.setpack import (
8 | SetPackData,
9 | build_setpack_model_gurobipy,
10 | )
11 |
12 |
13 | def test_setpack() -> None:
14 | data = SetPackData(
15 | costs=np.array([5, 10, 12, 6, 8]),
16 | incidence_matrix=np.array(
17 | [
18 | [1, 0, 0, 1, 0],
19 | [1, 1, 0, 0, 0],
20 | [0, 0, 1, 1, 1],
21 | ],
22 | ),
23 | )
24 | model = build_setpack_model_gurobipy(data)
25 | model.optimize()
26 | assert model.inner.objval == -22.0
27 |
--------------------------------------------------------------------------------
/tests/problems/test_stab.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | from tempfile import NamedTemporaryFile
5 |
6 | import networkx as nx
7 | import numpy as np
8 |
9 | from miplearn.h5 import H5File
10 | from miplearn.problems.stab import (
11 | MaxWeightStableSetData,
12 | build_stab_model_gurobipy,
13 | build_stab_model_pyomo,
14 | )
15 | from miplearn.solvers.abstract import AbstractModel
16 |
17 |
18 | def test_stab() -> None:
19 | data = MaxWeightStableSetData(
20 | graph=nx.cycle_graph(5),
21 | weights=np.array([1.0, 1.0, 1.0, 1.0, 1.0]),
22 | )
23 | for model in [
24 | build_stab_model_gurobipy(data),
25 | build_stab_model_pyomo(data),
26 | ]:
27 | assert isinstance(model, AbstractModel)
28 | with NamedTemporaryFile() as tempfile:
29 | with H5File(tempfile.name) as h5:
30 | model.optimize()
31 | model.extract_after_mip(h5)
32 | assert h5.get_scalar("mip_obj_value") == -2.0
33 |
--------------------------------------------------------------------------------
/tests/problems/test_tsp.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import numpy as np
6 | from miplearn.problems.tsp import (
7 | TravelingSalesmanData,
8 | TravelingSalesmanGenerator,
9 | build_tsp_model_gurobipy,
10 | )
11 | from scipy.spatial.distance import pdist, squareform
12 | from scipy.stats import randint, uniform
13 |
14 |
15 | def test_tsp_generator() -> None:
16 | np.random.seed(42)
17 | gen = TravelingSalesmanGenerator(
18 | x=uniform(loc=0.0, scale=1000.0),
19 | y=uniform(loc=0.0, scale=1000.0),
20 | n=randint(low=3, high=4),
21 | gamma=uniform(loc=1.0, scale=0.25),
22 | fix_cities=True,
23 | round=True,
24 | )
25 | data = gen.generate(2)
26 | assert data[0].distances.tolist() == [
27 | [0.0, 591.0, 996.0],
28 | [591.0, 0.0, 765.0],
29 | [996.0, 765.0, 0.0],
30 | ]
31 | assert data[1].distances.tolist() == [
32 | [0.0, 556.0, 853.0],
33 | [556.0, 0.0, 779.0],
34 | [853.0, 779.0, 0.0],
35 | ]
36 |
37 |
38 | def test_tsp() -> None:
39 | data = TravelingSalesmanData(
40 | n_cities=6,
41 | distances=squareform(
42 | pdist(
43 | [
44 | [0.0, 0.0],
45 | [1.0, 0.0],
46 | [2.0, 0.0],
47 | [3.0, 0.0],
48 | [0.0, 1.0],
49 | [3.0, 1.0],
50 | ]
51 | )
52 | ),
53 | )
54 | model = build_tsp_model_gurobipy(data)
55 | model.optimize()
56 | assert model.inner.getAttr("x", model.inner.getVars()) == [
57 | 1.0,
58 | 0.0,
59 | 0.0,
60 | 1.0,
61 | 0.0,
62 | 1.0,
63 | 0.0,
64 | 0.0,
65 | 0.0,
66 | 1.0,
67 | 0.0,
68 | 0.0,
69 | 0.0,
70 | 1.0,
71 | 1.0,
72 | ]
73 |
--------------------------------------------------------------------------------
/tests/problems/test_uc.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import numpy as np
6 | from scipy.stats import uniform, randint
7 |
8 | from miplearn.problems.uc import (
9 | UnitCommitmentData,
10 | build_uc_model_gurobipy,
11 | UnitCommitmentGenerator,
12 | )
13 |
14 |
15 | def test_generator() -> None:
16 | np.random.seed(42)
17 | gen = UnitCommitmentGenerator(
18 | n_units=randint(low=3, high=4),
19 | n_periods=randint(low=4, high=5),
20 | max_power=uniform(loc=50, scale=450),
21 | min_power=uniform(loc=0.25, scale=0.5),
22 | cost_startup=uniform(loc=1, scale=1),
23 | cost_prod=uniform(loc=1, scale=1),
24 | cost_fixed=uniform(loc=1, scale=1),
25 | min_uptime=randint(low=1, high=8),
26 | min_downtime=randint(low=1, high=8),
27 | cost_jitter=uniform(loc=0.75, scale=0.5),
28 | demand_jitter=uniform(loc=0.9, scale=0.2),
29 | fix_units=True,
30 | )
31 | data = gen.generate(2)
32 |
33 | assert data[0].demand.tolist() == [430.3, 518.65, 448.16, 860.61]
34 | assert data[0].min_power.tolist() == [120.05, 156.73, 124.44]
35 | assert data[0].max_power.tolist() == [218.54, 477.82, 379.4]
36 | assert data[0].min_uptime.tolist() == [3, 3, 5]
37 | assert data[0].min_downtime.tolist() == [4, 3, 6]
38 | assert data[0].cost_startup.tolist() == [1.06, 1.72, 1.94]
39 | assert data[0].cost_prod.tolist() == [1.0, 1.99, 1.62]
40 | assert data[0].cost_fixed.tolist() == [1.61, 1.01, 1.02]
41 |
42 | assert data[1].demand.tolist() == [407.3, 476.18, 458.77, 840.38]
43 | assert data[1].min_power.tolist() == [120.05, 156.73, 124.44]
44 | assert data[1].max_power.tolist() == [218.54, 477.82, 379.4]
45 | assert data[1].min_uptime.tolist() == [3, 3, 5]
46 | assert data[1].min_downtime.tolist() == [4, 3, 6]
47 | assert data[1].cost_startup.tolist() == [1.32, 1.69, 2.29]
48 | assert data[1].cost_prod.tolist() == [1.09, 1.94, 1.23]
49 | assert data[1].cost_fixed.tolist() == [1.97, 1.04, 0.96]
50 |
51 |
52 | def test_uc() -> None:
53 | data = UnitCommitmentData(
54 | demand=np.array([10, 12, 15, 10, 8, 5]),
55 | min_power=np.array([5, 5, 10]),
56 | max_power=np.array([10, 8, 20]),
57 | min_uptime=np.array([4, 3, 2]),
58 | min_downtime=np.array([4, 3, 2]),
59 | cost_startup=np.array([100, 120, 200]),
60 | cost_prod=np.array([1.0, 1.25, 1.5]),
61 | cost_fixed=np.array([10, 12, 9]),
62 | )
63 | model = build_uc_model_gurobipy(data)
64 | model.optimize()
65 | assert model.inner.objVal == 154.5
66 |
67 |
68 | if __name__ == "__main__":
69 | data = UnitCommitmentGenerator().generate(1)[0]
70 | model = build_uc_model_gurobipy(data)
71 | model.optimize()
72 |
--------------------------------------------------------------------------------
/tests/problems/test_vertexcover.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | import networkx as nx
6 | import numpy as np
7 |
8 | from miplearn.problems.vertexcover import (
9 | MinWeightVertexCoverData,
10 | build_vertexcover_model_gurobipy,
11 | )
12 |
13 |
14 | def test_stab() -> None:
15 | data = MinWeightVertexCoverData(
16 | graph=nx.cycle_graph(5),
17 | weights=np.array([1.0, 1.0, 1.0, 1.0, 1.0]),
18 | )
19 | model = build_vertexcover_model_gurobipy(data)
20 | model.optimize()
21 | assert model.inner.objVal == 3.0
22 |
--------------------------------------------------------------------------------
/tests/test_h5.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | from tempfile import NamedTemporaryFile
5 | from typing import Any
6 |
7 | import numpy as np
8 | from scipy.sparse import coo_matrix
9 |
10 | from miplearn.h5 import H5File
11 |
12 |
13 | def test_h5() -> None:
14 | file = NamedTemporaryFile()
15 | h5 = H5File(file.name)
16 | _assert_roundtrip_scalar(h5, "A")
17 | _assert_roundtrip_scalar(h5, True)
18 | _assert_roundtrip_scalar(h5, 1)
19 | _assert_roundtrip_scalar(h5, 1.0)
20 | assert h5.get_scalar("unknown-key") is None
21 |
22 | _assert_roundtrip_array(h5, np.array([True, False]))
23 | _assert_roundtrip_array(h5, np.array([1, 2, 3]))
24 | _assert_roundtrip_array(h5, np.array([1.0, 2.0, 3.0]))
25 | _assert_roundtrip_array(h5, np.array(["A", "BB", "CCC"], dtype="S"))
26 | assert h5.get_array("unknown-key") is None
27 |
28 | _assert_roundtrip_sparse(
29 | h5,
30 | coo_matrix(
31 | [
32 | [1.0, 0.0, 0.0],
33 | [0.0, 2.0, 3.0],
34 | [0.0, 0.0, 4.0],
35 | ],
36 | ),
37 | )
38 | assert h5.get_sparse("unknown-key") is None
39 |
40 |
41 | def _assert_roundtrip_array(h5: H5File, original: np.ndarray) -> None:
42 | h5.put_array("key", original)
43 | recovered = h5.get_array("key")
44 | assert recovered is not None
45 | assert isinstance(recovered, np.ndarray)
46 | assert (recovered == original).all()
47 |
48 |
49 | def _assert_roundtrip_scalar(h5: H5File, original: Any) -> None:
50 | h5.put_scalar("key", original)
51 | recovered = h5.get_scalar("key")
52 | assert recovered == original
53 | assert recovered is not None
54 | assert isinstance(
55 | recovered, original.__class__
56 | ), f"Expected {original.__class__}, found {recovered.__class__} instead"
57 |
58 |
59 | def _assert_roundtrip_sparse(h5: H5File, original: coo_matrix) -> None:
60 | h5.put_sparse("key", original)
61 | recovered = h5.get_sparse("key")
62 | assert recovered is not None
63 | assert isinstance(recovered, coo_matrix)
64 | assert (original != recovered).sum() == 0
65 |
--------------------------------------------------------------------------------
/tests/test_lazy_pyomo.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2023, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 | import logging
5 | from typing import Any, Hashable, List
6 |
7 | import pyomo.environ as pe
8 |
9 | from miplearn.solvers.pyomo import PyomoModel
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | def _build_model() -> PyomoModel:
15 | m = pe.ConcreteModel()
16 | m.x = pe.Var(bounds=(0, 5), domain=pe.Integers)
17 | m.obj = pe.Objective(expr=-m.x)
18 | m.cons = pe.ConstraintList()
19 |
20 | def lazy_separate(model: PyomoModel) -> List[Hashable]:
21 | model.solver.cbGetSolution(vars=[m.x])
22 | if m.x.value > 0.5:
23 | return [m.x.value]
24 | else:
25 | return []
26 |
27 | def lazy_enforce(model: PyomoModel, violations: List[Any]) -> None:
28 | for v in violations:
29 | model.add_constr(m.cons.add(m.x <= round(v - 1)))
30 |
31 | return PyomoModel(
32 | m,
33 | "gurobi_persistent",
34 | lazy_separate=lazy_separate,
35 | lazy_enforce=lazy_enforce,
36 | )
37 |
38 |
39 | def test_pyomo_callback() -> None:
40 | model = _build_model()
41 | model.optimize()
42 | assert model._lazy is not None
43 | assert len(model._lazy) > 0
44 | assert model.inner.x.value == 0.0
45 |
--------------------------------------------------------------------------------
/tests/test_solvers.py:
--------------------------------------------------------------------------------
1 | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
2 | # Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
3 | # Released under the modified BSD license. See COPYING.md for more details.
4 |
5 | from tempfile import NamedTemporaryFile
6 | from typing import Callable, Any
7 |
8 | import numpy as np
9 | import pytest
10 |
11 | from miplearn.h5 import H5File
12 | from miplearn.problems.setcover import (
13 | SetCoverData,
14 | build_setcover_model_gurobipy,
15 | build_setcover_model_pyomo,
16 | )
17 | from miplearn.solvers.abstract import AbstractModel
18 |
19 | inf = float("inf")
20 |
21 |
22 | @pytest.fixture
23 | def data() -> SetCoverData:
24 | return SetCoverData(
25 | costs=np.array([5, 10, 12, 6, 8]),
26 | incidence_matrix=np.array(
27 | [
28 | [1, 0, 0, 1, 0],
29 | [1, 1, 0, 0, 0],
30 | [0, 0, 1, 1, 1],
31 | ],
32 | ),
33 | )
34 |
35 |
36 | def test_gurobi(data: SetCoverData) -> None:
37 | _test_solver(build_setcover_model_gurobipy, data)
38 |
39 |
40 | def test_pyomo_persistent(data: SetCoverData) -> None:
41 | _test_solver(lambda d: build_setcover_model_pyomo(d, "gurobi_persistent"), data)
42 |
43 |
44 | def _test_solver(build_model: Callable, data: Any) -> None:
45 | _test_extract(build_model(data))
46 | _test_add_constr(build_model(data))
47 | _test_fix_vars(build_model(data))
48 | _test_infeasible(build_model(data))
49 |
50 |
51 | def _test_extract(model: AbstractModel) -> None:
52 | with NamedTemporaryFile() as tempfile:
53 | with H5File(tempfile.name) as h5:
54 |
55 | def test_scalar(key: str, expected_value: Any) -> None:
56 | actual_value = h5.get_scalar(key)
57 | assert actual_value is not None
58 | assert actual_value == expected_value
59 |
60 | def test_array(key: str, expected_value: Any) -> None:
61 | actual_value = h5.get_array(key)
62 | assert actual_value is not None
63 | assert actual_value.tolist() == expected_value
64 |
65 | def test_sparse(key: str, expected_value: Any) -> None:
66 | actual_value = h5.get_sparse(key)
67 | assert actual_value is not None
68 | assert actual_value.todense().tolist() == expected_value
69 |
70 | model.extract_after_load(h5)
71 | test_sparse(
72 | "static_constr_lhs",
73 | [
74 | [1.0, 0.0, 0.0, 1.0, 0.0],
75 | [1.0, 1.0, 0.0, 0.0, 0.0],
76 | [0.0, 0.0, 1.0, 1.0, 1.0],
77 | ],
78 | )
79 | test_array("static_constr_names", [b"eqs[0]", b"eqs[1]", b"eqs[2]"])
80 | test_array("static_constr_rhs", [1, 1, 1])
81 | test_array("static_constr_sense", [b">", b">", b">"])
82 | test_scalar("static_obj_offset", 0.0)
83 | test_scalar("static_sense", "min")
84 | test_array("static_var_lower_bounds", [0.0, 0.0, 0.0, 0.0, 0.0])
85 | test_array(
86 | "static_var_names",
87 | [
88 | b"x[0]",
89 | b"x[1]",
90 | b"x[2]",
91 | b"x[3]",
92 | b"x[4]",
93 | ],
94 | )
95 | test_array("static_var_obj_coeffs", [5.0, 10.0, 12.0, 6.0, 8.0])
96 | test_array("static_var_types", [b"B", b"B", b"B", b"B", b"B"])
97 | test_array("static_var_upper_bounds", [1.0, 1.0, 1.0, 1.0, 1.0])
98 |
99 | relaxed = model.relax()
100 | relaxed.optimize()
101 | relaxed.extract_after_lp(h5)
102 | test_array("lp_constr_dual_values", [0, 5, 6])
103 | test_array("lp_constr_slacks", [1, 0, 0])
104 | test_scalar("lp_obj_value", 11.0)
105 | test_array("lp_var_reduced_costs", [0.0, 5.0, 6.0, 0.0, 2.0])
106 | test_array("lp_var_values", [1.0, 0.0, 0.0, 1.0, 0.0])
107 | if model._supports_basis_status:
108 | test_array("lp_var_basis_status", [b"B", b"L", b"L", b"B", b"L"])
109 | test_array("lp_constr_basis_status", [b"B", b"N", b"N"])
110 | if model._supports_sensitivity_analysis:
111 | test_array("lp_constr_sa_rhs_up", [2, 1, 1])
112 | test_array("lp_constr_sa_rhs_down", [-inf, 0, 0])
113 | test_array("lp_var_sa_obj_up", [10.0, inf, inf, 8.0, inf])
114 | test_array("lp_var_sa_obj_down", [0.0, 5.0, 6.0, 0.0, 6.0])
115 | test_array("lp_var_sa_ub_up", [inf, inf, inf, inf, inf])
116 | test_array("lp_var_sa_ub_down", [1.0, 0.0, 0.0, 1.0, 0.0])
117 | test_array("lp_var_sa_lb_up", [1.0, 1.0, 1.0, 1.0, 1.0])
118 | test_array("lp_var_sa_lb_down", [-inf, 0.0, 0.0, -inf, 0.0])
119 | lp_wallclock_time = h5.get_scalar("lp_wallclock_time")
120 | assert lp_wallclock_time is not None
121 | assert lp_wallclock_time >= 0
122 |
123 | model.optimize()
124 | model.extract_after_mip(h5)
125 | test_array("mip_constr_slacks", [1, 0, 0])
126 | test_array("mip_var_values", [1.0, 0.0, 0.0, 1.0, 0.0])
127 | test_scalar("mip_gap", 0)
128 | test_scalar("mip_obj_bound", 11.0)
129 | test_scalar("mip_obj_value", 11.0)
130 | mip_wallclock_time = h5.get_scalar("mip_wallclock_time")
131 | assert mip_wallclock_time is not None
132 | assert mip_wallclock_time > 0
133 | if model._supports_node_count:
134 | count = h5.get_scalar("mip_node_count")
135 | assert count is not None
136 | assert count >= 0
137 | if model._supports_solution_pool:
138 | pool_var_values = h5.get_array("pool_var_values")
139 | pool_obj_values = h5.get_array("pool_obj_values")
140 | assert pool_var_values is not None
141 | assert pool_obj_values is not None
142 | assert len(pool_obj_values.shape) == 1
143 | n_sols = len(pool_obj_values)
144 | assert pool_var_values.shape == (n_sols, 5)
145 |
146 |
147 | def _test_add_constr(model: AbstractModel) -> None:
148 | with NamedTemporaryFile() as tempfile:
149 | with H5File(tempfile.name) as h5:
150 | model.add_constrs(
151 | np.array([b"x[2]", b"x[3]"], dtype="S"),
152 | np.array([[0, 1], [1, 0]]),
153 | np.array(["=", "="], dtype="S"),
154 | np.array([0, 0]),
155 | )
156 | model.optimize()
157 | model.extract_after_mip(h5)
158 | mip_var_values = h5.get_array("mip_var_values")
159 | assert mip_var_values is not None
160 | assert mip_var_values.tolist() == [1, 0, 0, 0, 1]
161 |
162 |
163 | def _test_fix_vars(model: AbstractModel) -> None:
164 | with NamedTemporaryFile() as tempfile:
165 | with H5File(tempfile.name) as h5:
166 | model.fix_variables(
167 | var_names=np.array([b"x[2]", b"x[3]"], dtype="S"),
168 | var_values=np.array([0, 0]),
169 | )
170 | model.optimize()
171 | model.extract_after_mip(h5)
172 | mip_var_values = h5.get_array("mip_var_values")
173 | assert mip_var_values is not None
174 | assert mip_var_values.tolist() == [1, 0, 0, 0, 1]
175 |
176 |
177 | def _test_infeasible(model: AbstractModel) -> None:
178 | with NamedTemporaryFile() as tempfile:
179 | with H5File(tempfile.name) as h5:
180 | model.fix_variables(
181 | var_names=np.array([b"x[0]", b"x[3]"], dtype="S"),
182 | var_values=np.array([0, 0]),
183 | )
184 | model.optimize()
185 | model.extract_after_mip(h5)
186 | assert h5.get_array("mip_var_values") is None
187 |
--------------------------------------------------------------------------------