├── requirements.txt ├── pymhlib ├── demos │ ├── __init__.py │ ├── data │ │ ├── mknap-small.txt │ │ ├── maxsat-simple.cnf │ │ ├── xqf131.tsp │ │ ├── bur26a.dat │ │ └── mknapcb5-01.txt │ ├── julia-maxsat-main.jl │ ├── julia-maxsat.py │ ├── graphs.py │ ├── dd_mkp.py │ ├── julia-maxsat2.py │ ├── misp.py │ ├── qap.py │ ├── graph_coloring.py │ ├── mkp.py │ ├── tsp.py │ ├── common.py │ ├── maxsat.py │ ├── vertex_cover.py │ ├── julia-maxsat.jl │ └── vertex_cover.ipynb ├── tests │ ├── __init__.py │ └── test_all.py ├── multi_run_summary.cfg ├── __init__.py ├── r1.sum ├── r2.sum ├── pbig.py ├── par_alns.py ├── gvns.py ├── ssga.py ├── population.py ├── binvec_solution.py ├── log.py ├── sa.py ├── multi_run_summary.py ├── settings.py ├── solution.py ├── decision_diag.py └── alns.py ├── mhlib.egg-info ├── top_level.txt ├── dependency_links.txt ├── SOURCES.txt └── PKG-INFO ├── mh.png ├── vcp1.pdf ├── vcp2.pdf ├── .gitignore ├── .pylintrc ├── .vscode ├── tasks.json ├── settings.json └── .ropeproject │ └── config.py ├── setup.py ├── .github └── workflows │ └── CI.yml └── README.md /requirements.txt: -------------------------------------------------------------------------------- 1 | . 2 | -------------------------------------------------------------------------------- /pymhlib/demos/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pymhlib/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /mhlib.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | mhlib 2 | -------------------------------------------------------------------------------- /mhlib.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /mh.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ac-tuwien/pymhlib/HEAD/mh.png -------------------------------------------------------------------------------- /vcp1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ac-tuwien/pymhlib/HEAD/vcp1.pdf -------------------------------------------------------------------------------- /vcp2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ac-tuwien/pymhlib/HEAD/vcp2.pdf -------------------------------------------------------------------------------- /pymhlib/demos/data/mknap-small.txt: -------------------------------------------------------------------------------- 1 | 5 1 0 2 | 5 3 2 7 4 3 | 7 6 5 8 5 4 | 15 5 | -------------------------------------------------------------------------------- /pymhlib/multi_run_summary.cfg: -------------------------------------------------------------------------------- 1 | fetch: 2 | '[ (50, "mse", r"T.*training data.*mse = (\d+.\d*)") ]' -------------------------------------------------------------------------------- /pymhlib/__init__.py: -------------------------------------------------------------------------------- 1 | """pymhlib - A Toolbox for Metaheuristics and Hybrid Optimization Methods.""" 2 | name = "pymhlib" 3 | -------------------------------------------------------------------------------- /pymhlib/demos/data/maxsat-simple.cnf: -------------------------------------------------------------------------------- 1 | c 2 | c A very simple instance 3 | c 4 | p cnf 5 3 5 | 1 -5 4 0 6 | -1 5 3 4 0 7 | -3 -4 0 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **/__pycache__ 2 | .idea 3 | *.pyc 4 | .ipynb_checkpoints 5 | venv/ 6 | build/ 7 | dist/ 8 | pymhlib.egg-info/ 9 | .vscode/.ropeproject/objectdb 10 | pymhlib/aggregate.py.old 11 | pymhlib/tt 12 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [TYPECHECK] 2 | ignored-classes=Namespace 3 | 4 | [FORMAT] 5 | max-line-length=120 6 | 7 | [MESSAGES] 8 | disable=invalid-name, 9 | too-few-public-methods, too-many-arguments, too-many-nested-blocks, too-many-instance-attributes, 10 | too-many-locals, too-many-boolean-expressions, 11 | duplicate-code 12 | -------------------------------------------------------------------------------- /pymhlib/r1.sum: -------------------------------------------------------------------------------- 1 | file obj 2 | tt/run-10-2021-02-19-14-18-03/run-10-2021-02-19-14-18-03.log 2.187878 3 | tt/run-1-2021-02-19-14-18-03/run-1-2021-02-19-14-18-03.log 1.9348861 4 | tt/run-2-2021-02-19-14-18-03/run-2-2021-02-19-14-18-03.log 2.2181625 5 | tt/run-3-2021-02-19-14-18-03/run-3-2021-02-19-14-18-03.log 1.6466211 6 | tt/run-4-2021-02-19-14-18-03/run-4-2021-02-19-14-18-03.log 1.8348815 7 | tt/run-5-2021-02-19-14-18-03/run-5-2021-02-19-14-18-03.log 1.9465961 8 | tt/run-6-2021-02-19-14-18-03/run-6-2021-02-19-14-18-03.log 2.0209966 9 | tt/run-7-2021-02-19-14-18-03/run-7-2021-02-19-14-18-03.log 2.2104063 10 | tt/run-8-2021-02-19-14-18-03/run-8-2021-02-19-14-18-03.log 1.7809341 11 | tt/run-9-2021-02-19-14-18-03/run-9-2021-02-19-14-18-03.log 1.8700649 12 | 13 | -------------------------------------------------------------------------------- /pymhlib/r2.sum: -------------------------------------------------------------------------------- 1 | file obj 2 | t2/run-10-2021-02-19-14-18-03/run-10-2021-02-19-14-18-03.log 2.187878 3 | t2/run-1-2021-02-19-14-18-03/run-1-2021-02-19-14-18-03.log 1.9348861 4 | t2/run-2-2021-02-19-14-18-03/run-2-2021-02-19-14-18-03.log 2.2181625 5 | t2/run-3-2021-02-19-14-18-03/run-3-2021-02-19-14-18-03.log 1.6466211 6 | t2/run-4-2021-02-19-14-18-03/run-4-2021-02-19-14-18-03.log 3.8348815 7 | t2/run-5-2021-02-19-14-18-03/run-5-2021-02-19-14-18-03.log 2.9465961 8 | t2/run-6-2021-02-19-14-18-03/run-6-2021-02-19-14-18-03.log 2.0209966 9 | t2/run-7-2021-02-19-14-18-03/run-7-2021-02-19-14-18-03.log 2.2104063 10 | t2/run-8-2021-02-19-14-18-03/run-8-2021-02-19-14-18-03.log 1.7809341 11 | t2/run-9-2021-02-19-14-18-03/run-9-2021-02-19-14-18-03.log 1.8700649 12 | 13 | -------------------------------------------------------------------------------- /pymhlib/demos/julia-maxsat-main.jl: -------------------------------------------------------------------------------- 1 | #!/usr/local/bin/julia 2 | """Demo application for showing the integration with the Julia language, solving the MAXSAT problem. 3 | 4 | Julia must be installed properly with package PyCall. 5 | This here is a Julia main program that uses pymhlib for the MAXSAT problem instance and 6 | metaheuristics but julia-maxsat.jl for the concrete solution class 7 | for performance reasons. 8 | 9 | Note that there is the alternative Python main module julia-maxsat.py. 10 | 11 | The goal in the MAXSAT problem is to maximize the number of clauses satisfied in a boolean 12 | function given in conjunctive normal form. 13 | """ 14 | 15 | ENV["PYTHONPATH"] = "." 16 | using PyCall 17 | 18 | maxsat = pyimport("pymhlib.demos.maxsat") 19 | 20 | include("julia-maxsat.jl") 21 | # using JuliaMAXSAT 22 | 23 | common = pyimport("pymhlib.demos.common") 24 | settings = pyimport("pymhlib.settings") 25 | parser = settings.get_settings_parser() 26 | parser.set_defaults(mh_titer=1000) 27 | common.run_optimization("Julia-MAXSAT", JuliaMAXSAT.JuliaMAXSATInstance, 28 | JuliaMAXSAT.JuliaMAXSATSolution, common.data_dir*"maxsat-adv1.cnf") 29 | -------------------------------------------------------------------------------- /mhlib.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | README.md 2 | setup.py 3 | mhlib/__init__.py 4 | mhlib/aggregate_results.py 5 | mhlib/alns.py 6 | mhlib/binvec_solution.py 7 | mhlib/decision_diag.py 8 | mhlib/gvns.py 9 | mhlib/log.py 10 | mhlib/multi_run_summary.py 11 | mhlib/par_alns.py 12 | mhlib/pbig.py 13 | mhlib/permutation_solution.py 14 | mhlib/population.py 15 | mhlib/sa.py 16 | mhlib/scheduler.py 17 | mhlib/settings.py 18 | mhlib/solution.py 19 | mhlib/ssga.py 20 | mhlib/subsetvec_solution.py 21 | mhlib.egg-info/PKG-INFO 22 | mhlib.egg-info/SOURCES.txt 23 | mhlib.egg-info/dependency_links.txt 24 | mhlib.egg-info/top_level.txt 25 | mhlib/demos/__init__.py 26 | mhlib/demos/common.py 27 | mhlib/demos/dd_mkp.py 28 | mhlib/demos/graph_coloring.py 29 | mhlib/demos/graphs.py 30 | mhlib/demos/julia-maxsat.py 31 | mhlib/demos/julia-maxsat2.py 32 | mhlib/demos/maxsat.py 33 | mhlib/demos/misp.py 34 | mhlib/demos/mkp.py 35 | mhlib/demos/qap.py 36 | mhlib/demos/tsp.py 37 | mhlib/demos/vertex_cover.py 38 | mhlib/demos/data/bur26a.dat 39 | mhlib/demos/data/fpsol2.i.1.col 40 | mhlib/demos/data/frb40-19-1.mis 41 | mhlib/demos/data/maxsat-adv1.cnf 42 | mhlib/demos/data/maxsat-adv2.cnf 43 | mhlib/demos/data/maxsat-simple.cnf 44 | mhlib/demos/data/mknap-small.txt 45 | mhlib/demos/data/mknapcb5-01.txt 46 | mhlib/demos/data/xqf131.tsp -------------------------------------------------------------------------------- /pymhlib/demos/julia-maxsat.py: -------------------------------------------------------------------------------- 1 | """Demo application for showing the integration with the Julia language, solving the MAXSAT problem. 2 | 3 | Julia and Python's julia package must be installed properly. 4 | The Julia module julia-maxsat.jl is used via Python's julia interface package. 5 | It provides a concrete solution class for solving the MAXSAT problem in essentially the same way as maxsat.py. 6 | 7 | Note that there is the alternative Julia main module julia-maxsat.py. 8 | 9 | The goal in the MAXSAT problem is to maximize the number of clauses satisfied in a boolean function given in 10 | conjunctive normal form. 11 | """ 12 | 13 | import os 14 | from julia import Julia 15 | # from julia.api import Julia 16 | # jl = Julia(compiled_modules=False) 17 | from julia import Main # , Base 18 | 19 | # from pymhlib.demos.maxsat import MAXSATInstance 20 | 21 | Main.eval(r'include("'+os.path.dirname(__file__)+r'/julia-maxsat.jl")') 22 | 23 | if __name__ == '__main__': 24 | from pymhlib.demos.common import run_optimization, data_dir 25 | from pymhlib.settings import get_settings_parser 26 | parser = get_settings_parser() 27 | parser.set_defaults(mh_titer=1000) 28 | run_optimization('Julia-MAXSAT',Main.JuliaMAXSAT.JuliaMAXSATInstance, Main.JuliaMAXSAT.JuliaMAXSATSolution, 29 | data_dir+"maxsat-adv1.cnf") 30 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | // See https://go.microsoft.com/fwlink/?LinkId=733558 3 | // for the documentation about the tasks.json format 4 | "version": "2.0.0", 5 | "tasks": [ 6 | { 7 | "label": "echo", 8 | "type": "shell", 9 | "command": "echo Hello" 10 | }, 11 | { 12 | "label": "pylint: whole project", 13 | "type": "shell", 14 | "command": "pylint --msg-template \"{path}:{line}:{column}:{category}:{symbol} - {msg}\" pymhlib", 15 | "windows": { 16 | "command": "pylint --msg-template \"{path}:{line}: {column}:{category}:{symbol} - {msg}\" pymhlib" 17 | }, 18 | "presentation": { 19 | "reveal": "never", 20 | "panel": "shared" 21 | }, 22 | "problemMatcher": { 23 | "owner": "python", 24 | "fileLocation": [ 25 | "relative", 26 | "${workspaceFolder}" 27 | ], 28 | "pattern": { 29 | "regexp": "^(.+):(\\d+):(\\d+):(\\w+):(.*)$", 30 | "file": 1, 31 | "line": 2, 32 | "column": 3, 33 | "severity": 4, 34 | "message": 5 35 | } 36 | } 37 | } 38 | ] 39 | } 40 | -------------------------------------------------------------------------------- /pymhlib/demos/graphs.py: -------------------------------------------------------------------------------- 1 | """Functions to read or create graphs for demo problems.""" 2 | 3 | import networkx as nx 4 | 5 | 6 | def create_or_read_simple_graph(name: str) -> nx.Graph: 7 | """Read a simple unweighted graph from the specified file or create random G_n,m graph if name is gnm-n-m. 8 | 9 | The nodes are labeled beginning with 0. 10 | 11 | File format: 12 | - ``c #`` ignored 13 | - ``p `` 14 | - ``e #`` for each edge, nodes are labeled in 1...number of nodes 15 | """ 16 | if name.startswith('gnm-'): 17 | # create random G_n,m graph 18 | par = name.split(sep='-') 19 | return nx.gnm_random_graph(int(par[1]), int(par[2]), int(par[3]) if len(par) == 4 else None) 20 | # read from file 21 | graph: nx.Graph = nx.Graph() 22 | with open(name) as f: 23 | for line in f: 24 | flag = line[0] 25 | if flag == 'p': 26 | split_line = line.split(' ') 27 | n = int(split_line[2]) 28 | # m = int(split_line[3]) 29 | graph.add_nodes_from(range(n)) 30 | elif flag == 'e': 31 | split_line = line.split(' ') 32 | u = int(split_line[1]) - 1 33 | v = int(split_line[2]) - 1 34 | graph.add_edge(u, v) 35 | return graph 36 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """Setup program for installing pymhlib. 2 | 3 | To create and upload a new version to PyPI: 4 | - pip install --upgrade pysetup wheel twine 5 | - Document changes in README.md 6 | - Increment version also below 7 | - python setup.py sdist bdist_wheel 8 | - For local installation uninstall first, then: python setup.py install 9 | - twine upload dist/* 10 | """ 11 | import setuptools 12 | 13 | with open("README.md", "r") as fh: 14 | long_description = fh.read() 15 | 16 | setuptools.setup( 17 | name="pymhlib", 18 | version="0.1.5", 19 | author="Günther Raidl et al.", 20 | author_email="raidl@ac.tuwien.ac.at", 21 | description="pymhlib - a toolbox for metaheuristics and hybrid optimization methods", 22 | long_description=long_description, 23 | long_description_content_type="text/markdown", 24 | url="https://github.com/ac-tuwien/pymhlib", 25 | license='GPL3', 26 | packages=setuptools.find_packages(), 27 | package_data={"pymhlib":['demos/data/*']}, 28 | classifiers=[ 29 | "Programming Language :: Python :: 3", 30 | "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", 31 | "Operating System :: OS Independent", 32 | ], 33 | python_requires='>=3.7', 34 | install_requires=[ 35 | 'networkx', 36 | 'configargparse', 37 | 'numpy', 38 | 'pandas', 39 | 'matplotlib', 40 | 'seaborn', 41 | 'PyYAML', 42 | ], 43 | ) 44 | -------------------------------------------------------------------------------- /.github/workflows/CI.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Python package 5 | 6 | on: 7 | push: 8 | branches: [ master, testing ] 9 | pull_request: 10 | branches: [ master, testing ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | strategy: 17 | matrix: 18 | python-version: [3.7] 19 | 20 | steps: 21 | - uses: actions/checkout@v2 22 | - name: Set up Python ${{ matrix.python-version }} 23 | uses: actions/setup-python@v2 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | python -m pip install flake8 pytest pytest-cov 30 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 31 | - name: Lint with flake8 32 | run: | 33 | # stop the build if there are Python syntax errors or undefined names 34 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 35 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 36 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 37 | - name: Test with pytest 38 | run: | 39 | # pytest 40 | pytest --cov=./ --cov-report=xml 41 | - name: "Upload coverage to Codecov" 42 | uses: codecov/codecov-action@v1 43 | with: 44 | fail_ci_if_error: true -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.rulers": [120], 3 | "cSpell.words": [ 4 | "ALNS's", 5 | "MISP", 6 | "PBIG", 7 | "SSGA", 8 | "andreas", 9 | "arange", 10 | "args", 11 | "binvec", 12 | "brutto", 13 | "categ", 14 | "categbase", 15 | "checkit", 16 | "codecov", 17 | "dtype", 18 | "einsum", 19 | "elems", 20 | "equi", 21 | "fpsol", 22 | "groupby", 23 | "heapify", 24 | "heapq", 25 | "iinfo", 26 | "imap", 27 | "inplace", 28 | "issubset", 29 | "itbest", 30 | "iter", 31 | "itertools", 32 | "ittot", 33 | "lcps", 34 | "lfreq", 35 | "lnewinc", 36 | "mannwhitneyu", 37 | "med", 38 | "mh", 39 | "mknap", 40 | "mknapcb", 41 | "nargs", 42 | "ndarray", 43 | "netto", 44 | "networkx", 45 | "pymhlib's", 46 | "rawdata", 47 | "roundagg", 48 | "roundaggmip", 49 | "sa", 50 | "scipy", 51 | "sdiv", 52 | "stdev", 53 | "subsetvec", 54 | "succ", 55 | "sysimage", 56 | "tbest", 57 | "tciter", 58 | "tctime", 59 | "tobj", 60 | "ttime", 61 | "ttot", 62 | "typecheck", 63 | "unindented", 64 | "windbichler", 65 | "xover" 66 | ], 67 | "python.testing.unittestArgs": [ 68 | "-v", 69 | "-s", 70 | "pymhlib", 71 | "-p", 72 | "test_*.py" 73 | ], 74 | "python.testing.pytestEnabled": false, 75 | "python.testing.nosetestsEnabled": false, 76 | "python.testing.unittestEnabled": true, 77 | "python.linting.pylintEnabled": true 78 | } -------------------------------------------------------------------------------- /pymhlib/demos/data/xqf131.tsp: -------------------------------------------------------------------------------- 1 | NAME : xqf131 2 | COMMENT : Bonn VLSI data set with 131 points 3 | COMMENT : Uni Bonn, Research Institute for Discrete Math 4 | COMMENT : Contributed by Andre Rohe 5 | TYPE : TSP 6 | DIMENSION : 131 7 | EDGE_WEIGHT_TYPE : EUC_2D 8 | NODE_COORD_SECTION 9 | 1 0 13 10 | 2 0 26 11 | 3 0 27 12 | 4 0 39 13 | 5 2 0 14 | 6 5 13 15 | 7 5 19 16 | 8 5 25 17 | 9 5 31 18 | 10 5 37 19 | 11 5 43 20 | 12 5 8 21 | 13 8 0 22 | 14 9 10 23 | 15 10 10 24 | 16 11 10 25 | 17 12 10 26 | 18 12 5 27 | 19 15 13 28 | 20 15 19 29 | 21 15 25 30 | 22 15 31 31 | 23 15 37 32 | 24 15 43 33 | 25 15 8 34 | 26 18 11 35 | 27 18 13 36 | 28 18 15 37 | 29 18 17 38 | 30 18 19 39 | 31 18 21 40 | 32 18 23 41 | 33 18 25 42 | 34 18 27 43 | 35 18 29 44 | 36 18 31 45 | 37 18 33 46 | 38 18 35 47 | 39 18 37 48 | 40 18 39 49 | 41 18 41 50 | 42 18 42 51 | 43 18 44 52 | 44 18 45 53 | 45 25 11 54 | 46 25 15 55 | 47 25 22 56 | 48 25 23 57 | 49 25 24 58 | 50 25 26 59 | 51 25 28 60 | 52 25 29 61 | 53 25 9 62 | 54 28 16 63 | 55 28 20 64 | 56 28 28 65 | 57 28 30 66 | 58 28 34 67 | 59 28 40 68 | 60 28 43 69 | 61 28 47 70 | 62 32 26 71 | 63 32 31 72 | 64 33 15 73 | 65 33 26 74 | 66 33 29 75 | 67 33 31 76 | 68 34 15 77 | 69 34 26 78 | 70 34 29 79 | 71 34 31 80 | 72 34 38 81 | 73 34 41 82 | 74 34 5 83 | 75 35 17 84 | 76 35 31 85 | 77 38 16 86 | 78 38 20 87 | 79 38 30 88 | 80 38 34 89 | 81 40 22 90 | 82 41 23 91 | 83 41 32 92 | 84 41 34 93 | 85 41 35 94 | 86 41 36 95 | 87 48 22 96 | 88 48 27 97 | 89 48 6 98 | 90 51 45 99 | 91 51 47 100 | 92 56 25 101 | 93 57 12 102 | 94 57 25 103 | 95 57 44 104 | 96 61 45 105 | 97 61 47 106 | 98 63 6 107 | 99 64 22 108 | 100 71 11 109 | 101 71 13 110 | 102 71 16 111 | 103 71 45 112 | 104 71 47 113 | 105 74 12 114 | 106 74 16 115 | 107 74 20 116 | 108 74 24 117 | 109 74 29 118 | 110 74 35 119 | 111 74 39 120 | 112 74 6 121 | 113 77 21 122 | 114 78 10 123 | 115 78 32 124 | 116 78 35 125 | 117 78 39 126 | 118 79 10 127 | 119 79 33 128 | 120 79 37 129 | 121 80 10 130 | 122 80 41 131 | 123 80 5 132 | 124 81 17 133 | 125 84 20 134 | 126 84 24 135 | 127 84 29 136 | 128 84 34 137 | 129 84 38 138 | 130 84 6 139 | 131 107 27 140 | EOF 141 | -------------------------------------------------------------------------------- /pymhlib/pbig.py: -------------------------------------------------------------------------------- 1 | """A population-based iterated greedy (PBIG) algorithm. 2 | . 3 | The parameter is a method that randomly creates a solution, which will be used to generate an initial population. 4 | Until a termination criterion is met, the list of d&r (destroy and recreate) methods 5 | is applied to each individual of the population, resulting in a temporary population. 6 | The best of the current and the temporary iteration form a new generation for which the process will be repeated. 7 | """ 8 | 9 | from typing import List 10 | from itertools import cycle 11 | 12 | from pymhlib.population import Population 13 | from pymhlib.scheduler import Method, Scheduler 14 | from pymhlib.solution import Solution 15 | 16 | 17 | class PBIG(Scheduler): 18 | """A population-based iterated greedy (PBIG) algorithm. 19 | 20 | Attributes 21 | - sol: solution object, in which final result will be returned 22 | - population: population of solutions 23 | - meths_ch: list of construction heuristic methods 24 | - meths_dr: list of destruct and recreate methods 25 | """ 26 | 27 | def __init__(self, sol: Solution, meths_ch: List[Method], meths_dr: List[Method], 28 | own_settings: dict = None): 29 | """Initialization. 30 | 31 | :param sol: solution to be improved 32 | :param meths_ch: list of construction heuristic methods 33 | :param meths_dr: list of destruct and recreate methods 34 | :param own_settings: optional dictionary with specific settings 35 | """ 36 | population = Population(sol, meths_ch, own_settings) 37 | super().__init__(sol, meths_ch+meths_dr, own_settings, population=population) 38 | self.meths_ch = meths_ch 39 | self.meths_dr = meths_dr 40 | 41 | def run(self): 42 | """Actually performs the construction heuristics followed by the PBIG.""" 43 | 44 | population = self.population 45 | 46 | meths_dr_cycle = cycle(self.meths_dr) 47 | 48 | while True: 49 | changed: List[Solution] = [] 50 | 51 | for individual in self.population: 52 | modified = individual.copy() 53 | method = next(meths_dr_cycle) 54 | res = self.perform_method(method, modified) 55 | 56 | if res.terminate: 57 | return 58 | 59 | if res.changed: 60 | changed.append(modified) 61 | 62 | # Update population best 63 | if modified.is_better(self.incumbent): 64 | self.incumbent = modified # update best solution 65 | 66 | # Add new individuals to population and take the best 67 | for individual in changed: 68 | worst = population.worst() 69 | if individual.is_better(population[worst]): 70 | population[worst] = individual 71 | -------------------------------------------------------------------------------- /pymhlib/demos/dd_mkp.py: -------------------------------------------------------------------------------- 1 | """Demo program addressing the MKP with decision diagrams.""" 2 | 3 | from typing import DefaultDict 4 | from itertools import count 5 | import logging 6 | import numpy as np 7 | 8 | from pymhlib.settings import parse_settings, settings, get_settings_parser, get_settings_as_str 9 | from pymhlib.log import init_logger 10 | from pymhlib.demos.mkp import MKPInstance, MKPSolution 11 | from pymhlib.decision_diag import State, Node, DecisionDiag 12 | from pymhlib.demos.common import data_dir 13 | 14 | 15 | class MKPState(State, tuple): 16 | """The state corresponds to an int-tuple indicating for each resource the amount already used.""" 17 | 18 | @staticmethod 19 | def __new__(cls, y): 20 | return tuple.__new__(tuple, y) 21 | 22 | def __hash__(self): 23 | return tuple.__hash__(self) 24 | 25 | def __eq__(self, other: 'MKPState'): 26 | return tuple.__eq__(self, other) 27 | 28 | def __repr__(self): 29 | return "y=" + tuple.__str__(self) 30 | 31 | 32 | class MKPNode(Node): 33 | """A DD node for the MKP.""" 34 | 35 | 36 | class MKPDecisionDiag(DecisionDiag): 37 | """A DD for the MKP.""" 38 | 39 | def __init__(self, inst: MKPInstance): 40 | super().__init__(inst, MKPNode('r', MKPState((0,)*inst.m), 0), MKPState(inst.b), 41 | MKPSolution(inst)) 42 | 43 | def expand_node(self, node: MKPNode, depth: int, node_pool: DefaultDict[(State, Node)])-> bool: 44 | assert not node.succ 45 | no_target = depth < self.inst.n-1 46 | self.get_successor_node(node_pool, node, 0, 0, node.state if no_target else self.t_state) 47 | y_new = node.state + self.inst.r[:, depth] 48 | if np.all(y_new <= self.inst.b): 49 | self.get_successor_node(node_pool, node, 1, self.inst.p[depth], 50 | MKPState(y_new) if no_target else self.t_state) 51 | return no_target 52 | 53 | def merge_states(self, state1: MKPState, state2: MKPState) -> MKPState: 54 | y = np.amin([state1, state2], axis=0) 55 | if y == state1: 56 | return state1 57 | if y == state2: 58 | return state2 59 | return MKPState(y) 60 | 61 | def derive_solution(self) -> MKPSolution: 62 | """Derive MKP solution.""" 63 | path = self.derive_best_path() 64 | # print(path) 65 | sel_pos = count() 66 | not_sel_pos = count(len(self.sol.x)-1, -1) 67 | for i, v in enumerate(path): 68 | idx = next(sel_pos) if v else next(not_sel_pos) 69 | self.sol.x[idx] = i 70 | self.sol.sel = next(sel_pos) 71 | self.sol.calc_y() 72 | assert self.sol.sel - 1 == next(not_sel_pos) 73 | # print(self.sol.x) 74 | return self.sol 75 | 76 | 77 | def main(): 78 | """Test for the DD classes.""" 79 | # import logging 80 | # from pymhlib.demos.common import data_dir 81 | parser = get_settings_parser() 82 | parser.add("--inst_file", type=str, default=data_dir+'mknap-small.txt', help='problem instance file') 83 | # parser.set_defaults(seed=3) 84 | 85 | parse_settings() 86 | init_logger() 87 | logger = logging.getLogger("pymhlib") 88 | logger.info("pymhlib demo for using decision diagrams for the MKP") 89 | logger.info(get_settings_as_str()) 90 | instance = MKPInstance(settings.inst_file) 91 | logger.info("MKP instance read:\n%s", str(instance)) 92 | 93 | # solution = MKPSolution(instance) 94 | # solution.initialize(0) 95 | dd = MKPDecisionDiag(instance) 96 | dd.expand_all('relaxed') 97 | logger.info(dd) 98 | sol = dd.derive_solution() 99 | # sol.check() 100 | logger.info("Solution: %s, obj=%f", sol, sol.obj()) 101 | 102 | 103 | if __name__ == '__main__': 104 | main() 105 | -------------------------------------------------------------------------------- /pymhlib/par_alns.py: -------------------------------------------------------------------------------- 1 | """Parallel implementation of Adaptive Large Neighborhood Search (ALNS). 2 | 3 | The module realizes a parallel version of the ALNS metaheuristic utilizing multiple cores via multiple processes. 4 | The destroy plus repair operations are delegated as distributed tasks to a process pool in an asynchronous way. 5 | Note that a reasonable speedup will only be gained if the repair/destroy operations are time-expensive. 6 | Further note that the implementation will not be deterministic anymore for more than one worker even 7 | if a seed > 0 is specified. 8 | """ 9 | 10 | import multiprocessing as mp 11 | import time 12 | from typing import Iterable, Tuple 13 | from configargparse import Namespace 14 | from pymhlib.settings import settings, set_settings 15 | from pymhlib.solution import Solution, TObj 16 | from pymhlib.scheduler import Method, Result 17 | from pymhlib.alns import ALNS 18 | 19 | 20 | class ParallelALNS(ALNS): 21 | """A parallel version of the ALNS utilizing multiple cores via multiple processes. 22 | 23 | The destroy plus repair operations are delegated as distributed tasks to a process pool in an asynchronous way. 24 | """ 25 | 26 | def operators_generator(self, sol: Solution) -> Iterable[Tuple[Method, Method, Solution]]: 27 | """Generator yielding a selected repair and destroy operators and the solution to apply them to. 28 | 29 | :param sol: Solution to be modified. 30 | :return: yields infinitely often a tuple of a selected destroy and repair methods and the solution to which 31 | they should be applied 32 | """ 33 | while True: 34 | destroy, repair = self.select_method_pair() 35 | yield destroy, repair, sol 36 | 37 | @staticmethod 38 | def process_init(s: Namespace, new_seed: int): 39 | """Initialization of new worker process.""" 40 | s.seed = new_seed 41 | set_settings(s) 42 | 43 | @staticmethod 44 | def perform_method_pair_in_worker(params: Tuple[Method, Method, Solution])\ 45 | -> Tuple[Method, Method, Solution, Result, TObj, float, float]: 46 | """Performs the given destroy and repair operator pair on the given solution in a worker process.""" 47 | destroy, repair, sol = params 48 | res = Result() 49 | obj_old = sol.obj() 50 | t_start = time.process_time() 51 | destroy.func(sol, destroy.par, res) 52 | t_destroyed = time.process_time() 53 | repair.func(sol, repair.par, res) 54 | t_end = time.process_time() 55 | return destroy, repair, sol, res, obj_old, t_destroyed-t_start, t_end-t_destroyed 56 | 57 | def alns(self, sol: Solution): 58 | """Perform adaptive large neighborhood search (ALNS) on given solution.""" 59 | self.next_segment = self.iteration + self.own_settings.mh_alns_segment_size 60 | sol_incumbent = sol.copy() 61 | sol_new = sol.copy() 62 | operators = self.operators_generator(sol_new) 63 | worker_seed = 0 if settings.mh_workers > 1 else settings.seed 64 | with mp.Pool(processes=settings.mh_workers, 65 | initializer=self.process_init, initargs=(settings, worker_seed)) as worker_pool: 66 | result_iter = worker_pool.imap_unordered(self.perform_method_pair_in_worker, operators) 67 | for result in result_iter: 68 | # print("Result:", result) 69 | destroy, repair, sol_result, res, obj_old, t_destroy, t_repair = result 70 | sol_new.copy_from(sol_result) 71 | self.update_stats_for_method_pair(destroy, repair, sol, res, obj_old, t_destroy, t_repair) 72 | self.update_after_destroy_and_repair_performed(destroy, repair, sol_new, sol_incumbent, sol) 73 | if res.terminate: 74 | sol.copy_from(sol_incumbent) 75 | return 76 | self.update_operator_weights() 77 | -------------------------------------------------------------------------------- /pymhlib/gvns.py: -------------------------------------------------------------------------------- 1 | """A general variable neighborhood search class which can also be used for plain local search, VND, GRASP, IG etc. 2 | 3 | It extends the more general scheduler module/class by distinguishing between construction heuristics, local 4 | improvement methods and shaking methods. 5 | """ 6 | 7 | from typing import List 8 | import time 9 | 10 | from pymhlib.scheduler import Method, Scheduler 11 | from pymhlib.settings import get_settings_parser 12 | from pymhlib.solution import Solution 13 | 14 | 15 | parser = get_settings_parser() 16 | 17 | 18 | class GVNS(Scheduler): 19 | """A general variable neighborhood search (GVNS). 20 | 21 | Attributes 22 | - sol: solution object, in which final result will be returned 23 | - meths_ch: list of construction heuristic methods 24 | - meths_li: list of local improvement methods 25 | - meths_sh: list of shaking methods 26 | """ 27 | 28 | def __init__(self, sol: Solution, meths_ch: List[Method], meths_li: List[Method], meths_sh: List[Method], 29 | own_settings: dict = None, consider_initial_sol=False): 30 | """Initialization. 31 | 32 | :param sol: solution to be improved 33 | :param meths_ch: list of construction heuristic methods 34 | :param meths_li: list of local improvement methods 35 | :param meths_sh: list of shaking methods 36 | :param own_settings: optional dictionary with specific settings 37 | :param consider_initial_sol: if true consider sol as valid solution that should be improved upon; otherwise 38 | sol is considered just a possibly uninitialized of invalid solution template 39 | """ 40 | super().__init__(sol, meths_ch+meths_li+meths_sh, own_settings, consider_initial_sol) 41 | self.meths_ch = meths_ch 42 | self.meths_li = meths_li 43 | self.meths_sh = meths_sh 44 | 45 | def vnd(self, sol: Solution) -> bool: 46 | """Perform variable neighborhood descent (VND) on given solution. 47 | 48 | :returns: true if a global termination condition is fulfilled, else False. 49 | """ 50 | sol2 = sol.copy() 51 | while True: 52 | for m in self.next_method(self.meths_li): 53 | res = self.perform_method(m, sol2) 54 | if sol2.is_better(sol): 55 | sol.copy_from(sol2) 56 | if res.terminate: 57 | return True 58 | break 59 | if res.terminate: 60 | return True 61 | if res.changed: 62 | sol2.copy_from(sol) 63 | else: # local optimum reached 64 | return False 65 | 66 | def gvns(self, sol: Solution): 67 | """Perform general variable neighborhood search (GVNS) to given solution.""" 68 | sol2 = sol.copy() 69 | if self.vnd(sol2) or not self.meths_sh: 70 | return 71 | use_vnd = bool(self.meths_li) 72 | while True: 73 | for m in self.next_method(self.meths_sh, repeat=True): 74 | t_start = time.process_time() 75 | res = self.perform_method(m, sol2, delayed_success=use_vnd) 76 | terminate = res.terminate 77 | if not terminate and use_vnd: 78 | terminate = self.vnd(sol2) 79 | self.delayed_success_update(m, sol.obj(), t_start, sol2) 80 | if sol2.is_better(sol): 81 | sol.copy_from(sol2) 82 | if terminate or res.terminate: 83 | return 84 | break 85 | if terminate or res.terminate: 86 | return 87 | sol2.copy_from(sol) 88 | else: 89 | break 90 | 91 | def run(self) -> None: 92 | """Actually performs the construction heuristics followed by the GVNS.""" 93 | sol = self.incumbent.copy() 94 | assert self.incumbent_valid or self.meths_ch 95 | self.perform_sequentially(sol, self.meths_ch) 96 | self.gvns(sol) 97 | -------------------------------------------------------------------------------- /pymhlib/ssga.py: -------------------------------------------------------------------------------- 1 | """A steady-state genetic algorithm (SSGA).""" 2 | 3 | from typing import List, Callable, Any 4 | import random 5 | 6 | from functools import partial 7 | from pymhlib.population import Population 8 | from pymhlib.scheduler import Method, Scheduler, Result, MethodStatistics 9 | from pymhlib.settings import get_settings_parser 10 | from pymhlib.solution import Solution 11 | 12 | 13 | parser = get_settings_parser() 14 | parser.add_argument("--mh_ssga_cross_prob", type=int, default=1, help='SSGA crossover probability') 15 | parser.add_argument("--mh_ssga_loc_prob", type=int, default=0.1, help='SSGA local improvement probability') 16 | 17 | 18 | class SteadyStateGeneticAlgorithm(Scheduler): 19 | """A steady state genetic algorithm. 20 | 21 | During each iteration, one new solution is generated by means 22 | of crossover and mutation. The new solution 23 | replaces the worst of the population. 24 | 25 | TODO: consider mh_pop_dupelim for duplicate elimination 26 | 27 | TODO: perform a randomized number of permutations according to a Poisson distribution, param for mutation prob/gene 28 | 29 | Attributes 30 | - sol: solution object, in which final result will be returned 31 | - meths_ch: list of construction heuristic methods 32 | - meth_cx: a crossover method 33 | - meth_mu: a mutation method 34 | - meth_ls: a local search method 35 | """ 36 | 37 | def __init__(self, sol: Solution, meths_ch: List[Method], 38 | meth_cx: Callable[[Solution, Solution], Solution], 39 | meth_mu: Method, 40 | meth_li: Method, 41 | own_settings: dict = None): 42 | """Initialization. 43 | 44 | :param sol: solution to be improved 45 | :param meths_ch: list of construction heuristic methods 46 | :param meth_cx: a crossover method 47 | :param meth_mu: a mutation method 48 | :param meth_li: an optional local improvement method 49 | :param own_settings: optional dictionary with specific settings 50 | """ 51 | population = Population(sol, meths_ch, own_settings) 52 | super().__init__(sol, meths_ch + [meth_mu] + [meth_li], own_settings, population=population) 53 | self.method_stats["cx"] = MethodStatistics() 54 | self.meth_cx = meth_cx 55 | self.meth_mu = meth_mu 56 | self.meth_ls = meth_li 57 | 58 | self.incumbent = self.population[self.population.best()] 59 | 60 | def run(self): 61 | """Actually performs the construction heuristics followed by the SteadyStateGeneticAlgorithm.""" 62 | 63 | population = self.population 64 | 65 | while True: 66 | # create a new solution 67 | p1 = population[population.select()].copy() 68 | 69 | # methods to perform in this iteration 70 | methods: List[Method] = [] 71 | 72 | # optional crossover 73 | if random.random() < self.own_settings.mh_ssga_cross_prob: 74 | p2 = population[population.select()].copy() 75 | 76 | # workaround for Method not allowing a second Solution as parameter 77 | def meth_cx(crossover, par2: Solution, par1: Solution, _par: Any, _res: Result): 78 | crossover(par1, par2) 79 | 80 | meth_cx_with_p2_bound = partial(meth_cx, self.meth_cx, p2) 81 | 82 | meth = Method("cx", meth_cx_with_p2_bound, None) 83 | methods.append(meth) 84 | 85 | # mutation 86 | methods.append(self.meth_mu) 87 | 88 | # optionally local search 89 | if self.meth_ls and random.random() < self.own_settings.mh_ssga_loc_prob: 90 | methods.append(self.meth_ls) 91 | 92 | res = self.perform_methods(methods, p1) 93 | 94 | if res.terminate: 95 | break 96 | 97 | # Replace in population 98 | worst = population.worst() 99 | population[worst].copy_from(p1) 100 | 101 | # Update best solution 102 | if p1.is_better(self.incumbent): 103 | self.incumbent.copy_from(p1) 104 | -------------------------------------------------------------------------------- /pymhlib/population.py: -------------------------------------------------------------------------------- 1 | """Population class for population based metaheuristics.""" 2 | 3 | from typing import List 4 | from itertools import cycle 5 | import random 6 | from statistics import stdev 7 | import numpy as np 8 | 9 | from pymhlib.scheduler import Method, Result 10 | from pymhlib.settings import get_settings_parser, settings, OwnSettings, boolArg 11 | from pymhlib.solution import Solution 12 | 13 | 14 | parser = get_settings_parser() 15 | parser.add_argument("--mh_pop_size", type=int, default=100, help='Population size') 16 | parser.add_argument("--mh_pop_tournament_size", type=int, default=10, help='Group size in tournament selection') 17 | parser.add_argument("--mh_pop_dupelim", type=boolArg, default=True, help='Prevent duplicates in population') 18 | 19 | 20 | class Population(np.ndarray): 21 | """ Maintains a set of solutions, called a population and provides elementary methods. 22 | 23 | Attributes 24 | - own_settings: own settings object with possibly individualized parameter values 25 | """ 26 | 27 | def __new__(cls, sol: Solution, meths_ch: List[Method], own_settings: dict = None): 28 | """Create population of mh_pop_size solutions using the list of construction heuristics if given. 29 | 30 | If sol is None or no constructors are given, the population is initialized empty. 31 | sol itself is just used as template for obtaining further solutions. 32 | """ 33 | own_settings = OwnSettings(own_settings) if own_settings else settings 34 | size = own_settings.mh_pop_size 35 | obj = super(Population, cls).__new__(cls, size, Solution) 36 | obj.own_settings = own_settings 37 | if sol is not None and meths_ch: 38 | # cycle through construction heuristics to generate population 39 | # perform all construction heuristics, take best solution 40 | meths_cycle = cycle(meths_ch) 41 | idx = 0 42 | while idx < size: 43 | m = next(meths_cycle) 44 | sol = sol.copy() 45 | res = Result() 46 | m.func(sol, m.par, res) 47 | if own_settings.mh_pop_dupelim and obj.duplicates_of(sol) != []: 48 | continue # do not add this duplicate 49 | obj[idx] = sol 50 | if res.terminate: 51 | break 52 | idx += 1 53 | return obj 54 | 55 | def best(self) -> int: 56 | """Get index of best solution.""" 57 | best = 0 58 | for i, elem in enumerate(self): 59 | if elem.is_better(self[best]): 60 | best = i 61 | return best 62 | 63 | def worst(self) -> int: 64 | """Get index of worst solution.""" 65 | worst = 0 66 | for i, elem in enumerate(self): 67 | if elem.is_worse(self[worst]): 68 | worst = i 69 | return worst 70 | 71 | def tournament_selection(self) -> int: 72 | """Select one solution with tournament selection with replacement and return its index.""" 73 | k = self.own_settings.mh_pop_tournament_size # pylint: disable=no-member 74 | best = random.randrange(len(self)) 75 | for _ in range(k - 1): 76 | idx = random.randrange(len(self)) 77 | if self[idx].is_better(self[best]): 78 | best = idx 79 | return best 80 | 81 | def select(self) -> int: 82 | """Select one solution and return its index. 83 | 84 | So far calls tournament_selection. May be extended in the future. 85 | """ 86 | return self.tournament_selection() 87 | 88 | def duplicates_of(self, solution: Solution) -> List[int]: 89 | """ Get a list of duplicates of the provided solution.""" 90 | return [i for i, sol in enumerate(self) if sol is not None and sol == solution] 91 | 92 | def obj_avg(self) -> float: 93 | """ Returns the average of all solutions' objective values.""" 94 | if len(self) < 1: 95 | raise ValueError("average requires at least one element") 96 | 97 | return sum([float(sol.obj()) for sol in self]) / len(self) 98 | 99 | def obj_std(self) -> float: 100 | """ Returns the standard deviation of all solutions' objective values.""" 101 | return stdev([float(sol.obj()) for sol in self]) 102 | -------------------------------------------------------------------------------- /pymhlib/demos/julia-maxsat2.py: -------------------------------------------------------------------------------- 1 | """Demo application for showing the integration with the Julia language, solving the MAXSAT problem. 2 | 3 | Julia and Python's julia package must be installed properly. 4 | The Julia module julia-maxsat.jl is used via Python's julia interface package. 5 | This variant of the demo uses an own Python solution class in which Julia is called just for the 6 | objective function evaluation and the local improvement. 7 | 8 | The goal in the MAXSAT problem is to maximize the number of clauses satisfied in a boolean function given in 9 | conjunctive normal form. 10 | """ 11 | 12 | import os 13 | import random 14 | from typing import Any 15 | import numpy as np 16 | 17 | # from julia import Julia 18 | from julia import Base, Main 19 | 20 | from pymhlib.binvec_solution import BinaryVectorSolution 21 | from pymhlib.alns import ALNS 22 | from pymhlib.scheduler import Result 23 | # from pymhlib.demos.maxsat import MAXSATInstance 24 | 25 | Main.eval(r'include("'+os.path.dirname(__file__)+r'/julia-maxsat.jl")') 26 | 27 | 28 | class JuliaMAXSAT2Solution(BinaryVectorSolution): 29 | """Solution to a MAXSAT instance. 30 | 31 | Attributes 32 | - inst: associated MAXSATInstance 33 | - x: binary incidence vector 34 | - destroyed: list of indices of variables that have been destroyed by the ALNS's destroy operator 35 | """ 36 | 37 | to_maximize = True 38 | 39 | def __init__(self, inst: Main.JuliaMAXSAT.JuliaMAXSATInstance): 40 | super().__init__(inst.n, inst=inst) 41 | self.destroyed = None 42 | 43 | def copy(self): 44 | sol = JuliaMAXSAT2Solution(self.inst) 45 | sol.copy_from(self) 46 | return sol 47 | 48 | def calc_objective(self): 49 | return Main.JuliaMAXSAT.obj(self.x, self.inst.julia_inst) 50 | 51 | def check(self): 52 | """Check if valid solution. 53 | 54 | :raises ValueError: if problem detected. 55 | """ 56 | if len(self.x) != self.inst.n: 57 | raise ValueError("Invalid length of solution") 58 | super().check() 59 | 60 | def construct(self, par: Any, _result: Result): 61 | """Scheduler method that constructs a new solution. 62 | 63 | Here we just call initialize. 64 | """ 65 | self.initialize(par) 66 | 67 | def local_improve(self, par: Any, _result: Result): 68 | """Perform one k_flip_neighborhood_search.""" 69 | obj_val = self.obj() 70 | x = self.x 71 | new_obj_val = Main.JuliaMAXSAT.k_flip_neighborhood_search_b(x, obj_val, self.inst.julia_inst, par, False) 72 | if new_obj_val > obj_val: 73 | self.x = x 74 | self.obj_val = new_obj_val 75 | return True 76 | return False 77 | 78 | def shaking(self, par, _result): 79 | """Scheduler method that performs shaking by flipping par random positions.""" 80 | for _ in range(par): 81 | p = random.randrange(self.inst.n) 82 | self.x[p] = not self.x[p] 83 | self.invalidate() 84 | 85 | def destroy(self, par: Any, _result: Result): 86 | """Destroy operator for ALNS selects par*ALNS.get_number_to_destroy positions uniformly at random for removal. 87 | 88 | Selected positions are stored with the solution in list self.destroyed. 89 | """ 90 | num = min(ALNS.get_number_to_destroy(len(self.x)) * par, len(self.x)) 91 | self.destroyed = np.random.choice(range(len(self.x)), num, replace=False) 92 | self.invalidate() 93 | 94 | def repair(self, _par: Any, _result: Result): 95 | """Repair operator for ALNS assigns new random values to all positions in self.destroyed.""" 96 | assert self.destroyed is not None 97 | for p in self.destroyed: 98 | self.x[p] = random.randrange(2) 99 | self.destroyed = None 100 | self.invalidate() 101 | 102 | def crossover(self, other: 'JuliaMAXSAT2Solution'): 103 | """ Perform uniform crossover as crossover.""" 104 | return self.uniform_crossover(other) 105 | 106 | 107 | if __name__ == '__main__': 108 | from pymhlib.demos.common import run_optimization, data_dir 109 | from pymhlib.settings import get_settings_parser 110 | parser = get_settings_parser() 111 | parser.set_defaults(mh_titer=1000) 112 | run_optimization('MAXSAT', Main.JuliaMAXSAT.JuliaMAXSATInstance, JuliaMAXSAT2Solution, data_dir+"maxsat-adv1.cnf") 113 | -------------------------------------------------------------------------------- /pymhlib/demos/data/bur26a.dat: -------------------------------------------------------------------------------- 1 | 26 2 | 3 | 53 66 66 66 66 53 53 53 53 53 73 53 53 53 66 53 53 53 53 85 73 73 73 73 53 53 4 | 66 53 66 66 66 53 53 53 53 53 53 73 53 53 66 53 53 53 53 73 85 73 73 73 53 53 5 | 66 66 53 66 66 53 53 53 53 53 53 53 73 53 66 53 53 53 53 73 73 85 73 73 53 53 6 | 66 66 66 53 66 53 53 53 53 53 53 53 53 73 73 53 53 53 53 73 73 73 85 85 53 53 7 | 66 66 66 66 53 53 53 53 53 53 53 53 53 53 73 53 53 53 53 73 73 73 85 85 53 53 8 | 53 53 53 53 53 53 66 66 66 66 53 53 53 53 53 73 73 53 53 53 53 53 53 53 85 85 9 | 53 53 53 53 53 66 53 66 66 66 53 53 53 53 53 73 73 53 53 53 53 53 53 53 85 85 10 | 53 53 53 53 53 66 66 53 66 66 53 53 53 53 53 66 53 73 53 53 53 53 53 53 73 73 11 | 53 53 53 53 53 66 66 66 53 66 53 53 53 53 53 66 53 53 73 53 53 53 53 53 73 73 12 | 53 53 53 53 53 66 66 66 66 53 53 53 53 53 53 66 53 53 53 53 53 53 53 53 73 73 13 | 66 66 66 66 66 53 53 53 53 53 53 53 53 53 66 53 53 53 53 73 73 73 73 73 53 53 14 | 66 66 66 66 66 53 53 53 53 53 53 53 53 53 66 53 53 53 53 73 73 73 73 73 53 53 15 | 66 66 66 66 66 53 53 53 53 53 53 53 53 53 66 53 53 53 53 73 73 73 73 73 53 53 16 | 66 66 66 66 66 53 53 53 53 53 53 53 53 53 66 53 53 53 53 73 73 73 73 73 53 53 17 | 66 66 66 66 66 53 53 53 53 53 53 53 53 66 53 53 53 53 53 73 73 73 73 73 53 53 18 | 53 53 53 53 53 66 66 66 66 66 53 53 53 53 53 53 66 53 53 53 53 53 53 53 73 73 19 | 53 53 53 53 53 66 66 66 66 66 53 53 53 53 53 66 53 53 53 53 53 53 53 53 73 73 20 | 53 53 53 53 53 66 66 66 66 66 53 53 53 53 53 66 53 53 53 53 53 53 53 53 73 73 21 | 53 53 53 53 53 66 66 66 66 66 53 53 53 53 53 66 53 53 53 53 53 53 53 53 73 73 22 | 85 66 66 66 66 53 53 53 53 53 66 53 53 53 66 53 53 53 53 53 73 73 73 73 53 53 23 | 66 85 66 66 66 53 53 53 53 53 53 66 53 53 66 53 53 53 53 73 53 73 73 73 53 53 24 | 66 66 85 66 66 53 53 53 53 53 53 53 66 53 66 53 53 53 53 73 73 53 73 73 53 53 25 | 66 66 66 85 85 53 53 53 53 53 53 53 53 66 66 53 53 53 53 73 73 73 53 66 53 53 26 | 66 66 66 85 85 53 53 53 53 53 53 53 53 66 66 53 53 53 53 73 73 73 66 53 53 53 27 | 53 53 53 53 53 85 85 66 66 66 53 53 53 53 53 66 66 53 53 53 53 53 53 53 53 66 28 | 53 53 53 53 53 85 85 66 66 66 53 53 53 53 53 66 66 53 53 53 53 53 53 53 66 53 29 | 30 | 47 348 316 74 12 181 338 309 35 3 84 714 367 1153 7 71 0 687 432 507 975 38 6 8 7 15 31 | 175 9 0 4 1300 12 41 18 183 6 3 102 2 7 84 0 0 150 84 54 148 2 13 0 1 11 32 | 19 0 6 9 12 0 1 3100 3 1 209 9 3 1 22 0 0 4 2 3 1 0 2 0 0 0 33 | 575 10 5 3 2729 10 10 6 1186 0 4 48 46 30 103 11 0 102 36 34 160 2 14 0 3 1 34 | 56 265 165 249 45 142 391 398 2329 4 132 747 479 4754 32 51 3 4501 1311 512 326 26 111 43 6 68 35 | 190 3 0 10 313 112 31 4 91 2 3 76 2 16 104 5 1 163 30 257 45 2 1 0 1 16 36 | 187 6 0 4 1889 9 7 2 138 3 33 126 7 41 39 0 0 198 199 149 100 2 1 0 5 10 37 | 626 11 3 5 1232 12 5 4 207 2 26 230 125 204 132 1 0 596 113 596 107 5 67 0 4 12 38 | 107 50 1029 149 2067 53 471 157 3 2 151 370 292 2100 220 18 3 250 1038 1008 25 65 2 4 0 42 39 | 171 0 0 0 125 0 0 0 0 0 0 0 0 1 12 0 0 0 0 1 28 0 0 0 0 0 40 | 286 4 2 0 297 11 13 7 53 0 4 118 1 11 247 4 0 113 32 218 116 1 4 0 1 3 41 | 473 80 23 99 802 49 64 13 787 2 33 639 27 43 126 11 0 9 239 332 186 12 1 0 28 21 42 | 378 41 3 2 567 9 5 5 581 0 5 23 244 4 101 80 0 1 44 86 101 0 6 0 0 0 43 | 407 56 36 1458 1176 112 1045 69 534 9 171 59 57 394 184 13 0 21 496 701 194 24 36 1 5 159 44 | 16 68 196 111 6 60 56 119 7 7 35 344 193 769 13 73 0 590 157 114 26 17 48 3 7 122 45 | 188 0 0 5 111 43 0 25 107 0 2 75 2 1 146 56 0 391 13 42 44 0 0 0 0 0 46 | 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 16 0 0 0 0 0 47 | 539 180 75 426 1251 119 211 129 548 12 212 190 122 229 367 41 0 89 443 601 314 20 77 2 3 94 48 | 272 84 1132 18 941 46 77 51 664 10 72 47 50 9 372 278 0 62 359 1432 92 42 46 0 19 18 49 | 381 36 5 6 2158 29 38 85 595 3 20 160 21 24 139 5 1 302 419 335 197 8 98 0 8 248 50 | 35 64 213 35 144 386 94 26 10 1 38 62 270 1624 1 56 0 426 463 284 2 13 9 4 0 12 51 | 22 1 0 1 523 0 0 0 97 0 0 0 0 0 525 1 0 1 0 2 1 0 4 0 0 0 52 | 298 1 0 1 591 0 0 2 480 14 0 1 1 5 153 0 1 0 4 0 67 0 0 0 0 0 53 | 5 0 1 0 6 0 0 0 15 0 1 0 0 0 2 14 0 0 0 7 3 0 0 0 1 1 54 | 0 1 1 2 10 0 1 1 1 0 4 0 30 8 3 9 0 3 13 1 0 0 1 1 0 0 55 | 37 1 1 2 400 6 10 2 177 1 5 15 3 8 19 6 0 1 7 78 529 4 101 0 1 2 56 | 57 | -------------------------------------------------------------------------------- /pymhlib/binvec_solution.py: -------------------------------------------------------------------------------- 1 | """A generic class for solutions that are represented by fixed-length binary vectors.""" 2 | 3 | from abc import ABC 4 | import random 5 | from typing import Tuple 6 | import numpy as np 7 | 8 | from pymhlib.solution import VectorSolution, TObj 9 | 10 | 11 | class BinaryVectorSolution(VectorSolution, ABC): 12 | """Abstract solution class with fixed-length 0/1 vector as solution representation. 13 | 14 | Attributes 15 | - x: 0/1 vector representing a solution 16 | """ 17 | 18 | def __init__(self, length, **kwargs): 19 | """Initializes the solution vector with zeros.""" 20 | super().__init__(length, dtype=bool, **kwargs) 21 | 22 | def dist(self, other: 'BinaryVectorSolution'): 23 | """Return Hamming distance of current solution to other solution.""" 24 | return sum(np.logical_xor(self.x, other.x)) 25 | 26 | def initialize(self, k): 27 | """Random initialization.""" 28 | self.x = np.random.randint(0, 2, len(self.x), dtype=bool) 29 | self.invalidate() 30 | 31 | def k_random_flips(self, k): 32 | """Perform k random flips (i.e. exactly k bits are flipped) and call invalidate.""" 33 | p = random.sample(range(self.inst.n), k) 34 | self.x[p] = np.invert(self.x[p]) 35 | self.invalidate() 36 | 37 | def check(self): 38 | """Check if valid solution. 39 | 40 | Raises ValueError if problem detected. 41 | """ 42 | super().check() 43 | for v in self.x: 44 | if not 0 <= v <= 1: 45 | raise ValueError("Invalid value in BinaryVectorSolution: {self.x}") 46 | 47 | def k_flip_neighborhood_search(self, k: int, best_improvement: bool) -> bool: 48 | """Perform one major iteration of a k-flip local search, i.e., search one neighborhood. 49 | 50 | If best_improvement is set, the neighborhood is completely searched and a best neighbor is kept; 51 | otherwise the search terminates in a first-improvement manner, i.e., keeping a first encountered 52 | better solution. 53 | 54 | :returns: True if an improved solution has been found. 55 | """ 56 | x = self.x 57 | assert 0 < k <= len(x) 58 | better_found = False 59 | best_sol = self.copy() 60 | perm = np.random.permutation(len(x)) # permutation for randomization of enumeration order 61 | p = np.full(k, -1) # flipped positions 62 | # initialize 63 | i = 0 # current index in p to consider 64 | while i >= 0: 65 | # evaluate solution 66 | if i == k: 67 | if self.is_better(best_sol): 68 | if not best_improvement: 69 | return True 70 | best_sol.copy_from(self) 71 | better_found = True 72 | i -= 1 # backtrack 73 | else: 74 | if p[i] == -1: 75 | # this index has not yet been placed 76 | p[i] = (p[i-1] if i > 0 else -1) + 1 77 | self.flip_variable(perm[p[i]]) 78 | i += 1 # continue with next position (if any) 79 | elif p[i] < len(x) - (k - i): 80 | # further positions to explore with this index 81 | self.flip_variable(perm[p[i]]) 82 | p[i] += 1 83 | self.flip_variable(perm[p[i]]) 84 | i += 1 85 | else: 86 | # we are at the last position with the i-th index, backtrack 87 | self.flip_variable(perm[p[i]]) 88 | p[i] = -1 # unset position 89 | i -= 1 90 | if better_found: 91 | self.copy_from(best_sol) 92 | self.invalidate() 93 | return better_found 94 | 95 | def flip_variable(self, pos: int): 96 | """Flip the variable at position pos and possibly incrementally update objective value or invalidate. 97 | 98 | This generic implementation just calls invalidate() after flipping the variable. 99 | """ 100 | self.x[pos] = not self.x[pos] 101 | self.invalidate() 102 | 103 | def flip_move_delta_eval(self, pos: int) -> TObj: 104 | """Determine delta in objective value when flipping position p. 105 | 106 | Here the solution is evaluated from scratch. If possible, it should be overloaded by a more 107 | efficient delta evaluation. 108 | """ 109 | obj = self.obj() 110 | self.x[pos] = not self.x[pos] 111 | self.invalidate() 112 | delta = self.obj() - obj 113 | self.x[pos] = not self.x[pos] 114 | self.obj_val = obj 115 | return delta 116 | 117 | def random_flip_move_delta_eval(self) -> Tuple[int, TObj]: 118 | """Choose random move in the flip neighborhood and perform delta evaluation, returning (move, delta_obj). 119 | 120 | The solution is not changed here yet. 121 | Primarily used in simulated annealing. 122 | """ 123 | p = random.randrange(len(self.x)) 124 | delta_obj = self.flip_move_delta_eval(p) 125 | return p, delta_obj 126 | -------------------------------------------------------------------------------- /.vscode/.ropeproject/config.py: -------------------------------------------------------------------------------- 1 | # The default ``config.py`` 2 | # flake8: noqa 3 | 4 | 5 | def set_prefs(prefs): 6 | """This function is called before opening the project""" 7 | 8 | # Specify which files and folders to ignore in the project. 9 | # Changes to ignored resources are not added to the history and 10 | # VCSs. Also they are not returned in `Project.get_files()`. 11 | # Note that ``?`` and ``*`` match all characters but slashes. 12 | # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc' 13 | # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc' 14 | # '.svn': matches 'pkg/.svn' and all of its children 15 | # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o' 16 | # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o' 17 | prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject', 18 | '.hg', '.svn', '_svn', '.git', '.tox'] 19 | 20 | # Specifies which files should be considered python files. It is 21 | # useful when you have scripts inside your project. Only files 22 | # ending with ``.py`` are considered to be python files by 23 | # default. 24 | # prefs['python_files'] = ['*.py'] 25 | 26 | # Custom source folders: By default rope searches the project 27 | # for finding source folders (folders that should be searched 28 | # for finding modules). You can add paths to that list. Note 29 | # that rope guesses project source folders correctly most of the 30 | # time; use this if you have any problems. 31 | # The folders should be relative to project root and use '/' for 32 | # separating folders regardless of the platform rope is running on. 33 | # 'src/my_source_folder' for instance. 34 | # prefs.add('source_folders', 'src') 35 | 36 | # You can extend python path for looking up modules 37 | # prefs.add('python_path', '~/python/') 38 | 39 | # Should rope save object information or not. 40 | prefs['save_objectdb'] = True 41 | prefs['compress_objectdb'] = False 42 | 43 | # If `True`, rope analyzes each module when it is being saved. 44 | prefs['automatic_soa'] = True 45 | # The depth of calls to follow in static object analysis 46 | prefs['soa_followed_calls'] = 0 47 | 48 | # If `False` when running modules or unit tests "dynamic object 49 | # analysis" is turned off. This makes them much faster. 50 | prefs['perform_doa'] = True 51 | 52 | # Rope can check the validity of its object DB when running. 53 | prefs['validate_objectdb'] = True 54 | 55 | # How many undos to hold? 56 | prefs['max_history_items'] = 32 57 | 58 | # Shows whether to save history across sessions. 59 | prefs['save_history'] = True 60 | prefs['compress_history'] = False 61 | 62 | # Set the number spaces used for indenting. According to 63 | # :PEP:`8`, it is best to use 4 spaces. Since most of rope's 64 | # unit-tests use 4 spaces it is more reliable, too. 65 | prefs['indent_size'] = 4 66 | 67 | # Builtin and c-extension modules that are allowed to be imported 68 | # and inspected by rope. 69 | prefs['extension_modules'] = [] 70 | 71 | # Add all standard c-extensions to extension_modules list. 72 | prefs['import_dynload_stdmods'] = True 73 | 74 | # If `True` modules with syntax errors are considered to be empty. 75 | # The default value is `False`; When `False` syntax errors raise 76 | # `rope.base.exceptions.ModuleSyntaxError` exception. 77 | prefs['ignore_syntax_errors'] = False 78 | 79 | # If `True`, rope ignores unresolvable imports. Otherwise, they 80 | # appear in the importing namespace. 81 | prefs['ignore_bad_imports'] = False 82 | 83 | # If `True`, rope will insert new module imports as 84 | # `from import ` by default. 85 | prefs['prefer_module_from_imports'] = False 86 | 87 | # If `True`, rope will transform a comma list of imports into 88 | # multiple separate import statements when organizing 89 | # imports. 90 | prefs['split_imports'] = False 91 | 92 | # If `True`, rope will remove all top-level import statements and 93 | # reinsert them at the top of the module when making changes. 94 | prefs['pull_imports_to_top'] = True 95 | 96 | # If `True`, rope will sort imports alphabetically by module name instead 97 | # of alphabetically by import statement, with from imports after normal 98 | # imports. 99 | prefs['sort_imports_alphabetically'] = False 100 | 101 | # Location of implementation of 102 | # rope.base.oi.type_hinting.interfaces.ITypeHintingFactory In general 103 | # case, you don't have to change this value, unless you're an rope expert. 104 | # Change this value to inject you own implementations of interfaces 105 | # listed in module rope.base.oi.type_hinting.providers.interfaces 106 | # For example, you can add you own providers for Django Models, or disable 107 | # the search type-hinting in a class hierarchy, etc. 108 | prefs['type_hinting_factory'] = ( 109 | 'rope.base.oi.type_hinting.factory.default_type_hinting_factory') 110 | 111 | 112 | def project_opened(project): 113 | """This function is called after opening the project""" 114 | # Do whatever you like here! 115 | -------------------------------------------------------------------------------- /pymhlib/tests/test_all.py: -------------------------------------------------------------------------------- 1 | """Basic functionality tests. 2 | 3 | Note that several of the heuristics would need to run for a longer time in order to get a more reasonable solution, 4 | nor are all the applied algorithms really meaningful for the considered problem. 5 | """ 6 | from unittest import TestCase, main 7 | 8 | from pymhlib.demos.common import run_optimization, data_dir, add_general_arguments_and_parse_settings 9 | from pymhlib.settings import get_settings_parser, settings, seed_random_generators 10 | from pymhlib.demos.maxsat import MAXSATInstance, MAXSATSolution 11 | from pymhlib.demos.tsp import TSPInstance, TSPSolution 12 | from pymhlib.demos.graph_coloring import GCInstance, GCSolution 13 | from pymhlib.demos.misp import MISPInstance, MISPSolution 14 | from pymhlib.demos.mkp import MKPInstance, MKPSolution 15 | from pymhlib.demos.qap import QAPInstance, QAPSolution 16 | from pymhlib.demos.vertex_cover import VertexCoverInstance, VertexCoverSolution 17 | 18 | import copy 19 | 20 | parser = get_settings_parser() 21 | add_general_arguments_and_parse_settings(args=[]) 22 | 23 | 24 | class TestAll(TestCase): 25 | """Diverse test functions.""" 26 | # pylint: disable=missing-function-docstring 27 | 28 | def test_maxsat_gvns(self): 29 | seed_random_generators(42) 30 | settings.inst_file = data_dir + "maxsat-adv1.cnf" 31 | settings.alg = 'gvns' 32 | settings.mh_titer = 100 33 | solution = run_optimization('MAXSAT', MAXSATInstance, MAXSATSolution, embedded=True) 34 | self.assertEqual(solution.obj(), 769) 35 | 36 | def test_maxsat_k_random_flip(self): 37 | seed_random_generators(42) 38 | settings.inst_file = data_dir + "maxsat-adv1.cnf" 39 | inst = MAXSATInstance(settings.inst_file) 40 | sol = MAXSATSolution(inst) 41 | k = 30 42 | old = copy.deepcopy(sol.x) 43 | sol.k_random_flips(k) 44 | new = sol.x 45 | ndiff = sum(old != new) 46 | self.assertEqual(ndiff, k) 47 | 48 | def test_tsp_sa(self): 49 | seed_random_generators(42) 50 | settings.inst_file = data_dir + "xqf131.tsp" 51 | settings.alg = 'sa' 52 | settings.mh_titer = 50000 53 | solution = run_optimization('TSP', TSPInstance, TSPSolution, embedded=True) 54 | self.assertEqual(solution.obj(), 2592) 55 | 56 | def test_tsp_ssga(self): 57 | seed_random_generators(42) 58 | settings.inst_file = data_dir + "xqf131.tsp" 59 | settings.alg = 'ssga' 60 | settings.mh_titer = 500 61 | solution = run_optimization('TSP', TSPInstance, TSPSolution, embedded=True) 62 | self.assertEqual(solution.obj(), 1376) 63 | 64 | def test_graph_coloring_gvns(self): 65 | seed_random_generators(42) 66 | settings.inst_file = data_dir + "fpsol2.i.1.col" 67 | settings.alg = 'gvns' 68 | settings.mh_titer = 500 69 | solution = run_optimization('Graph Coloring', GCInstance, GCSolution, embedded=True) 70 | self.assertEqual(solution.obj(), 1634) 71 | 72 | def test_misp_pbig(self): 73 | seed_random_generators(42) 74 | settings.inst_file = data_dir + "frb40-19-1.mis" 75 | settings.alg = 'pbig' 76 | settings.mh_titer = 500 77 | solution = run_optimization('MISP', MISPInstance, MISPSolution, embedded=True) 78 | self.assertEqual(solution.obj(), 32) 79 | 80 | def test_mkp_gvns(self): 81 | seed_random_generators(42) 82 | settings.inst_file = data_dir + "mknapcb5-01.txt" 83 | settings.alg = 'gvns' 84 | settings.mh_titer = 70 85 | solution = run_optimization('MKP', MKPInstance, MKPSolution, embedded=True) 86 | self.assertEqual(solution.obj(), 55610) 87 | 88 | def test_qap_gvns(self): 89 | seed_random_generators(42) 90 | settings.inst_file = data_dir + 'bur26a.dat' 91 | settings.alg = 'gvns' 92 | settings.mh_titer = 1000 93 | solution = run_optimization('QAP', QAPInstance, QAPSolution, embedded=True) 94 | self.assertEqual(solution.obj(), 5426670) 95 | 96 | def test_vertex_cover_gvns(self): 97 | seed_random_generators(42) 98 | settings.inst_file = data_dir + "frb40-19-1.mis" 99 | settings.alg = 'gvns' 100 | settings.mh_titer = 100 101 | solution = run_optimization('Vertex Cover', VertexCoverInstance, VertexCoverSolution, embedded=True) 102 | self.assertEqual(solution.obj(), 726) 103 | 104 | def test_maxsat_alns(self): 105 | seed_random_generators(42) 106 | settings.inst_file = data_dir + "maxsat-adv1.cnf" 107 | settings.alg = 'alns' 108 | settings.mh_titer = 600 109 | solution = run_optimization('MAXSAT', MAXSATInstance, MAXSATSolution, embedded=True) 110 | self.assertEqual(solution.obj(), 727) 111 | 112 | def test_maxsat_par_alns(self): 113 | seed_random_generators(42) 114 | settings.inst_file = data_dir + "maxsat-adv1.cnf" 115 | settings.alg = 'par_alns' 116 | settings.mh_titer = 600 117 | solution = run_optimization('MAXSAT', MAXSATInstance, MAXSATSolution, embedded=True) 118 | self.assertGreaterEqual(solution.obj(), 0) 119 | 120 | if __name__ == '__main__': 121 | main() 122 | -------------------------------------------------------------------------------- /pymhlib/log.py: -------------------------------------------------------------------------------- 1 | """pymhlib specific logging objects. 2 | 3 | Two logging objects are maintained specifically for the pymhlib: 4 | - logger with name "pymhlib" for the general information and 5 | - iter_logger with name "pymhlib_iter" for the iteration-wise logging. 6 | 7 | init() must be called to initialize this module, i.e., create these objects. 8 | """ 9 | 10 | import logging 11 | import logging.handlers 12 | import sys 13 | 14 | from pymhlib.settings import settings, get_settings_parser, parse_settings 15 | 16 | parser = get_settings_parser() 17 | parser.add_argument("--mh_out", type=str, default="None", 18 | help='file to write general output into (None: stdout)') 19 | parser.add_argument("--mh_log", type=str, default="None", 20 | help='file to write iteration-wise logging into (None: stdout)') 21 | 22 | 23 | def init_logger(): 24 | """Initialize logger objects.""" 25 | 26 | # logger for general output 27 | logger = logging.getLogger("pymhlib") 28 | formatter = logging.Formatter("%(message)s") 29 | if settings.mh_out == 'None': 30 | handler = logging.StreamHandler(sys.stdout) 31 | else: 32 | handler = logging.FileHandler(settings.mh_out, "w") 33 | handler.setFormatter(formatter) 34 | logger.handlers = [] 35 | logger.addHandler(handler) 36 | logger.setLevel(logging.INFO) 37 | logger.propagate = False 38 | 39 | # logger for iteration-wise output 40 | iter_logger = logging.getLogger("pymhlib_iter") 41 | if settings.mh_log == 'None': 42 | iter_handler = handler 43 | else: 44 | iter_file_handler = logging.FileHandler(settings.mh_log, "w") 45 | iter_file_handler.setFormatter(formatter) 46 | iter_handler = logging.handlers.MemoryHandler( 47 | capacity=1024 * 100, 48 | flushLevel=logging.ERROR, 49 | target=iter_file_handler 50 | ) 51 | iter_handler.setFormatter(formatter) 52 | iter_logger.handlers = [] 53 | iter_logger.addHandler(iter_handler) 54 | iter_logger.propagate = False 55 | iter_logger.setLevel(logging.INFO) 56 | 57 | 58 | class LogLevel: 59 | """Manage indentation of log messages according to specified levels. 60 | 61 | Indentation is most meaningful when embedding optimization algorithms within others. 62 | 63 | This class can also be used as context manager in a with statement. 64 | 65 | If indentation is used and some multi-line log message is written, write Loglevel.s after each "\n" 66 | in order to do the indentation for all lines. 67 | 68 | Class attributes 69 | - level: level of indentation 70 | - s: actual string used for current indentation 71 | - indent_str: prefix used for each indentation level 72 | - format_str: unindented format string for logging 73 | """ 74 | level = 0 75 | s = "" 76 | indent_str = " > " 77 | format_str = "%(message)s" 78 | 79 | @classmethod 80 | def reset(cls, value=0): 81 | """Reset indentation level to the given value.""" 82 | cls.level = value 83 | cls.set_format() 84 | 85 | @classmethod 86 | def increase(cls): 87 | """Increase indentation level by one.""" 88 | cls.level += 1 89 | cls.set_format() 90 | 91 | @classmethod 92 | def decrease(cls): 93 | """Decrease indentation level by one.""" 94 | cls.level -= 1 95 | assert cls.level >= 0 96 | cls.set_format() 97 | 98 | @classmethod 99 | def set_format(cls): 100 | """Activate the format for the currently set level.""" 101 | cls.s = cls.indent_str * cls.level 102 | format_str = cls.s + cls.format_str 103 | formatter = logging.Formatter(format_str) 104 | for name in ['pymhlib', 'pymhlib_iter']: 105 | logger = logging.getLogger(name) 106 | for h in logger.handlers: 107 | h.setFormatter(formatter) 108 | 109 | def __enter__(self): 110 | """When used as context manager in with statement and entering context, increase level.""" 111 | self.increase() 112 | return self 113 | 114 | def __exit__(self, exc_type, exc_val, exc_tb): 115 | """When used as context manager in with statement and leaving context, decrease level.""" 116 | self.decrease() 117 | 118 | @classmethod 119 | def indent(cls, s: s) -> s: 120 | """Correctly indent the given string, which may be a multi-line message.""" 121 | return cls.s + s.replace('\n', f'\n{cls.s}') 122 | 123 | 124 | def test(): 125 | """Some basic module tests.""" 126 | init_logger() 127 | logger = logging.getLogger("pymhlib") 128 | logger.info('This is an info to logger') 129 | logger.error('This is an error to logger') 130 | iter_logger = logging.getLogger("pymhlib_iter") 131 | iter_logger.info('This is an info to iter_logger') 132 | iter_logger.error('This is an error to iter_logger') 133 | LogLevel.increase() 134 | logger.info('This is an info to logger at level 1') 135 | LogLevel.increase() 136 | logger.info('This is an info to iter_logger at level 2') 137 | LogLevel.decrease() 138 | LogLevel.decrease() 139 | logger.info('This is an info to logger at level 0') 140 | 141 | 142 | if __name__ == "__main__": 143 | parse_settings() 144 | test() 145 | -------------------------------------------------------------------------------- /pymhlib/sa.py: -------------------------------------------------------------------------------- 1 | """A class implementing a simulated annealing (SA) metaheuristic. 2 | 3 | It extends the more general scheduler module/class. Allows for callbacks after each iteration. 4 | Parameter mh_sa_equi_iter controls how many random neighbor moves are investigated at each 5 | temperature level, and such a series of moves is considered one method call of the scheduler. 6 | 7 | From the demo applications, only the TSP, QAP and MAXSAT support SA so far. 8 | """ 9 | 10 | from typing import List, Callable 11 | import time 12 | from math import exp 13 | import numpy as np 14 | 15 | from pymhlib.scheduler import Method, Scheduler, MethodStatistics 16 | from pymhlib.settings import get_settings_parser 17 | from pymhlib.solution import Solution, TObj 18 | 19 | parser = get_settings_parser() 20 | parser.add_argument("--mh_sa_T_init", type=float, default=30, 21 | help='SA initial temperature') 22 | parser.add_argument("--mh_sa_alpha", type=float, default=0.95, 23 | help='SA alpha for geometric cooling') 24 | parser.add_argument("--mh_sa_equi_iter", type=int, default=10000, 25 | help='SA iterations until equilibrium') 26 | 27 | 28 | class SA(Scheduler): 29 | """A simulated annealing metaheuristic (SA). 30 | 31 | Parameter mh_sa_equi_iter controls how many random neighbor moves are investigated at each 32 | temperature level, and such a series of moves is considered one method call of the scheduler. 33 | 34 | Attributes 35 | - sol: solution object, in which final result will be returned 36 | - meths_ch: list of construction heuristic methods 37 | - random_move_delta_eval: propose neighborhood move method 38 | - apply_neighborhood_move: apply neighborhood move method return by propose method 39 | - iter_cb: callback for each iteration passing iteration number, proposed sol, accepted sol, temperature, 40 | and acceptance 41 | - temperature: current temperature 42 | - equi_iter: iterations until equilibrium 43 | """ 44 | 45 | def __init__(self, sol: Solution, meths_ch: List[Method], random_move_delta_eval: Callable, 46 | apply_neighborhood_move: Callable, iter_cb: Callable, own_settings: dict = None, 47 | consider_initial_sol=False): 48 | """Initialization. 49 | 50 | :param sol: solution to be improved 51 | :param meths_ch: list of construction heuristic methods 52 | :param random_move_delta_eval: function that chooses a random move and determines the delta in the obj_val 53 | :param apply_neighborhood_move: apply neighborhood move method return by propose method 54 | :param iter_cb: callback for each iteration passing iteration number, proposed sol, accepted sol, temperature, 55 | and acceptance 56 | :param own_settings: optional dictionary with specific settings 57 | :param consider_initial_sol: if true consider sol as valid solution that should be improved upon; otherwise 58 | sol is considered just a possibly uninitialized of invalid solution template 59 | """ 60 | super().__init__(sol, meths_ch, own_settings, consider_initial_sol) 61 | self.meths_ch = meths_ch 62 | self.random_move_delta_eval = random_move_delta_eval 63 | self.apply_neighborhood_move = apply_neighborhood_move 64 | self.method_stats['sa'] = MethodStatistics() 65 | self.iter_cb = iter_cb 66 | self.temperature = self.own_settings.mh_sa_T_init 67 | self.equi_iter = self.own_settings.mh_sa_equi_iter 68 | 69 | def metropolis_criterion(self, sol, delta_obj:TObj) -> bool: 70 | """Apply Metropolis criterion as acceptance decision determined by delta_obj and current temperature.""" 71 | if sol.is_better_obj(delta_obj, 0): 72 | return True 73 | return np.random.random_sample() <= exp(-abs(delta_obj) / self.temperature) 74 | 75 | def cool_down(self): 76 | """Apply geometric cooling.""" 77 | self.temperature *= self.own_settings.mh_sa_alpha 78 | 79 | def sa(self, sol: Solution): 80 | """Perform simulated annealing with geometric cooling on given solution.""" 81 | 82 | def sa_iteration(sol: Solution, _par, result): 83 | neighborhood_move, delta_obj = self.random_move_delta_eval(sol) 84 | acceptance = self.metropolis_criterion(sol, delta_obj) 85 | if acceptance: 86 | self.apply_neighborhood_move(sol, neighborhood_move) 87 | sol.obj_val = sol.obj_val + delta_obj 88 | result.changed = True 89 | if self.iter_cb is not None: 90 | self.iter_cb(self.iteration, sol, self.temperature, acceptance) 91 | sa_method = Method("sa", sa_iteration, 0) 92 | 93 | while True: 94 | for _ in range(self.equi_iter): 95 | t_start = time.process_time() 96 | obj_old = self.incumbent.obj() 97 | res = self.perform_method(sa_method, sol, delayed_success=True) 98 | self.delayed_success_update(sa_method, obj_old, t_start, sol) 99 | if res.terminate: 100 | return True 101 | self.cool_down() 102 | 103 | def run(self) -> None: 104 | """Actually performs the construction heuristics followed by the SA.""" 105 | sol = self.incumbent.copy() 106 | assert self.incumbent_valid or self.meths_ch 107 | self.perform_sequentially(sol, self.meths_ch) 108 | self.sa(sol) 109 | -------------------------------------------------------------------------------- /pymhlib/multi_run_summary.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | r"""Summarizes essential information from multiple pymhlib algorithm runs found in the respective out and log files. 3 | 4 | The information to be extracted from each out-file is specified by the list to_fetch containing tuples, 5 | where the first element is some numeric value indicating the order in which the elements are appearing in each out-file, 6 | the second element is a name, and the third element is a regular expression for extracting the element. 7 | Values >= 100 indicate that also the corresponding log-file needs to be searched and the corresponding elements 8 | are to be extracted from there. 9 | 10 | Example for a YAML config file: 11 | 12 | fetch: 13 | '[ 14 | (10, "obj", r"^T best obj:\s(\d+\.?\d*)"), 15 | (30, "ittot", r"^T total iterations:\s(\d+\.?\d*)"), 16 | (20, "itbest", r"^T best iteration:\s(\d+\.?\d*)"), 17 | (50, "ttot", r"^T total time \[s\]:\s(\d+\.?\d*)"), 18 | (40, "tbest", r"T best time \[s\]:\s(\d+\.?\d*)"), 19 | ]' 20 | """ 21 | 22 | import glob 23 | import os 24 | import re 25 | from dataclasses import dataclass 26 | from typing import Any, List 27 | from pandas import DataFrame 28 | import configargparse as p 29 | 30 | 31 | # Configuration of what information to extract from the out/log files. 32 | fetch = [ 33 | (10, 'obj', r'^T best obj:\s(\d+\.?\d*)'), 34 | (30, 'ittot', r'^T total iterations:\s(\d+\.?\d*)'), 35 | (20, 'itbest', r'^T best iteration:\s(\d+\.?\d*)'), 36 | (50, 'ttot', r'^T total time \[s\]:\s(\d+\.?\d*)'), 37 | (40, 'tbest', r'^T best time \[s\]:\s(\d+\.?\d*)'), 38 | (110, 'obj0', r'^I\s+0\s+(\d+.?\d*)'), 39 | (120, 'obj1', r'^I\s+1\s+(\d+.?\d*)'), 40 | ] 41 | 42 | 43 | @dataclass 44 | class Data: 45 | """Information on data to be collected and collected list of values.""" 46 | nr_to_fetch: int 47 | name: str 48 | reg_exp: str 49 | reg_exp_compiled: Any 50 | values: List 51 | 52 | 53 | def _parse_file(file: str, fetch_item: Data, fetch_iter) -> bool: 54 | """Parse `file`, looking for `fetch_item` and when found take next `fetch_item` from `fetch_iter`. 55 | 56 | :return: True when all information found, else False 57 | """ 58 | # print(file) 59 | with open(file) as f: 60 | for line in f: 61 | while True: 62 | m = re.match(fetch_item.reg_exp_compiled, line) 63 | if m: 64 | fetch_item.values.append(float(m[1])) 65 | try: 66 | fetch_item = next(fetch_iter) 67 | except StopIteration: 68 | return True 69 | else: 70 | break 71 | return False 72 | 73 | 74 | def parse_files(paths: List[str], to_fetch=None) -> DataFrame: 75 | """Process list of files/directories or a single file/directory and return resulting dataframe.""" 76 | if not to_fetch: 77 | to_fetch = fetch[:-2] 78 | files = [] 79 | if isinstance(paths, str): 80 | paths = [paths] 81 | for path in paths: 82 | if os.path.isdir(path): 83 | files.extend(f for f in glob.glob(path + "**/*.out", recursive=True)) 84 | else: 85 | files.append(path) 86 | to_fetch_data = [Data(_fetch[0], _fetch[1], _fetch[2], re.compile(_fetch[2]), []) for _fetch in to_fetch] 87 | to_fetch_data_sorted = sorted(to_fetch_data, key=lambda d: d.nr_to_fetch) 88 | fully_parsed_files = [] 89 | for file in files: 90 | # process out-file 91 | # print(file) 92 | fetch_iter = iter(to_fetch_data_sorted) 93 | fetch_item = next(fetch_iter) 94 | completed = _parse_file(file, fetch_item, fetch_iter) 95 | if not completed and fetch_item.nr_to_fetch >= 100: 96 | # also process corresponding log file 97 | log_file = re.sub("(.out)$", ".log", file) 98 | completed = _parse_file(log_file, fetch_item, fetch_iter) 99 | if not completed: 100 | # remove partially extracted information 101 | length = len(to_fetch_data_sorted[-1].values) 102 | for f in to_fetch_data_sorted: 103 | del f.values[length:] 104 | else: 105 | fully_parsed_files.append(file) 106 | df = DataFrame({f.name: f.values for f in to_fetch_data}) 107 | df.insert(0, 'file', fully_parsed_files) 108 | df.set_index('file', inplace=True) 109 | return df 110 | 111 | 112 | def main(): 113 | """Main program for summarizing results from .out and .log files.""" 114 | to_fetch = fetch 115 | parser = p.ArgumentParser(description='Summarize results for multiple pymhlib runs from their .out files.', 116 | config_file_parser_class=p.YAMLConfigFileParser, 117 | default_config_files=['multi-run-summary.cfg']) 118 | parser.add_argument('paths', type=str, nargs='+', 119 | help='a .out file or directory (tree) containing .out files') 120 | parser.add_argument('--log', type=bool, default=False, help='also process corresponding .log files') 121 | parser.add_argument('--fetch', type=str, default=None, 122 | help='list of tuples specifying what information to fetch') 123 | parser.add_argument('-c', '--config', is_config_file=True, help='YAML-config file to be read') 124 | 125 | args = parser.parse_args() 126 | if args.fetch: 127 | # pylint: disable=eval-used 128 | to_fetch = eval(args.fetch) 129 | else: 130 | if not args.log: 131 | del fetch[-2:] 132 | df = parse_files(args.paths, to_fetch) 133 | print(df.to_csv(sep='\t')) 134 | 135 | 136 | if __name__ == '__main__': 137 | main() 138 | -------------------------------------------------------------------------------- /pymhlib/demos/misp.py: -------------------------------------------------------------------------------- 1 | """Demo application solving the maximum (weighted) independent set problem (MISP). 2 | 3 | Give an undirected (weighted) graph, find a maximum cardinality subset of nodes where 4 | no pair of nodes is adjacent in the graph. 5 | """ 6 | 7 | from typing import Any, Tuple 8 | import numpy as np 9 | 10 | from pymhlib.solution import TObj 11 | from pymhlib.subsetvec_solution import SubsetVectorSolution 12 | from pymhlib.scheduler import Result 13 | from pymhlib.demos.graphs import create_or_read_simple_graph 14 | 15 | 16 | class MISPInstance: 17 | """Maximum (weighted) independent set problem (MISP) instance. 18 | 19 | Give an undirected (weighted) graph, find a maximum cardinality subset of nodes where 20 | no pair of nodes is adjacent in the graph. 21 | 22 | Attributes 23 | - graph: undirected unweighted graph to consider 24 | - n: number of nodes 25 | - m number of edges 26 | - p: prices (weights) of items 27 | """ 28 | 29 | def __init__(self, name: str): 30 | """Create or read graph with given name. 31 | 32 | So far we only create unweighted MISP instances here. 33 | """ 34 | self.graph = create_or_read_simple_graph(name) 35 | self.n = self.graph.number_of_nodes() 36 | self.m = self.graph.number_of_edges() 37 | self.p = np.ones(self.n, dtype=int) 38 | 39 | def __repr__(self): 40 | return f"n={self.n} m={self.m}\n" 41 | 42 | 43 | class MISPSolution(SubsetVectorSolution): 44 | """Solution to a MISP instance. 45 | 46 | Additional attributes 47 | - covered: for each node the number of selected neighbor nodes plus one if the node itself is selected 48 | """ 49 | 50 | to_maximize = True 51 | 52 | def __init__(self, inst: MISPInstance): 53 | super().__init__(range(inst.n), inst=inst) 54 | self.covered = np.zeros(inst.n, dtype=int) 55 | 56 | @classmethod 57 | def unselected_elems_in_x(cls) -> bool: 58 | return False 59 | 60 | def copy(self): 61 | sol = MISPSolution(self.inst) 62 | sol.copy_from(self) 63 | return sol 64 | 65 | def copy_from(self, other: 'MISPSolution'): 66 | super().copy_from(other) 67 | self.covered[:] = other.covered 68 | 69 | def calc_objective(self): 70 | return np.sum(self.inst.p[self.x[:self.sel]]) if self.sel else 0 71 | 72 | def check(self, unsorted=False): 73 | super().check(unsorted) 74 | selected = set(self.x[:self.sel]) 75 | for u, v in self.inst.graph.edges: 76 | if u in selected and v in selected: 77 | raise ValueError(f"Invalid solution - adjacent nodes selected: {u}, {v}") 78 | new_covered = np.zeros(self.inst.n, dtype=int) 79 | for u in self.x[:self.sel]: 80 | new_covered[u] += 1 81 | for v in self.inst.graph.neighbors(u): 82 | new_covered[v] += 1 83 | if np.any(self.covered != new_covered): 84 | raise ValueError(f"Invalid covered values in solution: {self.covered}") 85 | 86 | def clear(self): 87 | super().clear() 88 | self.covered.fill(0) 89 | 90 | def construct(self, par: Any, _result: Result): 91 | """Scheduler method that constructs a new solution. 92 | 93 | Here we just call initialize. 94 | """ 95 | self.initialize(par) 96 | 97 | def local_improve(self, _par: Any, result: Result): 98 | """Scheduler method that performs one iteration of the exchange neighborhood.""" 99 | if not self.two_exchange_random_fill_neighborhood_search(False): 100 | result.changed = False 101 | 102 | def shaking(self, par: Any, _result: Result): 103 | """Scheduler method that performs shaking by remove_some(par) and random_fill().""" 104 | self.remove_some(par) 105 | self.fill(list(np.nonzero(self.covered == 0)[0])) 106 | 107 | def may_be_extendible(self) -> bool: 108 | return np.any(self.covered == 0) 109 | 110 | def element_removed_delta_eval(self, update_obj_val=True, allow_infeasible=False) -> bool: 111 | u = self.x[self.sel] 112 | self.covered[u] -= 1 113 | for v in self.inst.graph.neighbors(u): 114 | self.covered[v] -= 1 115 | if update_obj_val: 116 | self.obj_val -= self.inst.p[u] 117 | return True 118 | 119 | def element_added_delta_eval(self, update_obj_val=True, allow_infeasible=False) -> bool: 120 | u = self.x[self.sel-1] 121 | if allow_infeasible or not self.covered[u]: 122 | # accept 123 | self.covered[u] += 1 124 | for v in self.inst.graph.neighbors(u): 125 | self.covered[v] += 1 126 | if update_obj_val: 127 | self.obj_val += self.inst.p[u] 128 | return self.covered[u] == 1 129 | # revert 130 | self.sel -= 1 131 | return False 132 | 133 | def random_move_delta_eval(self) -> Tuple[int, TObj]: 134 | """Choose a random move and perform delta evaluation for it, return (move, delta_obj).""" 135 | raise NotImplementedError 136 | 137 | def apply_neighborhood_move(self, pos: int): 138 | """This method applies a given neighborhood move accepted by SA, 139 | without updating the obj_val or invalidating, since obj_val is updated incrementally by the SA scheduler.""" 140 | raise NotImplementedError 141 | 142 | def crossover(self, other: 'MISPSolution') -> 'MISPSolution': 143 | """Apply subset_crossover.""" 144 | return self.subset_crossover(other) 145 | 146 | 147 | if __name__ == '__main__': 148 | from pymhlib.demos.common import run_optimization, data_dir 149 | from pymhlib.settings import get_settings_parser 150 | parser = get_settings_parser() 151 | run_optimization('MISP', MISPInstance, MISPSolution, data_dir + "frb40-19-1.mis") 152 | -------------------------------------------------------------------------------- /pymhlib/demos/qap.py: -------------------------------------------------------------------------------- 1 | """Demo application solving the Quadratic Assignment Problem (QAP). 2 | 3 | There are a set of n facilities and a set of n locations. For each pair of locations, 4 | a distance is specified and for each pair of facilities a flow is given 5 | (e.g., the amount of supplies transported between the two facilities). 6 | The task is to assign all facilities to different locations with the goal of minimizing 7 | the sum of the distances multiplied by the corresponding flows. 8 | """ 9 | 10 | import random 11 | from typing import Any, Tuple 12 | import numpy as np 13 | 14 | from pymhlib.permutation_solution import PermutationSolution 15 | from pymhlib.scheduler import Result 16 | from pymhlib.solution import TObj 17 | 18 | 19 | class QAPInstance: 20 | """Quadratic Assignment Problem (QAP) instance. 21 | 22 | There are a set of n facilities and a set of n locations. For each pair of locations, 23 | a distance is specified and for each pair of facilities a flow is given 24 | (e.g., the amount of supplies transported between the two facilities). 25 | The task is to assign all facilities to different locations with the goal of minimizing 26 | the sum of the distances multiplied by the corresponding flows. 27 | 28 | Attributes 29 | - n: instance size 30 | - a: distance matrix 31 | - b: flow matrix 32 | """ 33 | 34 | def __init__(self, file_name: str): 35 | """Read an instance from the specified file.""" 36 | self.n = 0 37 | with open(file_name, "r") as file: 38 | self.n = int(file.readline()) 39 | if not 2 <= self.n <= 1000: 40 | raise ValueError(f"Invalid n read from file {file_name}: {self.n}") 41 | self.a = np.empty([self.n, self.n], dtype=int) 42 | self.b = np.empty([self.n, self.n], dtype=int) 43 | file.readline() # skip empty line 44 | for i in range(self.n): 45 | line = file.readline() 46 | self.a[i] = [int(aij) for aij in line.split()] 47 | file.readline() 48 | for i in range(self.n): 49 | line = file.readline() 50 | self.b[i] = [int(bij) for bij in line.split()] 51 | 52 | def __repr__(self): 53 | return f"n={self.n}\n" #,\na={self.a},\nb={self.b}\n" 54 | 55 | 56 | class QAPSolution(PermutationSolution): 57 | """Solution to a QAP instance. 58 | 59 | Attributes 60 | - inst: associated QAPInstance 61 | - x: integer vector representing a permutation 62 | """ 63 | 64 | to_maximize = False 65 | 66 | def __init__(self, inst: QAPInstance, **kwargs): 67 | """Initializes the solution with 0,...,n-1 if init is set.""" 68 | super().__init__(inst.n, inst=inst, **kwargs) 69 | 70 | def copy(self): 71 | sol = QAPSolution(self.inst, init=False) 72 | sol.copy_from(self) 73 | return sol 74 | 75 | def copy_from(self, other: 'QAPSolution'): 76 | super().copy_from(other) 77 | 78 | def calc_objective(self): 79 | obj = np.einsum('ij,ij', self.inst.a, self.inst.b[self.x][:, self.x]) 80 | return obj 81 | 82 | def construct(self, par: Any, _result: Result): 83 | """Scheduler method that constructs a new solution. 84 | 85 | Here we just call initialize. 86 | """ 87 | self.initialize(par) 88 | 89 | def local_improve(self, _par: Any, _result: Result): 90 | """Perform one major iteration of local search in the 2-exchange neighborhood.""" 91 | self.two_exchange_neighborhood_search(False) 92 | 93 | def shaking(self, par: Any, _result: Result): 94 | """Scheduler method that performs shaking by par random 2-exchange moves.""" 95 | for _ in range(par): 96 | p1 = random.randrange(0, self.inst.n) 97 | p2 = random.randrange(0, self.inst.n) 98 | self.x[p1], self.x[p2] = self.x[p2], self.x[p1] 99 | self.invalidate() 100 | 101 | def two_exchange_move_delta_eval(self, p1: int, p2: int) -> TObj: 102 | """Return delta value in objective when exchanging positions p1 and p2 in self.x. 103 | 104 | The solution is not changed. 105 | """ 106 | x = self.x 107 | a = self.inst.a 108 | b = self.inst.b 109 | d = np.inner(a[:, p1] - a[:, p2], b[x, x[p2]] - b[x, x[p1]]) - \ 110 | (a[p1, p1] - a[p1, p2]) * (b[x[p1], x[p2]] - b[x[p1], x[p1]]) - \ 111 | (a[p2, p1] - a[p2, p2]) * (b[x[p2], x[p2]] - b[x[p2], x[p1]]) 112 | d += np.inner(a[p1, :] - a[p2, :], b[x[p2], x] - b[x[p1], x]) - \ 113 | (a[p1, p1] - a[p2, p1]) * (b[x[p2], x[p1]] - b[x[p1], x[p1]]) - \ 114 | (a[p1, p2] - a[p2, p2]) * (b[x[p2], x[p2]] - b[x[p1], x[p2]]) 115 | d += (a[p1, p1] - a[p2, p2]) * (b[x[p2], x[p2]] - b[x[p1], x[p1]]) + \ 116 | (a[p1, p2] - a[p2, p1]) * (b[x[p2], x[p1]] - b[x[p1], x[p2]]) 117 | return d 118 | 119 | def random_move_delta_eval(self) -> Tuple[Any, TObj]: 120 | """Choose a random move and perform delta evaluation for it, return (move, delta_obj).""" 121 | return self.random_two_exchange_move_delta_eval() 122 | 123 | def apply_neighborhood_move(self, move): 124 | """This method applies a given neighborhood move accepted by SA, 125 | without updating the obj_val or invalidating, since obj_val is updated incrementally by the SA scheduler.""" 126 | p1, p2 = move 127 | x = self.x 128 | x[p1], x[p2] = x[p2], x[p1] 129 | 130 | def crossover(self, other: 'QAPSolution') -> 'QAPSolution': 131 | """Perform cycle crossover.""" 132 | # return self.partially_mapped_crossover(other) 133 | return self.cycle_crossover(other) 134 | 135 | 136 | if __name__ == '__main__': 137 | from pymhlib.demos.common import run_optimization, data_dir 138 | from pymhlib.settings import get_settings_parser 139 | parser = get_settings_parser() 140 | run_optimization('QAP', QAPInstance, QAPSolution, data_dir+'bur26a.dat') 141 | -------------------------------------------------------------------------------- /pymhlib/settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Provides configuration file and command line argument parsing functionality to all modules. 3 | 4 | Parameters can be decentrally defined in any module by getting the global parser via get_settings_parser 5 | and registering them by add_argument(). parse_settings() needs to be called one in the main program, then 6 | all parameters are available under the global Namespace settings. 7 | If sys.argv shall not be used, e.g., because pymhlib is embedded in some framework like Django or 8 | a Jupyter notebook, pass "" as args (or some meaningful initialization parameters). 9 | 10 | For the usage of config files see the documentation of configargparse or call the program with -h. 11 | """ 12 | 13 | import pickle 14 | import random 15 | import numpy as np 16 | from configargparse import ArgParser, Namespace, ArgumentDefaultsRawHelpFormatter 17 | 18 | 19 | settings = Namespace() # global Namespace with all settings 20 | unknown_args = [] # global list with all unknown parameters 21 | _parser = None # single global settings parser 22 | 23 | 24 | def get_settings_parser() -> ArgParser: 25 | """Returns the single global argument parser for adding parameters. 26 | 27 | Parameters can be added in all modules by add_argument. 28 | After calling parse() once in the main program, all settings 29 | are available in the global settings dictionary. 30 | """ 31 | global _parser # pylint: disable=global-statement 32 | if not _parser: 33 | _parser = ArgParser( # default_config_files=["default.cfg"], 34 | formatter_class=ArgumentDefaultsRawHelpFormatter) 35 | _parser.set_defaults(seed=0) 36 | return _parser 37 | 38 | 39 | def boolArg(v): 40 | """Own boolean type for arguments, which converts a string into a bool. 41 | 42 | Provide it as type in add_argument. 43 | """ 44 | if isinstance(v, bool): 45 | return v 46 | if v.lower() in ('yes', 'true', 't', 'y', '1'): 47 | return True 48 | if v.lower() in ('no', 'false', 'f', 'n', '0'): 49 | return False 50 | raise ValueError('Boolean value expected.') 51 | 52 | 53 | def parse_settings(args=None, return_unknown=False, default_config_files=None, seed=0): 54 | """Parses the config files and command line arguments and initializes settings and unknown_parameters. 55 | 56 | Needs to be called once in the main program, or more generally after all arguments have been added to the parser 57 | and before they are used. 58 | Also seeds the random number generators based on parameter seed. 59 | If sys.argv shall not be used, e.g., because pymhlib is embedded in some framework like Django or 60 | a Jupyter notebook, pass "" as args (or some meaningful initialization parameters). 61 | 62 | :param args: optional sequence of string arguments; if None sys.argv is used 63 | :param return_unknown: return unknown parameters as list in global variable unknown_args; otherwise raise exception 64 | :param default_config_files: list of default config files to read 65 | :param seed: Seed value for initializing random number generators, 0: random 66 | """ 67 | global settings, unknown_args # pylint: disable=global-statement 68 | p = get_settings_parser() 69 | if not settings.__dict__: 70 | p.add_argument('--seed', type=int, help='seed for the random number generators (0: random init)', default=seed) 71 | p.add_argument('-c', '--config', is_config_file=True, help='config file to be read') 72 | else: 73 | # parse_settings has already been called once, reset 74 | settings.__dict__.clear() 75 | setattr(settings, "seed", seed) 76 | 77 | p._default_config_files = default_config_files if default_config_files else [] # pylint: disable=protected-access 78 | if return_unknown: 79 | _, unknown_args[:] = p.parse_known_args(args=args, namespace=settings) 80 | else: 81 | p.parse_args(args=args, namespace=settings) 82 | 83 | seed_random_generators() 84 | 85 | 86 | def set_settings(s: Namespace): 87 | """Adopt given settings. 88 | 89 | Used, for example in child processes to adopt settings from parent process. 90 | """ 91 | settings.__dict__ = s.__dict__ 92 | seed_random_generators() 93 | 94 | 95 | def seed_random_generators(seed=None): 96 | """Initialize random number generators with settings.seed or the given value; if zero, a random seed is generated. 97 | """ 98 | if seed is not None: 99 | settings.seed = seed 100 | if settings.seed == 0: 101 | np.random.seed(None) 102 | settings.seed = np.random.randint(np.iinfo(np.int32).max) 103 | np.random.seed(settings.seed) 104 | random.seed(settings.seed) 105 | 106 | 107 | def save_settings(filename): 108 | """Save settings to given binary file.""" 109 | with open(filename, 'wb') as f: 110 | pickle.dump(settings, f, pickle.HIGHEST_PROTOCOL) 111 | 112 | 113 | def load_settings(filename): 114 | """Load settings from given binary file.""" 115 | with open(filename, 'rb') as f: 116 | global settings # pylint: disable=global-statement 117 | settings.__dict__ = vars(pickle.load(f)) 118 | seed_random_generators() 119 | 120 | 121 | def get_settings_as_str(): 122 | """Get all parameters and their values as descriptive multi-line string.""" 123 | s = "\nsettings:\n" 124 | for key, value in sorted(vars(settings).items()): 125 | s += f"{key}={value}\n" 126 | return s 127 | 128 | 129 | class OwnSettings: 130 | """An individualized settings storage, which falls back to the default setting for not provided parameters.""" 131 | 132 | def __init__(self, own_settings: dict = None): 133 | self.__dict__ = own_settings if own_settings else dict() 134 | 135 | def __getattr__(self, item): 136 | try: 137 | return self.__dict__[item] 138 | except KeyError: 139 | val = settings.__getattribute__(item) 140 | self.__setattr__(item, val) 141 | return val 142 | -------------------------------------------------------------------------------- /pymhlib/demos/graph_coloring.py: -------------------------------------------------------------------------------- 1 | """Demo application solving the graph coloring problem. 2 | 3 | Given a graph and an number of colors, color each node with one color so that 4 | the number of adjacent nodes having the same color is minimized. 5 | """ 6 | 7 | from typing import Any, Tuple 8 | import numpy as np 9 | 10 | from pymhlib.solution import VectorSolution, TObj 11 | from pymhlib.scheduler import Result 12 | from pymhlib.settings import get_settings_parser, settings 13 | from pymhlib.demos.graphs import create_or_read_simple_graph 14 | 15 | parser = get_settings_parser() 16 | parser.add_argument("--mh_gcp_colors", type=int, default=3, help='number of colors available') 17 | 18 | 19 | class GCInstance: 20 | """Graph coloring problem instance. 21 | 22 | Given a graph and an number of colors, color each node with one color so that 23 | the number of adjacent nodes having the same color is minimized. 24 | 25 | Attributes 26 | - graph: the graph we want to color 27 | - n: number of nodes 28 | - m number of edges 29 | - colors: number of colors 30 | """ 31 | 32 | def __init__(self, name: str): 33 | """Create or read graph with given name.""" 34 | self.graph = create_or_read_simple_graph(name) 35 | self.n = self.graph.number_of_nodes() 36 | self.m = self.graph.number_of_edges() 37 | self.colors = settings.mh_gcp_colors 38 | 39 | def __repr__(self): 40 | """Write out the instance data.""" 41 | return f"n={self.n} m={self.m} c={self.colors}\n" 42 | 43 | 44 | class GCSolution(VectorSolution): 45 | """Solution to a graph coloring problem instance. 46 | 47 | Attributes 48 | - x: for each node the color that is assigned to it 49 | """ 50 | 51 | to_maximize = False 52 | 53 | def __init__(self, inst: GCInstance): 54 | super().__init__(inst.n, inst=inst) 55 | 56 | def copy(self): 57 | sol = GCSolution(self.inst) 58 | sol.copy_from(self) 59 | return sol 60 | 61 | def copy_from(self, other: 'GCSolution'): 62 | super().copy_from(other) 63 | 64 | def calc_objective(self): 65 | violations = 0 66 | for u, v in self.inst.graph.edges: 67 | if self.x[u] == self.x[v]: 68 | violations += 1 69 | return violations 70 | 71 | def check(self): 72 | """Check if valid solution. 73 | 74 | :raises ValueError: if problem detected. 75 | """ 76 | if len(self.x) != self.inst.n: 77 | raise ValueError("Invalid length of solution") 78 | super().check() 79 | 80 | def construct(self, par, _result): 81 | """Scheduler method that constructs a new solution. 82 | 83 | Here we just call initialize. 84 | """ 85 | self.initialize(par) 86 | 87 | def local_improve(self, _par: Any, result: Result): 88 | """Scheduler method that performs one iteration of a local search following a first improvement strategy. 89 | The neighborhood used is defined by all solutions that can be created by changing the color 90 | of a vertex involved in a conflict. 91 | """ 92 | n = len(self.x) 93 | order = np.arange(n) 94 | np.random.shuffle(order) 95 | for p in order: 96 | nbh_col = {} 97 | for col in range(self.inst.colors): 98 | nbh_col[col] = 0 99 | for adj in self.inst.graph.adj[p]: 100 | nbh_col[self.x[adj]] += 1 101 | old_col = self.x[p] 102 | if nbh_col[old_col] > 0: 103 | # violation found 104 | for new_col in range(self.inst.colors): 105 | if nbh_col[new_col] < nbh_col[old_col]: 106 | # Possible improvement found 107 | self.x[p] = new_col 108 | self.obj_val -= nbh_col[old_col] 109 | self.obj_val += nbh_col[new_col] 110 | result.changed = True 111 | return 112 | result.changed = False 113 | 114 | def shaking(self, par: Any, result: Result): 115 | """Scheduler method that performs shaking by randomly assigning a different color 116 | to 'par' many random vertices that are involved in conflicts. 117 | """ 118 | 119 | under_conflict = [] 120 | result.changed = False 121 | 122 | for u in range(len(self.x)): 123 | for v in self.inst.graph.adj[u]: 124 | if self.x[u] == self.x[v]: 125 | # Conflict found 126 | under_conflict.append(u) 127 | break 128 | 129 | for _ in range(par): 130 | if len(under_conflict) == 0: 131 | return 132 | 133 | u = np.random.choice(under_conflict) 134 | # Pick random color (different from current) 135 | rand_col = np.random.randint(0, self.inst.colors - 1) 136 | 137 | if rand_col >= self.x[u]: 138 | rand_col += 1 139 | 140 | self.x[u] = rand_col 141 | self.invalidate() 142 | result.changed = True 143 | 144 | # Prevent this vertex from getting changed again 145 | under_conflict.remove(u) 146 | 147 | def initialize(self, _k): 148 | """Initialize solution vector with random colors.""" 149 | self.x = np.random.randint(self.inst.colors, size=len(self.x)) 150 | self.invalidate() 151 | 152 | def random_move_delta_eval(self) -> Tuple[int, int, TObj]: 153 | """Choose a random move and perform delta evaluation for it, return (move, delta_obj).""" 154 | raise NotImplementedError 155 | 156 | def apply_neighborhood_move(self, pos, color: int): 157 | """This method applies a given neighborhood move accepted by SA, 158 | without updating the obj_val or invalidating, since obj_val is updated incrementally by the SA scheduler.""" 159 | self.x[pos] = color 160 | 161 | def crossover(self, other: 'GCSolution') -> 'GCSolution': 162 | """ Preform uniform crossover.""" 163 | return self.uniform_crossover(other) 164 | 165 | 166 | if __name__ == '__main__': 167 | from pymhlib.demos.common import run_optimization, data_dir 168 | # from pymhlib.settings import settings, get_settings_parser 169 | 170 | run_optimization('Graph Coloring', GCInstance, GCSolution, data_dir + "fpsol2.i.1.col") 171 | -------------------------------------------------------------------------------- /pymhlib/demos/mkp.py: -------------------------------------------------------------------------------- 1 | """Demo application solving the multi-dimensional knapsack problem (MKP). 2 | 3 | Given are a set of n items, m resources, and a capacity for each resource. 4 | Each item has a price and requires from each resource a certain amount. 5 | Find a subset of the items with maximum total price that does not exceed the resources' capacities. 6 | """ 7 | 8 | from typing import Any, Tuple 9 | import numpy as np 10 | 11 | from pymhlib.solution import TObj 12 | from pymhlib.subsetvec_solution import SubsetVectorSolution 13 | from pymhlib.scheduler import Result 14 | 15 | 16 | class MKPInstance: 17 | """Multi-dimensional knapsack problem (MKP) instance. 18 | 19 | Given are a set of n items, m resources, and a capacity for each resource. 20 | Each item has a price and requires from each resource a certain amount. 21 | Find a subset of the items with maximum total price that does not exceed the resources' capacities. 22 | 23 | Attributes 24 | - n: number of items 25 | - m: number of resources, i.e., constraints 26 | - p: prices of items 27 | - r: resource consumption values 28 | - b: resource capacities 29 | - obj_opt: optimal objective value or 0 if not known 30 | - r_min: minimal resource consumption value over all elements for each resource 31 | """ 32 | 33 | def __init__(self, file_name: str): 34 | """Read an instance from the specified file in Chu and Beasley's format.""" 35 | self.n = 0 36 | self.m = 0 37 | self.p = None 38 | self.r = None 39 | self.b = None 40 | self.obj_opt = 0 41 | 42 | all_values = [] 43 | with open(file_name, "r") as file: 44 | for line in file: 45 | for word in line.split(): 46 | all_values.append(int(word)) 47 | self.n = all_values[0] 48 | self.m = all_values[1] 49 | if len(all_values) != 3+self.n+self.m*self.n+self.m: 50 | raise ValueError(f"Invalid number of values in MKP instance file {file_name}") 51 | self.obj_opt = all_values[2] 52 | self.p = np.array(all_values[3:3+self.n]) 53 | self.r = np.array(all_values[3+self.n:3+self.n+self.m*self.n]).reshape([self.m, self.n]) 54 | self.b = np.array(all_values[3+self.n+self.m*self.n:3+self.n+self.m*self.n+self.m]) 55 | self.r_min = np.min(self.r, 1) 56 | 57 | def __repr__(self): 58 | return f"n={self.n} m={self.m},\np={self.p},\nr={self.r},\nb={self.b}\n" 59 | 60 | 61 | class MKPSolution(SubsetVectorSolution): 62 | """Solution to an MKP instance. 63 | 64 | Additional attributes 65 | - y: amount of each resource used 66 | """ 67 | 68 | to_maximize = True 69 | 70 | def __init__(self, inst: MKPInstance): 71 | super().__init__(range(inst.n), inst=inst) 72 | self.y = np.zeros([self.inst.m], dtype=int) 73 | 74 | def copy(self): 75 | sol = MKPSolution(self.inst) 76 | sol.copy_from(self) 77 | return sol 78 | 79 | def copy_from(self, other: 'MKPSolution'): 80 | super().copy_from(other) 81 | self.y[:] = other.y 82 | 83 | def calc_objective(self): 84 | return np.sum(self.inst.p[self.x[:self.sel]]) 85 | 86 | def calc_y(self): 87 | """Calculates z from scratch.""" 88 | self.y = np.sum(self.inst.r[:, self.x[:self.sel]], axis=1) 89 | 90 | def check(self, unsorted=False): 91 | super().check(unsorted) 92 | y_old = self.y 93 | self.calc_y() 94 | if np.any(y_old != self.y): 95 | raise ValueError(f"Solution had invalid y values: {self.y!s} {y_old!s}") 96 | if np.any(self.y > self.inst.b): 97 | raise ValueError(f"Solution exceeds capacity limits: {self.y}, {self.inst.b}") 98 | 99 | def clear(self): 100 | self.y.fill(0) 101 | super().clear() 102 | 103 | def construct(self, par: Any, _result: Result): 104 | """Scheduler method that constructs a new solution. 105 | 106 | Here we just call initialize. 107 | """ 108 | self.initialize(par) 109 | 110 | def local_improve(self, _par: Any, result: Result): 111 | """Scheduler method that performs one iteration of the exchange neighborhood.""" 112 | if not self.two_exchange_random_fill_neighborhood_search(False): 113 | result.changed = False 114 | 115 | def shaking(self, par: Any, _result: Result): 116 | """Scheduler method that performs shaking by remove_some(par) and random_fill().""" 117 | self.remove_some(par) 118 | self.fill() 119 | 120 | def may_be_extendible(self) -> bool: 121 | return np.all(self.y + self.inst.r_min <= self.inst.b) and self.sel < len(self.x) 122 | 123 | def element_removed_delta_eval(self, update_obj_val=True, allow_infeasible=False) -> bool: 124 | elem = self.x[self.sel] 125 | self.y -= self.inst.r[:, elem] 126 | if update_obj_val: 127 | self.obj_val -= self.inst.p[elem] 128 | return True 129 | 130 | def element_added_delta_eval(self, update_obj_val=True, allow_infeasible=False) -> bool: 131 | elem = self.x[self.sel-1] 132 | y_new = self.y + self.inst.r[:, elem] 133 | feasible = np.all(y_new <= self.inst.b) 134 | if allow_infeasible or feasible: 135 | # accept 136 | self.y = y_new 137 | if update_obj_val: 138 | self.obj_val += self.inst.p[elem] 139 | return feasible 140 | # revert 141 | self.sel -= 1 142 | return False 143 | 144 | def random_move_delta_eval(self) -> Tuple[int, TObj]: 145 | """Choose a random move and perform delta evaluation for it, return (move, delta_obj).""" 146 | raise NotImplementedError 147 | 148 | def apply_neighborhood_move(self, pos: int): 149 | """This method applies a given neighborhood move accepted by SA, 150 | without updating the obj_val or invalidating, since obj_val is updated incrementally by the SA scheduler.""" 151 | raise NotImplementedError 152 | 153 | def crossover(self, other: 'MKPSolution') -> 'MKPSolution': 154 | """Apply subset_crossover.""" 155 | return self.subset_crossover(other) 156 | 157 | 158 | if __name__ == '__main__': 159 | from pymhlib.demos.common import run_optimization, data_dir 160 | from pymhlib.settings import get_settings_parser 161 | parser = get_settings_parser() 162 | run_optimization('MKP', MKPInstance, MKPSolution, data_dir + "mknapcb5-01.txt") 163 | -------------------------------------------------------------------------------- /pymhlib/demos/tsp.py: -------------------------------------------------------------------------------- 1 | """Demo application solving the symmetric traveling salesman problem. 2 | 3 | Given n cities and a symmetric distance matrix for all city pairs, find a shortest round trip through all cities. 4 | """ 5 | 6 | import random 7 | import math 8 | from typing import Tuple, Any 9 | import numpy as np 10 | 11 | from pymhlib.permutation_solution import PermutationSolution 12 | from pymhlib.solution import TObj 13 | 14 | 15 | class TSPInstance: 16 | """An instance of the traveling salesman problem. 17 | 18 | This instance contains the distances between all city pairs. 19 | Starting from a solution in which the cities are visited in the order they are defined in the instance file, 20 | a local search in a 2-opt neighborhood using edge exchange is performed. 21 | 22 | Attributes 23 | - n: number of cities, i.e., size of incidence vector 24 | - distances: square matrix of integers representing the distances between two cities; 25 | zero means there is not connection between the two cities 26 | """ 27 | 28 | def __init__(self, file_name: str): 29 | """Read an instance from the specified file.""" 30 | coordinates = {} 31 | dimension = None 32 | 33 | with open(file_name, "r") as f: 34 | for line in f: 35 | if line.startswith("NAME") or line.startswith("COMMENT") or line.startswith("NODE_COORD_SECTION"): 36 | pass 37 | elif line.startswith("EOF"): 38 | break 39 | elif line.startswith("TYPE"): 40 | assert line.split()[-1] == "TSP" 41 | elif line.startswith("EDGE_WEIGHT_TYPE"): 42 | assert line.split()[-1] == "EUC_2D" 43 | elif line.startswith("DIMENSION"): 44 | dimension = int(line.split()[-1]) 45 | else: 46 | split_line = line.split() 47 | num = int(split_line[0]) - 1 # starts at 1 48 | x = int(split_line[1]) 49 | y = int(split_line[2]) 50 | 51 | coordinates[num] = (x, y) 52 | 53 | assert len(coordinates) == dimension 54 | 55 | # building adjacency matrix 56 | distances = np.zeros((dimension, dimension)) 57 | 58 | for i in range(0, dimension): 59 | for j in range(i + 1, dimension): 60 | x1, y1 = coordinates[i] 61 | x2, y2 = coordinates[j] 62 | dist = math.sqrt(math.pow(x2 - x1, 2) + math.pow(y2 - y1, 2)) 63 | distances[i][j] = distances[j][i] = int(dist) 64 | 65 | self.distances = distances 66 | self.n = dimension 67 | 68 | # make basic check if instance is meaningful 69 | if not 1 <= self.n <= 1000000: 70 | raise ValueError(f"Invalid n: {self.n}") 71 | 72 | def __repr__(self): 73 | """Write out the instance data.""" 74 | return f"n={self.n},\ndistances={self.distances!r}\n" 75 | 76 | 77 | class TSPSolution(PermutationSolution): 78 | """Solution to a TSP instance. 79 | 80 | Attributes 81 | - inst: associated TSPInstance 82 | - x: order in which cities are visited, i.e., a permutation of 0,...,n-1 83 | """ 84 | 85 | to_maximize = False 86 | 87 | def __init__(self, inst: TSPInstance): 88 | super().__init__(inst.n, inst=inst) 89 | self.obj_val_valid = False 90 | 91 | def copy(self): 92 | sol = TSPSolution(self.inst) 93 | sol.copy_from(self) 94 | return sol 95 | 96 | def calc_objective(self): 97 | distance = 0 98 | for i in range(self.inst.n - 1): 99 | distance += self.inst.distances[self.x[i]][self.x[i + 1]] 100 | distance += self.inst.distances[self.x[-1]][self.x[0]] 101 | return distance 102 | 103 | def check(self): 104 | """Check if valid solution. 105 | 106 | :raises ValueError: if problem detected. 107 | """ 108 | if len(self.x) != self.inst.n: 109 | raise ValueError("Invalid length of solution") 110 | super().check() 111 | 112 | def construct(self, par, _result): 113 | """Scheduler method that constructs a new solution. 114 | 115 | Here we just call initialize. 116 | """ 117 | self.initialize(par) 118 | 119 | def shaking(self, par, result): 120 | """Scheduler method that performs shaking by 'par'-times swapping a pair of randomly chosen cities.""" 121 | for _ in range(par): 122 | a = random.randint(0, self.inst.n - 1) 123 | b = random.randint(0, self.inst.n - 1) 124 | self.x[a], self.x[b] = self.x[b], self.x[a] 125 | self.invalidate() 126 | result.changed = True 127 | 128 | def local_improve(self, _par, _result): 129 | """2-opt local search.""" 130 | self.two_opt_neighborhood_search(True) 131 | 132 | def two_opt_move_delta_eval(self, p1: int, p2: int) -> int: 133 | """ This method performs the delta evaluation for inverting self.x from position p1 to position p2. 134 | 135 | The function returns the difference in the objective function if the move would be performed, 136 | the solution, however, is not changed. 137 | """ 138 | assert p1 < p2 139 | n = len(self.x) 140 | if p1 == 0 and p2 == n - 1: 141 | # reversing the whole solution has no effect 142 | return 0 143 | prev = (p1 - 1) % n 144 | nxt = (p2 + 1) % n 145 | x_p1 = self.x[p1] 146 | x_p2 = self.x[p2] 147 | x_prev = self.x[prev] 148 | x_next = self.x[nxt] 149 | d = self.inst.distances 150 | delta = d[x_prev][x_p2] + d[x_p1][x_next] - d[x_prev][x_p1] - d[x_p2][x_next] 151 | return delta 152 | 153 | def random_move_delta_eval(self) -> Tuple[Any, TObj]: 154 | """Choose a random move and perform delta evaluation for it, return (move, delta_obj).""" 155 | return self.random_two_opt_move_delta_eval() 156 | 157 | def apply_neighborhood_move(self, move): 158 | """This method applies a given neighborhood move accepted by SA, 159 | without updating the obj_val or invalidating, since obj_val is updated incrementally by the SA scheduler.""" 160 | self.apply_two_opt_move(*move) 161 | 162 | def crossover(self, other: 'TSPSolution') -> 'TSPSolution': 163 | """Perform edge recombination.""" 164 | return self.edge_recombination(other) 165 | 166 | 167 | if __name__ == '__main__': 168 | from pymhlib.demos.common import run_optimization, data_dir 169 | from pymhlib.settings import get_settings_parser 170 | parser = get_settings_parser() 171 | run_optimization('TSP', TSPInstance, TSPSolution, data_dir + "xqf131.tsp") 172 | -------------------------------------------------------------------------------- /pymhlib/demos/common.py: -------------------------------------------------------------------------------- 1 | """Some functions common for all demo applications.""" 2 | 3 | import logging 4 | from typing import Callable 5 | 6 | from pkg_resources import resource_filename 7 | 8 | from pymhlib.alns import ALNS 9 | from pymhlib.gvns import GVNS 10 | from pymhlib.log import init_logger 11 | from pymhlib.par_alns import ParallelALNS 12 | from pymhlib.pbig import PBIG 13 | from pymhlib.sa import SA 14 | from pymhlib.scheduler import Method 15 | from pymhlib.settings import parse_settings, settings, get_settings_parser, get_settings_as_str 16 | from pymhlib.solution import Solution 17 | from pymhlib.ssga import SteadyStateGeneticAlgorithm 18 | 19 | data_dir = resource_filename("pymhlib", "demos/data/") 20 | 21 | 22 | def add_general_arguments_and_parse_settings(default_inst_file: str = 'inst.dat', args=None, seed: int = 0): 23 | """Some general parameters are registered and the settings are parsed. 24 | 25 | :param seed: optional seed value for the random number generators; 0: random initialization 26 | :param default_inst_file: default instance file to be loaded and solved 27 | """ 28 | parser = get_settings_parser() 29 | parser.add_argument("--alg", type=str, default='gvns', help='optimization algorithm to be used ' 30 | '(gvns, alns, pbig, par_alns, ssga, sa)') 31 | parser.add_argument("--inst_file", type=str, default=default_inst_file, 32 | help='problem instance file') 33 | parser.add_argument("--meths_ch", type=int, default=1, 34 | help='number of construction heuristics to be used') 35 | parser.add_argument("--meths_li", type=int, default=1, 36 | help='number of local improvement methods to be used') 37 | parser.add_argument("--meths_sh", type=int, default=5, 38 | help='number of shaking methods to be used') 39 | parser.add_argument("--meths_de", type=int, default=3, 40 | help='number of destroy methods to be used') 41 | parser.add_argument("--meths_re", type=int, default=3, 42 | help='number of repair methods to be used') 43 | parse_settings(args=args, seed=seed) 44 | 45 | 46 | def run_optimization(problem_name: str, instance_class, solution_class, default_inst_file: str = "inst.dat", 47 | own_settings: dict = None, embedded: bool = False, iter_cb: Callable = None, 48 | seed: int = 0) -> Solution: 49 | """Initialize and run optimization algorithm given by parameter alg on given problem instance. 50 | 51 | First, some general parameters for the algorithm to be applied, the instance file, and the methods to 52 | be applied are registered and the settings are parsed. 53 | Then the loggers are initialized, instance and solution objects are created and the chosen algorithm is 54 | performed. The resulting solution is finally returned. 55 | 56 | :param problem_name: name of the problem to be printed 57 | :param instance_class: class of the instance to be solved 58 | :param solution_class: concrete solution class to be used 59 | :param default_inst_file: default instance file to be loaded and solved 60 | :param own_settings: optional run-specific settings dictionary 61 | :param embedded: if set it is assumed that the call is embedded in a Notebook or other larger framework, 62 | and therefore, the parameters are assumed to be already registered and parsed 63 | :param iter_cb: optional callback function that is called each iteration by some of the algorithms 64 | :param seed: optional seed value for the random number generators; 0: random initialization 65 | """ 66 | if not embedded: 67 | add_general_arguments_and_parse_settings(default_inst_file, seed=seed) 68 | 69 | init_logger() 70 | logger = logging.getLogger("pymhlib") 71 | logger.info("pymhlib demo for solving %s", problem_name) 72 | logger.info(get_settings_as_str()) 73 | instance = instance_class(settings.inst_file) 74 | logger.info("%s instance read:\n%s", problem_name, str(instance)) 75 | solution = solution_class(instance) 76 | # solution.initialize(0) 77 | 78 | logger.info("Solution: %s, obj=%f\n", solution, solution.obj()) 79 | 80 | if settings.alg == 'gvns': 81 | alg = GVNS(solution, 82 | [Method(f"ch{i}", solution_class.construct, i) for i in range(settings.meths_ch)], 83 | [Method(f"li{i}", solution_class.local_improve, i) for i in range(1, settings.meths_li + 1)], 84 | [Method(f"sh{i}", solution_class.shaking, i) for i in range(1, settings.meths_sh + 1)], 85 | own_settings) 86 | elif settings.alg == 'alns': 87 | alg = ALNS(solution, 88 | [Method(f"ch{i}", solution_class.construct, i) for i in range(settings.meths_ch)], 89 | [Method(f"de{i}", solution_class.destroy, i) for i in range(1, settings.meths_de + 1)], 90 | [Method(f"re{i}", solution_class.repair, i) for i in range(1, settings.meths_re + 1)], 91 | own_settings) 92 | elif settings.alg == 'pbig': 93 | alg = PBIG(solution, 94 | [Method(f"ch{i}", solution_class.construct, i) for i in range(settings.meths_ch)], 95 | [Method(f"li{i}", solution_class.local_improve, i) for i in range(1, settings.meths_li + 1)] + 96 | [Method(f"sh{i}", solution_class.shaking, i) for i in range(1, settings.meths_sh + 1)], 97 | own_settings) 98 | elif settings.alg == 'par_alns': 99 | alg = ParallelALNS(solution, 100 | [Method(f"ch{i}", solution_class.construct, i) for i in range(settings.meths_ch)], 101 | [Method(f"de{i}", solution_class.destroy, i) for i in range(1, settings.meths_de + 1)], 102 | [Method(f"re{i}", solution_class.repair, i) for i in range(1, settings.meths_re + 1)], 103 | own_settings) 104 | elif settings.alg == 'ssga': 105 | alg = SteadyStateGeneticAlgorithm(solution, 106 | [Method("ch{i}", solution_class.construct, i) for i in 107 | range(settings.meths_ch)], 108 | solution_class.crossover, 109 | Method("mu", solution_class.shaking, 1), 110 | Method("ls", solution_class.local_improve, 1), 111 | own_settings) 112 | elif settings.alg == 'sa': 113 | alg = SA(solution, 114 | [Method(f"ch{i}", solution_class.construct, i) for i in range(settings.meths_ch)], 115 | solution_class.random_move_delta_eval, solution_class.apply_neighborhood_move, iter_cb, own_settings) 116 | else: 117 | raise ValueError('Invalid optimization algorithm selected (settings.alg): ', settings.alg) 118 | 119 | alg.run() 120 | logger.info("") 121 | alg.method_statistics() 122 | alg.main_results() 123 | return solution 124 | -------------------------------------------------------------------------------- /pymhlib/demos/maxsat.py: -------------------------------------------------------------------------------- 1 | """Demo application solving the MAXSAT problem. 2 | 3 | The goal is to maximize the number of clauses satisfied in a boolean function given in conjunctive normal form. 4 | """ 5 | 6 | import random 7 | from typing import Any, Tuple 8 | import numpy as np 9 | 10 | from pymhlib.solution import TObj 11 | from pymhlib.binvec_solution import BinaryVectorSolution 12 | from pymhlib.alns import ALNS 13 | from pymhlib.scheduler import Result 14 | 15 | 16 | class MAXSATInstance: 17 | """MAXSAT problem instance. 18 | 19 | The goal is to maximize the number of clauses satisfied in a boolean function given in conjunctive normal form. 20 | 21 | Attributes 22 | - n: number of variables, i.e., size of incidence vector 23 | - m: number of clauses 24 | - clauses: list of clauses, where each clause is represented by an array of integers; 25 | a positive integer v refers to the v-th variable, while a negative integer v refers 26 | to the negated form of the v-th variable; note that variable indices start with 1 (-1) 27 | - variable_usage: array containing for each variable a list with the indices of the clauses in 28 | which the variable appears; needed for efficient incremental evaluation 29 | """ 30 | 31 | def __init__(self, file_name: str): 32 | """Read an instance from the specified file.""" 33 | self.n = 0 34 | self.m = 0 35 | self.clauses = list() 36 | self.variable_usage: np.ndarray 37 | 38 | with open(file_name, "r") as file: 39 | for line in file: 40 | if line.startswith("c"): 41 | # ignore comments 42 | continue 43 | fields = line.split() 44 | if len(fields) == 4 and fields[0] == "p" and fields[1] == "cnf": 45 | try: 46 | self.n = int(fields[2]) 47 | self.m = int(fields[3]) 48 | except ValueError as val_error: 49 | raise ValueError(f"Invalid values in line 'p cnf': {line}") from val_error 50 | self.variable_usage = [list() for _ in range(self.n)] 51 | elif len(fields) >= 1: 52 | # read clause 53 | if not fields[-1].startswith("0"): 54 | raise ValueError(f"Last field in clause line must be 0, but is not: {line}, {fields[-1]!r}") 55 | try: 56 | clause = [int(s) for s in fields[:-1]] 57 | for v in clause: 58 | self.variable_usage[abs(v)-1].append(len(self.clauses)) 59 | self.clauses.append(np.array(clause)) 60 | except ValueError as val_error: 61 | raise ValueError(f"Invalid clause: {line}") from val_error 62 | 63 | for v, usage in enumerate(self.variable_usage): 64 | self.variable_usage[v] = np.array(usage) 65 | 66 | # make basic check if instance is meaningful 67 | if not 1 <= self.n <= 1000000: 68 | raise ValueError(f"Invalid n: {self.n}") 69 | if not 1 <= self.m <= 1000000: 70 | raise ValueError(f"Invalid m: {self.m}") 71 | if len(self.clauses) != self.m: 72 | raise ValueError(f"Number of clauses should be {self.m}, but {len(self.clauses)} read") 73 | 74 | def __repr__(self): 75 | return f"m={self.m}, n={self.n}\n" # , clauses={self.clauses!r}\n" 76 | 77 | 78 | class MAXSATSolution(BinaryVectorSolution): 79 | """Solution to a MAXSAT instance. 80 | 81 | Attributes 82 | - inst: associated MAXSATInstance 83 | - x: binary incidence vector 84 | - destroyed: list of indices of variables that have been destroyed by the ALNS's destroy operator 85 | """ 86 | 87 | to_maximize = True 88 | 89 | def __init__(self, inst: MAXSATInstance): 90 | super().__init__(inst.n, inst=inst) 91 | self.destroyed = None 92 | 93 | def copy(self): 94 | sol = MAXSATSolution(self.inst) 95 | sol.copy_from(self) 96 | return sol 97 | 98 | def calc_objective(self): 99 | fulfilled_clauses = 0 100 | for clause in self.inst.clauses: 101 | for v in clause: 102 | if self.x[abs(v)-1] == (1 if v > 0 else 0): 103 | fulfilled_clauses += 1 104 | break 105 | return fulfilled_clauses 106 | 107 | def check(self): 108 | """Check if valid solution. 109 | 110 | :raises ValueError: if problem detected. 111 | """ 112 | if len(self.x) != self.inst.n: 113 | raise ValueError("Invalid length of solution") 114 | super().check() 115 | 116 | def construct(self, par: Any, _result: Result): 117 | """Scheduler method that constructs a new random solution. 118 | 119 | Here we just call initialize. 120 | """ 121 | self.initialize(par) 122 | 123 | def local_improve(self, par: Any, _result: Result): 124 | """Perform one k_flip_neighborhood_search.""" 125 | self.k_flip_neighborhood_search(par, False) 126 | 127 | def shaking(self, par, _result): 128 | """Scheduler method that performs shaking by flipping par random positions.""" 129 | self.k_random_flips(par) 130 | 131 | def destroy(self, par: Any, _result: Result): 132 | """Destroy operator for ALNS selects par*ALNS.get_number_to_destroy positions uniformly at random for removal. 133 | 134 | Selected positions are stored with the solution in list self.destroyed. 135 | """ 136 | num = min(ALNS.get_number_to_destroy(len(self.x)) * par, len(self.x)) 137 | self.destroyed = np.random.choice(range(len(self.x)), num, replace=False) 138 | self.invalidate() 139 | 140 | def repair(self, _par: Any, _result: Result): 141 | """Repair operator for ALNS assigns new random values to all positions in self.destroyed.""" 142 | assert self.destroyed is not None 143 | for p in self.destroyed: 144 | self.x[p] = random.randrange(2) 145 | self.destroyed = None 146 | self.invalidate() 147 | 148 | def flip_variable(self, pos: int): 149 | delta_obj = self.flip_move_delta_eval(pos) 150 | self.obj_val += delta_obj 151 | self.x[pos] = not self.x[pos] 152 | 153 | def flip_move_delta_eval(self, pos: int) -> TObj: 154 | """Determine delta in objective value when flipping position pos.""" 155 | assert self.obj_val_valid 156 | val = not self.x[pos] 157 | delta = 0 158 | for clause in self.inst.variable_usage[pos]: 159 | val_fulfills_now = False 160 | for v in self.inst.clauses[clause]: 161 | if abs(v)-1 == pos: 162 | val_fulfills_now = (val if v > 0 else not val) 163 | elif self.x[abs(v) - 1] == (1 if v > 0 else 0): 164 | break # clause fulfilled by other variable, no change 165 | else: 166 | delta += 1 if val_fulfills_now else -1 167 | return delta 168 | 169 | def random_move_delta_eval(self) -> Tuple[int, TObj]: 170 | """Choose a random move and perform delta evaluation for it, return (move, delta_obj).""" 171 | return self.random_flip_move_delta_eval() 172 | 173 | def apply_neighborhood_move(self, pos): 174 | """This method applies a given neighborhood move accepted by SA, 175 | without updating the obj_val or invalidating, since obj_val is updated incrementally by the SA scheduler.""" 176 | self.x[pos] = not self.x[pos] 177 | 178 | def crossover(self, other: 'MAXSATSolution'): 179 | """ Perform uniform crossover as crossover.""" 180 | return self.uniform_crossover(other) 181 | 182 | 183 | if __name__ == '__main__': 184 | from pymhlib.demos.common import run_optimization, data_dir 185 | from pymhlib.settings import get_settings_parser 186 | parser = get_settings_parser() 187 | parser.set_defaults(mh_titer=1000) 188 | run_optimization('MAXSAT', MAXSATInstance, MAXSATSolution, data_dir+"maxsat-adv1.cnf") 189 | -------------------------------------------------------------------------------- /pymhlib/solution.py: -------------------------------------------------------------------------------- 1 | """ 2 | Abstract base class representing a candidate solution to an optimization problem and some derived still generic classes. 3 | 4 | The abstract base class Solution represents a candidate solution to an optimization problem. 5 | Derived classes VectorSolution, BinaryVectorSolution, and SetSolution are for solutions which are 6 | represented bei general fixed-length vectors, boolean fixed-length vectors and sets of arbitrary elements. 7 | 8 | For a concrete optimization problem to solve you have to derive from one of these classes. 9 | """ 10 | 11 | from typing import TypeVar 12 | from abc import ABC, abstractmethod 13 | import random 14 | import numpy as np 15 | 16 | from pymhlib.settings import settings, get_settings_parser 17 | 18 | parser = get_settings_parser() 19 | parser.add_argument("--mh_xover_pts", type=int, default=1, help='number of crossover points in multi-point crossover') 20 | 21 | TObj = TypeVar('TObj', int, float) # Type of objective value 22 | 23 | 24 | class Solution(ABC): 25 | """Abstract base class for a candidate solution. 26 | 27 | Class variables 28 | - to maximize: default is True, i.e., to maximize; override with False if the goal is to minimize 29 | 30 | Attributes 31 | - obj_val: objective value; valid if obj_val_valid is set 32 | - obj_val_valid: indicates if obj_val has been calculated and is valid 33 | - inst: optional reference to a problem instance object 34 | - alg: optional reference to an algorithm object using this solution 35 | """ 36 | 37 | to_maximize = True 38 | 39 | def __init__(self, inst=None, alg=None): 40 | self.obj_val: TObj = -1 41 | self.obj_val_valid: bool = False 42 | self.inst = inst 43 | self.alg = alg 44 | 45 | @abstractmethod 46 | def copy(self): 47 | """Return a (deep) clone of the current solution.""" 48 | 49 | @abstractmethod 50 | def copy_from(self, other: 'Solution'): 51 | """Make the current solution a (deep) copy of the other.""" 52 | # self.inst = other.inst 53 | # self.alg = other.alg 54 | self.obj_val = other.obj_val 55 | self.obj_val_valid = other.obj_val_valid 56 | 57 | @abstractmethod 58 | def __repr__(self): 59 | return str(self.obj()) 60 | 61 | @abstractmethod 62 | def calc_objective(self) -> TObj: 63 | """Determine the objective value and return it.""" 64 | raise NotImplementedError 65 | 66 | def obj(self) -> TObj: 67 | """Return objective value. 68 | 69 | Returns stored value if already known or calls calc_objective() otherwise. 70 | """ 71 | if not self.obj_val_valid: 72 | self.obj_val = self.calc_objective() 73 | self.obj_val_valid = True 74 | return self.obj_val 75 | 76 | def invalidate(self): 77 | """Mark the stored objective value obj_val as not valid anymore. 78 | 79 | Needs to be called whenever the solution is changed and obj_val not updated accordingly. 80 | """ 81 | self.obj_val_valid = False 82 | 83 | @abstractmethod 84 | def initialize(self, k): 85 | """Construct an initial solution in a fast non-sophisticated way. 86 | 87 | :param k: is increased from 0 onwards for each call of this method 88 | """ 89 | raise NotImplementedError 90 | 91 | def __eq__(self, other: "Solution") -> bool: 92 | """Return true if the other solution is equal to the current one. 93 | 94 | The default implementation returns True if the objective values are the same. 95 | """ 96 | return self.obj() == other.obj() 97 | 98 | def is_better(self, other: "Solution") -> bool: 99 | """Return True if the current solution is better in terms of the objective function than the other.""" 100 | return self.obj() > other.obj() if self.to_maximize else self.obj() < other.obj() 101 | 102 | def is_worse(self, other: "Solution") -> bool: 103 | """Return True if the current solution is worse in terms of the objective function than the other.""" 104 | return self.obj() < other.obj() if self.to_maximize else self.obj() > other.obj() 105 | 106 | @classmethod 107 | def is_better_obj(cls, obj1: TObj, obj2: TObj) -> bool: 108 | """Return True if the obj1 is a better objective value than obj2.""" 109 | return obj1 > obj2 if cls.to_maximize else obj1 < obj2 110 | 111 | @classmethod 112 | def is_worse_obj(cls, obj1: TObj, obj2: TObj) -> bool: 113 | """Return True if obj1 is a worse objective value than obj2.""" 114 | return obj1 < obj2 if cls.to_maximize else obj1 > obj2 115 | 116 | def dist(self, other): 117 | """Return distance of current solution to other solution. 118 | 119 | The default implementation just returns 0 if the solutions have the same objective value. 120 | """ 121 | return self.obj() != other.obj() 122 | 123 | def __hash__(self): 124 | """Return hash value for solution. 125 | 126 | The default implementation returns the hash value of the objective value. 127 | """ 128 | return hash(self.obj()) 129 | 130 | @abstractmethod 131 | def check(self): 132 | """Check validity of solution. 133 | 134 | If a problem is encountered, raise an exception. 135 | The default implementation just re-calculates the objective value. 136 | """ 137 | if self.obj_val_valid: 138 | old_obj = self.obj_val 139 | self.invalidate() 140 | if old_obj != self.obj(): 141 | raise ValueError(f'Solution has wrong objective value: {old_obj}, should be {self.obj()}') 142 | 143 | 144 | class VectorSolution(Solution, ABC): 145 | """Abstract solution class with fixed-length integer vector as solution representation. 146 | 147 | Attributes 148 | - x: vector representing a solution, realized ba a numpy.ndarray 149 | """ 150 | 151 | def __init__(self, length, init=True, dtype=int, init_value=0, **kwargs): 152 | """Initializes the solution vector with zeros.""" 153 | super().__init__(**kwargs) 154 | self.x = np.full([length], init_value, dtype=dtype) if init else np.empty([length], dtype=dtype) 155 | 156 | def copy_from(self, other: 'VectorSolution'): 157 | super().copy_from(other) 158 | self.x[:] = other.x 159 | 160 | def __repr__(self): 161 | return str(self.x) 162 | 163 | def __eq__(self, other: 'VectorSolution') -> bool: 164 | return self.obj() == other.obj() and np.array_equal(self.x, other.x) 165 | 166 | def uniform_crossover(self, other: 'VectorSolution') -> 'VectorSolution': 167 | """Uniform crossover of the current solution with the given other solution.""" 168 | child = self.copy() 169 | # randomly replace elements with those from other solution 170 | for i in range(len(self.x)): 171 | if random.getrandbits(1): 172 | child.x[i] = other.x[i] 173 | child.invalidate() 174 | return child 175 | 176 | def multi_point_crossover(self, other: 'VectorSolution') -> 'VectorSolution': 177 | """Multi-point crossover of current and other given solution. 178 | 179 | The number of crossover points is passed in settings.mh_xover_pts. 180 | """ 181 | child = self.copy() 182 | size = len(self.x) 183 | points = np.random.choice(size, settings.mh_xover_pts, replace=False) 184 | points.sort() 185 | if len(points) % 2: 186 | points.append(size) 187 | points = points.reshape(len(points)/2, 2) 188 | for a, b in points: 189 | child.x[a:b] = other.x[a:b] 190 | child.invalidate() 191 | return child 192 | 193 | 194 | class SetSolution(Solution, ABC): 195 | """Abstract solution class with a set as solution representation. 196 | 197 | Attributes 198 | - s: set representing a solution 199 | """ 200 | 201 | def __init__(self, **kwargs): 202 | """Initializes the solution with the empty set.""" 203 | super().__init__(**kwargs) 204 | self.s = set() 205 | 206 | def copy_from(self, other: 'SetSolution'): 207 | super().copy_from(other) 208 | self.s = other.s.copy() 209 | 210 | def __repr__(self): 211 | return str(self.s) 212 | 213 | def __eq__(self, other: 'SetSolution') -> bool: 214 | return self.obj() == other.obj() and self.s == other.s 215 | 216 | def initialize(self, k): 217 | """Set the solution to the empty set.""" 218 | self.s.clear() 219 | -------------------------------------------------------------------------------- /pymhlib/demos/vertex_cover.py: -------------------------------------------------------------------------------- 1 | """Demo application solving the minimum vertex cover problem. 2 | 3 | Given an undirected simple graph, find a minimum subset of the vertices so that from each edge in the graph at 4 | least one of its end points is in this subset. 5 | """ 6 | 7 | import random 8 | from typing import Any, Tuple 9 | from itertools import combinations 10 | import heapq 11 | import networkx as nx 12 | 13 | from pymhlib.solution import SetSolution, TObj 14 | from pymhlib.settings import get_settings_parser 15 | from pymhlib.scheduler import Result 16 | from pymhlib.demos.graphs import create_or_read_simple_graph 17 | 18 | parser = get_settings_parser() 19 | 20 | 21 | class VertexCoverInstance: 22 | """Minimum vertex cover problem instance. 23 | 24 | Given an undirected simple graph, find a minimum subset of the vertices so that from each edge in the graph at 25 | least one of its end points is in this subset. 26 | 27 | Attributes 28 | - graph: the graph for which we want to find a minimum vertex cover 29 | - n: number of nodes 30 | - m: number of edges 31 | """ 32 | 33 | def __init__(self, name: str): 34 | """Create or read graph with given name.""" 35 | self.graph = create_or_read_simple_graph(name) 36 | self.n = self.graph.number_of_nodes() 37 | self.m = self.graph.number_of_edges() 38 | 39 | def __repr__(self): 40 | """Write out the instance data.""" 41 | return f"n={self.n} m={self.m}\n" 42 | 43 | 44 | class VertexCoverSolution(SetSolution): 45 | """Solution to a minimum vertex cover instance. 46 | 47 | Attributes 48 | - s: set of selected elements 49 | """ 50 | 51 | to_maximize = False 52 | 53 | def __init__(self, inst: VertexCoverInstance): 54 | super().__init__(inst=inst) 55 | 56 | def copy(self): 57 | sol = VertexCoverSolution(self.inst) 58 | sol.copy_from(self) 59 | return sol 60 | 61 | def copy_from(self, other: 'VertexCoverSolution'): 62 | super().copy_from(other) 63 | 64 | def calc_objective(self): 65 | return len(self.s) 66 | 67 | def check(self): 68 | """Check if valid solution. 69 | 70 | :raises ValueError: if problem detected. 71 | """ 72 | super().check() 73 | if not self.s.issubset(set(range(self.inst.n))): 74 | raise ValueError(f'Invalid value in solution set: {self.s}') 75 | for u, v in self.inst.graph.edges: 76 | if u not in self.s and v not in self.s: 77 | raise ValueError(f'Edge ({u},{v}) not covered') 78 | 79 | def remove_redundant(self) -> bool: 80 | """Scheduler method that checks for each node in the current vertex cover if it can be removed. 81 | 82 | The nodes are processed in random order. 83 | 84 | :return: True if solution could be improved 85 | """ 86 | s = self.s 87 | x = list(s) 88 | random.shuffle(x) 89 | for u in x: 90 | for v in self.inst.graph.neighbors(u): 91 | if v not in s: 92 | break 93 | else: 94 | s.remove(u) 95 | if len(s) < len(x): 96 | self.invalidate() 97 | return True 98 | return False 99 | 100 | def two_approximation_construction(self): 101 | """Perform a randomized 2-approximation construction algorithm. 102 | 103 | Randomly select an uncovered edge and include both end nodes in vertex cover until all edges are covered. 104 | """ 105 | g: nx.Graph = self.inst.graph.copy() 106 | s = self.s 107 | s.clear() 108 | edge_list = list(g.edges) 109 | random.shuffle(edge_list) 110 | for u, v in edge_list: 111 | if not g.has_edge(u, v): 112 | continue 113 | s.add(u) 114 | s.add(v) 115 | g.remove_nodes_from((u, v)) 116 | self.invalidate() 117 | 118 | # noinspection PyCallingNonCallable 119 | def greedy_construction(self, use_degree=True): 120 | """Degree-based greedy or pure random construction heuristic.""" 121 | g: nx.Graph = self.inst.graph.copy() 122 | s = self.s 123 | s.clear() 124 | nodes = list(g.nodes) 125 | random.shuffle(nodes) 126 | 127 | heap = None 128 | if use_degree: 129 | heap = [(-g.degree(u), u) for u in g.nodes] 130 | heapq.heapify(heap) 131 | 132 | # noinspection PyCallingNonCallable 133 | def node_yielder(): 134 | if use_degree: 135 | while heap: 136 | d, node = heapq.heappop(heap) 137 | if d == 0: 138 | return 139 | if not g.has_node(node) or g.degree(node) != -d: 140 | continue 141 | yield node 142 | else: 143 | for node in nodes: 144 | if g.degree(node): 145 | yield node 146 | 147 | for u in node_yielder(): 148 | for v in g.neighbors(u): 149 | if v not in s: 150 | s.add(u) 151 | if use_degree: 152 | for v2 in g.neighbors(u): 153 | v2_d = g.degree(v2) - 1 154 | if v2_d: 155 | heapq.heappush(heap, (-v2_d, v2)) 156 | g.remove_node(u) 157 | break 158 | self.invalidate() 159 | 160 | def construct(self, par, _result): 161 | """Scheduler method that constructs a new solution. 162 | 163 | Here we just call initialize. 164 | """ 165 | self.initialize(par) 166 | 167 | def local_improve(self, _par, result): 168 | """Search add-one-remove-at-least-two-nodes neighborhood in first-improvement manner.""" 169 | g: nx.Graph = self.inst.graph 170 | s = self.s 171 | x = list(set(range(self.inst.n)).difference(s)) 172 | random.shuffle(x) 173 | for u in x: 174 | # when adding u, can we remove >= 2 neighboring nodes? 175 | s.add(u) 176 | removable = [] 177 | for v in g.neighbors(u): 178 | if v not in s: 179 | continue 180 | for v2 in g.neighbors(v): 181 | if v2 not in s: 182 | break 183 | else: 184 | removable.append(v) 185 | if len(removable) >= 2: 186 | # find two non-adjacent 187 | for va, vb in combinations(removable, 2): 188 | if not g.has_edge(va, vb): 189 | s.remove(va) 190 | s.remove(vb) 191 | removable.remove(va) 192 | removable.remove(vb) 193 | removed = {va, vb} 194 | for vc in removable: 195 | for vr in removed: 196 | if g.has_edge(vc, vr): 197 | break 198 | else: 199 | s.remove(vc) 200 | removed.add(vc) 201 | self.invalidate() 202 | return 203 | s.remove(u) 204 | result.changed = False 205 | 206 | def shaking(self, par: Any, _result: Result): 207 | """Add par so far unselected nodes and apply remove_redundant.""" 208 | s = self.s 209 | x = list(set(range(self.inst.n)).difference(s)) 210 | to_add = random.sample(x, max(len(x), par)) 211 | for u in to_add: 212 | s.add(u) 213 | self.remove_redundant() 214 | 215 | def initialize(self, k): 216 | """Initialize solution by taking all nodes and applying local_improve.""" 217 | super().initialize(0) 218 | self.greedy_construction(k == 0) 219 | # self.two_approximation_construction() 220 | # self.remove_redundant() 221 | self.check() 222 | 223 | def random_move_delta_eval(self) -> Tuple[int, TObj]: 224 | """Choose a random move and perform delta evaluation for it, return (move, delta_obj).""" 225 | raise NotImplementedError 226 | 227 | def apply_neighborhood_move(self, pos: int): 228 | """This method applies a given neighborhood move accepted by SA, 229 | without updating the obj_val or invalidating, since obj_val is updated incrementally by the SA scheduler.""" 230 | raise NotImplementedError 231 | 232 | def crossover(self, other: 'VertexCoverSolution') -> 'VertexCoverSolution': 233 | """Abstract crossover function.""" 234 | raise NotImplementedError 235 | 236 | 237 | if __name__ == '__main__': 238 | from pymhlib.demos.common import run_optimization, data_dir 239 | parser = get_settings_parser() 240 | run_optimization('Minimum Vertex Cover', VertexCoverInstance, VertexCoverSolution, data_dir + "frb40-19-1.mis") 241 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## `pymhlib` - A Toolbox for Metaheuristics and Hybrid Optimization Methods 2 | 3 | [![BuildStatus](https://github.com/ac-tuwien/pymhlib/actions/workflows/CI.yml/badge.svg?branch=master)](https://github.com/ac-tuwien/pymhlib?branch=master) 4 | [![codecov.io](http://codecov.io/github/ac-tuwien/pymhlib/coverage.svg?branch=master)](http://codecov.io/github/ac-tuwien/pymhlib?branch=master) 5 | 6 | _This project is still in early development, any feedback is much appreciated!_ 7 | 8 | `pymhlib` is a collection of modules supporting the efficient implementation of metaheuristics 9 | and certain hybrid optimization approaches for solving primarily combinatorial optimization 10 | problems in Python 3.7+. 11 | 12 | ![ ](mh.png) 13 | 14 | This Python `mhlib` version emerged from the 15 | [C++ `mhlib`](https://bitbucket.org/ads-tuwien/mhlib) to which it has certain similarities 16 | but also many differences. 17 | 18 | Note that there also exists a more recent efficient **Julia-implementation** of this libraries, 19 | following a similar design concept: [Julia MHLib.jl](https://github.com/ac-tuwien/MHLib.jl) 20 | 21 | The main purpose of the library is to support rapid prototyping and teaching. 22 | While ultimately efficient implementations of such algorithms in compiled 23 | languages like Julia or C++ will in general be much faster, an advantage of 24 | the Python implementation lies in the possibly quicker development cycle. 25 | 26 | 27 | `pymhlib` is developed primarily by the 28 | [Algorithms and Complexity Group of TU Wien](https://www.ac.tuwien.ac.at), 29 | Vienna, Austria, since 2019. 30 | 31 | #### Contributors: 32 | - [Günther Raidl](https://www.ac.tuwien.ac.at/raidl) (primarily responsible) 33 | - [Nikolaus Frohner](https://www.ac.tuwien.ac.at/nfrohner) 34 | - Thomas Jatschka 35 | - Daniel Obszelka 36 | - Andreas Windbichler 37 | 38 | ### Installation 39 | 40 | Major versions of `pymhlib` can be installed from `PyPI` via 41 | 42 | python3 -m pip install -U pymhlib 43 | 44 | and development versions are available at https://github.com/ac-tuwien/pymhlib. 45 | 46 | ### Major Components 47 | 48 | - **solution.py**: 49 | An abstract base class `Solution`that represents a candidate solution to an optimization problem and 50 | derived classes `VectorSolution`, `BinaryVectorSolution`, and `SetSolution` for solutions which are 51 | represented bei general fixed-length vectors, boolean vectors or sets of arbitrary elements. 52 | - **binvec_solution.py**: 53 | A more specific solution class `BinaryVectorSolution` for problems in which solutions are represented by 54 | fixed-length binary vectors. 55 | - **subsetvec_solution.py**: 56 | A more specific solution class `SubsetVectorSolution` for problems in which solutions are subsets of a 57 | larger set. The set is realized by an efficient numpy array which is split into two parts, 58 | the one with the included elements in sorted order and the one with the remaining elements. 59 | - **permutation_solution.py**: 60 | A more specific solution class `PermutationSolution` for problems in which solutions are permutations of a 61 | set of elements. 62 | - **scheduler.py**: 63 | A an abstract framework for single metaheuristics that rely on iteratively applying certain 64 | methods to a current solution. Modules like gvns.py and alns.py extend this abstract class towards 65 | more specific metaheuristics. 66 | - **gvns.py**: 67 | A framework for local search, iterated local search, (general) variable neighborhood 68 | search, GRASP, etc. 69 | - **alns.py**: 70 | A framework for adaptive large neighborhood search (ALNS). 71 | - **par_alns.py**: 72 | A multi-process implementation of the ALNS where destroy+repair operations are parallelized. 73 | - **population.py** 74 | A population class for population-based metaheuristics. 75 | - **pbig.py**: 76 | A population based iterated greedy (PBIG) algorithm. 77 | - **ssga.py**: 78 | A steady-state genetic algorithm (SSGA). 79 | - **sa.py**: 80 | A simulated annealing (SA) algorithm with geometric cooling. 81 | - **decision_diag.py**: 82 | A generic class for (relaxed) decision diagrams for combinatorial optimization. 83 | - **log.py**: 84 | Provides two logger objects, one for writing out general log information, which is typically 85 | written into a `*.out` file, and one for iteration-wise information, which is typically 86 | written into a `*.log` file. The latter is buffered in order to work also efficiently, e.g., 87 | on network drives and massive detailed log information. 88 | A class `LogLevel` is provided for indented writing of log information according to a current level, 89 | which might be used for hierarchically embedded components of a larger optimization framework, 90 | such as a local search that is embedded in a population-based approach. 91 | - **settings.py**: 92 | Allows for defining module-specific parameters directly in each module in an independent distributed 93 | way, while values for these parameters can be provided as program arguments or in 94 | configuration files. Most `pymhlib` modules rely on this mechanism for their external parameters. 95 | 96 | Modules/scripts for analyzing results of many runs: 97 | 98 | - **multi_run_summary.py**: 99 | Collects essential information from multiple `pymhlib` algorithm runs found in the respective out and log files 100 | and returns a corresponding pandas `DataFrame` if used as a module or as a plain ASCII table when used as 101 | independent script. The module can be easily configured to extract also arbitrary application-specific data. 102 | 103 | - **aggregate_results.py**: 104 | Calculate grouped basic statistics for one or two dataframes/TSV files obtained e.g. from `multi-run-summary.py`. 105 | In particular, two test series with different algorithms or different settings can be statistically 106 | compared, including Wilcoxon signed rank tests. The module can be used as standalone script as well 107 | as module called, e.g., from a jupyter notebook. 108 | 109 | 110 | #### Demos 111 | 112 | For demonstration purposes, simple metaheuristic approaches are provided in the `demo` subdirectory for the following 113 | well-known combinatorial optimization problems. They can be started by 114 | 115 | python3 -m pymhlib.demos. ... 116 | 117 | where `` is one of the following and `...` represents further parameters that can be seen by providing 118 | the option `-h`. 119 | It is recommended to take such a demo as template 120 | for solving your own problem. 121 | 122 | - **`maxsat`**: maximum satisfiability problem based on `BinaryVectorSolution` 123 | - **`tsp`**: traveling salesperson problem based on `PermutationSolution` 124 | - **`qap`**: quadratic assignment problem based on `PermutationSolution` 125 | - **`vertex_cover`**: minimum vertex cover problem based on `SetSolution` 126 | - **`graph_coloring`**: graph coloring problem based on `VectorSolution` 127 | - **`misp`**: maximum (weighted) independent set problem based on `SubsetVectorSolution` 128 | - **`mkp`**: multidimensional 0-1 knapsack problem based on `SubsetVectorSolution` 129 | 130 | Shared code of these demos is found in the submodules `pymhlib.demos.common` and `pymhlib.demos.graphs`, 131 | test instance data in `pymhlib.demos.data`. 132 | 133 | Moreover, `julia-maxsat.py` and `julia-maxsat.jl` demonstrate the integration with the Julia programming language. 134 | Implementing time-critical parts of an application in Julia may accelerate the code substantially. 135 | To run this demo, Julia must be set up correctly and Python's `julia` package must be installed. 136 | While this demo derives a whole solution class in Julia, `julia-maxsat2.py` is a variant where only two functions 137 | are realized in Julia. 138 | 139 | 140 | ### Changelog 141 | 142 | Major changes in releases: 143 | 144 | #### Version 0.1.5 145 | - bug fix in `settings: repeated `parse_settings` now resets former settings correctly 146 | - bug fix in `common.py/run_optimization`: seed now correctly passed to `add_general_arguments_and_parse_settings` 147 | - automatic installation of required other packages fixed 148 | 149 | #### Version 0.1.4 150 | - cleaning according to pylint warnings 151 | - settings.parse_args may now be called multiple times 152 | 153 | #### Version 0.1.3 154 | - bug fix in 2-opt neighborhood search of permutation representation 155 | 156 | #### Version 0.1.2 157 | - directory renamed to pymhlib to correspond to module name 158 | - bug fix in Metropolis criterion of ALNS 159 | - boolean arguments must now be specified in the command line as any other parameter 160 | 161 | #### Version 0.1.1 162 | - basic functionality test `tests/test_all.py` for all problems and 163 | algorithms added 164 | - polishing, minor fixes 165 | 166 | #### Version 0.1.0 167 | - ALNS and parallel ALNS added 168 | - graph coloring, TSP, and minimum vertex cover demos added 169 | - population based iterated greedy and steady state genetic algorithms added 170 | - SA with geometric cooling added 171 | - demos.graphs introduced 172 | - mhlib renamed to pymhlib 173 | - demo for interfacing with Julia added 174 | - many smaller improvements, bug fixes, improvements in documentation 175 | 176 | #### Version 0.0.1 177 | - Initial version 178 | -------------------------------------------------------------------------------- /mhlib.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: mhlib 3 | Version: 0.1.0a0 4 | Summary: Python mhlib - a toolbox for metaheuristics and hybrid optimization methods 5 | Home-page: https://github.com/ac-tuwien/pymhlib 6 | Author: Günther Raidl 7 | Author-email: raidl@ac.tuwien.ac.at 8 | License: GPL3 9 | Description: ## Python `mhlib` - A Toolbox for Metaheuristics and Hybrid Optimization Methods 10 | 11 | _This project is still in early development, any feedback is much appreciated!_ 12 | 13 | Python `mhlib` is a collection of modules supporting the efficient implementation of metaheuristics 14 | and certain hybrid optimization approaches for solving primarily combinatorial optimization 15 | problems in Python 3.7+. 16 | 17 | ![ ](mh.png) 18 | 19 | This Python `mhlib` version emerged from the 20 | [C++ `mhlib`](https://bitbucket.org/ads-tuwien/mhlib) to which it has certain similarities 21 | but also many differences. 22 | 23 | The main purpose of the library is to support rapid prototyping and teaching. 24 | While ultimately efficient implementations of such algorithms in compiled 25 | languages like C++ will likely be faster, the expected advantage of the Python 26 | implementation lies in the expected faster implementation. 27 | 28 | `mhlib` is developed primarily by the 29 | [Algorithms and Complexity Group of TU Wien](https://www.ac.tuwien.ac.at), 30 | Vienna, Austria, since 2019. 31 | 32 | #### Contributors: 33 | - [Günther Raidl](https://www.ac.tuwien.ac.at/raidl) (primarily responsible) 34 | - [Nikolaus Frohner](https://www.ac.tuwien.ac.at/nfrohner) 35 | - Thomas Jatschka 36 | - Daniel Obszelka 37 | - Andreas Windbichler 38 | 39 | ### Installation 40 | 41 | Major versions of `mhlib` can be installed from `PyPI` via 42 | 43 | python3 -m pip install -U mhlib 44 | 45 | and development versions are available at https://github.com/ac-tuwien/pymhlib. 46 | 47 | ### Major Components 48 | 49 | - **solution.py**: 50 | An abstract base class `Solution`that represents a candidate solution to an optimization problem and 51 | derived classes `VectorSolution`, `BinaryVectorSolution`, and `SetSolution` for solutions which are 52 | represented bei general fixed-length vectors, boolean vectors or sets of arbitrary elements. 53 | - **binvec_solution.py**: 54 | A more specific solution class `BinaryVectorSolution` for problems in which solutions are represented by 55 | fixed-length binary vectors. 56 | - **subsetvec_solution.py**: 57 | A more specific solution class `SubsetVectorSolution` for problems in which solutions are subsets of a 58 | larger set. The set is realized by an efficient numpy array which is split into two parts, 59 | the one with the included elements in sorted order and the one with the remaining elements. 60 | - **permutation_solution.py**: 61 | A more specific solution class `PermutationSolution` for problems in which solutions are permutations of a 62 | set of elements. 63 | - **scheduler.py**: 64 | A an abstract framework for single metaheuristics that rely on iteratively applying certain 65 | methods to a current solution. Modules like gvns.py and alns.py extend this abstract class towards 66 | more specific metaheuristics. 67 | - **gvns.py**: 68 | A framework for local search, iterated local search, (general) variable neighborhood 69 | search, GRASP, etc. 70 | - **alns.py**: 71 | A framework for adaptive large neighborhood search (ALNS). 72 | - **par_alns.py**: 73 | A multi-process implementation of the ALNS where destroy+repair operations are parallelized. 74 | - **population.py** 75 | A population class for population-based metaheuristics. 76 | - **pbig.py**: 77 | A population based iterated greedy (PBIG) algorithm. 78 | - **ssga.py**: 79 | A steady-state genetic algorithm (SSGA). 80 | - **sa.py**: 81 | A simulated annealing (SA) algorithm with geometric cooling. 82 | - **decision_diag.py**: 83 | A generic class for (relaxed) decision diagrams for combinatorial optimization. 84 | - **log.py**: 85 | Provides two logger objects, one for writing out general log information, which is typically 86 | written into a `*.out` file, and one for iteration-wise information, which is typically 87 | written into a `*.log` file. The latter is buffered in order to work also efficiently, e.g., 88 | on network drives and massive detailed log information. 89 | A class `LogLevel` is provided for indented writing of log information according to a current level, 90 | which might be used for hierarchically embedded components of a larger optimization framework, 91 | such as a local search that is embedded in a population-based approach. 92 | - **settings.py**: 93 | Allows for defining module-specific parameters directly in each module in an independent distributed 94 | way, while values for these parameters can be provided as program arguments or in 95 | configuration files. Most `pyhmlib` modules rely on this mechanism for their external parameters. 96 | 97 | Modules/scripts for analyzing results of many runs: 98 | 99 | - **multi_run_summary.py**: 100 | Collects essential information from multiple mhlib algorithm runs found in the respective out and log files 101 | and returns a corresponding pandas dataframe if used as a module or as a plain ASCII table when used as 102 | independent script. The module can be easily configured to extract also arbitrary application-specific data. 103 | 104 | - **aggregate_results.py**: 105 | Calculate grouped basic statistics for one or two dataframes/TSV files obtained e.g. from `multi-run-summary.py`. 106 | In particular, two test series with different algorithms or different settings can be statistically 107 | compared, including Wilcoxon signed rank tests. The module can be used as standalone script as well 108 | as module called, e.g., from a jupyter notebook. 109 | 110 | 111 | #### Demos 112 | 113 | For demonstration purposes, simple metaheuristic approaches are provided in the `demo` subdirectory for the following 114 | well-known combinatorial optimization problems. They can be startet by 115 | 116 | python3 -m mhlib.demos. ... 117 | 118 | where `` is one of the following and `...` represents further parameters that can be seen by providing 119 | the option `-h`. 120 | It is recommended to take such a demo as template 121 | for solving your own problem. 122 | 123 | - **`maxsat`**: maximum satisfiability problem based on `BinaryVectorSolution` 124 | - **`tsp`**: traveling salesperson problem based on `PermutationSolution` 125 | - **`qap`**: quadratic assignment problem based on `PermutationSolution` 126 | - **`vertex_cover`**: minimum vertex cover problem based on `SetSolution` 127 | - **`graph_coloring`**: graph coloring problem based on `VectorSolution` 128 | - **`misp`**: maximum (weighted) independent set problem based on `SubsetVectorSolution` 129 | - **`mkp`**: multidimensional 0-1 knapsack problem based on `SubsetVectorSolution` 130 | 131 | Shared code of these demos is found in the submodules `mhlib.demos.common` and `mhlib.demos.graphs`, 132 | test instance data in `mhlib.demos.data`. 133 | 134 | Moreover, `julia-maxsat.py` and `julia-maxsat.jl` demonstrate the integration with the Julia programming language. 135 | Implementing time-critical parts of an application in Julia may accelerate the code substantially. 136 | To run this demo, Julia must be set up correctly and Python's `julia` package must be installed. 137 | While this demo derives a whole solution class in Julia, `julia-maxsat2.py` is a variant where only two functions 138 | are realized in Julia. 139 | 140 | 141 | ### Changelog 142 | 143 | Major changes over major releases: 144 | 145 | #### Version 0.1 146 | - ALNS and parallel ALNS added 147 | - graph coloring, TSP, and minimum vertex cover demos added 148 | - population based iterated greedy and steady state genetic algorithms added 149 | - SA with geometric cooling added 150 | - demos.graphs introduced 151 | - demo for interfacing with Julia added 152 | - many smaller improvements, bug fixes, improvements in documentation 153 | 154 | #### Version 0.0.1 155 | - Initial version 156 | 157 | Platform: UNKNOWN 158 | Classifier: Programming Language :: Python :: 3 159 | Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) 160 | Classifier: Operating System :: OS Independent 161 | Requires-Python: >=3.7 162 | Description-Content-Type: text/markdown 163 | -------------------------------------------------------------------------------- /pymhlib/demos/julia-maxsat.jl: -------------------------------------------------------------------------------- 1 | """Demo application for showing the integration with the Julia language, solving the MAXSAT problem. 2 | 3 | The Julia module julia-maxsat.jl is used via Python's julia interface package. 4 | Julia and Python's julia package must be installed properly. 5 | This Julia module provides a concrete Solution class for solving the MAXSAT problem in essentially 6 | the same way as maxsat.py. The goal is to maximize the number of clauses satisfied in a 7 | boolean function given in conjunctive normal form. 8 | """ 9 | 10 | module JuliaMAXSAT 11 | 12 | using PyCall 13 | using Random 14 | # using StatsBase 15 | 16 | # math = pyimport("math") 17 | # println("JuliaMAXSAT initialized", math.sin(3)) 18 | 19 | py_maxsat = pyimport("pymhlib.demos.maxsat") 20 | py_solution = pyimport("pymhlib.binvec_solution") 21 | 22 | 23 | """ 24 | JuliaMAXSATInstance 25 | 26 | Python instance class that augments MAXSATInstance by a reference julia_inst to Julia-specific 27 | instance data for a better performance. 28 | """ 29 | @pydef mutable struct JuliaMAXSATInstance <: py_maxsat.MAXSATInstance 30 | function __init__(self, file_name) 31 | pybuiltin(:super)(JuliaMAXSATInstance,self).__init__(file_name) 32 | self.julia_inst = JMAXSATInstance(self) 33 | end 34 | end 35 | 36 | 37 | """ 38 | JMAXSATInstance 39 | 40 | Juliy-specific instance data, which are derived from Python's MAXSATInstance. 41 | This separate structure enables a better performance of the Julia code since 42 | no data conversion has to take place at each lookup of some instance data. 43 | """ 44 | struct JMAXSATInstance 45 | clauses::Array{Int,2} 46 | variable_usage::Array{Int,2} 47 | 48 | function JMAXSATInstance(inst::PyObject) 49 | cl::Vector{Vector{Int}} = inst."clauses" 50 | vu::Vector{Vector{Int}} = inst."variable_usage" 51 | for c in vu c .+= 1 end 52 | return new(make_2d_array(cl), make_2d_array(vu)) 53 | end 54 | end 55 | 56 | """ 57 | make_2d_array(a, fill) 58 | 59 | Turns an Array of Arrays of Ints into a 2D array, filling up with zeros. 60 | """ 61 | function make_2d_array(a::Vector{Vector{Int}})::Array{Int,2} 62 | size1 = length(a) 63 | size2 = maximum(length(a[i]) for i in 1:size1) 64 | a2 = zeros(Int, size2, size1) 65 | for i in 1:size1 66 | a2[1:length(a[i]),i] = a[i] 67 | end 68 | return a2 69 | end 70 | 71 | 72 | """Solution to a MAXSAT instance. 73 | 74 | Attributes 75 | - inst: associated MAXSATInstance 76 | - x: binary incidence vector 77 | - destroyed: list of indices of variables that have been destroyed by the ALNS's destroy op. 78 | """ 79 | @pydef mutable struct JuliaMAXSATSolution <: py_solution.BinaryVectorSolution 80 | 81 | to_maximize = true 82 | 83 | function __init__(self, inst) 84 | pybuiltin(:super)(JuliaMAXSATSolution,self).__init__(inst.n, inst=inst) 85 | self.destroyed = nothing 86 | end 87 | 88 | function calc_objective(self) 89 | """Count the number of satisfied clauses.""" 90 | return obj(self.x, self.inst.julia_inst) 91 | end 92 | 93 | function copy(self) 94 | sol = JuliaMAXSATSolution(self.inst) 95 | sol.copy_from(self) 96 | return sol 97 | end 98 | 99 | function check(self) 100 | x = PyArray(self."x") 101 | if length(x) != self.inst.n 102 | throw(DomainError("Invalid length of solution")) 103 | end 104 | pybuiltin(:super)(JuliaMAXSATSolution,self).check() 105 | end 106 | 107 | function construct(self, par, _result) 108 | """ 109 | Scheduler method that constructs a new solution. 110 | 111 | Here we just call initialize. 112 | """ 113 | self.initialize(par) 114 | end 115 | 116 | function local_improve(self, par, _result) 117 | """Perform one k_flip_neighborhood_search.""" 118 | x = self.x 119 | obj_val = self.obj() 120 | new_obj_val = k_flip_neighborhood_search!(x, obj_val, self.inst.julia_inst, par, false) 121 | if new_obj_val > obj_val 122 | PyArray(self."x")[:] = x 123 | self.obj_val = new_obj_val 124 | return true 125 | end 126 | return false 127 | end 128 | 129 | function shaking(self, par, _result) 130 | """Scheduler method that performs shaking by flipping par random positions.""" 131 | x = PyArray(self."x") 132 | for i in 1:par 133 | p = rand(1:length(x)) 134 | x[p] = !x[p] 135 | end 136 | self.invalidate() 137 | end 138 | 139 | function destroy(self, par, _result) 140 | """Destroy operator for ALNS selects par*ALNS.get_number_to_destroy positions 141 | uniformly at random for removal. 142 | 143 | Selected positions are stored with the solution in list self.destroyed. 144 | """ 145 | x = PyArray(self."x") 146 | num = min(ALNS.get_number_to_destroy(length(x)) * par, length(x)) 147 | self.destroyed = sample(1:length(x), num, replace=false) 148 | self.invalidate() 149 | end 150 | 151 | function repair(self, _par, _result) 152 | """Repair operator for ALNS assigns new random values to all positions in self.destroyed.""" 153 | @assert !(self.destroyed === nothing) 154 | x = PyArray(self."x") 155 | for p in self.destroyed 156 | x[p] = rand(0:1) 157 | end 158 | self.destroyed = nothing 159 | self.invalidate() 160 | end 161 | 162 | 163 | function crossover(self, other) 164 | """ Perform uniform crossover as crossover.""" 165 | return self.uniform_crossover(other) 166 | end 167 | 168 | end 169 | 170 | 171 | function obj(x::Vector{Bool}, julia_inst::JMAXSATInstance) 172 | """Count the number of satisfied clauses.""" 173 | fulfilled_clauses = 0 174 | for clause in eachcol(julia_inst.clauses) 175 | for v in clause 176 | if v == 0 177 | break 178 | end 179 | if x[abs(v)] == (v > 0) 180 | fulfilled_clauses += 1 181 | break 182 | end 183 | end 184 | end 185 | return fulfilled_clauses 186 | end 187 | 188 | 189 | function locimp(self)::Bool 190 | """Perform one k_flip_neighborhood_search.""" 191 | x = self.x 192 | obj_val = self.obj() 193 | new_obj_val = k_flip_neighborhood_search!(x, obj_val, self.inst.julia_inst, 1, false) 194 | if new_obj_val > obj_val 195 | PyArray(self."x")[:] = x 196 | self.obj_val = new_obj_val 197 | return true 198 | end 199 | return false 200 | end 201 | 202 | function k_flip_neighborhood_search!(x::Vector{Bool}, obj_val::Int, julia_inst::JMAXSATInstance, 203 | k::Int, best_improvement::Bool)::Int 204 | """Perform one major iteration of a k-flip local search, i.e., search one neighborhood. 205 | 206 | If best_improvement is set, the neighborhood is completely searched and a best neighbor is 207 | kept; otherwise the search terminates in a first-improvement manner, i.e., keeping a first 208 | encountered better solution. 209 | 210 | :returns: Objective value. 211 | """ 212 | len_x = length(x) 213 | @assert 0 < k <= len_x 214 | better_found = false 215 | best_sol = copy(x) 216 | best_obj = obj_val 217 | perm = randperm(len_x) # random permutation for randomizing enumeration order 218 | p = fill(-1, k) # flipped positions 219 | # initialize 220 | i = 1 # current index in p to consider 221 | while i >= 1 222 | # evaluate solution 223 | if i == k + 1 224 | if obj_val > best_obj 225 | if !best_improvement 226 | return true 227 | end 228 | best_sol[:] = x 229 | best_obj = obj_val 230 | better_found = true 231 | end 232 | i -= 1 # backtrack 233 | else 234 | if p[i] == -1 235 | # this index has not yet been placed 236 | p[i] = (i>1 ? p[i-1] : 0) + 1 237 | obj_val = flip_variable!(x, perm[p[i]], julia_inst, obj_val) 238 | i += 1 # continue with next position (if any) 239 | elseif p[i] < len_x - (k - i) 240 | # further positions to explore with this index 241 | obj_val = flip_variable!(x, perm[p[i]], julia_inst, obj_val) 242 | p[i] += 1 243 | obj_val = flip_variable!(x, perm[p[i]], julia_inst, obj_val) 244 | i += 1 245 | else 246 | # we are at the last position with the i-th index, backtrack 247 | obj_val = flip_variable!(x, perm[p[i]], julia_inst, obj_val) 248 | p[i] = -1 # unset position 249 | i -= 1 250 | end 251 | end 252 | end 253 | if better_found 254 | x[:] = best_sol 255 | obj_val = best_obj 256 | end 257 | return obj_val 258 | end 259 | 260 | 261 | function flip_variable!(x::Vector{Bool}, pos::Int, julia_inst::JMAXSATInstance, obj_val::Int)::Int 262 | val = !x[pos] 263 | x[pos] = val 264 | for clause in view(julia_inst.variable_usage,:,pos) 265 | if clause == 0 break end 266 | fulfilled_by_other = false 267 | val_fulfills_now = false 268 | for v in view(julia_inst.clauses,:,clause) 269 | if v == 0 break end 270 | if abs(v) == pos 271 | val_fulfills_now = (v>0 ? val : !val) 272 | elseif x[abs(v)] == (v>0 ? 1 : 0) 273 | fulfilled_by_other = true 274 | break # clause fulfilled by other variable, no change 275 | end 276 | end 277 | if !fulfilled_by_other 278 | obj_val += (val_fulfills_now ? 1 : -1) 279 | end 280 | end 281 | return obj_val 282 | end 283 | 284 | 285 | # using Random 286 | # Random.seed!(3) 287 | 288 | end # module JuliaMAXSAT 289 | -------------------------------------------------------------------------------- /pymhlib/demos/vertex_cover.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Demo of using `pymhlib` within a Jupyter notebook\n", 8 | "\n", 9 | "Note that this notebook is still rather crude and not yet cleaned; it is a very basic first try." 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "import sys\n", 19 | "sys.version" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": null, 25 | "metadata": { 26 | "pycharm": { 27 | "is_executing": true, 28 | "name": "#%%\n" 29 | } 30 | }, 31 | "outputs": [], 32 | "source": [ 33 | "import sys\n", 34 | "sys.path.append('../..') # path to pymhlib to be used\n", 35 | "%load_ext autoreload\n", 36 | "%aimport pymhlib.demos.vertex_cover, logging\n", 37 | "%autoreload 1\n", 38 | "from pymhlib.settings import settings, parse_settings\n", 39 | "from pymhlib.log import init_logger\n", 40 | "from pymhlib.demos.vertex_cover import VertexCoverInstance, VertexCoverSolution\n", 41 | "from pymhlib.gvns import GVNS, Method\n", 42 | "import logging\n", 43 | "import matplotlib.pyplot as plt\n", 44 | "import pandas as pd\n", 45 | "import seaborn as sns\n", 46 | "sns.set()\n", 47 | "%matplotlib inline\n", 48 | "from IPython.display import set_matplotlib_formats; set_matplotlib_formats('png', 'pdf')\n", 49 | "if not settings.__dict__: parse_settings(args='')\n", 50 | "init_logger()\n", 51 | "logger = logging.getLogger(\"pymhlib\")" 52 | ] 53 | }, 54 | { 55 | "cell_type": "markdown", 56 | "metadata": {}, 57 | "source": [ 58 | "## In the following different construction heuristics, local search and a GVNS are applied 'runs' times to the vertex cover problem and box plots of the obtained solution values are created" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": null, 64 | "metadata": { 65 | "pycharm": { 66 | "is_executing": true, 67 | "name": "#%%\n" 68 | } 69 | }, 70 | "outputs": [], 71 | "source": [ 72 | "runs=3\n", 73 | "problem_name = \"gnm-1000-2000\"\n", 74 | "settings.mh_titer=10000\n", 75 | "logger.info(f\"pymhlib demo for solving {problem_name}\")\n", 76 | "# logger.info(get_settings_as_str())\n", 77 | "res_approx = list()\n", 78 | "res_approx_red = list()\n", 79 | "res_deg_greedy = list()\n", 80 | "res_deg_greedy_ls = list()\n", 81 | "for run in range(runs):\n", 82 | " instance = VertexCoverInstance(problem_name+f'-{run+1}')\n", 83 | " # logger.info(f\"{problem_name} instance read:\\n\" + str(instance))\n", 84 | " solution = VertexCoverSolution(instance)\n", 85 | " solution.two_approximation_construction()\n", 86 | " res_approx.append(solution.obj())\n", 87 | " solution.remove_redundant()\n", 88 | " res_approx_red.append(solution.obj())\n", 89 | " solution.greedy_construction()\n", 90 | " res_deg_greedy.append(solution.obj())\n", 91 | " alg = GVNS(solution,\n", 92 | " [Method(f\"ch0\", VertexCoverSolution.construct, 0)],\n", 93 | " [Method(f\"li2\", VertexCoverSolution.local_improve, 2)],\n", 94 | " [Method(f\"sh{i}\", VertexCoverSolution.shaking, i) for i in range(0)])\n", 95 | " alg.run()\n", 96 | " alg.method_statistics()\n", 97 | " alg.main_results()\n", 98 | " res_deg_greedy_ls.append(solution.obj())" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": null, 104 | "metadata": { 105 | "pycharm": { 106 | "is_executing": true, 107 | "name": "#%%\n" 108 | } 109 | }, 110 | "outputs": [], 111 | "source": [ 112 | "res_vns = []\n", 113 | "settings.mh_titer=1000\n", 114 | "for run in range(runs):\n", 115 | " instance = VertexCoverInstance(problem_name+f'-{run+1}')\n", 116 | " # logger.info(f\"{problem_name} instance read:\\n\" + str(instance))\n", 117 | " solution = VertexCoverSolution(instance)\n", 118 | " alg = GVNS(solution,\n", 119 | " [Method(f\"ch0\", VertexCoverSolution.construct, 0)],\n", 120 | " [Method(f\"li2\", VertexCoverSolution.local_improve, 2)],\n", 121 | " [Method(f\"sh{i}\", VertexCoverSolution.shaking, i) for i in range(1,5)])\n", 122 | " alg.run()\n", 123 | " alg.method_statistics()\n", 124 | " alg.main_results()\n", 125 | " res_vns.append(solution.obj())" 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": null, 131 | "metadata": { 132 | "collapsed": false, 133 | "jupyter": { 134 | "outputs_hidden": false 135 | }, 136 | "pycharm": { 137 | "is_executing": true, 138 | "name": "#%%\n" 139 | } 140 | }, 141 | "outputs": [], 142 | "source": [ 143 | "df = pd.DataFrame({'2-apx': res_approx, '2-apx+LS1': res_approx_red, \n", 144 | " 'deg-greedy': res_deg_greedy, 'deg-greedy+LS2': res_deg_greedy_ls,\n", 145 | " 'VNS': res_vns})\n", 146 | "sns.boxplot(data=df[['2-apx', '2-apx+LS1', 'deg-greedy', 'deg-greedy+LS2']])\n", 147 | "plt.title(f'Minimum Vertex Cover Problem\\n{runs} random graphs with $n=1000,\\,m=2000$')\n", 148 | "plt.ylabel('$|C|$');\n", 149 | "# plt.ylim((0,1000))\n", 150 | "# plt.savefig(\"vcp1.pdf\")" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": null, 156 | "metadata": { 157 | "collapsed": false, 158 | "jupyter": { 159 | "outputs_hidden": false 160 | }, 161 | "pycharm": { 162 | "is_executing": true, 163 | "name": "#%%\n" 164 | } 165 | }, 166 | "outputs": [], 167 | "source": [ 168 | "plt.title(f'Minimum Vertex Cover Problem\\n{runs} random graphs with $n=1000,\\,m=2000$')\n", 169 | "plt.ylabel('$|C|$')\n", 170 | "sns.boxplot(data=df[['2-apx+LS1', 'deg-greedy', 'deg-greedy+LS2', 'VNS']]);\n", 171 | "# plt.savefig(\"vcp2.pdf\")" 172 | ] 173 | }, 174 | { 175 | "cell_type": "markdown", 176 | "metadata": {}, 177 | "source": [ 178 | "## The following illustrates an interactive interface to start the GVNS and create a plot of the obtained solutions values over the iterations" 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": null, 184 | "metadata": { 185 | "pycharm": { 186 | "is_executing": true, 187 | "name": "#%%\n" 188 | } 189 | }, 190 | "outputs": [], 191 | "source": [ 192 | "import ipywidgets as widgets\n", 193 | "from IPython.display import display\n", 194 | "from ipywidgets.widgets.interaction import show_inline_matplotlib_plots\n", 195 | "iter_text = widgets.FloatText(description=\"Iterations\", min=0, max=10000, value=1000)\n", 196 | "iter_slider = widgets.IntSlider(min=0, max=10000)\n", 197 | "start = widgets.Button(description=\"Start\")\n", 198 | "output = widgets.Output()\n", 199 | "hbox = widgets.HBox([iter_text, iter_slider])\n", 200 | "display(hbox)\n", 201 | "widgets.jslink((iter_text, 'value'), (iter_slider, 'value'))\n", 202 | "display(start,output)\n", 203 | "\n", 204 | "problem_name = \"gnm-1000-2000\"\n", 205 | "df = None\n", 206 | "\n", 207 | "def run(_start):\n", 208 | " with output:\n", 209 | " global logger, instance, solution, alg\n", 210 | " output.clear_output()\n", 211 | " settings.mh_log = \"run.log\" # write iteration log to file\n", 212 | " settings.mh_lfreq = 1 # log all iterations\n", 213 | " init_logger()\n", 214 | " logger.handlers = [] # switch off general textual log output\n", 215 | " logger.info(f\"pymhlib demo for solving {problem_name}\")\n", 216 | " settings.mh_titer = iter_text.value\n", 217 | " instance = VertexCoverInstance(problem_name+f'-1')\n", 218 | " solution = VertexCoverSolution(instance)\n", 219 | " alg = GVNS(solution,\n", 220 | " [Method(f\"ch0\", VertexCoverSolution.construct, 2)],\n", 221 | " [Method(f\"li2\", VertexCoverSolution.local_improve, 2)],\n", 222 | " [Method(f\"sh{i}\", VertexCoverSolution.shaking, i) for i in range(1,5)])\n", 223 | " alg.run()\n", 224 | " # alg.method_statistics()\n", 225 | " # alg.main_results()\n", 226 | " \n", 227 | " logging.getLogger(\"pymhlib_iter\").handlers[0].flush()\n", 228 | " global df\n", 229 | " df = pd.read_csv(\"run.log\", sep=r'\\s+')\n", 230 | " df.plot(kind='line', x='iteration', y=['best', 'obj_new']); plt.ylabel('obj')\n", 231 | " # display(df)\n", 232 | " show_inline_matplotlib_plots()\n", 233 | " \n", 234 | "start.on_click(run)" 235 | ] 236 | }, 237 | { 238 | "cell_type": "code", 239 | "execution_count": null, 240 | "metadata": { 241 | "pycharm": { 242 | "is_executing": true 243 | } 244 | }, 245 | "outputs": [], 246 | "source": [] 247 | } 248 | ], 249 | "metadata": { 250 | "kernelspec": { 251 | "name": "python3", 252 | "display_name": "Python 3.9.1 64-bit ('3.9.1')", 253 | "metadata": { 254 | "interpreter": { 255 | "hash": "25f4c3e3959e00fad59026cfcf062f3d67d701e03df70233f09ed9abeb5eb196" 256 | } 257 | } 258 | }, 259 | "language_info": { 260 | "codemirror_mode": { 261 | "name": "ipython", 262 | "version": 3 263 | }, 264 | "file_extension": ".py", 265 | "mimetype": "text/x-python", 266 | "name": "python", 267 | "nbconvert_exporter": "python", 268 | "pygments_lexer": "ipython3", 269 | "version": "3.9.1-final" 270 | }, 271 | "pycharm": { 272 | "stem_cell": { 273 | "cell_type": "raw", 274 | "source": [], 275 | "metadata": { 276 | "collapsed": false 277 | } 278 | } 279 | } 280 | }, 281 | "nbformat": 4, 282 | "nbformat_minor": 4 283 | } -------------------------------------------------------------------------------- /pymhlib/decision_diag.py: -------------------------------------------------------------------------------- 1 | """Generic classes for decision diagrams (DDs).""" 2 | 3 | from typing import Dict, DefaultDict, List, Optional 4 | from abc import ABC, abstractmethod 5 | from itertools import count 6 | from dataclasses import dataclass 7 | from collections import defaultdict 8 | 9 | from pymhlib.solution import VectorSolution, TObj 10 | 11 | 12 | @dataclass 13 | class Arc: 14 | """An arc in the DD 15 | 16 | Attributes: 17 | - u, v: source and target nodes 18 | - val: value of the arc, i.e., the value assigned to a corresponding variable etc. 19 | - length: arc length 20 | """ 21 | u: 'Node' 22 | v: 'Node' 23 | value: int 24 | length: TObj 25 | 26 | def __repr__(self): 27 | return f"Arc({self.u.id_}-{self.v.id_}, value={self.value}, length={self.length})" 28 | 29 | 30 | class State(ABC): 31 | """Problem-specific state information in a node.""" 32 | 33 | @abstractmethod 34 | def __hash__(self): 35 | pass 36 | 37 | @abstractmethod 38 | def __eq__(self, other: 'State'): 39 | return self is other 40 | 41 | @abstractmethod 42 | def __repr__(self): 43 | pass 44 | 45 | 46 | class Node(State, ABC): 47 | """An abstract class for a node of a DD. 48 | 49 | Attributes 50 | - id: a DD-unique ID for printing the node 51 | - state: the problem-specific state data 52 | - z_bp: length of a best path from r to the node 53 | - pred: list of ingoing arcs 54 | - succ: dict with outgoing arcs, with values as keys 55 | """ 56 | 57 | def __init__(self, id_, state: State, z_bp: TObj): 58 | """Create a node.""" 59 | self.id_ = id_ 60 | self.state = state 61 | self.z_bp = z_bp 62 | self.pred: List[Arc] = list() 63 | self.succ: Dict[int, Arc] = dict() 64 | 65 | def __repr__(self): 66 | return f"Node {self.id_}: z_bp={self.z_bp}, state={self.state}" 67 | 68 | def __hash__(self): 69 | """A hash function used for the graph and to determine identical states.""" 70 | return hash(self.state) 71 | 72 | def __eq__(self, other: 'Node'): 73 | """Return True if the nodes represent the same states.""" 74 | return self.state == other.state 75 | 76 | 77 | NodePool = Dict[State, Node] 78 | 79 | 80 | class DecisionDiag(ABC): 81 | """An abstract class for a DD. 82 | 83 | Class attributes 84 | - to_maximize: True for maximization, else False 85 | Attributes 86 | - inst: problem instance 87 | - id_generator: yields successive IDs for the nodes 88 | - r: root node 89 | - t_state: state a target node 90 | - t: target node; only set when actually reached 91 | - sol: solution object in which final solution will be stored 92 | - NodeType: specific node type to be used determined from r 93 | - layers dict of dict of nodes at each layer 94 | """ 95 | 96 | to_maximize = True 97 | 98 | def __init__(self, inst, r: Node, t_state: State, sol: VectorSolution): 99 | super().__init__() 100 | self.inst = inst 101 | self.id_generator = count() 102 | self.r = r 103 | self.t_state = t_state 104 | self.t: Optional[Node, None] = None 105 | self.sol = sol 106 | self.NodeType = r.__class__ 107 | self.layers: DefaultDict[int, NodePool] = defaultdict(dict) 108 | self.layers[0][r.state] = r 109 | 110 | def __repr__(self): 111 | s = f"DD, {len(self.layers)} layers:\n" 112 | for i, layer in self.layers.items(): 113 | s += f"Layer {i}, {len(layer)} node(s):\n" 114 | for node in layer.values(): 115 | s += f" {node!s}\n" 116 | return s 117 | 118 | def get_successor_node(self, node_pool: NodePool, node: Node, value: int, length: TObj, state: State) -> Node: 119 | """Look up or create a successor for a node, connect them with an arc, set z_bp, and return the successor. 120 | 121 | :param node_pool: node pool in which to either find already existing node or add new node 122 | :param node: source node 123 | :param value: value of the arc, i.e., value assigned to a corresponding variable 124 | :param length: arc length 125 | :param state: state of the successor node 126 | """ 127 | z_bp_new = node.z_bp + length 128 | if state in node_pool: 129 | succ_node = node_pool[state] 130 | if self.sol.is_better_obj(z_bp_new, succ_node.z_bp): 131 | succ_node.z_bp = z_bp_new 132 | else: 133 | if state == self.t_state: 134 | self.t = succ_node = self.NodeType('t', state, z_bp_new) 135 | else: 136 | succ_node = self.NodeType(next(self.id_generator), state, z_bp_new) 137 | node_pool[state] = succ_node 138 | assert value not in node.succ 139 | arc = Arc(node, succ_node, value, length) 140 | node.succ[value] = arc 141 | succ_node.pred.append(arc) 142 | return succ_node 143 | 144 | def expand_layer(self, depth: int) -> bool: 145 | """Expand all nodes in layers[depth], creating layers[depth+1. 146 | 147 | :return: True if all nodes have been expanded, else False 148 | """ 149 | layers = self.layers 150 | if depth not in layers or not layers[depth]: 151 | return False 152 | depth_succ = depth + 1 153 | further_nodes_remaining = False 154 | for _s, n in layers[depth].items(): 155 | if self.expand_node(n, depth, layers[depth_succ]): 156 | further_nodes_remaining = True 157 | return further_nodes_remaining 158 | 159 | def expand_all(self, dd_type: str, max_width: int = 1): 160 | """Expand nodes layer-wise until no unexpanded nodes remain, i.e., create complete DD. 161 | 162 | :param dd_type: of DD to create: 'exact', 'relaxed', or 'restricted' 163 | :param max_width: maximum with in case of type 'relaxed' or 'restricted' 164 | """ 165 | for depth in count(0): 166 | if not self.expand_layer(depth): 167 | break 168 | if dd_type == 'relaxed': 169 | self.relax_layer(self.layers[depth+1], max_width) 170 | elif dd_type == 'restricted': 171 | self.restrict_layer(self.layers[depth+1], max_width) 172 | elif dd_type != 'exact': 173 | raise ValueError(f"Invalid dd_type: {dd_type}") 174 | 175 | @classmethod 176 | def get_sorted_nodes(cls, node_pool): 177 | """Return a sorted list of the nodes in the given node_pool, with the most promising node first.""" 178 | return sorted(node_pool.values(), key=lambda n: n.z_bp, reverse=cls.to_maximize) 179 | 180 | def relax_layer(self, node_pool: NodePool, max_width: int = 1): 181 | """Relax the last created layer at the given depth to the given maximum width.""" 182 | if len(node_pool) > max_width: 183 | nodes_sorted = self.get_sorted_nodes(node_pool) 184 | self.merge_nodes(nodes_sorted[max_width-1:], node_pool) 185 | 186 | def restrict_layer(self, node_pool: NodePool, max_width: int = 1): 187 | """Restrict the last created layer at the given depth to the given maximum width.""" 188 | if len(node_pool) > max_width: 189 | nodes_sorted = self.get_sorted_nodes(node_pool) 190 | for node in nodes_sorted[-1:max_width-1:-1]: 191 | self.delete_node(node, node_pool) 192 | 193 | @staticmethod 194 | def delete_node(node: Node, node_pool: NodePool): 195 | """Deletes the specified node from the DD and node_pool, together with all its arcs. 196 | 197 | The nodes must not have any successors yet and must not be the r or t. 198 | """ 199 | assert not node.succ # node must not have successors 200 | del node_pool[node.state] 201 | for arc in node.pred: 202 | del arc.u.succ[arc.value] 203 | 204 | def derive_best_path(self) -> List[int]: 205 | """Derives from a completely constructed DD a best path and returns it as list of arc values.""" 206 | node = self.t 207 | path = [] 208 | assert node 209 | for _ in range(len(self.sol.x)-1, -1, -1): 210 | for pred in node.pred: 211 | # print(pred, node.z_bp, pred.u.z_bp + pred.length) 212 | if node.z_bp == pred.u.z_bp + pred.length: 213 | path.append(pred.value) 214 | node = pred.u 215 | break 216 | else: 217 | raise ValueError(f"Invalid z_bp value at node {node!s}") 218 | return path[::-1] 219 | 220 | def merge_nodes(self, nodes: List[Node], node_pool: NodePool): 221 | """Merge given list of nodes into the first node. 222 | 223 | All input nodes are not yet expanded and are assumed to be in the given node_pool. 224 | """ 225 | assert len(nodes) >= 2 226 | merged_node = nodes[0] 227 | merged_state = merged_node.state 228 | for n in nodes[1:]: 229 | merged_state = self.merge_states(merged_state, n.state) 230 | if merged_state is n.state: 231 | merged_node = n 232 | if merged_node.state != merged_state: 233 | n = node_pool[merged_node.state] 234 | if n: 235 | merged_node = n 236 | else: 237 | del node_pool[merged_node.state] 238 | merged_node.state = merged_state 239 | node_pool[merged_node.state] = merged_node 240 | for n in nodes: 241 | if n is merged_node: 242 | continue 243 | merged_node.pred += n.pred 244 | for arc in n.pred: 245 | arc.v = merged_node 246 | z_bp_new = arc.u.z_bp + arc.length 247 | if self.sol.is_better_obj(z_bp_new, merged_node.z_bp): 248 | merged_node.z_bp = z_bp_new 249 | del node_pool[n.state] 250 | 251 | # Problem-specific abstract methods 252 | 253 | @abstractmethod 254 | def expand_node(self, node: Node, depth: int, node_pool: NodePool) -> bool: 255 | """Expand node, creating all successor nodes in node_pool. 256 | 257 | The successor nodes and the corresponding arcs are added to the graph. 258 | z_bp is also set in the successor nodes.. 259 | :param node: the node to be expanded; must not yet have any successors 260 | :param depth: optional depth of the current node 261 | :param node_pool: pool of nodes in which to look for already existing node or create new nodes 262 | :return: True if nodes that need further expansion have been created 263 | """ 264 | return False 265 | 266 | @abstractmethod 267 | def merge_states(self, state1: State, state2: State) -> State: 268 | """Return merged state of the two given states. 269 | 270 | May return directly state1 or state2 if one of this state dominates the other. 271 | """ 272 | -------------------------------------------------------------------------------- /pymhlib/alns.py: -------------------------------------------------------------------------------- 1 | """Adaptive Large Neighborhood Search (ALNS). 2 | 3 | The module realizes a classical ALNS based on the scheduler module. 4 | """ 5 | 6 | from typing import List, Tuple 7 | from math import exp 8 | from itertools import chain 9 | from dataclasses import dataclass 10 | import numpy as np 11 | 12 | from pymhlib.settings import get_settings_parser, settings, boolArg 13 | from pymhlib.solution import Solution 14 | from pymhlib.scheduler import Scheduler, Method 15 | from pymhlib.log import LogLevel 16 | 17 | 18 | parser = get_settings_parser() 19 | parser.add_argument("--mh_alns_segment_size", type=int, default=200, help='ALNS segment size') 20 | parser.add_argument("--mh_alns_sigma1", type=int, default=33, help='ALNS score for new global best solution') 21 | parser.add_argument("--mh_alns_sigma2", type=int, default=9, help='ALNS score for better than current solution') 22 | parser.add_argument("--mh_alns_sigma3", type=int, default=13, help='ALNS score for worse accepted solution') 23 | parser.add_argument("--mh_alns_gamma", type=float, default=0.1, help='ALNS ') 24 | parser.add_argument("--mh_alns_init_temp_factor", type=float, default=1.05, 25 | help='ALNS factor for determining initial temperature') 26 | parser.add_argument("--mh_alns_temp_dec_factor", type=float, default=0.99975, 27 | help='ALNS factor for decreasing the temperature') 28 | parser.add_argument("--mh_alns_dest_min_abs", type=int, default=4, help='ALNS minimum number of elements to destroy') 29 | parser.add_argument("--mh_alns_dest_max_abs", type=int, default=60, help='ALNS maximum number of elements to destroy') 30 | parser.add_argument("--mh_alns_dest_min_ratio", type=float, default=0.05, 31 | help='ALNS minimum ratio of elements to destroy') 32 | parser.add_argument("--mh_alns_dest_max_ratio", type=float, default=0.35, 33 | help='ALNS maximum ratio of elements to destroy') 34 | parser.add_argument("--mh_alns_logscores", type=boolArg, default=True, help='ALNS write out log information on scores') 35 | 36 | 37 | @dataclass 38 | class ScoreData: 39 | """Weight of a method and all data relevant to calculate the score and update the weight. 40 | 41 | Properties 42 | - weight: weight to be used for selecting methods 43 | - score: current score in current segment 44 | - applied: number of applications in current segment 45 | """ 46 | weight: float = 1.0 47 | score: int = 0 48 | applied: int = 0 49 | 50 | 51 | class ALNS(Scheduler): 52 | """An adaptive large neighborhood search (ALNS). 53 | 54 | Attributes 55 | - sol: solution object, in which final result will be returned 56 | - meths_ch: list of construction heuristic methods 57 | - meths_de: list of destroy methods 58 | - meths_repair: list of repair methods 59 | - score_data: dictionary yielding ScoreData for a method 60 | - temperature: temperature for Metropolis criterion 61 | - next_segment: iteration number of next segment for updating operator weights 62 | """ 63 | 64 | def __init__(self, sol: Solution, meths_ch: List[Method], meths_destroy: List[Method], meths_repair: List[Method], 65 | own_settings: dict = None, consider_initial_sol=False): 66 | """Initialization. 67 | 68 | :param sol: solution to be improved 69 | :param meths_ch: list of construction heuristic methods 70 | :param meths_destroy: list of destroy methods 71 | :param meths_repair: list of repair methods 72 | :param own_settings: optional dictionary with specific settings 73 | :param consider_initial_sol: if true consider sol as valid solution that should be improved upon; otherwise 74 | sol is considered just a possibly uninitialized of invalid solution template 75 | """ 76 | super().__init__(sol, meths_ch + meths_destroy + meths_repair, own_settings, consider_initial_sol) 77 | self.meths_ch = meths_ch 78 | assert meths_destroy and meths_repair 79 | self.meths_destroy = meths_destroy 80 | self.meths_repair = meths_repair 81 | self.score_data = {m.name: ScoreData() for m in chain(self.meths_destroy, self.meths_repair)} 82 | self.temperature = sol.obj() * self.own_settings.mh_alns_init_temp_factor + 0.000000001 83 | self.next_segment = 0 84 | 85 | @staticmethod 86 | def select_method(meths: List[Method], weights=None) -> Method: 87 | """Randomly select a method from the given list with probabilities proportional to the given weights. 88 | 89 | :param meths: list of methods from which to select one 90 | :param weights: list of probabilities for the methods; if None, uniform probability is used 91 | """ 92 | if weights is None: 93 | return np.random.choice(meths) 94 | return np.random.choice(meths, p=weights/sum(weights)) 95 | 96 | def select_method_pair(self) -> Tuple[Method, Method]: 97 | """Select a destroy and repair method pair according to current weights.""" 98 | destroy = self.select_method(self.meths_destroy, 99 | np.fromiter((self.score_data[m.name].weight for m in self.meths_destroy), 100 | dtype=float, count=len(self.meths_destroy))) 101 | repair = self.select_method(self.meths_repair, 102 | np.fromiter((self.score_data[m.name].weight for m in self.meths_repair), 103 | dtype=float, count=len(self.meths_repair))) 104 | return destroy, repair 105 | 106 | def metropolis_criterion(self, sol_new: Solution, sol_current: Solution) -> bool: 107 | """Apply Metropolis criterion as acceptance decision, return True when sol_new should be accepted.""" 108 | if sol_new.is_better(sol_current): 109 | return True 110 | return np.random.random_sample() <= exp(-abs(sol_new.obj() - sol_current.obj()) / self.temperature) 111 | 112 | @staticmethod 113 | def get_number_to_destroy(num_elements: int, own_settings=settings, dest_min_abs=None, dest_min_ratio=None, 114 | dest_max_abs=None, dest_max_ratio=None) -> int: 115 | """Randomly sample the number of elements to destroy in the destroy operator based on the parameter settings. 116 | 117 | :param num_elements: number of elements to destroy from (e.g., the size of the solution) 118 | :param own_settings: a settings object to be used which overrides the global settings 119 | :param dest_min_abs: absolute minimum number of elements to destroy overriding settings 120 | :param dest_min_ratio: relative minimum ratio of elements to destroy overriding settings 121 | :param dest_max_abs: absolute maximum number of elements to destroy overriding settings 122 | :param dest_max_ratio: relative maximum ratio of elements to destroy overriding settings 123 | """ 124 | if dest_min_abs is None: 125 | dest_min_abs = own_settings.mh_alns_dest_min_abs 126 | if dest_min_ratio is None: 127 | dest_min_ratio = own_settings.mh_alns_dest_min_ratio 128 | if dest_max_abs is None: 129 | dest_max_abs = own_settings.mh_alns_dest_max_abs 130 | if dest_max_ratio is None: 131 | dest_max_ratio = own_settings.mh_alns_dest_max_ratio 132 | a = max(dest_min_abs, int(dest_min_ratio * num_elements)) 133 | b = min(dest_max_abs, int(dest_max_ratio * num_elements)) 134 | return np.random.randint(a, b+1) if b >= a else b+1 135 | 136 | def update_operator_weights(self): 137 | """Update operator weights at segment ends and re-initialize scores""" 138 | if self.iteration == self.next_segment: 139 | if self.own_settings.mh_alns_logscores: 140 | self.log_scores() 141 | self.next_segment = self.iteration + self.own_settings.mh_alns_segment_size 142 | gamma = self.own_settings.mh_alns_gamma 143 | for m in chain(self.meths_destroy, self.meths_repair): 144 | data = self.score_data[m.name] 145 | if data.applied: 146 | data.weight = data.weight * (1 - gamma) + gamma * data.score / data.applied 147 | data.score = 0 148 | data.applied = 0 149 | 150 | def update_after_destroy_and_repair_performed(self, destroy: Method, repair: Method, sol_new: Solution, 151 | sol_incumbent: Solution, sol: Solution): 152 | """Update current solution, incumbent, and all operator score data according to performed destroy+repair. 153 | 154 | :param destroy: applied destroy method 155 | :param repair: applied repair method 156 | :param sol_new: obtained new solution 157 | :param sol_incumbent: current incumbent solution 158 | :param sol: current (last accepted) solution 159 | """ 160 | destroy_data = self.score_data[destroy.name] 161 | repair_data = self.score_data[repair.name] 162 | destroy_data.applied += 1 163 | repair_data.applied += 1 164 | score = 0 165 | if sol_new.is_better(sol_incumbent): 166 | score = self.own_settings.mh_alns_sigma1 167 | # print('better than incumbent') 168 | sol_incumbent.copy_from(sol_new) 169 | sol.copy_from(sol_new) 170 | elif sol_new.is_better(sol): 171 | score = self.own_settings.mh_alns_sigma2 172 | # print('better than current') 173 | sol.copy_from(sol_new) 174 | elif sol.is_better(sol_new) and self.metropolis_criterion(sol_new, sol): 175 | score = self.own_settings.mh_alns_sigma3 176 | # print('accepted although worse') 177 | sol.copy_from(sol_new) 178 | elif sol_new != sol: 179 | sol_new.copy_from(sol) 180 | destroy_data.score += score 181 | repair_data.score += score 182 | 183 | def cool_down(self): 184 | """Apply geometric cooling.""" 185 | self.temperature *= self.own_settings.mh_alns_temp_dec_factor 186 | 187 | def log_scores(self): 188 | """Write information on received scores and weight update to log.""" 189 | indent = ' ' * 32 190 | s = f"{indent}scores at end of iteration {self.iteration}:\n" 191 | s += f"{indent} method applied score weight" 192 | for m in chain(self.meths_destroy, self.meths_repair): 193 | data = self.score_data[m.name] 194 | s += f"\n{indent}{m.name:>7} {data.applied:10d} {data.score:7d} {data.weight:10.3f}" 195 | self.iter_logger.info(LogLevel.indent(s)) 196 | 197 | def alns(self, sol: Solution): 198 | """Perform adaptive large neighborhood search (ALNS) on given solution.""" 199 | self.next_segment = self.iteration + self.own_settings.mh_alns_segment_size 200 | sol_incumbent = sol.copy() 201 | sol_new = sol.copy() 202 | while True: 203 | destroy, repair = self.select_method_pair() 204 | res = self.perform_method_pair(destroy, repair, sol_new) 205 | self.update_after_destroy_and_repair_performed(destroy, repair, sol_new, sol_incumbent, sol) 206 | if res.terminate: 207 | sol.copy_from(sol_incumbent) 208 | return 209 | self.update_operator_weights() 210 | self.cool_down() 211 | 212 | def run(self) -> None: 213 | """Actually performs the construction heuristics followed by the ALNS.""" 214 | sol = self.incumbent.copy() 215 | assert self.incumbent_valid or self.meths_ch 216 | self.perform_sequentially(sol, self.meths_ch) 217 | self.alns(sol) 218 | -------------------------------------------------------------------------------- /pymhlib/demos/data/mknapcb5-01.txt: -------------------------------------------------------------------------------- 1 | 250 10 0 2 | 992 612 582 610 495 573 608 3 | 873 636 620 560 983 760 935 4 | 693 751 582 438 499 692 570 5 | 773 473 634 980 935 905 910 6 | 902 718 837 939 847 911 789 7 | 733 977 629 947 719 804 587 8 | 919 627 816 849 565 957 762 9 | 917 936 1006 888 768 774 770 10 | 796 774 698 910 675 808 791 11 | 864 809 963 707 1057 814 649 12 | 499 558 799 940 682 950 920 13 | 797 925 930 676 674 427 843 14 | 922 675 759 898 712 808 479 15 | 762 757 786 871 722 980 627 16 | 479 562 716 910 764 1133 876 17 | 417 832 518 511 494 1049 1097 18 | 614 489 606 790 839 839 676 19 | 576 581 676 912 579 594 715 20 | 866 727 908 606 1063 734 936 21 | 747 665 552 688 1066 561 793 22 | 604 1021 930 553 539 486 392 23 | 484 791 822 694 739 744 734 24 | 886 898 518 779 612 872 586 25 | 1093 726 657 857 666 1059 669 26 | 557 701 770 885 764 724 762 27 | 690 653 533 878 514 890 547 28 | 944 885 907 737 792 1115 904 29 | 1081 546 616 547 943 668 707 30 | 748 926 784 530 747 1145 797 31 | 761 705 902 795 615 835 785 32 | 926 750 659 1000 712 525 846 33 | 463 659 642 801 540 784 932 34 | 743 865 602 571 968 866 877 35 | 898 645 486 450 870 969 594 36 | 641 641 524 592 740 681 1066 37 | 912 775 759 544 714 38 | 396 63 43 279 807 710 482 39 | 339 659 810 542 945 467 651 40 | 290 63 68 30 468 599 712 41 | 246 642 132 452 722 461 765 42 | 56 452 303 100 625 303 843 43 | 998 145 806 104 81 921 652 44 | 453 622 915 751 116 475 139 45 | 740 207 768 199 505 76 541 46 | 794 549 879 616 148 959 435 47 | 391 270 905 864 239 881 891 48 | 691 443 413 439 1000 36 256 49 | 64 152 387 948 672 515 999 50 | 247 390 541 47 729 877 584 51 | 262 904 75 29 857 963 966 52 | 9 946 1000 206 953 994 886 53 | 2 270 209 961 84 126 574 54 | 177 363 401 44 698 693 126 55 | 138 442 825 848 456 804 621 56 | 413 275 663 997 928 679 429 57 | 22 963 679 724 518 791 927 58 | 55 571 198 115 906 765 383 59 | 343 175 397 583 142 52 394 60 | 223 943 649 760 739 259 338 61 | 623 244 759 953 876 617 977 62 | 248 274 66 588 577 247 280 63 | 396 382 174 422 561 319 823 64 | 720 460 543 464 469 618 254 65 | 842 518 267 53 116 556 657 66 | 139 800 403 672 177 511 113 67 | 586 655 630 948 177 557 301 68 | 990 745 858 483 473 794 372 69 | 563 786 833 756 252 856 57 70 | 807 92 906 68 571 322 937 71 | 157 400 45 154 769 898 968 72 | 370 984 248 739 999 471 766 73 | 688 138 690 859 562 74 | 422 396 538 638 984 463 536 75 | 582 612 379 386 769 427 358 76 | 972 340 854 367 1 543 306 77 | 954 206 959 641 931 806 247 78 | 835 763 795 518 297 36 536 79 | 143 190 388 809 267 435 659 80 | 736 430 673 45 350 739 158 81 | 142 141 129 875 992 584 547 82 | 11 77 823 696 44 398 68 83 | 599 307 703 7 944 657 41 84 | 130 938 648 669 91 857 603 85 | 57 925 235 216 468 117 535 86 | 791 145 237 501 255 246 147 87 | 370 605 353 807 84 628 271 88 | 455 176 395 236 634 733 193 89 | 394 478 169 969 538 721 932 90 | 157 483 674 760 356 461 118 91 | 165 956 532 426 871 718 462 92 | 467 902 571 603 702 545 770 93 | 327 929 505 295 969 398 143 94 | 883 472 467 813 85 474 179 95 | 909 540 597 418 300 673 417 96 | 627 988 174 92 745 397 266 97 | 947 591 713 280 737 720 153 98 | 618 522 445 658 816 148 291 99 | 48 312 954 713 510 61 279 100 | 425 778 712 372 662 852 591 101 | 219 200 720 280 776 511 608 102 | 475 498 182 523 570 101 227 103 | 556 834 692 184 378 894 121 104 | 978 123 304 541 132 887 815 105 | 558 604 781 712 587 96 744 106 | 388 16 873 960 268 410 356 107 | 577 303 442 671 939 591 770 108 | 146 430 631 329 625 107 782 109 | 486 467 259 240 285 110 | 468 664 408 316 330 149 111 111 | 285 938 687 699 114 753 384 112 | 409 506 721 408 873 421 273 113 | 10 531 457 692 15 525 58 114 | 685 173 217 627 591 117 608 115 | 295 966 31 482 118 916 278 116 | 169 88 749 435 992 403 570 117 | 826 286 685 831 503 578 1 118 | 395 198 230 737 818 789 188 119 | 39 628 483 601 555 235 133 120 | 435 43 871 948 364 118 889 121 | 560 828 239 802 858 335 979 122 | 785 476 732 217 343 274 417 123 | 611 458 917 335 737 715 675 124 | 801 360 305 312 419 508 882 125 | 178 637 291 653 7 972 806 126 | 175 360 45 253 969 842 161 127 | 745 391 817 502 73 439 475 128 | 51 321 77 332 449 118 977 129 | 303 395 882 581 944 164 71 130 | 455 906 458 278 76 987 638 131 | 69 919 161 312 218 643 971 132 | 421 818 222 264 704 615 722 133 | 824 914 5 986 491 993 524 134 | 617 485 481 155 42 263 757 135 | 128 134 893 627 522 569 873 136 | 307 955 60 74 774 150 673 137 | 731 174 132 904 853 618 38 138 | 468 372 358 714 957 75 483 139 | 149 931 431 84 445 767 491 140 | 884 780 258 65 544 196 618 141 | 796 521 141 359 973 781 607 142 | 970 780 650 72 728 858 74 143 | 420 585 944 892 431 204 713 144 | 401 772 711 928 641 851 256 145 | 938 329 405 539 1 146 | 909 239 481 430 589 928 277 147 | 68 386 498 238 436 237 834 148 | 271 124 555 448 181 991 874 149 | 645 717 146 722 816 235 340 150 | 244 734 674 355 852 661 175 151 | 958 870 439 972 374 866 319 152 | 869 547 918 68 332 912 144 153 | 720 813 791 880 752 321 818 154 | 79 290 35 462 850 256 813 155 | 480 853 998 439 524 872 476 156 | 748 154 417 300 197 975 631 157 | 691 642 796 226 376 221 57 158 | 76 238 380 416 256 496 897 159 | 449 916 33 372 569 471 610 160 | 284 977 123 992 855 823 265 161 | 586 663 383 399 958 88 600 162 | 427 310 923 78 788 701 765 163 | 835 853 197 533 21 184 515 164 | 479 773 932 465 275 581 54 165 | 929 789 366 225 858 642 316 166 | 222 581 343 107 497 260 146 167 | 22 857 895 365 76 133 758 168 | 982 313 911 677 119 277 746 169 | 67 222 943 379 457 365 727 170 | 473 867 679 734 307 960 399 171 | 134 782 33 976 705 331 99 172 | 923 99 186 620 756 806 257 173 | 344 524 761 91 297 797 552 174 | 248 359 415 114 240 973 717 175 | 708 840 951 30 993 984 277 176 | 437 590 234 209 896 359 459 177 | 224 536 396 198 634 477 169 178 | 197 33 375 848 559 948 860 179 | 534 192 536 636 424 747 331 180 | 494 491 632 176 257 254 102 181 | 114 827 577 223 921 182 | 865 626 614 517 314 708 425 183 | 765 732 542 182 150 99 188 184 | 743 61 485 479 934 331 439 185 | 469 151 755 493 921 375 620 186 | 276 273 648 63 992 618 129 187 | 488 620 83 727 52 45 835 188 | 735 191 853 843 511 50 176 189 | 588 511 407 456 31 787 340 190 | 441 159 541 993 892 926 724 191 | 378 332 673 647 427 66 261 192 | 647 294 657 503 504 373 393 193 | 229 630 921 92 109 567 167 194 | 226 241 149 869 28 951 296 195 | 184 266 281 268 130 934 210 196 | 208 206 573 708 90 892 849 197 | 969 58 531 471 104 669 553 198 | 249 283 703 443 211 730 669 199 | 45 350 697 775 601 383 882 200 | 529 888 549 523 899 849 380 201 | 918 318 388 532 770 173 414 202 | 464 260 949 970 832 461 957 203 | 756 662 322 675 180 904 839 204 | 628 108 938 697 273 595 561 205 | 789 18 373 512 883 187 632 206 | 322 90 602 463 460 683 988 207 | 501 960 645 346 312 841 179 208 | 319 659 889 535 245 722 716 209 | 797 933 963 523 600 299 177 210 | 776 325 147 239 700 529 839 211 | 575 634 691 363 776 617 481 212 | 398 777 868 651 113 400 138 213 | 58 692 217 516 447 229 312 214 | 922 891 528 189 996 530 773 215 | 969 850 262 53 332 946 796 216 | 474 269 260 729 849 270 807 217 | 891 160 165 650 23 218 | 543 987 922 942 777 498 879 219 | 247 159 953 317 746 233 351 220 | 123 882 460 776 605 544 385 221 | 755 745 798 875 295 115 11 222 | 926 553 826 464 584 183 839 223 | 76 553 218 217 131 626 5 224 | 434 255 519 519 735 860 715 225 | 867 906 509 448 82 791 440 226 | 932 653 192 690 2 468 769 227 | 290 877 645 651 670 589 238 228 | 393 889 103 802 524 63 518 229 | 147 607 781 452 787 161 996 230 | 679 938 590 359 915 991 293 231 | 569 267 119 215 296 623 366 232 | 365 279 813 400 30 511 743 233 | 22 618 979 176 746 826 645 234 | 278 409 235 804 150 548 503 235 | 66 234 716 922 87 267 552 236 | 69 152 839 329 701 485 744 237 | 615 920 101 443 267 997 538 238 | 331 900 880 564 777 294 8 239 | 534 292 742 985 26 504 158 240 | 904 878 600 889 170 179 211 241 | 669 700 369 728 332 792 413 242 | 537 824 724 541 881 265 426 243 | 529 276 457 679 514 899 426 244 | 501 511 548 101 846 908 996 245 | 553 991 41 404 284 185 275 246 | 236 605 238 309 106 974 951 247 | 820 616 34 288 181 313 834 248 | 696 335 122 503 74 513 704 249 | 247 373 105 773 40 171 585 250 | 594 453 300 42 331 564 320 251 | 838 206 147 311 399 153 6 252 | 689 272 166 585 596 855 936 253 | 789 504 63 746 757 254 | 660 663 612 454 55 4 397 255 | 681 670 7 439 871 969 855 256 | 869 233 756 637 237 53 21 257 | 969 325 559 930 192 447 869 258 | 152 914 196 784 436 268 786 259 | 900 850 138 173 390 775 74 260 | 574 800 814 401 103 807 20 261 | 623 666 583 613 785 252 522 262 | 267 426 349 397 618 393 316 263 | 95 444 177 925 624 469 474 264 | 26 998 806 339 114 159 122 265 | 164 891 901 394 172 369 612 266 | 192 880 388 4 494 974 931 267 | 871 365 477 579 218 980 69 268 | 22 412 687 846 499 304 811 269 | 929 25 130 90 640 963 3 270 | 411 902 634 228 119 62 686 271 | 148 283 444 610 858 561 22 272 | 666 834 125 961 125 351 520 273 | 731 650 868 855 236 636 899 274 | 88 44 486 289 465 155 109 275 | 288 225 250 536 859 91 476 276 | 624 462 7 532 767 845 279 277 | 293 178 137 313 28 974 402 278 | 785 150 868 256 348 169 140 279 | 856 660 307 498 255 207 12 280 | 75 423 158 973 574 396 438 281 | 622 51 845 320 841 717 235 282 | 881 743 710 307 233 821 767 283 | 67 340 454 44 23 955 232 284 | 401 433 464 798 879 890 275 285 | 104 420 674 758 789 126 345 286 | 219 108 638 251 441 229 844 287 | 974 153 666 438 499 403 988 288 | 600 798 558 64 242 662 796 289 | 166 804 690 18 765 290 | 714 386 736 712 112 617 957 291 | 327 660 27 376 253 566 715 292 | 974 597 113 561 38 971 646 293 | 877 230 435 340 854 636 93 294 | 146 17 748 883 600 691 380 295 | 881 569 549 72 297 633 314 296 | 471 1000 278 762 986 557 384 297 | 331 688 483 231 180 283 475 298 | 537 530 887 899 713 446 608 299 | 884 350 53 793 966 337 21 300 | 116 11 996 864 677 998 658 301 | 571 724 451 203 427 190 251 302 | 916 12 275 110 104 977 318 303 | 87 985 546 294 453 828 544 304 | 551 751 696 602 960 29 780 305 | 38 114 887 59 286 45 982 306 | 809 304 786 652 235 203 624 307 | 997 136 323 632 506 721 35 308 | 287 313 490 5 327 670 133 309 | 547 492 871 147 730 71 490 310 | 346 276 284 120 876 26 420 311 | 638 165 203 690 55 555 536 312 | 702 115 355 859 851 374 384 313 | 650 800 265 418 336 153 856 314 | 79 214 3 208 589 292 866 315 | 791 899 657 899 112 962 219 316 | 992 469 175 656 567 596 366 317 | 417 569 117 453 233 211 728 318 | 758 833 166 265 154 761 999 319 | 161 837 78 766 955 430 870 320 | 967 323 322 844 459 151 638 321 | 491 899 667 905 29 273 517 322 | 931 577 39 133 336 894 308 323 | 946 610 787 618 740 114 66 324 | 209 374 684 175 73 399 149 325 | 660 178 813 942 226 326 | 307 464 870 471 580 279 712 327 | 347 396 430 850 832 602 938 328 | 208 708 517 58 956 376 738 329 | 230 471 622 992 116 395 941 330 | 949 894 448 580 102 773 131 331 | 208 193 907 750 741 563 220 332 | 898 878 161 82 373 317 591 333 | 571 571 817 913 43 653 98 334 | 509 129 916 646 125 867 572 335 | 881 925 256 770 158 65 401 336 | 123 569 756 446 897 952 112 337 | 482 351 940 64 382 679 419 338 | 847 63 992 795 735 207 80 339 | 423 377 931 395 993 887 922 340 | 377 931 699 488 505 941 54 341 | 469 168 198 367 202 983 747 342 | 633 708 207 368 435 264 838 343 | 746 23 301 231 376 626 320 344 | 819 853 269 730 985 590 384 345 | 929 158 275 241 297 309 912 346 | 159 873 5 977 162 413 597 347 | 72 358 423 136 517 588 172 348 | 153 297 111 528 109 235 982 349 | 449 906 119 727 748 475 595 350 | 502 514 593 151 199 88 441 351 | 406 146 241 960 393 615 579 352 | 190 54 582 532 677 435 88 353 | 779 353 221 643 31 356 727 354 | 816 696 904 459 956 887 340 355 | 585 56 470 211 419 249 583 356 | 919 884 687 965 897 68 790 357 | 437 541 725 134 563 766 962 358 | 731 539 831 308 828 94 558 359 | 500 355 65 6 416 777 908 360 | 827 702 840 176 308 250 909 361 | 917 511 879 467 358 362 | 759 791 73 398 158 218 662 363 | 687 35 448 954 454 603 191 364 | 388 271 863 227 499 18 160 365 | 17 150 939 469 954 866 427 366 | 649 576 734 344 789 713 280 367 | 300 464 982 557 296 648 418 368 | 939 319 568 75 143 451 855 369 | 59 373 448 908 195 347 137 370 | 188 612 693 590 751 389 414 371 | 627 829 259 219 551 908 924 372 | 631 841 326 946 310 498 573 373 | 232 643 355 902 174 104 109 374 | 894 908 768 914 991 296 542 375 | 927 152 320 428 318 288 654 376 | 835 389 145 950 573 672 469 377 | 217 380 63 865 119 776 919 378 | 436 758 643 642 533 365 917 379 | 830 615 535 654 526 246 208 380 | 703 874 855 577 855 391 353 381 | 571 934 22 685 101 12 582 382 | 54 958 671 425 328 450 30 383 | 486 835 844 929 556 541 311 384 | 516 202 784 429 495 167 120 385 | 859 95 92 138 371 971 3 386 | 558 392 86 118 35 815 551 387 | 799 964 578 322 275 934 235 388 | 644 413 991 994 401 804 646 389 | 735 301 948 188 559 388 525 390 | 398 238 339 606 415 873 35 391 | 768 368 825 577 386 935 158 392 | 126 975 24 193 535 297 862 393 | 479 900 140 303 182 713 850 394 | 907 186 350 522 923 669 862 395 | 240 26 480 610 123 204 196 396 | 612 337 272 430 67 514 727 397 | 750 777 823 330 46 398 | 31985 30902 31096 31684 31802 31439 29603 399 | 30621 32059 31345 --------------------------------------------------------------------------------