├── .github
├── assets
│ └── logo.png
└── workflows
│ └── publish-package.yaml
├── MANIFEST.in
├── docs
├── source
│ ├── _static
│ │ └── images
│ │ │ ├── boxplot-BHI-DI.png
│ │ │ ├── result_labels.png
│ │ │ ├── result_metrics.png
│ │ │ ├── boxplot-BHI-BRI.png
│ │ │ ├── boxplot-BHI-DBI.png
│ │ │ ├── boxplot-BHI-DRI.png
│ │ │ ├── convergence-BHI-1.png
│ │ │ ├── convergence-MIS-1.png
│ │ │ ├── result_convergences.png
│ │ │ ├── result_metrics_mean.png
│ │ │ └── result_metrics_std.png
│ ├── pages
│ │ ├── metacluster.rst
│ │ ├── metacluster.utils.rst
│ │ ├── support.rst
│ │ └── quick_start.rst
│ ├── conf.py
│ └── index.rst
├── requirements.txt
├── Makefile
└── make.bat
├── requirements.txt
├── .flake8
├── examples
├── __init__.py
├── utils
│ ├── __init__.py
│ └── exam_data_loader.py
├── exam_MhaKCentersClustering.py
├── exam_MhaKMeansTuner.py
├── exam_cluster_finder.py
├── exam_kmeans_GridSearchCV.py
└── exam_MetaCluster.py
├── tests
├── __init__.py
├── test_Data.py
└── test_MetaCluster.py
├── metacluster
├── utils
│ ├── __init__.py
│ ├── io_util.py
│ ├── encoder.py
│ ├── mealpy_util.py
│ ├── validator.py
│ ├── data_loader.py
│ ├── visualize_util.py
│ └── cluster.py
├── __init__.py
└── data
│ ├── diagnosis_II.csv
│ ├── Iris.csv
│ ├── Soybean-small.csv
│ ├── Zoo.csv
│ ├── flame.csv
│ ├── appendicitis.csv
│ ├── pathbased.csv
│ ├── Lymphography.csv
│ ├── jain.csv
│ ├── vary-density.csv
│ ├── liver.csv
│ ├── balance.csv
│ ├── seeds.csv
│ ├── Monk3.csv
│ ├── Monk1.csv
│ ├── heart.csv
│ ├── HeartEW.csv
│ └── Glass.csv
├── CITATION.cff
├── .readthedocs.yaml
├── .gitignore
├── ChangeLog.md
├── CODE_OF_CONDUCT.md
├── setup.py
└── README.md
/.github/assets/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thieu1995/MetaCluster/HEAD/.github/assets/logo.png
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.md LICENSE CODE_OF_CONDUCT.md CITATION.cff ChangeLog.md
2 | include metacluster/data/*.csv
--------------------------------------------------------------------------------
/docs/source/_static/images/boxplot-BHI-DI.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thieu1995/MetaCluster/HEAD/docs/source/_static/images/boxplot-BHI-DI.png
--------------------------------------------------------------------------------
/docs/source/_static/images/result_labels.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thieu1995/MetaCluster/HEAD/docs/source/_static/images/result_labels.png
--------------------------------------------------------------------------------
/docs/source/_static/images/result_metrics.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thieu1995/MetaCluster/HEAD/docs/source/_static/images/result_metrics.png
--------------------------------------------------------------------------------
/docs/source/_static/images/boxplot-BHI-BRI.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thieu1995/MetaCluster/HEAD/docs/source/_static/images/boxplot-BHI-BRI.png
--------------------------------------------------------------------------------
/docs/source/_static/images/boxplot-BHI-DBI.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thieu1995/MetaCluster/HEAD/docs/source/_static/images/boxplot-BHI-DBI.png
--------------------------------------------------------------------------------
/docs/source/_static/images/boxplot-BHI-DRI.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thieu1995/MetaCluster/HEAD/docs/source/_static/images/boxplot-BHI-DRI.png
--------------------------------------------------------------------------------
/docs/source/_static/images/convergence-BHI-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thieu1995/MetaCluster/HEAD/docs/source/_static/images/convergence-BHI-1.png
--------------------------------------------------------------------------------
/docs/source/_static/images/convergence-MIS-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thieu1995/MetaCluster/HEAD/docs/source/_static/images/convergence-MIS-1.png
--------------------------------------------------------------------------------
/docs/source/_static/images/result_convergences.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thieu1995/MetaCluster/HEAD/docs/source/_static/images/result_convergences.png
--------------------------------------------------------------------------------
/docs/source/_static/images/result_metrics_mean.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thieu1995/MetaCluster/HEAD/docs/source/_static/images/result_metrics_mean.png
--------------------------------------------------------------------------------
/docs/source/_static/images/result_metrics_std.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thieu1995/MetaCluster/HEAD/docs/source/_static/images/result_metrics_std.png
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy>=1.17.1,<=1.26.0
2 | scipy>=1.7.1
3 | scikit-learn>=1.0.2
4 | pandas>=1.3.5
5 | mealpy>=3.0.1
6 | permetrics>=1.5.0
7 | plotly>=5.10.0
8 | kaleido >=0.2.1
9 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | exclude = __pycache__, built, build, mytest, htmlcov, drafts
3 | ignore = E203, E266, W291, W503
4 | max-line-length = 180
5 | max-complexity = 18
6 | select = B,C,E,F,W,T4,B9
7 |
--------------------------------------------------------------------------------
/examples/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 16:54, 21/04/2020 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | # Defining the exact version will make sure things don't break
2 | sphinx==5.0.2
3 | sphinx_rtd_theme==1.0.0
4 | readthedocs-sphinx-search==0.3.2
5 | numpy>=1.17.1,<=1.26.0
6 | scipy>=1.7.1
7 | scikit-learn>=1.0.2
8 | pandas>=1.3.5
9 | plotly>=5.10.0
10 | kaleido >=0.2.1
11 | mealpy>=3.0.1
12 | permetrics>=1.5.0
13 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 14:15, 25/05/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
--------------------------------------------------------------------------------
/docs/source/pages/metacluster.rst:
--------------------------------------------------------------------------------
1 | MetaCluster Library
2 | ===================
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | metacluster.utils
8 |
9 |
10 | metacluster.metacluster module
11 | ------------------------------
12 |
13 | .. automodule:: metacluster.metacluster
14 | :members:
15 | :undoc-members:
16 | :show-inheritance:
17 |
--------------------------------------------------------------------------------
/examples/utils/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 10:53, 25/05/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
--------------------------------------------------------------------------------
/metacluster/utils/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 23:32, 21/05/2022 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
--------------------------------------------------------------------------------
/CITATION.cff:
--------------------------------------------------------------------------------
1 | cff-version: 1.1.0
2 | message: "If you use this software, please cite it as below."
3 | authors:
4 | - family-names: "Van Thieu"
5 | given-names: "Nguyen"
6 | orcid: "https://orcid.org/0000-0001-9994-8747"
7 | title: "MetaCluster: An Open-Source Python Library for Metaheuristic-based Clustering Problems"
8 | version: 1.3.0
9 | doi: 10.5281/zenodo.8214539
10 | date-released: 2023-08-26
11 | url: "https://github.com/thieu1995/metacluster"
12 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # Read the Docs configuration file
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3 |
4 | # Required
5 | version: 2
6 |
7 | # Build all formats
8 | formats: all
9 |
10 | build:
11 | os: "ubuntu-20.04"
12 | tools:
13 | python: "3.8"
14 |
15 | # Build documentation in the docs/ directory with Sphinx
16 | sphinx:
17 | configuration: docs/source/conf.py
18 |
19 | python:
20 | install:
21 | - requirements: docs/requirements.txt
22 |
23 | submodules:
24 | include: all
25 |
--------------------------------------------------------------------------------
/examples/utils/exam_data_loader.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 10:53, 25/05/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | from metacluster import get_dataset
8 |
9 | # Try unknown data
10 | get_dataset("unknown")
11 | # Enter: 1
12 |
13 | data = get_dataset("Arrhythmia")
14 | data.split_train_test(test_size=0.2)
15 |
16 | print(data.X_train[:2].shape)
17 | print(data.y_train[:2].shape)
18 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/tests/test_Data.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 14:15, 25/05/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | import numpy as np
8 | from metacluster import get_dataset
9 |
10 | np.random.seed(41)
11 |
12 |
13 | def test_Data_class():
14 | data = get_dataset("circles")
15 | data.X, scaler = data.scale(data.X, method="MinMaxScaler", feature_range=(0, 1))
16 |
17 | assert 0 <= data.X[np.random.randint(0, 3)][0] <= 1
18 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.https://www.sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/examples/exam_MhaKCentersClustering.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 20:33, 10/11/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | from metacluster import get_dataset, MhaKCentersClustering
8 | import time
9 |
10 | data = get_dataset("aggregation")
11 | data.X, scaler = data.scale(data.X, method="MinMaxScaler", feature_range=(0, 1))
12 |
13 | # Get all supported methods and print them out
14 | MhaKCentersClustering.get_support(name="all")
15 |
16 | time_run = time.perf_counter()
17 | model = MhaKCentersClustering(optimizer="OriginalWOA", optimizer_paras={"name": "GWO", "epoch": 10, "pop_size": 20}, seed=10)
18 | model.fit(data, cluster_finder="elbow", obj="SSEI", verbose=True)
19 |
20 | print(model.best_agent)
21 | print(model.convergence)
22 | print(model.predict(data.X))
23 |
24 | print(f"Time process: {time.perf_counter() - time_run} seconds")
25 |
--------------------------------------------------------------------------------
/examples/exam_MhaKMeansTuner.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 11:15, 10/11/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | from metacluster import get_dataset, MhaKMeansTuner
8 | import time
9 |
10 | data = get_dataset("aggregation")
11 | data.X, scaler = data.scale(data.X, method="MinMaxScaler", feature_range=(0, 1))
12 |
13 | # Get all supported methods and print them out
14 | MhaKMeansTuner.get_support(name="all")
15 |
16 | time_run = time.perf_counter()
17 | model = MhaKMeansTuner(optimizer="OriginalWOA", optimizer_paras={"name": "GWO", "epoch": 10, "pop_size": 20}, seed=10)
18 | model.fit(data.X, mealpy_bound=None, max_clusters=5, obj="SSEI", verbose=True)
19 |
20 | print(model.best_parameters)
21 | print(model.best_estimator.predict(data.X))
22 | print(model.predict(data.X))
23 |
24 | print(f"Time process: {time.perf_counter() - time_run} seconds")
25 |
--------------------------------------------------------------------------------
/metacluster/utils/io_util.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 16:10, 31/07/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | import csv
8 | from pathlib import Path
9 |
10 |
11 | def write_dict_to_csv(data:dict, save_path=None, file_name=None):
12 | """
13 | Write a list of dictionaries to a CSV file.
14 |
15 | Args:
16 | data (list): A list of dictionaries.
17 | save_path (str): Path to save the file
18 | file_name (str): The name of the output CSV file.
19 |
20 | Returns:
21 | None
22 | """
23 | save_file = f"{save_path}/{file_name}.csv"
24 | Path(save_path).mkdir(parents=True, exist_ok=True)
25 | is_file_existed = Path(save_file).exists()
26 |
27 | with open(save_file, mode='a', newline='') as file:
28 | writer = csv.DictWriter(file, fieldnames=data.keys())
29 | if not is_file_existed:
30 | writer.writeheader()
31 | writer.writerow(data)
32 | return None
33 |
--------------------------------------------------------------------------------
/tests/test_MetaCluster.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 17:18, 31/05/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | import numpy as np
8 | from metacluster import get_dataset, MetaCluster
9 |
10 | np.random.seed(42)
11 |
12 |
13 | def test_MetaCluster_class():
14 | data = get_dataset("circles")
15 | data.X, scaler = data.scale(data.X, method="MinMaxScaler", feature_range=(0, 1))
16 |
17 | list_optimizer = ["OriginalFBIO", "OriginalGWO", "OriginalSMA"]
18 | list_paras = [
19 | {"name": "FBIO", "epoch": 10, "pop_size": 30},
20 | {"name": "GWO", "epoch": 10, "pop_size": 30},
21 | {"name": "SMA", "epoch": 10, "pop_size": 30}
22 | ]
23 | list_obj = ["BHI", "MIS", "XBI"]
24 |
25 | model = MetaCluster(list_optimizer=list_optimizer, list_paras=list_paras, list_obj=list_obj, n_trials=3, seed=10)
26 | assert model.n_trials == 3
27 | assert model.list_obj == list_obj
28 | assert len(model.list_optimizer) == len(list_optimizer)
29 |
--------------------------------------------------------------------------------
/examples/exam_cluster_finder.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 10:15, 26/08/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | from metacluster import get_dataset, MetaCluster
8 | import time
9 |
10 | MetaCluster.get_support("cluster_finder")
11 |
12 | data = get_dataset("aniso")
13 | data.X, scaler = data.scale(data.X, method="MinMaxScaler", feature_range=(0, 1))
14 | data.y = None
15 |
16 | list_optimizer = ["OriginalWOA", "OriginalTLO", ]
17 | list_paras = [
18 | {"name": "WOA", "epoch": 10, "pop_size": 30},
19 | {"name": "TLO", "epoch": 10, "pop_size": 30},
20 | ]
21 | list_obj = ["BHI"]
22 | list_metric = ["BRI", "DBI", "DRI"]
23 |
24 | time_run = time.perf_counter()
25 | model = MetaCluster(list_optimizer=list_optimizer, list_paras=list_paras, list_obj=list_obj, n_trials=2, seed=10)
26 | model.execute(data=data, cluster_finder="all_majority", list_metric=list_metric, save_path="history", verbose=False)
27 | model.save_boxplots()
28 | model.save_convergences()
29 | print(f"Time process: {time.perf_counter() - time_run} seconds")
30 |
--------------------------------------------------------------------------------
/docs/source/pages/metacluster.utils.rst:
--------------------------------------------------------------------------------
1 | metacluster.utils package
2 | =========================
3 |
4 | metacluster.utils.cluster module
5 | --------------------------------
6 |
7 | .. automodule:: metacluster.utils.cluster
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | metacluster.utils.data\_loader module
13 | -------------------------------------
14 |
15 | .. automodule:: metacluster.utils.data_loader
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
20 | metacluster.utils.encoder module
21 | --------------------------------
22 |
23 | .. automodule:: metacluster.utils.encoder
24 | :members:
25 | :undoc-members:
26 | :show-inheritance:
27 |
28 | metacluster.utils.io\_util module
29 | ---------------------------------
30 |
31 | .. automodule:: metacluster.utils.io_util
32 | :members:
33 | :undoc-members:
34 | :show-inheritance:
35 |
36 | metacluster.utils.mealpy\_util module
37 | -------------------------------------
38 |
39 | .. automodule:: metacluster.utils.mealpy_util
40 | :members:
41 | :undoc-members:
42 | :show-inheritance:
43 |
44 | metacluster.utils.validator module
45 | ----------------------------------
46 |
47 | .. automodule:: metacluster.utils.validator
48 | :members:
49 | :undoc-members:
50 | :show-inheritance:
51 |
52 | metacluster.utils.visualize\_util module
53 | ----------------------------------------
54 |
55 | .. automodule:: metacluster.utils.visualize_util
56 | :members:
57 | :undoc-members:
58 | :show-inheritance:
59 |
--------------------------------------------------------------------------------
/metacluster/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 15:23, 06/03/2022 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 | #
7 | # from metacluster import get_dataset, MetaCluster
8 | # from sklearn.preprocessing import MinMaxScaler
9 | #
10 | # # Get all supported methods and print them out
11 | # MetaCluster.get_support(name="all")
12 | #
13 | # # Scale dataset to range (0, 1)
14 | # scaler = MinMaxScaler(feature_range=(0, 1))
15 | # data = get_dataset("aniso")
16 | # data.X = scaler.fit_transform(data.X)
17 | #
18 | # # Set up Metaheuristic Algorithms
19 | # list_optimizer = ["BaseFBIO", "OriginalGWO", "OriginalSMA"]
20 | # list_paras = [
21 | # {"name": "FBIO", "epoch": 10, "pop_size": 30},
22 | # {"name": "GWO", "epoch": 10, "pop_size": 30},
23 | # {"name": "SMA", "epoch": 10, "pop_size": 30}
24 | # ]
25 | #
26 | # # Set up list objectives and list performance metrics
27 | # list_obj = ["SI", "RSI"]
28 | # list_metric = ["BHI", "DBI", "DI", "CHI", "SSEI", "NMIS", "HS", "CS", "VMS", "HGS"]
29 | #
30 | # # Define MetaCluster model and execute it
31 | # model = MetaCluster(list_optimizer=list_optimizer, list_paras=list_paras, list_obj=list_obj, n_trials=3)
32 | # model.execute(data=data, cluster_finder="elbow", list_metric=list_metric, save_path="history", verbose=False)
33 |
34 | __version__ = "1.3.0"
35 |
36 | from metacluster.utils.encoder import LabelEncoder
37 | from metacluster.utils.data_loader import Data, get_dataset
38 | from metacluster.metacluster import MetaCluster
39 | from metacluster.mha_cluster import MhaKCentersClustering, MhaKMeansTuner
40 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | run_KCenterClustering.py
2 | examples/history/
3 | examples/wrapper/
4 | # Pycharm
5 | .idea/
6 | tut_upcode.md
7 | drafts/
8 | docs/refs/
9 | # Byte-compiled / optimized / DLL files
10 | __pycache__/
11 | *.py[cod]
12 | *$py.class
13 |
14 | # C extensions
15 | *.so
16 |
17 | # Distribution / packaging
18 | .Python
19 | build/
20 | develop-eggs/
21 | dist/
22 | downloads/
23 | eggs/
24 | .eggs/
25 | lib/
26 | lib64/
27 | parts/
28 | sdist/
29 | var/
30 | wheels/
31 | *.egg-info/
32 | .installed.cfg
33 | *.egg
34 | MANIFEST
35 |
36 | # PyInstaller
37 | # Usually these files are written by a python script from a template
38 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
39 | *.manifest
40 | *.spec
41 |
42 | # Installer logs
43 | pip-log.txt
44 | pip-delete-this-directory.txt
45 |
46 | # Unit test / coverage reports
47 | htmlcov/
48 | .tox/
49 | .coverage
50 | .coverage.*
51 | .cache
52 | nosetests.xml
53 | coverage.xml
54 | *.cover
55 | .hypothesis/
56 | .pytest_cache/
57 |
58 | # Translations
59 | *.mo
60 | *.pot
61 |
62 | # Django stuff:
63 | *.log
64 | local_settings.py
65 | db.sqlite3
66 |
67 | # Flask stuff:
68 | instance/
69 | .webassets-cache
70 |
71 | # Scrapy stuff:
72 | .scrapy
73 |
74 | # Sphinx documentation
75 | docs/_build/
76 |
77 | # PyBuilder
78 | target/
79 |
80 | # Jupyter Notebook
81 | .ipynb_checkpoints
82 |
83 | # pyenv
84 | .python-version
85 |
86 | # celery beat schedule file
87 | celerybeat-schedule
88 |
89 | # SageMath parsed files
90 | *.sage.py
91 |
92 | # Environments
93 | .env
94 | .venv
95 | env/
96 | venv/
97 | ENV/
98 | env.bak/
99 | venv.bak/
100 |
101 | # Spyder project settings
102 | .spyderproject
103 | .spyproject
104 |
105 | # Rope project settings
106 | .ropeproject
107 |
108 | # mkdocs documentation
109 | /site
110 |
111 | # mypy
112 | .mypy_cache/
113 |
--------------------------------------------------------------------------------
/examples/exam_kmeans_GridSearchCV.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 09:38, 10/11/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | # import numpy as np
8 | # from sklearn.cluster import KMeans
9 | # from sklearn.model_selection import GridSearchCV
10 | #
11 | # kmeans = KMeans()
12 | #
13 | # param_grid = {
14 | # 'n_clusters': [2, 3, 4, 5, 6, 7, 8],
15 | # 'init': ['k-means++', 'random'],
16 | # 'max_iter': [100, 200, 300],
17 | # 'n_init': [10, 15, 20],
18 | # 'random_state': [42]
19 | # }
20 | #
21 | # grid_search = GridSearchCV(kmeans, param_grid, cv=5)
22 | # grid_search.fit(X)
23 | #
24 | # best_params = grid_search.best_params_
25 | # print(best_params)
26 | #
27 | # best_kmeans = grid_search.best_estimator_
28 |
29 |
30 |
31 | import numpy as np
32 | from sklearn.datasets import make_blobs
33 | from sklearn.cluster import KMeans
34 | from sklearn.model_selection import GridSearchCV
35 |
36 | # Generate synthetic data
37 | X, y = make_blobs(n_samples=300, centers=4, random_state=42, cluster_std=1.0)
38 |
39 | # Create the KMeans model
40 | kmeans = KMeans()
41 |
42 | # Define the parameter grid
43 | param_grid = {
44 | 'n_clusters': [2, 3, 4, 5, 6, 7, 8],
45 | 'init': ['k-means++', 'random'],
46 | 'max_iter': [100, 200, 300],
47 | 'n_init': [10, 15, 20],
48 | 'random_state': [42]
49 | }
50 |
51 | # Create the GridSearchCV object
52 | grid_search = GridSearchCV(kmeans, param_grid, cv=2, verbose=2)
53 |
54 | # Fit the GridSearchCV object on your data
55 | grid_search.fit(X)
56 |
57 | # Retrieve the best parameters
58 | best_params = grid_search.best_params_
59 | print(best_params)
60 |
61 | # Retrieve the best model
62 | best_kmeans = grid_search.best_estimator_
63 | print(best_kmeans)
64 |
--------------------------------------------------------------------------------
/ChangeLog.md:
--------------------------------------------------------------------------------
1 |
2 | # Version 1.3.0
3 |
4 | ### General Updates
5 | - Updated GitHub Actions workflows.
6 | - Updated `mealpy` dependency to the latest release (v3.0.3).
7 | - Updated documentation and examples.
8 | - Updated `ChangeLog.md`, `CITATION.cff`, and `README.md`.
9 |
10 | ### Bug Fixes
11 | - Fixed warning issue in `KMeans` class.
12 | - Fixed bugs in:
13 | - `MhaKMeansTuner` class (compatibility with latest `mealpy`).
14 | - `MhaKCentersClustering` class (compatibility with latest `mealpy`).
15 | - `MetaCluster` class (compatibility with latest `mealpy`).
16 |
17 | ---------------------------------------------------------------------
18 |
19 | # Version 1.2.0
20 |
21 | + Add class `MhaKMeansTuner` that can tune hyper-parameters of KMeans model using Metaheuristic Algorithms (MHAs)
22 | + Add class `MhaKCentersClustering` that can optimize the center weights of KMeans model using MHAs
23 | + Update required version of mealpy to 3.0.1
24 | + Update documents, examples, ChangeLog.md, CITATION.cff, README.md
25 |
26 | ---------------------------------------------------------------------
27 |
28 | # Version 1.1.0
29 |
30 | + Update required version of PERMETRICS to 1.5.0
31 | + **Speed up 10x-100x** for the computation time of objective functions.
32 | + Update all cluster_finder methods.
33 | + Add customization functions to save figures.
34 | + Update documents, examples, ChangeLog.md, CITATION.cff, README.md
35 |
36 | ---------------------------------------------------------------------
37 |
38 | # Version 1.0.2
39 |
40 | + Update ChangeLog.md, CITATION.cff, MANIFEST.in, README.md
41 | + Add document website
42 | + Fix bug in dataset
43 |
44 | ---------------------------------------------------------------------
45 |
46 | # Version 1.0.1
47 |
48 | + Add ChangeLog.md
49 | + Update citation
50 | + Add logo
51 | + Update requirements libraries
52 |
53 | ---------------------------------------------------------------------
54 |
55 | # Version 1.0.0 (First version)
56 | + Add project's information (MANIFEST.in, LICENSE, README.md, requirements.txt, CITATION.cff, CODE_OF_CONDUCT.md)
57 | + Add supported data folder
58 | + Add utils module
59 | + Add examples and tests folder
60 | + Add MetaCluster class
61 | + Add Github's Action
62 |
63 |
--------------------------------------------------------------------------------
/docs/source/pages/support.rst:
--------------------------------------------------------------------------------
1 | ================
2 | Citation Request
3 | ================
4 |
5 | Please include these citations if you plan to use this library:::
6 |
7 | @article{VanThieu2023,
8 | author = {Van Thieu, Nguyen and Oliva, Diego and Pérez-Cisneros, Marco},
9 | title = {MetaCluster: An open-source Python library for metaheuristic-based clustering problems},
10 | journal = {SoftwareX},
11 | year = {2023},
12 | pages = {101597},
13 | volume = {24},
14 | DOI = {10.1016/j.softx.2023.101597},
15 | }
16 |
17 | @article{van2023mealpy,
18 | title={MEALPY: An open-source library for latest meta-heuristic algorithms in Python},
19 | author={Van Thieu, Nguyen and Mirjalili, Seyedali},
20 | journal={Journal of Systems Architecture},
21 | year={2023},
22 | publisher={Elsevier},
23 | doi={10.1016/j.sysarc.2023.102871}
24 | }
25 |
26 | If you have an open-ended or a research question, you can contact me via nguyenthieu2102@gmail.com
27 |
28 | ===============
29 | Important links
30 | ===============
31 |
32 | * Official source code repo: https://github.com/thieu1995/metacluster
33 | * Official document: https://metacluster.readthedocs.io/
34 | * Download releases: https://pypi.org/project/metacluster/
35 | * Issue tracker: https://github.com/thieu1995/metacluster/issues
36 | * Notable changes log: https://github.com/thieu1995/metacluster/blob/master/ChangeLog.md
37 | * Official chat group: https://t.me/+fRVCJGuGJg1mNDg1
38 |
39 | * This project also related to our another projects which are optimization and machine learning. Check it here:
40 | * https://github.com/thieu1995/metaheuristics
41 | * https://github.com/thieu1995/mealpy
42 | * https://github.com/thieu1995/mafese
43 | * https://github.com/thieu1995/pfevaluator
44 | * https://github.com/thieu1995/opfunu
45 | * https://github.com/thieu1995/enoppy
46 | * https://github.com/thieu1995/permetrics
47 | * https://github.com/thieu1995/IntelELM
48 | * https://github.com/thieu1995/MetaPerceptron
49 | * https://github.com/thieu1995/GrafoRVFL
50 | * https://github.com/aiir-team
51 |
52 | =======
53 | License
54 | =======
55 |
56 | The project is licensed under GNU General Public License (GPL) V3 license.
57 |
58 |
59 | .. toctree::
60 | :maxdepth: 4
61 |
62 | .. toctree::
63 | :maxdepth: 4
64 |
65 | .. toctree::
66 | :maxdepth: 4
67 |
--------------------------------------------------------------------------------
/examples/exam_MetaCluster.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 17:03, 24/05/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | from metacluster import get_dataset, MetaCluster
8 | import time
9 |
10 | data = get_dataset("circles")
11 | data.X, scaler = data.scale(data.X, method="MinMaxScaler", feature_range=(0, 1))
12 |
13 | # Get all supported methods and print them out
14 | MetaCluster.get_support(name="all")
15 |
16 | list_optimizer = ["OriginalFBIO", "OriginalGWO", "OriginalSMA", "OriginalWOA", "OriginalTLO", "OriginalALO", "OriginalAOA", "OriginalBBO",
17 | "OriginalBMO", "OriginalGCO", "OriginalHGSO", "OriginalHHO", "OriginalICA"]
18 | list_paras = [
19 | {"name": "FBIO", "epoch": 10, "pop_size": 30},
20 | {"name": "GWO", "epoch": 10, "pop_size": 30},
21 | {"name": "SMA", "epoch": 10, "pop_size": 30},
22 | {"name": "WOA", "epoch": 10, "pop_size": 30},
23 | {"name": "TLO", "epoch": 10, "pop_size": 30},
24 | {"name": "ALO", "epoch": 10, "pop_size": 30},
25 | {"name": "AOA", "epoch": 10, "pop_size": 30},
26 | {"name": "BBO", "epoch": 10, "pop_size": 30},
27 | {"name": "BMO", "epoch": 10, "pop_size": 30},
28 | {"name": "GCO", "epoch": 10, "pop_size": 30},
29 | {"name": "HGSO", "epoch": 10, "pop_size": 30},
30 | {"name": "HHO", "epoch": 10, "pop_size": 30},
31 | {"name": "ICA", "epoch": 10, "pop_size": 30},
32 | ]
33 | list_obj = ["BHI", "MIS", "XBI"]
34 | list_metric = ["BRI", "DBI", "DRI", "CHI", "KDI"]
35 |
36 | time_run = time.perf_counter()
37 | model = MetaCluster(list_optimizer=list_optimizer, list_paras=list_paras, list_obj=list_obj, n_trials=2, seed=44)
38 | model.execute(data=data, cluster_finder="elbow", list_metric=list_metric, save_path="history", verbose=False)
39 | model.save_boxplots(figure_size=None, xlabel="Optimizer", list_ylabel=None, title="Boxplot of comparison models",
40 | show_legend=True, show_mean_only=False, exts=(".png", ".pdf"), file_name="boxplot")
41 | model.save_convergences(figure_size=None, xlabel="Epoch", list_ylabel=None,
42 | title="Convergence chart of comparison models", exts=(".png", ".pdf"), file_name="convergence")
43 | print(f"Time process: {time.perf_counter() - time_run} seconds")
44 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | # import os
14 | # import sys
15 | # sys.path.insert(0, os.path.abspath('.'))
16 |
17 | import sphinx_rtd_theme
18 | import os
19 | import sys
20 |
21 | sys.path.insert(0, os.path.abspath('.'))
22 | sys.path.insert(0, os.path.abspath('../../'))
23 | sys.path.insert(1, os.path.abspath('../../metacluster'))
24 |
25 |
26 | # -- Project information -----------------------------------------------------
27 |
28 | project = 'MetaCluster'
29 | copyright = '2023, Thieu'
30 | author = 'Thieu'
31 |
32 | # The full version, including alpha/beta/rc tags
33 | release = '1.3.0'
34 |
35 |
36 | # -- General configuration ---------------------------------------------------
37 |
38 | # Add any Sphinx extension module names here, as strings. They can be
39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
40 | # ones.
41 | extensions = [
42 | "sphinx.ext.autodoc",
43 | "sphinx.ext.napoleon",
44 | "sphinx.ext.intersphinx",
45 | "sphinx.ext.viewcode",
46 | ]
47 |
48 | # Add any paths that contain templates here, relative to this directory.
49 | templates_path = ['_templates']
50 |
51 | # List of patterns, relative to source directory, that match files and
52 | # directories to ignore when looking for source files.
53 | # This pattern also affects html_static_path and html_extra_path.
54 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
55 |
56 |
57 | # -- Options for HTML output -------------------------------------------------
58 |
59 | # The theme to use for HTML and HTML Help pages. See the documentation for
60 | # a list of builtin themes.
61 | #
62 | html_theme = 'sphinx_rtd_theme'
63 |
64 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
65 |
66 | # Add any paths that contain custom static files (such as style sheets) here,
67 | # relative to this directory. They are copied after the builtin static files,
68 | # so a file named "default.css" will overwrite the builtin "default.css".
69 | html_static_path = ['_static']
--------------------------------------------------------------------------------
/metacluster/utils/encoder.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 13:58, 09/05/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | import numpy as np
8 |
9 |
10 | class LabelEncoder:
11 | """
12 | Encode categorical features as integer labels.
13 | """
14 |
15 | def __init__(self):
16 | self.unique_labels = None
17 | self.label_to_index = {}
18 |
19 | def fit(self, y):
20 | """
21 | Fit label encoder to a given set of labels.
22 |
23 | Parameters:
24 | -----------
25 | y : array-like
26 | Labels to encode.
27 | """
28 | self.unique_labels = np.unique(y)
29 | self.label_to_index = {label: i for i, label in enumerate(self.unique_labels)}
30 |
31 | def transform(self, y):
32 | """
33 | Transform labels to encoded integer labels.
34 |
35 | Parameters:
36 | -----------
37 | y : array-like
38 | Labels to encode.
39 |
40 | Returns:
41 | --------
42 | encoded_labels : array-like
43 | Encoded integer labels.
44 | """
45 | if self.unique_labels is None:
46 | raise ValueError("Label encoder has not been fit yet.")
47 | return np.array([self.label_to_index[label] for label in y])
48 |
49 | def fit_transform(self, y):
50 | """Fit label encoder and return encoded labels.
51 |
52 | Parameters
53 | ----------
54 | y : array-like of shape (n_samples,)
55 | Target values.
56 |
57 | Returns
58 | -------
59 | y : array-like of shape (n_samples,)
60 | Encoded labels.
61 | """
62 | self.fit(y)
63 | return self.transform(y)
64 |
65 | def inverse_transform(self, y):
66 | """
67 | Transform integer labels to original labels.
68 |
69 | Parameters:
70 | -----------
71 | y : array-like
72 | Encoded integer labels.
73 |
74 | Returns:
75 | --------
76 | original_labels : array-like
77 | Original labels.
78 | """
79 | if self.unique_labels is None:
80 | raise ValueError("Label encoder has not been fit yet.")
81 | return np.array([self.unique_labels[i] if i in self.label_to_index.values() else "unknown" for i in y])
82 |
--------------------------------------------------------------------------------
/metacluster/data/diagnosis_II.csv:
--------------------------------------------------------------------------------
1 | 0,0,1,0,0,0,1
2 | 0.066667,0,0,1,1,1,1
3 | 0.066667,0,1,0,0,0,1
4 | 0.083333,0,0,1,1,1,1
5 | 0.083333,0,1,0,0,0,1
6 | 0.083333,0,1,0,0,0,1
7 | 0.116667,0,0,1,1,1,1
8 | 0.116667,0,1,0,0,0,1
9 | 0.133333,0,0,1,1,1,1
10 | 0.183333,0,0,1,1,1,1
11 | 0.183333,0,0,1,1,1,1
12 | 0.183333,0,1,0,0,0,1
13 | 0.183333,0,1,0,0,0,1
14 | 0.2,0,0,1,1,1,1
15 | 0.2,0,1,0,0,0,1
16 | 0.2,0,1,0,0,0,1
17 | 0.216667,0,0,1,1,1,1
18 | 0.216667,0,0,1,1,1,1
19 | 0.233333,0,0,1,1,1,1
20 | 0.233333,0,1,0,0,0,1
21 | 0.25,0,0,1,1,0,1
22 | 0.25,0,0,1,1,0,1
23 | 0.25,0,1,0,0,0,1
24 | 0.25,0,0,1,1,1,1
25 | 0.25,0,0,1,1,1,1
26 | 0.25,0,0,1,1,1,1
27 | 0.25,0,0,1,1,1,1
28 | 0.25,0,0,1,0,0,1
29 | 0.266667,0,1,0,0,0,1
30 | 0.266667,0,0,1,1,1,1
31 | 0.266667,0,0,1,0,0,1
32 | 0.283333,0,0,1,1,0,1
33 | 0.283333,0,1,0,0,0,1
34 | 0.283333,0,0,1,0,0,1
35 | 0.3,0,1,0,0,0,1
36 | 0.3,0,0,1,1,1,1
37 | 0.3,0,0,1,0,0,1
38 | 0.316667,0,1,0,0,0,1
39 | 0.316667,0,0,1,0,0,1
40 | 0.333333,0,0,1,1,0,1
41 | 0.333333,0,1,0,0,0,1
42 | 0.333333,0,1,0,0,0,1
43 | 0.333333,0,0,1,1,1,1
44 | 0.333333,0,0,1,0,0,1
45 | 0.333333,0,0,1,0,0,1
46 | 0.35,0,0,1,1,0,1
47 | 0.35,0,0,1,1,0,1
48 | 0.35,0,0,1,1,1,1
49 | 0.366667,0,0,1,1,0,1
50 | 0.366667,0,0,1,1,0,1
51 | 0.366667,0,1,0,0,0,1
52 | 0.366667,0,0,1,0,0,1
53 | 0.383333,0,1,0,0,0,1
54 | 0.383333,0,0,1,1,1,1
55 | 0.383333,0,0,1,0,0,1
56 | 0.4,0,0,1,1,0,1
57 | 0.4,0,0,1,1,0,1
58 | 0.4,0,1,0,0,0,1
59 | 0.4,0,0,1,1,1,1
60 | 0.4,0,0,1,0,0,1
61 | 0.416667,0,1,1,0,1,2
62 | 0.416667,0,1,1,0,1,2
63 | 0.433333,0,1,1,0,1,2
64 | 0.466667,0,1,1,0,1,2
65 | 0.5,0,1,1,0,1,2
66 | 0.533333,0,1,1,0,1,2
67 | 0.566667,0,1,1,0,1,2
68 | 0.583333,0,1,1,0,1,2
69 | 0.65,0,1,1,0,1,2
70 | 0.7,0,1,1,0,1,2
71 | 0.75,1,1,1,1,1,2
72 | 0.75,1,1,1,1,1,2
73 | 0.75,1,1,1,1,0,2
74 | 0.75,0,0,0,0,0,1
75 | 0.75,0,0,0,0,0,1
76 | 0.75,1,1,0,1,0,2
77 | 0.75,1,1,0,1,0,2
78 | 0.75,0,1,1,0,1,2
79 | 0.766667,1,1,1,1,0,2
80 | 0.783333,1,1,1,1,1,2
81 | 0.783333,0,0,0,0,0,1
82 | 0.783333,1,1,0,1,0,2
83 | 0.8,0,1,1,0,1,2
84 | 0.816667,1,1,1,1,1,2
85 | 0.816667,1,1,1,1,0,2
86 | 0.816667,1,1,1,1,0,2
87 | 0.816667,0,0,0,0,0,1
88 | 0.816667,1,1,0,1,0,2
89 | 0.833333,1,1,1,1,0,2
90 | 0.85,1,1,1,1,1,2
91 | 0.85,0,0,0,0,0,1
92 | 0.85,1,1,0,1,0,2
93 | 0.866667,1,1,1,1,1,2
94 | 0.866667,1,1,1,1,0,2
95 | 0.866667,0,0,0,0,0,1
96 | 0.866667,1,1,0,1,0,2
97 | 0.866667,0,1,1,0,1,2
98 | 0.883333,0,1,1,0,1,2
99 | 0.9,1,1,1,1,0,2
100 | 0.9,1,1,1,1,0,2
101 | 0.9,0,1,1,0,1,2
102 | 0.916667,1,1,1,1,1,2
103 | 0.916667,0,0,0,0,0,1
104 | 0.916667,1,1,0,1,0,2
105 | 0.916667,0,1,1,0,1,2
106 | 0.933333,1,1,1,1,1,2
107 | 0.933333,1,1,1,1,0,2
108 | 0.933333,0,0,0,0,0,1
109 | 0.933333,1,1,0,1,0,2
110 | 0.933333,0,1,1,0,1,2
111 | 0.95,1,1,1,1,1,2
112 | 0.95,0,0,0,0,0,1
113 | 0.95,1,1,0,1,0,2
114 | 0.95,0,1,1,0,1,2
115 | 0.966667,1,1,1,1,0,2
116 | 0.983333,0,1,1,0,1,2
117 | 1,0,0,0,0,0,1
118 | 1,1,1,0,1,0,2
119 | 1,0,1,1,0,1,2
120 | 1,0,1,1,0,1,2
121 |
--------------------------------------------------------------------------------
/metacluster/utils/mealpy_util.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 14:52, 26/05/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | import numpy as np
8 | from permetrics import ClusteringMetric
9 | from sklearn.cluster import KMeans
10 | from mealpy import *
11 |
12 |
13 | class KMeansParametersProblem(Problem):
14 | def __init__(self, bounds=None, minmax="min", X=None, obj_name=None, seed=None, **kwargs):
15 | super().__init__(bounds, minmax, **kwargs)
16 | self.X = X
17 | self.obj_name = obj_name
18 | self.seed = seed
19 |
20 | def get_model(self, solution) -> KMeans:
21 | x_dict = self.decode_solution(solution)
22 | kmeans = KMeans(random_state=self.seed, n_init="auto")
23 | kmeans.set_params(**x_dict)
24 | kmeans.fit(self.X)
25 | return kmeans
26 |
27 | def obj_func(self, solution):
28 | kmeans = self.get_model(solution)
29 | y_pred = kmeans.predict(self.X)
30 | evaluator = ClusteringMetric(y_pred=y_pred, X=self.X, raise_error=False, decimal=8)
31 | obj = evaluator.get_metric_by_name(self.obj_name)[self.obj_name]
32 | return obj
33 |
34 |
35 | class KCentersClusteringProblem(Problem):
36 | def __init__(self, bounds=None, minmax=None, data=None, obj_name=None, **kwargs):
37 | super().__init__(bounds, minmax, **kwargs)
38 | self.data = data
39 | self.obj_name = obj_name
40 |
41 | @staticmethod
42 | def get_y_pred(X, solution):
43 | centers = np.reshape(solution, (-1, X.shape[1]))
44 | # Calculate the distance between each sample and each center
45 | distances = np.sqrt(np.sum((X[:, np.newaxis, :] - centers) ** 2, axis=2))
46 | # Assign each sample to the closest center
47 | labels = np.argmin(distances, axis=1)
48 | return labels
49 |
50 | def get_metrics(self, solution=None, list_metric=None, list_paras=None):
51 | centers = np.reshape(solution, (-1, self.data.X.shape[1]))
52 | y_pred = self.get_y_pred(self.data.X, centers)
53 | evaluator = ClusteringMetric(y_true=self.data.y, y_pred=y_pred, X=self.data.X, decimal=8)
54 | results = evaluator.get_metrics_by_list_names(list_metric, list_paras)
55 | return results
56 |
57 | def obj_func(self, solution):
58 | centers = self.decode_solution(solution)["center_weights"]
59 | y_pred = self.get_y_pred(self.data.X, centers)
60 | evaluator = ClusteringMetric(y_true=self.data.y, y_pred=y_pred, X=self.data.X, raise_error=False, decimal=8)
61 | obj = evaluator.get_metric_by_name(self.obj_name)[self.obj_name]
62 | return obj
63 |
--------------------------------------------------------------------------------
/.github/workflows/publish-package.yaml:
--------------------------------------------------------------------------------
1 | name: Tests & Publishes to PyPI
2 |
3 | on:
4 | release:
5 | types: [published]
6 | push:
7 | branches:
8 | - master
9 | pull_request:
10 | branches:
11 | - "*"
12 |
13 | env:
14 | PROJECT_NAME: metacluster
15 |
16 | jobs:
17 | build:
18 | runs-on: ubuntu-latest
19 | strategy:
20 | fail-fast: false
21 | matrix:
22 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
23 |
24 | steps:
25 | - uses: actions/checkout@v3
26 | with:
27 | fetch-depth: 9
28 | submodules: false
29 |
30 | - name: Use Python ${{ matrix.python-version }}
31 | uses: actions/setup-python@v4
32 | with:
33 | python-version: ${{ matrix.python-version }}
34 |
35 | - uses: actions/cache@v3
36 | id: depcache
37 | with:
38 | path: deps
39 | key: requirements-pip-${{ matrix.python-version }}-${{ hashFiles('requirements.txt') }}
40 |
41 | - name: Download dependencies
42 | if: steps.depcache.outputs.cache-hit != 'true'
43 | run: |
44 | pip download --dest=deps -r requirements.txt
45 |
46 | - name: Install dependencies
47 | run: |
48 | pip install -U --no-index --find-links=deps deps/*
49 | pip install pytest pytest-cov flake8
50 |
51 | - name: Run tests
52 | run: |
53 | pytest --doctest-modules --junitxml=junit/pytest-results-${{ matrix.python-version }}.xml --cov=$PROJECT_NAME --cov-report=xml tests/
54 | flake8 tests/
55 |
56 | - name: Upload pytest test results
57 | uses: actions/upload-artifact@v4
58 | with:
59 | name: pytest-results-${{ matrix.python-version }}
60 | path: junit/pytest-results-${{ matrix.python-version }}.xml
61 | if: always()
62 |
63 | - name: Install distribution dependencies
64 | run: pip install --upgrade twine setuptools wheel
65 | if: matrix.python-version == 3.11
66 |
67 | - name: Create distribution package
68 | run: python setup.py sdist bdist_wheel
69 | if: matrix.python-version == 3.11
70 |
71 | - name: Upload distribution package
72 | uses: actions/upload-artifact@v4
73 | with:
74 | name: dist-package-${{ matrix.python-version }}
75 | path: dist
76 | if: matrix.python-version == 3.11
77 |
78 | publish:
79 | runs-on: ubuntu-latest
80 | needs: build
81 | if: github.event_name == 'release'
82 | permissions:
83 | id-token: write
84 | contents: read
85 | steps:
86 | - name: Download a distribution artifact
87 | uses: actions/download-artifact@v4
88 | with:
89 | name: dist-package-3.11
90 | path: dist
91 |
92 | - name: Publish distribution 📦 to Test PyPI
93 | uses: pypa/gh-action-pypi-publish@release/v1
94 | with:
95 | repository-url: https://test.pypi.org/legacy/
96 | skip-existing: true
97 | attestations: false
98 |
99 | - name: Publish distribution 📦 to PyPI
100 | uses: pypa/gh-action-pypi-publish@release/v1
101 | with:
102 | skip-existing: true
103 | attestations: true
104 |
--------------------------------------------------------------------------------
/metacluster/data/Iris.csv:
--------------------------------------------------------------------------------
1 | 5.1,3.5,1.4,0.2,0
2 | 4.9,3.0,1.4,0.2,0
3 | 4.7,3.2,1.3,0.2,0
4 | 4.6,3.1,1.5,0.2,0
5 | 5.0,3.6,1.4,0.2,0
6 | 5.4,3.9,1.7,0.4,0
7 | 4.6,3.4,1.4,0.3,0
8 | 5.0,3.4,1.5,0.2,0
9 | 4.4,2.9,1.4,0.2,0
10 | 4.9,3.1,1.5,0.1,0
11 | 5.4,3.7,1.5,0.2,0
12 | 4.8,3.4,1.6,0.2,0
13 | 4.8,3.0,1.4,0.1,0
14 | 4.3,3.0,1.1,0.1,0
15 | 5.8,4.0,1.2,0.2,0
16 | 5.7,4.4,1.5,0.4,0
17 | 5.4,3.9,1.3,0.4,0
18 | 5.1,3.5,1.4,0.3,0
19 | 5.7,3.8,1.7,0.3,0
20 | 5.1,3.8,1.5,0.3,0
21 | 5.4,3.4,1.7,0.2,0
22 | 5.1,3.7,1.5,0.4,0
23 | 4.6,3.6,1.0,0.2,0
24 | 5.1,3.3,1.7,0.5,0
25 | 4.8,3.4,1.9,0.2,0
26 | 5.0,3.0,1.6,0.2,0
27 | 5.0,3.4,1.6,0.4,0
28 | 5.2,3.5,1.5,0.2,0
29 | 5.2,3.4,1.4,0.2,0
30 | 4.7,3.2,1.6,0.2,0
31 | 4.8,3.1,1.6,0.2,0
32 | 5.4,3.4,1.5,0.4,0
33 | 5.2,4.1,1.5,0.1,0
34 | 5.5,4.2,1.4,0.2,0
35 | 4.9,3.1,1.5,0.2,0
36 | 5.0,3.2,1.2,0.2,0
37 | 5.5,3.5,1.3,0.2,0
38 | 4.9,3.6,1.4,0.1,0
39 | 4.4,3.0,1.3,0.2,0
40 | 5.1,3.4,1.5,0.2,0
41 | 5.0,3.5,1.3,0.3,0
42 | 4.5,2.3,1.3,0.3,0
43 | 4.4,3.2,1.3,0.2,0
44 | 5.0,3.5,1.6,0.6,0
45 | 5.1,3.8,1.9,0.4,0
46 | 4.8,3.0,1.4,0.3,0
47 | 5.1,3.8,1.6,0.2,0
48 | 4.6,3.2,1.4,0.2,0
49 | 5.3,3.7,1.5,0.2,0
50 | 5.0,3.3,1.4,0.2,0
51 | 7.0,3.2,4.7,1.4,1
52 | 6.4,3.2,4.5,1.5,1
53 | 6.9,3.1,4.9,1.5,1
54 | 5.5,2.3,4.0,1.3,1
55 | 6.5,2.8,4.6,1.5,1
56 | 5.7,2.8,4.5,1.3,1
57 | 6.3,3.3,4.7,1.6,1
58 | 4.9,2.4,3.3,1.0,1
59 | 6.6,2.9,4.6,1.3,1
60 | 5.2,2.7,3.9,1.4,1
61 | 5.0,2.0,3.5,1.0,1
62 | 5.9,3.0,4.2,1.5,1
63 | 6.0,2.2,4.0,1.0,1
64 | 6.1,2.9,4.7,1.4,1
65 | 5.6,2.9,3.6,1.3,1
66 | 6.7,3.1,4.4,1.4,1
67 | 5.6,3.0,4.5,1.5,1
68 | 5.8,2.7,4.1,1.0,1
69 | 6.2,2.2,4.5,1.5,1
70 | 5.6,2.5,3.9,1.1,1
71 | 5.9,3.2,4.8,1.8,1
72 | 6.1,2.8,4.0,1.3,1
73 | 6.3,2.5,4.9,1.5,1
74 | 6.1,2.8,4.7,1.2,1
75 | 6.4,2.9,4.3,1.3,1
76 | 6.6,3.0,4.4,1.4,1
77 | 6.8,2.8,4.8,1.4,1
78 | 6.7,3.0,5.0,1.7,1
79 | 6.0,2.9,4.5,1.5,1
80 | 5.7,2.6,3.5,1.0,1
81 | 5.5,2.4,3.8,1.1,1
82 | 5.5,2.4,3.7,1.0,1
83 | 5.8,2.7,3.9,1.2,1
84 | 6.0,2.7,5.1,1.6,1
85 | 5.4,3.0,4.5,1.5,1
86 | 6.0,3.4,4.5,1.6,1
87 | 6.7,3.1,4.7,1.5,1
88 | 6.3,2.3,4.4,1.3,1
89 | 5.6,3.0,4.1,1.3,1
90 | 5.5,2.5,4.0,1.3,1
91 | 5.5,2.6,4.4,1.2,1
92 | 6.1,3.0,4.6,1.4,1
93 | 5.8,2.6,4.0,1.2,1
94 | 5.0,2.3,3.3,1.0,1
95 | 5.6,2.7,4.2,1.3,1
96 | 5.7,3.0,4.2,1.2,1
97 | 5.7,2.9,4.2,1.3,1
98 | 6.2,2.9,4.3,1.3,1
99 | 5.1,2.5,3.0,1.1,1
100 | 5.7,2.8,4.1,1.3,1
101 | 6.3,3.3,6.0,2.5,2
102 | 5.8,2.7,5.1,1.9,2
103 | 7.1,3.0,5.9,2.1,2
104 | 6.3,2.9,5.6,1.8,2
105 | 6.5,3.0,5.8,2.2,2
106 | 7.6,3.0,6.6,2.1,2
107 | 4.9,2.5,4.5,1.7,2
108 | 7.3,2.9,6.3,1.8,2
109 | 6.7,2.5,5.8,1.8,2
110 | 7.2,3.6,6.1,2.5,2
111 | 6.5,3.2,5.1,2.0,2
112 | 6.4,2.7,5.3,1.9,2
113 | 6.8,3.0,5.5,2.1,2
114 | 5.7,2.5,5.0,2.0,2
115 | 5.8,2.8,5.1,2.4,2
116 | 6.4,3.2,5.3,2.3,2
117 | 6.5,3.0,5.5,1.8,2
118 | 7.7,3.8,6.7,2.2,2
119 | 7.7,2.6,6.9,2.3,2
120 | 6.0,2.2,5.0,1.5,2
121 | 6.9,3.2,5.7,2.3,2
122 | 5.6,2.8,4.9,2.0,2
123 | 7.7,2.8,6.7,2.0,2
124 | 6.3,2.7,4.9,1.8,2
125 | 6.7,3.3,5.7,2.1,2
126 | 7.2,3.2,6.0,1.8,2
127 | 6.2,2.8,4.8,1.8,2
128 | 6.1,3.0,4.9,1.8,2
129 | 6.4,2.8,5.6,2.1,2
130 | 7.2,3.0,5.8,1.6,2
131 | 7.4,2.8,6.1,1.9,2
132 | 7.9,3.8,6.4,2.0,2
133 | 6.4,2.8,5.6,2.2,2
134 | 6.3,2.8,5.1,1.5,2
135 | 6.1,2.6,5.6,1.4,2
136 | 7.7,3.0,6.1,2.3,2
137 | 6.3,3.4,5.6,2.4,2
138 | 6.4,3.1,5.5,1.8,2
139 | 6.0,3.0,4.8,1.8,2
140 | 6.9,3.1,5.4,2.1,2
141 | 6.7,3.1,5.6,2.4,2
142 | 6.9,3.1,5.1,2.3,2
143 | 5.8,2.7,5.1,1.9,2
144 | 6.8,3.2,5.9,2.3,2
145 | 6.7,3.3,5.7,2.5,2
146 | 6.7,3.0,5.2,2.3,2
147 | 6.3,2.5,5.0,1.9,2
148 | 6.5,3.0,5.2,2.0,2
149 | 6.2,3.4,5.4,2.3,2
150 | 5.9,3.0,5.1,1.8,2
151 |
--------------------------------------------------------------------------------
/metacluster/data/Soybean-small.csv:
--------------------------------------------------------------------------------
1 | 4,0,2,1,1,1,0,1,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,1
2 | 5,0,2,1,0,3,1,1,1,2,1,1,0,2,2,0,0,0,1,1,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,1
3 | 3,0,2,1,0,2,0,2,1,1,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,1
4 | 6,0,2,1,0,1,1,1,0,0,1,1,0,2,2,0,0,0,1,1,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,1
5 | 4,0,2,1,0,3,0,2,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,1
6 | 5,0,2,1,0,2,0,1,1,0,1,1,0,2,2,0,0,0,1,1,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,1
7 | 3,0,2,1,0,2,1,1,0,1,1,1,0,2,2,0,0,0,1,1,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,1
8 | 6,0,0,2,1,0,2,1,0,0,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,2
9 | 4,0,0,1,0,2,3,1,1,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,2
10 | 5,0,0,2,0,3,2,1,0,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,2
11 | 6,0,0,1,1,3,3,1,1,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,2
12 | 3,0,0,2,1,0,2,1,0,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,2
13 | 4,0,0,1,1,1,3,1,1,1,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,2
14 | 3,0,0,1,0,1,2,1,0,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,2
15 | 0,1,2,0,0,1,1,1,1,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,3
16 | 2,1,2,0,0,3,1,2,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,3
17 | 2,1,2,0,0,2,1,1,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,3
18 | 0,1,2,0,0,0,1,1,1,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,3
19 | 0,1,2,0,0,2,1,1,1,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,3
20 | 4,0,2,0,1,0,1,2,0,2,1,1,0,2,2,0,0,0,1,1,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,3
21 | 2,1,2,0,0,3,1,2,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,3
22 | 2,1,2,1,1,3,1,2,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,1,0,0,0,3,4,0,0,0,0,0,1,4
23 | 0,1,1,1,0,1,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,4
24 | 3,1,2,0,0,1,1,2,1,0,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,4
25 | 2,1,2,1,1,1,1,2,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,1,0,0,0,3,4,0,0,0,0,0,1,4
26 | 1,1,2,0,0,3,1,1,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,4
27 | 1,1,2,1,0,0,1,2,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,4
28 | 0,1,2,1,0,3,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,4
29 | 2,1,2,0,0,1,1,2,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,4
30 | 3,1,2,0,0,2,1,2,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,4
31 | 3,1,1,0,0,2,1,2,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,4
32 | 0,1,2,1,1,1,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,1,0,0,0,3,4,0,0,0,0,0,1,4
33 | 1,1,2,1,1,3,1,2,0,1,1,1,0,2,2,0,0,0,1,1,1,2,0,1,0,0,0,3,4,0,0,0,0,0,1,4
34 | 3,0,2,1,0,1,0,2,1,2,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,1
35 | 6,0,2,1,0,3,0,1,1,1,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,1
36 | 6,0,2,1,0,1,0,1,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,1
37 | 5,0,0,2,1,2,2,1,0,2,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,2
38 | 6,0,0,2,0,1,3,1,1,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,2
39 | 5,0,0,2,1,3,3,1,1,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,2
40 | 0,1,2,0,0,0,1,1,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,1,3
41 | 3,0,2,0,1,3,1,2,0,1,1,0,0,2,2,0,0,0,1,1,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,3
42 | 0,1,2,0,0,1,1,2,1,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,3
43 | 1,1,2,0,0,0,1,2,1,0,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,4
44 | 1,1,2,1,1,2,3,1,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,1,0,0,0,3,4,0,0,0,0,0,1,4
45 | 2,1,1,0,0,3,1,2,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,4
46 | 0,1,1,1,1,2,1,2,1,0,1,1,0,2,2,0,0,0,1,1,2,2,0,1,0,0,0,3,4,0,0,0,0,0,1,4
47 | 0,1,2,1,0,3,1,1,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,4
48 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. MetaCluster documentation master file, created by
2 | sphinx-quickstart on Sat May 20 16:59:33 2023.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to MetaCluster's documentation!
7 | =======================================
8 |
9 | .. image:: https://img.shields.io/badge/release-1.3.0-yellow.svg
10 | :target: https://github.com/thieu1995/metacluster/releases
11 |
12 | .. image:: https://img.shields.io/pypi/wheel/gensim.svg
13 | :target: https://pypi.python.org/pypi/metacluster
14 |
15 | .. image:: https://badge.fury.io/py/metacluster.svg
16 | :target: https://badge.fury.io/py/metacluster
17 |
18 | .. image:: https://img.shields.io/pypi/pyversions/metacluster.svg
19 | :target: https://www.python.org/
20 |
21 | .. image:: https://img.shields.io/pypi/status/metacluster.svg
22 | :target: https://img.shields.io/pypi/status/metacluster.svg
23 |
24 | .. image:: https://github.com/thieu1995/metacluster/actions/workflows/publish-package.yaml/badge.svg
25 | :target: https://github.com/thieu1995/metacluster/actions/workflows/publish-package.yaml
26 |
27 | .. image:: https://static.pepy.tech/badge/MetaCluster
28 | :target: https://pepy.tech/project/MetaCluster
29 |
30 | .. image:: https://img.shields.io/github/release-date/thieu1995/metacluster.svg
31 | :target: https://img.shields.io/github/release-date/thieu1995/metacluster.svg
32 |
33 | .. image:: https://readthedocs.org/projects/metacluster/badge/?version=latest
34 | :target: https://metacluster.readthedocs.io/en/latest/?badge=latest
35 |
36 | .. image:: https://img.shields.io/badge/Chat-on%20Telegram-blue
37 | :target: https://t.me/+fRVCJGuGJg1mNDg1
38 |
39 | .. image:: https://img.shields.io/github/contributors/thieu1995/metacluster.svg
40 | :target: https://img.shields.io/github/contributors/thieu1995/metacluster.svg
41 |
42 | .. image:: https://img.shields.io/badge/PR-Welcome-%23FF8300.svg?
43 | :target: https://git-scm.com/book/en/v2/GitHub-Contributing-to-a-Project
44 |
45 | .. image:: https://zenodo.org/badge/545209353.svg
46 | :target: https://doi.org/10.5281/zenodo.7969042
47 |
48 | .. image:: https://img.shields.io/badge/License-GPLv3-blue.svg
49 | :target: https://www.gnu.org/licenses/gpl-3.0
50 |
51 |
52 | MetaCluster is the largest open-source nature-inspired optimization (Metaheuristic Algorithms) library for
53 | clustering problem in Python
54 |
55 | * **Free software:** GNU General Public License (GPL) V3 license
56 | * **Total nature-inspired metaheuristic optimizers (Metaheuristic Algorithms)**: > 200 optimizers
57 | * **Total objective functions (as fitness)**: > 40 objectives
58 | * **Total supported datasets**: 48 datasets from Scikit learn, UCI, ELKI, KEEL...
59 | * **Total performance metrics**: > 40 metrics
60 | * **Total different way of detecting the K value**: >= 10 methods
61 | * **Documentation:** https://metacluster.readthedocs.io/en/latest/
62 | * **Python versions:** >= 3.7.x
63 | * **Dependencies:** numpy, scipy, scikit-learn, pandas, mealpy, permetrics, plotly, kaleido
64 |
65 |
66 | .. toctree::
67 | :maxdepth: 4
68 | :caption: Quick Start:
69 |
70 | pages/quick_start.rst
71 |
72 | .. toctree::
73 | :maxdepth: 4
74 | :caption: Models API:
75 |
76 | pages/metacluster.rst
77 |
78 | .. toctree::
79 | :maxdepth: 4
80 | :caption: Support:
81 |
82 | pages/support.rst
83 |
84 |
85 |
86 | Indices and tables
87 | ==================
88 |
89 | * :ref:`genindex`
90 | * :ref:`modindex`
91 | * :ref:`search`
92 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as
6 | contributors and maintainers pledge to making participation in our project and
7 | our community a harassment-free experience for everyone, regardless of age, body
8 | size, disability, ethnicity, sex characteristics, gender identity and expression,
9 | level of experience, education, socio-economic status, nationality, personal
10 | appearance, race, religion, or sexual identity and orientation.
11 |
12 | ## Our Standards
13 |
14 | Examples of behavior that contributes to creating a positive environment
15 | include:
16 |
17 | * Using welcoming and inclusive language
18 | * Being respectful of differing viewpoints and experiences
19 | * Gracefully accepting constructive criticism
20 | * Focusing on what is best for the community
21 | * Showing empathy towards other community members
22 |
23 | Examples of unacceptable behavior by participants include:
24 |
25 | * The use of sexualized language or imagery and unwelcome sexual attention or
26 | advances
27 | * Trolling, insulting/derogatory comments, and personal or political attacks
28 | * Public or private harassment
29 | * Publishing others' private information, such as a physical or electronic
30 | address, without explicit permission
31 | * Other conduct which could reasonably be considered inappropriate in a
32 | professional setting
33 |
34 | ## Our Responsibilities
35 |
36 | Project maintainers are responsible for clarifying the standards of acceptable
37 | behavior and are expected to take appropriate and fair corrective action in
38 | response to any instances of unacceptable behavior.
39 |
40 | Project maintainers have the right and responsibility to remove, edit, or
41 | reject comments, commits, code, wiki edits, issues, and other contributions
42 | that are not aligned to this Code of Conduct, or to ban temporarily or
43 | permanently any contributor for other behaviors that they deem inappropriate,
44 | threatening, offensive, or harmful.
45 |
46 | ## Scope
47 |
48 | This Code of Conduct applies both within project spaces and in public spaces
49 | when an individual is representing the project or its community. Examples of
50 | representing a project or community include using an official project e-mail
51 | address, posting via an official social media account, or acting as an appointed
52 | representative at an online or offline event. Representation of a project may be
53 | further defined and clarified by project maintainers.
54 |
55 | ## Enforcement
56 |
57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
58 | reported by contacting the project team at nguyenthieu2102@gmail.com. All
59 | complaints will be reviewed and investigated and will result in a response that
60 | is deemed necessary and appropriate to the circumstances. The project team is
61 | obligated to maintain confidentiality with regard to the reporter of an incident.
62 | Further details of specific enforcement policies may be posted separately.
63 |
64 | Project maintainers who do not follow or enforce the Code of Conduct in good
65 | faith may face temporary or permanent repercussions as determined by other
66 | members of the project's leadership.
67 |
68 | ## Attribution
69 |
70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
72 |
73 | [homepage]: https://www.contributor-covenant.org
74 |
75 | For answers to common questions about this code of conduct, see
76 | https://www.contributor-covenant.org/faq
77 |
--------------------------------------------------------------------------------
/metacluster/data/Zoo.csv:
--------------------------------------------------------------------------------
1 | 1,0,0,1,0,0,1,1,1,1,0,0,4,0,0,1,1
2 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,0,1,1
3 | 0,0,1,0,0,1,1,1,1,0,0,1,0,1,0,0,4
4 | 1,0,0,1,0,0,1,1,1,1,0,0,4,0,0,1,1
5 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,0,1,1
6 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,0,1,1
7 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,1,1,1
8 | 0,0,1,0,0,1,0,1,1,0,0,1,0,1,1,0,4
9 | 0,0,1,0,0,1,1,1,1,0,0,1,0,1,0,0,4
10 | 1,0,0,1,0,0,0,1,1,1,0,0,4,0,1,0,1
11 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,0,1,1
12 | 0,1,1,0,1,0,0,0,1,1,0,0,2,1,1,0,2
13 | 0,0,1,0,0,1,1,1,1,0,0,1,0,1,0,0,4
14 | 0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,7
15 | 0,0,1,0,0,1,1,0,0,0,0,0,4,0,0,0,7
16 | 0,0,1,0,0,1,1,0,0,0,0,0,6,0,0,0,7
17 | 0,1,1,0,1,0,1,0,1,1,0,0,2,1,0,0,2
18 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,0,1,1
19 | 0,0,1,0,0,1,1,1,1,0,0,1,0,1,0,1,4
20 | 0,0,0,1,0,1,1,1,1,1,0,1,0,1,0,1,1
21 | 0,1,1,0,1,0,0,0,1,1,0,0,2,1,1,0,2
22 | 0,1,1,0,1,1,0,0,1,1,0,0,2,1,0,0,2
23 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,0,1,1
24 | 0,1,1,0,1,0,0,0,1,1,0,0,2,1,0,1,2
25 | 0,0,1,0,0,0,0,0,0,1,0,0,6,0,0,0,6
26 | 0,0,1,0,0,1,1,1,1,1,0,0,4,0,0,0,5
27 | 0,0,1,0,0,1,1,1,1,1,1,0,4,0,0,0,5
28 | 1,0,0,1,1,0,0,1,1,1,0,0,2,1,0,0,1
29 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,0,1,1
30 | 1,0,0,1,0,0,1,1,1,1,0,0,2,0,1,1,1
31 | 0,0,1,0,1,0,0,0,0,1,0,0,6,0,0,0,6
32 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,1,1,1
33 | 1,0,0,1,0,0,0,1,1,1,0,0,2,0,0,1,1
34 | 0,1,1,0,1,1,1,0,1,1,0,0,2,1,0,0,2
35 | 0,0,1,0,0,1,0,1,1,0,0,1,0,1,0,0,4
36 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,1,0,1
37 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,0,0,1
38 | 0,1,1,0,1,0,1,0,1,1,0,0,2,1,0,0,2
39 | 0,0,1,0,0,1,1,1,1,0,0,1,0,1,0,0,4
40 | 1,0,1,0,1,0,0,0,0,1,1,0,6,0,1,0,6
41 | 1,0,1,0,1,0,0,0,0,1,0,0,6,0,0,0,6
42 | 0,1,1,0,0,0,1,0,1,1,0,0,2,1,0,0,2
43 | 0,0,1,0,1,0,1,0,0,1,0,0,6,0,0,0,6
44 | 0,1,1,0,1,0,0,0,1,1,0,0,2,1,0,0,2
45 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,0,1,1
46 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,0,1,1
47 | 0,0,1,0,0,1,1,0,0,0,0,0,6,0,0,0,7
48 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,0,1,1
49 | 1,0,0,1,0,1,1,1,1,1,0,0,4,1,0,1,1
50 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,0,0,1
51 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,0,1,1
52 | 1,0,1,0,1,0,0,0,0,1,0,0,6,0,0,0,6
53 | 0,0,1,0,0,1,1,1,1,1,0,0,4,1,0,0,5
54 | 0,0,1,0,0,1,1,0,0,0,0,0,8,0,0,1,7
55 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,0,0,1
56 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,0,1,1
57 | 0,1,1,0,0,0,0,0,1,1,0,0,2,1,0,1,2
58 | 0,1,1,0,1,0,0,0,1,1,0,0,2,1,1,0,2
59 | 0,1,1,0,0,1,1,0,1,1,0,0,2,1,0,1,2
60 | 0,1,1,0,1,0,0,0,1,1,0,0,2,1,0,0,2
61 | 0,0,1,0,0,1,1,1,1,0,0,1,0,1,0,1,4
62 | 0,0,1,0,0,1,1,1,1,0,0,1,0,1,0,0,4
63 | 0,0,1,0,0,0,1,1,1,1,1,0,0,1,0,0,3
64 | 1,0,1,1,0,1,1,0,1,1,0,0,4,1,0,1,1
65 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,0,1,1
66 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,1,1,1
67 | 0,0,0,1,0,1,1,1,1,1,0,1,0,1,0,1,1
68 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,0,1,1
69 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,1,1,1
70 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,0,1,1
71 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,1,1,1
72 | 0,1,1,0,0,0,1,0,1,1,0,0,2,1,0,1,2
73 | 0,0,0,0,0,0,1,0,0,1,1,0,8,1,0,0,7
74 | 0,0,1,0,0,1,0,1,1,0,0,1,0,1,0,0,4
75 | 1,0,0,1,0,1,1,1,1,1,0,1,0,0,0,1,1
76 | 1,0,0,1,0,1,1,1,1,1,0,1,2,1,0,1,1
77 | 0,0,0,0,0,1,1,1,1,0,1,0,0,1,0,0,3
78 | 0,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,7
79 | 0,1,1,0,1,1,1,0,1,1,0,0,2,1,0,0,2
80 | 0,1,1,0,1,1,1,0,1,1,0,0,2,1,0,0,2
81 | 0,0,1,0,0,0,1,1,1,1,0,0,0,1,0,0,3
82 | 0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,7
83 | 0,0,1,0,0,1,0,1,1,0,0,1,0,1,0,0,4
84 | 0,1,1,0,1,0,0,0,1,1,0,0,2,1,0,0,2
85 | 1,0,0,1,0,0,0,1,1,1,0,0,2,1,0,0,1
86 | 0,0,1,0,0,1,1,0,0,0,0,0,5,0,0,0,7
87 | 0,0,1,0,0,1,1,1,1,0,1,1,0,1,0,1,4
88 | 0,1,1,0,1,1,0,0,1,1,0,0,2,1,0,1,2
89 | 0,0,1,0,0,0,0,0,0,1,0,0,6,0,0,0,6
90 | 0,0,1,0,0,1,0,1,1,1,0,0,4,0,0,0,5
91 | 0,0,1,0,0,0,0,0,1,1,0,0,4,1,0,1,3
92 | 0,0,1,0,0,0,1,1,1,1,0,0,4,1,0,0,3
93 | 0,0,1,0,0,1,1,1,1,0,0,1,0,1,0,1,4
94 | 1,0,0,1,1,0,0,1,1,1,0,0,2,1,0,0,1
95 | 1,0,0,1,0,0,0,1,1,1,0,0,4,1,0,0,1
96 | 0,1,1,0,1,0,1,0,1,1,0,0,2,1,0,1,2
97 | 1,0,0,1,0,0,0,1,1,1,0,0,2,1,0,1,1
98 | 1,0,1,0,1,0,0,0,0,1,1,0,6,0,0,0,6
99 | 1,0,0,1,0,0,1,1,1,1,0,0,4,1,0,1,1
100 | 0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,7
101 | 0,1,1,0,1,0,0,0,1,1,0,0,2,1,0,0,2
102 |
--------------------------------------------------------------------------------
/metacluster/utils/validator.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 21:39, 29/06/2022 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | import operator
8 | import numpy as np
9 |
10 |
11 | def is_in_bound(value, bound):
12 | ops = None
13 | if type(bound) is tuple:
14 | ops = operator.lt
15 | elif type(bound) is list:
16 | ops = operator.le
17 | if bound[0] == float("-inf") and bound[1] == float("inf"):
18 | return True
19 | elif bound[0] == float("-inf") and ops(value, bound[1]):
20 | return True
21 | elif ops(bound[0], value) and bound[1] == float("inf"):
22 | return True
23 | elif ops(bound[0], value) and ops(value, bound[1]):
24 | return True
25 | return False
26 |
27 |
28 | def is_str_in_list(value: str, my_list: list):
29 | if type(value) == str and my_list is not None:
30 | return True if value in my_list else False
31 | return False
32 |
33 |
34 | def check_int(name: str, value: int, bound=None):
35 | if type(value) in [int, float]:
36 | if bound is None:
37 | return int(value)
38 | elif is_in_bound(value, bound):
39 | return int(value)
40 | bound = "" if bound is None else f"and value should be in range: {bound}"
41 | raise ValueError(f"'{name}' is an integer {bound}.")
42 |
43 |
44 | def check_float(name: str, value: int, bound=None):
45 | if type(value) in [int, float]:
46 | if bound is None:
47 | return float(value)
48 | elif is_in_bound(value, bound):
49 | return float(value)
50 | bound = "" if bound is None else f"and value should be in range: {bound}"
51 | raise ValueError(f"'{name}' is a float {bound}.")
52 |
53 |
54 | def check_str(name: str, value: str, bound=None):
55 | if type(value) is str:
56 | if bound is None or is_str_in_list(value, bound):
57 | return value
58 | bound = "" if bound is None else f"and value should be one of this: {bound}"
59 | raise ValueError(f"'{name}' is a string {bound}.")
60 |
61 |
62 | def check_bool(name: str, value: bool, bound=(True, False)):
63 | if type(value) is bool:
64 | if value in bound:
65 | return value
66 | bound = "" if bound is None else f"and value should be one of this: {bound}"
67 | raise ValueError(f"'{name}' is a boolean {bound}.")
68 |
69 |
70 | def check_tuple_int(name: str, values: tuple, bounds=None):
71 | if type(values) in [tuple, list] and len(values) > 1:
72 | value_flag = [type(item) == int for item in values]
73 | if np.all(value_flag):
74 | if bounds is not None and len(bounds) == len(values):
75 | value_flag = [is_in_bound(item, bound) for item, bound in zip(values, bounds)]
76 | if np.all(value_flag):
77 | return values
78 | else:
79 | return values
80 | bounds = "" if bounds is None else f"and values should be in range: {bounds}"
81 | raise ValueError(f"'{name}' are integer {bounds}.")
82 |
83 |
84 | def check_tuple_float(name: str, values: tuple, bounds=None):
85 | if type(values) in [tuple, list] and len(values) > 1:
86 | value_flag = [type(item) in [int, float] for item in values]
87 | if np.all(value_flag):
88 | if bounds is not None and len(bounds) == len(values):
89 | value_flag = [is_in_bound(item, bound) for item, bound in zip(values, bounds)]
90 | if np.all(value_flag):
91 | return values
92 | else:
93 | return values
94 | bounds = "" if bounds is None else f"and values should be in range: {bounds}"
95 | raise ValueError(f"'{name}' are float {bounds}.")
96 |
--------------------------------------------------------------------------------
/metacluster/data/flame.csv:
--------------------------------------------------------------------------------
1 | 1.85,27.8,1
2 | 1.35,26.65,1
3 | 1.4,23.25,0
4 | 0.85,23.05,0
5 | 0.5,22.35,0
6 | 0.65,21.35,0
7 | 1.1,22.05,0
8 | 1.35,22.65,0
9 | 1.95,22.8,0
10 | 2.4,22.45,0
11 | 1.8,22,0
12 | 2.5,21.85,0
13 | 2.95,21.4,0
14 | 1.9,21.25,0
15 | 1.35,21.45,0
16 | 1.35,20.9,0
17 | 1.25,20.35,0
18 | 1.75,20.05,0
19 | 2,20.6,0
20 | 2.5,21,0
21 | 1.7,19.05,0
22 | 2.4,20.05,0
23 | 3.05,20.45,0
24 | 3.7,20.45,0
25 | 3.45,19.9,0
26 | 2.95,19.5,0
27 | 2.4,19.4,0
28 | 2.4,18.25,0
29 | 2.85,18.75,0
30 | 3.25,19.05,0
31 | 3.95,19.6,0
32 | 2.7,17.8,0
33 | 3.45,18.05,0
34 | 3.8,18.55,0
35 | 4,19.1,0
36 | 4.45,19.9,0
37 | 4.65,19.15,0
38 | 4.85,18.45,0
39 | 4.3,18.05,0
40 | 3.35,17.3,0
41 | 3.7,16.3,0
42 | 4.4,16.95,0
43 | 4.25,17.4,0
44 | 4.8,17.65,0
45 | 5.25,18.25,0
46 | 5.75,18.55,0
47 | 5.3,19.25,0
48 | 6.05,19.55,0
49 | 6.5,18.9,0
50 | 6.05,18.2,0
51 | 5.6,17.8,0
52 | 5.45,17.15,0
53 | 5.05,16.55,0
54 | 4.55,16.05,0
55 | 4.95,15.45,0
56 | 5.85,14.8,0
57 | 5.6,15.3,0
58 | 5.65,16,0
59 | 5.95,16.8,0
60 | 6.25,16.4,0
61 | 6.1,17.45,0
62 | 6.6,17.65,0
63 | 6.65,18.3,0
64 | 7.3,18.35,0
65 | 7.85,18.3,0
66 | 7.15,17.8,0
67 | 7.6,17.7,0
68 | 6.7,17.25,0
69 | 7.3,17.25,0
70 | 6.7,16.8,0
71 | 7.3,16.65,0
72 | 6.75,16.3,0
73 | 7.4,16.2,0
74 | 6.55,15.75,0
75 | 7.35,15.8,0
76 | 6.8,14.95,0
77 | 7.45,15.1,0
78 | 6.85,14.45,0
79 | 7.6,14.6,0
80 | 8.55,14.65,0
81 | 8.2,15.5,0
82 | 7.9,16.1,0
83 | 8.05,16.5,0
84 | 7.8,17,0
85 | 8,17.45,0
86 | 8.4,18.1,0
87 | 8.65,17.75,0
88 | 8.9,17.1,0
89 | 8.4,17.1,0
90 | 8.65,16.65,0
91 | 8.45,16.05,0
92 | 8.85,15.35,0
93 | 9.6,15.3,0
94 | 9.15,16,0
95 | 10.2,16,0
96 | 9.5,16.65,0
97 | 10.75,16.6,0
98 | 10.45,17.2,0
99 | 9.85,17.1,0
100 | 9.4,17.6,0
101 | 10.15,17.7,0
102 | 9.85,18.15,0
103 | 9.05,18.25,0
104 | 9.3,18.7,0
105 | 9.15,19.15,0
106 | 8.5,18.8,0
107 | 11.65,17.45,0
108 | 11.1,17.65,0
109 | 10.4,18.25,0
110 | 10,18.95,0
111 | 11.95,18.25,0
112 | 11.25,18.4,0
113 | 10.6,18.9,0
114 | 11.15,19,0
115 | 11.9,18.85,0
116 | 12.6,18.9,0
117 | 11.8,19.45,0
118 | 11.05,19.45,0
119 | 10.3,19.4,0
120 | 9.9,19.75,0
121 | 10.45,20,0
122 | 13.05,19.9,0
123 | 12.5,19.75,0
124 | 11.9,20.05,0
125 | 11.2,20.25,0
126 | 10.85,20.85,0
127 | 11.4,21.25,0
128 | 11.7,20.6,0
129 | 12.3,20.45,0
130 | 12.95,20.55,0
131 | 12.55,20.95,0
132 | 12.05,21.25,0
133 | 11.75,22.1,0
134 | 12.25,21.85,0
135 | 12.8,21.5,0
136 | 13.55,21,0
137 | 13.6,21.6,0
138 | 12.95,22,0
139 | 12.5,22.25,0
140 | 12.2,22.85,0
141 | 12.7,23.35,0
142 | 13,22.7,0
143 | 13.55,22.2,0
144 | 14.05,22.25,0
145 | 14.2,23.05,0
146 | 14.1,23.6,0
147 | 13.5,22.8,0
148 | 13.35,23.5,0
149 | 13.3,24,0
150 | 7.3,19.15,0
151 | 7.95,19.35,0
152 | 7.7,20.05,0
153 | 6.75,19.9,0
154 | 5.25,20.35,0
155 | 6.15,20.7,1
156 | 7,20.7,1
157 | 7.6,21.2,1
158 | 8.55,20.6,1
159 | 9.35,20.5,1
160 | 8.3,21.45,1
161 | 7.9,21.6,1
162 | 7.15,21.75,1
163 | 6.7,21.3,1
164 | 5.2,21.1,0
165 | 6.2,21.95,1
166 | 6.75,22.4,1
167 | 6.15,22.5,1
168 | 5.65,22.2,1
169 | 4.65,22.55,1
170 | 4.1,23.45,1
171 | 5.35,22.8,1
172 | 7.4,22.6,1
173 | 7.75,22.1,1
174 | 8.5,22.3,1
175 | 9.3,22,1
176 | 9.7,22.95,1
177 | 8.8,22.95,1
178 | 8.05,22.9,1
179 | 7.6,23.15,1
180 | 6.85,23,1
181 | 6.2,23.25,1
182 | 5.7,23.4,1
183 | 5.1,23.55,1
184 | 4.55,24.15,1
185 | 5.5,24,1
186 | 6.1,24.05,1
187 | 6.5,23.6,1
188 | 6.75,23.95,1
189 | 7.3,23.75,1
190 | 8.3,23.4,1
191 | 8.9,23.7,1
192 | 9.55,23.65,1
193 | 10.35,24.1,1
194 | 7.95,24.05,1
195 | 3.95,24.4,1
196 | 3.75,25.25,1
197 | 3.9,25.95,1
198 | 4.55,26.65,1
199 | 5.25,26.75,1
200 | 6.5,27.6,1
201 | 7.45,27.6,1
202 | 8.35,27.35,1
203 | 9.25,27.2,1
204 | 9.95,26.5,1
205 | 10.55,25.6,1
206 | 9.9,24.95,1
207 | 9.2,24.5,1
208 | 8.55,24.2,1
209 | 8.8,24.8,1
210 | 9.2,25.35,1
211 | 9.55,26.05,1
212 | 9.05,26.6,1
213 | 8.8,25.8,1
214 | 8.15,26.35,1
215 | 8.05,25.8,1
216 | 8.35,25.2,1
217 | 7.9,25.3,1
218 | 8.05,24.7,1
219 | 7.3,24.4,1
220 | 7.55,24.85,1
221 | 6.85,24.45,1
222 | 6.25,24.65,1
223 | 5.55,24.5,1
224 | 4.65,25.1,1
225 | 5,25.55,1
226 | 5.55,26.1,1
227 | 5.55,25.25,1
228 | 6.2,25.2,1
229 | 6.8,25.05,1
230 | 7.4,25.25,1
231 | 6.65,25.45,1
232 | 6.15,25.8,1
233 | 6.5,26.1,1
234 | 6.6,26.6,1
235 | 7.7,26.65,1
236 | 7.5,26.2,1
237 | 7.5,25.65,1
238 | 7.05,25.85,1
239 | 6.9,27.15,1
240 | 6.15,26.9,1
241 |
--------------------------------------------------------------------------------
/metacluster/utils/data_loader.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 23:33, 21/05/2022 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | import pandas as pd
8 | import numpy as np
9 | from pathlib import Path
10 | from sklearn.model_selection import train_test_split
11 | from sklearn import preprocessing
12 |
13 |
14 | class Data:
15 | """
16 | The structure of our supported Data class
17 |
18 | Parameters
19 | ----------
20 | X : np.ndarray
21 | The features of your data
22 |
23 | y : np.ndarray, Optional, default=None
24 | The labels of your data, for clustering problem, this can be None
25 | """
26 |
27 | SUPPORT = {
28 | "scaler": ["StandardScaler", "MinMaxScaler", "MaxAbsScaler", "RobustScaler", "Normalizer"]
29 | }
30 |
31 | def __init__(self, X, y=None, name="Unknown"):
32 | self.X = X
33 | self.y = y
34 | self.name = name
35 | self.X_train, self.y_train, self.X_test, self.y_test = None, None, None, None
36 |
37 | def split_train_test(self, test_size=0.2, train_size=None,
38 | random_state=41, shuffle=True, stratify=None, inplace=True):
39 | """
40 | The wrapper of the split_train_test function in scikit-learn library.
41 | """
42 | if self.y is None:
43 | self.X_train, self.X_test = train_test_split(self.X, test_size=test_size,
44 | train_size=train_size, random_state=random_state, shuffle=shuffle, stratify=stratify)
45 | else:
46 | self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(self.X, self.y, test_size=test_size,
47 | train_size=train_size, random_state=random_state, shuffle=shuffle, stratify=stratify)
48 | if not inplace:
49 | return self.X_train, self.X_test, self.y_train, self.y_test
50 |
51 | @staticmethod
52 | def scale(X, method="MinMaxScaler", **kwargs):
53 | if method in Data.SUPPORT["scaler"]:
54 | scaler = getattr(preprocessing, method)(**kwargs)
55 | data = scaler.fit_transform(X)
56 | return data, scaler
57 | raise ValueError(f"Data class doesn't support scaling method name: {method}")
58 |
59 | def set_train_test(self, X_train=None, y_train=None, X_test=None, y_test=None):
60 | """
61 | Function use to set your own X_train, y_train, X_test, y_test in case you don't want to use our split function
62 |
63 | Parameters
64 | ----------
65 | X_train : np.ndarray
66 | y_train : np.ndarray
67 | X_test : np.ndarray
68 | y_test : np.ndarray
69 | """
70 | self.X_train = X_train
71 | self.y_train = y_train
72 | self.X_test = X_test
73 | self.y_test = y_test
74 |
75 | def get_name(self):
76 | return self.name
77 |
78 |
79 | def get_dataset(dataset_name):
80 | """
81 | Helper function to retrieve the data
82 |
83 | Parameters
84 | ----------
85 | dataset_name : str
86 | Name of the dataset
87 |
88 | Returns
89 | -------
90 | data: Data
91 | The instance of Data class, that hold X and y variables.
92 | """
93 | dir_root = f"{Path(__file__).parent.parent.__str__()}/data"
94 | list_path = Path(f"{dir_root}").glob("*.csv")
95 | list_datasets = [pf.name[:-4] for pf in list_path]
96 |
97 | if dataset_name not in list_datasets:
98 | print(f"MetaCluster currently does not have '{dataset_name}' data in its database....")
99 | print("+ List of the supported datasets are:")
100 | for idx, dataset in enumerate(list_datasets):
101 | print(f"\t{idx + 1}: {dataset}")
102 | else:
103 | df = pd.read_csv(f"{dir_root}/{dataset_name}.csv", header=None)
104 | data = Data(np.array(df.iloc[:, 0:-1]), np.array(df.iloc[:, -1]), name=dataset_name)
105 | print(f"Requested dataset: {dataset_name} found and loaded!")
106 | return data
107 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 13:24, 25/05/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | import setuptools
8 | import os
9 | import re
10 |
11 |
12 | with open("requirements.txt", encoding='utf-8') as f:
13 | REQUIREMENTS = f.read().splitlines()
14 |
15 |
16 | def get_version():
17 | init_path = os.path.join(os.path.dirname(__file__), 'metacluster', '__init__.py')
18 | with open(init_path, 'r', encoding='utf-8') as f:
19 | init_content = f.read()
20 | version_match = re.search(r"^__version__ = ['\"]([^'\"]+)['\"]", init_content, re.M)
21 | if version_match:
22 | return version_match.group(1)
23 | raise RuntimeError("Unable to find version string.")
24 |
25 |
26 | def readme():
27 | with open('README.md', encoding='utf-8') as f:
28 | res = f.read()
29 | return res
30 |
31 | setuptools.setup(
32 | name="metacluster",
33 | version=get_version(),
34 | author="Thieu",
35 | author_email="nguyenthieu2102@gmail.com",
36 | description="MetaCluster: An Open-Source Python Library for Metaheuristic-based Clustering Problems",
37 | long_description=readme(),
38 | long_description_content_type="text/markdown",
39 | keywords=["clustering", "optimization", "k-center clustering", "data points", "centers", "euclidean distance", "maximum distance",
40 | "NP-hard", "greedy algorithm", "approximation algorithm", "covering problem", "computational complexity",
41 | "geometric algorithms", "machine learning", "pattern recognition", "spatial analysis", "graph theory", "mathematical optimization",
42 | "dimensionality reduction", "mutual information", "correlation-based feature selection",
43 | "Genetic algorithm (GA)", "Particle swarm optimization (PSO)", "Ant colony optimization (ACO)",
44 | "Differential evolution (DE)", "Simulated annealing", "Grey wolf optimizer (GWO)", "Whale Optimization Algorithm (WOA)",
45 | "confusion matrix", "recall", "precision", "accuracy", "K-Nearest Neighbors",
46 | "pearson correlation coefficient (PCC)", "spearman correlation coefficient (SCC)",
47 | "multi-objectives optimization problems", "Stochastic optimization", "Global optimization",
48 | "Convergence analysis", "Search space exploration", "Local search", "Computational intelligence", "Robust optimization",
49 | "Performance analysis", "Intelligent optimization", "Simulations"],
50 | url="https://github.com/thieu1995/metacluster",
51 | project_urls={
52 | 'Documentation': 'https://metacluster.readthedocs.io/',
53 | 'Source Code': 'https://github.com/thieu1995/metacluster',
54 | 'Bug Tracker': 'https://github.com/thieu1995/metacluster/issues',
55 | 'Change Log': 'https://github.com/thieu1995/metacluster/blob/master/ChangeLog.md',
56 | 'Forum': 'https://t.me/+fRVCJGuGJg1mNDg1',
57 | },
58 | packages=setuptools.find_packages(exclude=['tests*', 'examples*']),
59 | include_package_data=True,
60 | license="GPLv3",
61 | classifiers=[
62 | "Development Status :: 5 - Production/Stable",
63 | "Intended Audience :: Developers",
64 | "Intended Audience :: Education",
65 | "Intended Audience :: Information Technology",
66 | "Intended Audience :: Science/Research",
67 | "License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
68 | "Natural Language :: English",
69 | "Programming Language :: Python :: 3",
70 | "Programming Language :: Python :: 3.8",
71 | "Programming Language :: Python :: 3.9",
72 | "Programming Language :: Python :: 3.10",
73 | "Programming Language :: Python :: 3.11",
74 | "Programming Language :: Python :: 3.12",
75 | "Programming Language :: Python :: 3.13",
76 | "Topic :: System :: Benchmark",
77 | "Topic :: Scientific/Engineering",
78 | "Topic :: Scientific/Engineering :: Mathematics",
79 | "Topic :: Scientific/Engineering :: Artificial Intelligence",
80 | "Topic :: Scientific/Engineering :: Information Analysis",
81 | "Topic :: Scientific/Engineering :: Visualization",
82 | "Topic :: Scientific/Engineering :: Bio-Informatics",
83 | "Topic :: Software Development :: Build Tools",
84 | "Topic :: Software Development :: Libraries",
85 | "Topic :: Software Development :: Libraries :: Python Modules",
86 | "Topic :: Utilities",
87 | ],
88 | install_requires=REQUIREMENTS,
89 | extras_require={
90 | "dev": ["pytest>=7.1.2", "twine>=4.0.1", "pytest-cov==4.0.0", "flake8>=4.0.1"],
91 | },
92 | python_requires='>=3.7',
93 | )
94 |
--------------------------------------------------------------------------------
/metacluster/data/appendicitis.csv:
--------------------------------------------------------------------------------
1 | 0.213,0.554,0.207,0,0,0.749,0.22,1
2 | 0.458,0.714,0.468,0.111,0.102,0.741,0.436,1
3 | 0.102,0.518,0.111,0.056,0.022,0.506,0.086,1
4 | 0.187,0.196,0.105,0.056,0.029,0.133,0.085,1
5 | 0.236,0.804,0.289,0.111,0.066,0.756,0.241,1
6 | 0.116,0.161,0.057,0.333,0.14,0.177,0.049,1
7 | 0.089,0.179,0.045,0.028,0.011,0.168,0.032,1
8 | 0.364,0.661,0.365,0.319,0.25,0.743,0.353,1
9 | 0.191,0.661,0.214,0.042,0.022,0.448,0.145,1
10 | 0.12,0.25,0.076,0.125,0.053,0.224,0.059,1
11 | 0.058,0.589,0.087,0.583,0.196,0.576,0.06,1
12 | 0.071,0.321,0.057,0.069,0.025,0.129,0.016,1
13 | 1,0.768,1,0.028,0.047,0.878,1,1
14 | 0.204,0.268,0.132,0,0,0.149,0.098,1
15 | 0,0.232,0,0,0,0.421,0,1
16 | 0.364,0.786,0.408,0.042,0.033,0.71,0.344,1
17 | 0.187,0.411,0.153,0.056,0.029,0.322,0.119,1
18 | 0.378,0.732,0.402,0.278,0.223,0.687,0.35,1
19 | 0.187,0.214,0.109,0,0,0.273,0.11,1
20 | 0.298,0.375,0.222,0.153,0.105,0.412,0.216,1
21 | 0.098,0.607,0.123,0.042,0.016,0.67,0.105,1
22 | 0.391,0.893,0.471,0.306,0.251,0.922,0.428,0
23 | 0.52,0.768,0.548,0.597,0.603,0.752,0.495,0
24 | 0.369,0.768,0.406,0.153,0.121,0.685,0.342,0
25 | 0.449,0.571,0.404,0.139,0.126,0.639,0.396,0
26 | 0.2,0.911,0.28,0.208,0.113,0.745,0.207,0
27 | 0.44,0.786,0.48,0.097,0.087,0.805,0.44,0
28 | 0.413,0.714,0.428,0.028,0.024,0.73,0.393,0
29 | 0.422,0.911,0.509,0.347,0.301,0.854,0.438,0
30 | 0.302,0.482,0.258,0.069,0.048,0.599,0.263,0
31 | 0.373,1,0.49,0.264,0.21,0.827,0.384,0
32 | 0.627,0.75,0.64,0.083,0.097,0.727,0.581,0
33 | 0.298,0.839,0.36,0.208,0.143,0.856,0.32,0
34 | 0.44,0.643,0.424,0.139,0.124,0.767,0.428,0
35 | 0.396,0.464,0.322,0.611,0.506,0.366,0.274,0
36 | 0.271,0.857,0.338,0.292,0.189,0.778,0.278,0
37 | 0.284,0.857,0.352,0.389,0.259,0.448,0.214,0
38 | 0.538,0.732,0.549,0.056,0.058,0.882,0.558,0
39 | 0.32,0.946,0.415,0.208,0.15,0.765,0.319,0
40 | 0.44,0.804,0.486,0,0,0.829,0.447,0
41 | 0.378,0.589,0.352,0.181,0.145,0.574,0.319,0
42 | 0.142,0.589,0.157,0.708,0.325,0.938,0.186,0
43 | 0.52,0.804,0.564,0.361,0.364,0.816,0.518,0
44 | 0.556,0.607,0.508,0.069,0.074,0.758,0.53,0
45 | 0.453,0.946,0.556,0.167,0.152,0.834,0.461,0
46 | 0.329,0.661,0.334,0.153,0.112,0.674,0.304,0
47 | 0.409,0.75,0.437,0.097,0.082,0.765,0.399,0
48 | 0.453,0.911,0.541,0.097,0.089,0.843,0.464,0
49 | 0.062,0.75,0.114,0.625,0.214,0.82,0.09,0
50 | 0.351,0.518,0.307,0.056,0.042,0.636,0.313,0
51 | 0.413,0.732,0.434,0,0,0.896,0.442,0
52 | 0.48,0.857,0.547,0.056,0.053,0.836,0.487,0
53 | 0.44,0.768,0.473,0.347,0.31,0.696,0.406,0
54 | 0.413,0.821,0.467,0.083,0.071,0.818,0.419,0
55 | 0.378,0.518,0.328,0.292,0.234,0.512,0.302,0
56 | 0.231,0.357,0.172,0.181,0.106,0.18,0.12,0
57 | 0.573,0.75,0.59,0.361,0.392,0.956,0.619,0
58 | 0.764,0.786,0.789,0.25,0.341,0.854,0.762,0
59 | 0.236,0.75,0.276,0.333,0.174,0.78,0.246,0
60 | 0.751,0.821,0.797,0.292,0.392,0.747,0.7,0
61 | 0.36,0.821,0.415,0.194,0.151,0.698,0.338,0
62 | 0.516,0.625,0.482,0.056,0.056,0.65,0.456,0
63 | 0.502,0.857,0.569,0.528,0.519,0.832,0.506,0
64 | 0.68,0.714,0.67,0.014,0.017,0.851,0.681,0
65 | 0.627,0.893,0.711,0.097,0.113,1,0.69,0
66 | 0.511,0.786,0.547,0.139,0.138,0.741,0.483,0
67 | 0.396,0.911,0.482,0.111,0.092,0.754,0.384,0
68 | 0.662,0.714,0.654,0.278,0.338,0.537,0.533,0
69 | 0.298,0.857,0.365,0.083,0.057,0.845,0.318,0
70 | 0.493,0.732,0.508,0.056,0.054,0.661,0.441,0
71 | 0.516,0.768,0.544,0.139,0.139,0.667,0.462,0
72 | 0.427,0.589,0.393,0.181,0.158,0.754,0.412,0
73 | 0.507,0.964,0.619,0.194,0.192,0.82,0.507,0
74 | 0.467,0.643,0.448,0.083,0.078,0.743,0.444,0
75 | 0.502,0.589,0.456,0.181,0.178,0.623,0.436,0
76 | 0.796,0.786,0.818,0.556,0.784,0.876,0.803,0
77 | 0.471,0.839,0.531,0.111,0.104,0.845,0.481,0
78 | 0.622,0.75,0.635,0.264,0.306,0.787,0.601,0
79 | 0.218,0.875,0.29,0.556,0.032,0.818,0.237,0
80 | 0.684,0.589,0.607,0,0,0.862,0.69,0
81 | 0.431,0.839,0.491,0.333,0.293,0.874,0.452,0
82 | 0.587,0.875,0.662,0.625,0.692,0.911,0.616,0
83 | 0.213,0.768,0.259,1,0.563,0.528,0.177,0
84 | 0.222,0.429,0.183,0.486,0.24,0.357,0.15,0
85 | 0.613,0.929,0.715,0.069,0.08,0.849,0.618,0
86 | 0.489,0.768,0.519,0.236,0.228,0.798,0.483,0
87 | 0.556,0.875,0.63,0.458,0.486,0.927,0.591,0
88 | 0.884,0.821,0.927,0.069,0.107,0.78,0.837,0
89 | 0.027,0.5,0.05,0.625,0.182,0.654,0.043,0
90 | 0.418,0.857,0.485,0.111,0.096,0.792,0.415,0
91 | 0.524,0.75,0.544,0.153,0.155,0.714,0.486,0
92 | 0.271,0.196,0.154,0.25,0.162,0.22,0.154,0
93 | 0.471,0.679,0.466,0.208,0.196,0.556,0.388,0
94 | 0.222,0.661,0.241,0,0,0.641,0.206,0
95 | 0.613,0.643,0.574,0.319,0.366,0.738,0.574,0
96 | 0.236,0.75,0.276,0.014,0.008,0.809,0.252,0
97 | 0.591,0.625,0.546,0.028,0.031,0.576,0.492,0
98 | 0.373,0.589,0.349,0.056,0.044,0.43,0.276,0
99 | 0.4,0.589,0.371,0.514,0.429,0.716,0.377,0
100 | 0.471,0.679,0.466,0.083,0.078,0.574,0.394,0
101 | 0.778,0.732,0.769,0.722,1,0.503,0.608,0
102 | 0.449,0.875,0.523,0.083,0.076,0.92,0.487,0
103 | 0.102,0,0.022,0,0,0,0.017,0
104 | 0.409,0.875,0.482,0.306,0.259,0.914,0.443,0
105 | 0.427,0.804,0.474,0.056,0.048,0.836,0.437,0
106 | 0.462,0.911,0.551,0.167,0.154,0.931,0.5,0
107 |
--------------------------------------------------------------------------------
/metacluster/data/pathbased.csv:
--------------------------------------------------------------------------------
1 | 11.25,5.05,1
2 | 10.95,4.7,1
3 | 9.85,5.8,1
4 | 9.8,5.75,1
5 | 9.15,6.8,1
6 | 8.65,6.6,1
7 | 8.4,7.5,1
8 | 7.9,7.75,1
9 | 6.95,8.7,1
10 | 7.25,9.75,1
11 | 7.3,10.25,1
12 | 5.9,10.7,1
13 | 5.85,11.8,1
14 | 6.45,12.05,1
15 | 5.7,12.95,1
16 | 5.35,13.45,1
17 | 5.4,14.65,1
18 | 4.7,14.85,1
19 | 5.4,15.4,1
20 | 5.1,16.25,1
21 | 5.75,16.7,1
22 | 4.85,17.65,1
23 | 5,18,1
24 | 6.05,18,1
25 | 5.7,19.45,1
26 | 5.3,19.55,1
27 | 5.85,21.25,1
28 | 6.1,21.35,1
29 | 6.55,22.15,1
30 | 5.9,22.8,1
31 | 7.15,23.7,1
32 | 6.75,24.25,1
33 | 7.95,24.65,1
34 | 7.75,25.3,1
35 | 8.8,26.05,1
36 | 8.85,26.95,1
37 | 9.35,27.45,1
38 | 9.95,27.1,1
39 | 11.25,28.2,1
40 | 10.7,28.55,1
41 | 11.95,29.45,1
42 | 11.95,28.65,1
43 | 13.1,30.05,1
44 | 13.4,29.3,1
45 | 14.7,30.2,1
46 | 14.7,30.6,1
47 | 16.1,30.4,1
48 | 16.1,31.05,1
49 | 17.55,30.8,1
50 | 17.65,31.75,1
51 | 18.55,31.6,1
52 | 18.85,30.6,1
53 | 19.85,30.9,1
54 | 20.1,31.3,1
55 | 21.5,31.35,1
56 | 20.85,30.4,1
57 | 22.95,30.05,1
58 | 23.4,30.3,1
59 | 24.2,29.9,1
60 | 24.75,30,1
61 | 25.55,29.3,1
62 | 25.55,28.45,1
63 | 26.7,28.3,1
64 | 26.85,28.75,1
65 | 27.6,27.15,1
66 | 28.25,27.4,1
67 | 29.05,27,1
68 | 29.05,26.2,1
69 | 29.45,25.55,1
70 | 30.05,25.55,1
71 | 30.3,23.3,1
72 | 30.6,23.95,1
73 | 30.9,22.75,1
74 | 31,22.3,1
75 | 30.65,21.3,1
76 | 31.3,20.8,1
77 | 31.85,21.2,1
78 | 31.45,19.3,1
79 | 32.7,19.3,1
80 | 31.9,17.9,1
81 | 33.05,18.05,1
82 | 32.8,16.6,1
83 | 32.2,16.3,1
84 | 32.4,15.15,1
85 | 31.8,14.75,1
86 | 32.35,13.25,1
87 | 31.65,13.35,1
88 | 31.15,12.05,1
89 | 32,11.9,1
90 | 31.05,10.3,1
91 | 31.95,10.4,1
92 | 30.05,9.55,1
93 | 30.5,8.25,1
94 | 29.6,8.25,1
95 | 29.8,7.6,1
96 | 29,7.05,1
97 | 29,6.7,1
98 | 27.6,5.95,1
99 | 28.15,5.45,1
100 | 26.5,4.8,1
101 | 26.4,4.4,1
102 | 25.8,3.65,1
103 | 25.5,4.1,1
104 | 31.6,16.6,1
105 | 30.7,17.4,1
106 | 29.65,17.95,1
107 | 29.15,16.5,1
108 | 30.5,15.55,1
109 | 29.95,13.55,0
110 | 30,11.85,1
111 | 28.65,14.1,0
112 | 27.45,10.85,0
113 | 26.4,10.75,0
114 | 25.05,10.1,0
115 | 26.2,11.55,0
116 | 27.2,13.3,0
117 | 28.3,14.45,0
118 | 27.95,14.65,0
119 | 27.95,14.7,0
120 | 23.15,11,0
121 | 22.9,11.6,0
122 | 23.9,11.6,0
123 | 24.55,11.6,0
124 | 23.85,12.45,0
125 | 23.35,13.1,0
126 | 24.85,13.2,0
127 | 25.1,12.25,0
128 | 25.15,12.5,0
129 | 25.65,12.9,0
130 | 25.7,13.5,0
131 | 26.3,13.3,0
132 | 27.1,14.55,0
133 | 27.15,14.6,0
134 | 26.4,14.35,0
135 | 26.4,14.35,0
136 | 25.75,14.55,0
137 | 25.75,14.9,0
138 | 25.35,14.65,0
139 | 23.7,14.55,0
140 | 24.05,14.9,0
141 | 23.65,15.3,0
142 | 22.75,14.5,0
143 | 22,14,0
144 | 20.9,12.95,0
145 | 20.3,13.1,0
146 | 22.2,16.45,0
147 | 22.15,16.65,0
148 | 22.4,15.15,0
149 | 22.15,15.2,0
150 | 23.95,15.95,0
151 | 24.25,16.1,0
152 | 24.8,16.1,0
153 | 25.15,16.15,0
154 | 25.5,16.7,0
155 | 25.75,16.85,0
156 | 26.2,16.85,0
157 | 26.25,16.9,0
158 | 26.25,16.35,0
159 | 26.75,16.2,0
160 | 27.4,16.15,0
161 | 27.6,16.85,0
162 | 26.95,17.2,0
163 | 26.3,18.1,0
164 | 27.55,17.95,0
165 | 27.7,17.6,0
166 | 28.25,18.25,0
167 | 28.8,19.15,0
168 | 28.5,19.15,0
169 | 28.1,19.35,0
170 | 28.05,20.3,0
171 | 27.3,20.5,0
172 | 27.1,21.6,0
173 | 26.75,19.5,0
174 | 26.5,20,0
175 | 25.9,19.8,0
176 | 25.1,19.8,0
177 | 24.75,20.7,0
178 | 24.35,20.55,0
179 | 23.55,20.35,0
180 | 24.3,19.7,0
181 | 24.9,19,0
182 | 24.7,16.8,0
183 | 24.35,16.8,0
184 | 24.4,17.15,0
185 | 24.9,17.3,0
186 | 24.35,17.7,0
187 | 24.95,17.8,0
188 | 24.95,18.05,0
189 | 24.4,18.35,0
190 | 23.65,18.6,0
191 | 22.85,18.9,0
192 | 22.4,20.65,0
193 | 22.5,17.8,0
194 | 22.45,18.25,0
195 | 21.6,17.7,0
196 | 21.35,18.05,0
197 | 21.3,18.25,0
198 | 19.95,19.8,0
199 | 20.45,20.45,0
200 | 20.35,16.95,0
201 | 19.7,17.45,0
202 | 19.35,17.45,0
203 | 12.45,9.15,2
204 | 10.1,10.05,2
205 | 11.75,12.2,2
206 | 9.55,12.4,2
207 | 8.65,13.35,2
208 | 7.75,13.55,2
209 | 8.55,15.15,2
210 | 8.05,15.9,2
211 | 8.45,15.9,2
212 | 8.6,16.85,2
213 | 9,17.05,2
214 | 9,16.3,2
215 | 9.35,16.3,2
216 | 9.55,15.3,2
217 | 9.65,14.85,2
218 | 10.15,15.05,2
219 | 10.05,15.6,2
220 | 10.4,16,2
221 | 10.65,16,2
222 | 10.9,15.95,2
223 | 10.7,15.35,2
224 | 11.35,15.05,2
225 | 11.15,14.75,2
226 | 11.05,14.6,2
227 | 11.15,14.2,2
228 | 11.1,13.6,2
229 | 12.5,13,2
230 | 13.3,12.45,2
231 | 13.5,12.4,2
232 | 13.95,11.75,2
233 | 14.4,12.2,2
234 | 15.4,12.2,2
235 | 15.25,12.45,2
236 | 14.6,12.75,2
237 | 14.1,13.05,2
238 | 14.2,13.25,2
239 | 14.75,13.45,2
240 | 13.15,13.4,2
241 | 13.05,13.7,2
242 | 12.65,13.65,2
243 | 15.45,13.75,2
244 | 14.65,14.2,2
245 | 13.75,14.05,2
246 | 13.75,14.5,2
247 | 12.95,14.8,2
248 | 13.2,14.9,2
249 | 13.25,15.5,2
250 | 12.1,15.35,2
251 | 12.15,15.5,2
252 | 11.25,16.4,2
253 | 12.7,15.6,2
254 | 12.5,16.15,2
255 | 12.7,16.6,2
256 | 12.15,16.2,2
257 | 11.95,16.5,2
258 | 11.45,16.8,2
259 | 11.05,17.2,2
260 | 11.3,17.6,2
261 | 11.65,17.6,2
262 | 11.25,18.25,2
263 | 11.05,18.45,2
264 | 11.05,18.55,2
265 | 10.55,18.55,2
266 | 10.8,19.2,2
267 | 7.45,19,1
268 | 10.05,20.1,2
269 | 9.95,20.5,2
270 | 10.65,20.45,2
271 | 10.3,22.75,2
272 | 11.7,19.6,2
273 | 12.2,19.65,2
274 | 13.2,20.1,2
275 | 13.55,20.05,2
276 | 14.15,20.05,2
277 | 14.25,21.5,2
278 | 13.25,21.4,2
279 | 12.85,18.1,2
280 | 13.75,18.3,2
281 | 14.2,18.35,2
282 | 14.25,18.8,2
283 | 13.75,16,2
284 | 13.75,16.7,2
285 | 13.75,17.05,2
286 | 14.05,16.8,2
287 | 14.5,16.95,2
288 | 14.75,16.65,2
289 | 15.25,16.05,2
290 | 15.4,16.2,2
291 | 15.85,16.2,2
292 | 15.5,15.55,2
293 | 15,14.95,2
294 | 16.6,16.15,2
295 | 17.9,15.6,2
296 | 17.5,18.05,2
297 | 16.65,17.5,2
298 | 15.45,17.3,2
299 | 15.45,17.8,2
300 | 15.7,18.4,2
301 |
--------------------------------------------------------------------------------
/docs/source/pages/quick_start.rst:
--------------------------------------------------------------------------------
1 | ============
2 | Installation
3 | ============
4 |
5 | * Install the `current PyPI release `_::
6 |
7 | $ pip install metacluster==1.3.0
8 |
9 |
10 | * Install directly from source code::
11 |
12 | $ git clone https://github.com/thieu1995/metacluster.git
13 | $ cd metacluster
14 | $ python setup.py install
15 |
16 |
17 | * In case, you want to install the development version from Github::
18 |
19 | $ pip install git+https://github.com/thieu1995/permetrics
20 |
21 |
22 | After installation, you can import MetaCluster as any other Python module::
23 |
24 | $ python
25 | >>> import metacluster
26 | >>> metacluster.__version__
27 |
28 |
29 | ========
30 | Examples
31 | ========
32 |
33 | Let's go through some examples.
34 |
35 | 1. First, load dataset. You can use the available datasets from MetaCluster::
36 |
37 | # Load available dataset from MetaCluster
38 | from metacluster import get_dataset
39 |
40 | # Try unknown data
41 | get_dataset("unknown")
42 | # Enter: 1 -> This wil list all of avaialble dataset
43 |
44 | data = get_dataset("Arrhythmia")
45 |
46 | Load your own dataset if you want::
47 |
48 | import pandas as pd
49 | from metacluster import Data
50 |
51 | # load X and y
52 | # NOTE MetaCluster accepts numpy arrays only, hence use the .values attribute
53 | dataset = pd.read_csv('examples/dataset.csv', index_col=0).values
54 | X, y = dataset[:, 0:-1], dataset[:, -1]
55 | data = Data(X, y, name="my-dataset") # Set up the name for dataset as saved path of model
56 |
57 |
58 | 2. Next, scale your features::
59 |
60 | # MinMaxScaler
61 | data.X, scaler = data.scale(data.X, method="MinMaxScaler", feature_range=(0, 1))
62 |
63 | # StandardScaler
64 | data.X, scaler = data.scale(data.X, method="StandardScaler")
65 |
66 | # MaxAbsScaler
67 | data.X, scaler = data.scale(data.X, method="MaxAbsScaler")
68 |
69 | # RobustScaler
70 | data.X, scaler = data.scale(data.X, method="RobustScaler")
71 |
72 | # Normalizer
73 | data.X, scaler = data.scale(data.X, method="Normalizer", norm="l2") # "l1" or "l2" or "max"
74 |
75 |
76 | 3. Next, select Metaheuristic Algorithm, Its parameters, list of objectives, and list of performance metrics::
77 |
78 | list_optimizer = ["BaseFBIO", "OriginalGWO", "OriginalSMA"]
79 | list_paras = [
80 | {"name": "FBIO", "epoch": 10, "pop_size": 30},
81 | {"name": "GWO", "epoch": 10, "pop_size": 30},
82 | {"name": "SMA", "epoch": 10, "pop_size": 30}
83 | ]
84 | list_obj = ["SI", "RSI"]
85 | list_metric = ["BHI", "DBI", "DI", "CHI", "SSEI", "NMIS", "HS", "CS", "VMS", "HGS"]
86 |
87 |
88 | You can check all supported metaheuristic algorithms from: `Mealpy Link `_.
89 | All supported clustering objectives and metrics from: `Permetrics Link `_.
90 |
91 | If you don't want to read the documents, you can print out all of the supported information by::
92 |
93 | from metacluster import MetaCluster
94 |
95 | # Get all supported methods and print them out
96 | MetaCluster.get_support(name="all")
97 |
98 |
99 | 4. Next, create an instance of MetaCluster class and run it::
100 |
101 | model = MetaCluster(list_optimizer=list_optimizer, list_paras=list_paras, list_obj=list_obj, n_trials=3)
102 |
103 | model.execute(data=data, cluster_finder="elbow", list_metric=list_metric, save_path="history", verbose=False)
104 |
105 | model.save_boxplots()
106 | model.save_convergences()
107 |
108 | As you can see, you can define different datasets and using the same model to run it.
109 | **Remember to set the name to your dataset**, because the folder that hold your results is the name of your dataset.
110 |
111 |
112 | =============
113 | Visualization
114 | =============
115 |
116 | If you set `save_figures=True` in the 4th step, you can get a lots of figures that automatically saved in the `save_path`.
117 |
118 | .. image:: /_static/images/boxplot-BHI-BRI.png
119 | :width: 49 %
120 | .. image:: /_static/images/boxplot-BHI-DBI.png
121 | :width: 49 %
122 |
123 | .. image:: /_static/images/boxplot-BHI-DI.png
124 | :width: 49 %
125 | .. image:: /_static/images/boxplot-BHI-DRI.png
126 | :width: 49 %
127 |
128 | .. image:: /_static/images/convergence-BHI-1.png
129 | :width: 49 %
130 | .. image:: /_static/images/convergence-MIS-1.png
131 | :width: 49 %
132 |
133 | Also you will get a lots of csv file like this.
134 |
135 | .. image:: /_static/images/result_convergences.png
136 |
137 | .. image:: /_static/images/result_labels.png
138 |
139 | .. image:: /_static/images/result_metrics.png
140 |
141 | .. image:: /_static/images/result_metrics_mean.png
142 |
143 | .. image:: /_static/images/result_metrics_std.png
144 |
145 |
146 | Note that, there are two special files which are `result_convergences.csv` and `result_labels.csv`. You will see a lots of symbol `=` in these files.
147 |
148 | We did that intentionally because we need to save all fitness value after N epochs as `fitness` column and all labels of predicted X as `y_pred` column.
149 | So it will be easier for users when they read these csv files.
150 |
151 | Here is an simple example how to read these files::
152 |
153 | import pandas as pd
154 | df = pd.read_csv("path_save/result_convergences.csv")
155 |
156 | # I want to get the loss convergence of model FBIO, with objective BHI and at the 1st trial
157 | res = df[(df["optimizer"]=="FBIO") & (df["obj"]=="BHI")]["fitness"].values
158 | list_convergences = np.array(res.split("="), dtype=float)
159 |
160 |
161 | .. toctree::
162 | :maxdepth: 4
163 |
164 | .. toctree::
165 | :maxdepth: 4
166 |
167 | .. toctree::
168 | :maxdepth: 4
169 |
--------------------------------------------------------------------------------
/metacluster/data/Lymphography.csv:
--------------------------------------------------------------------------------
1 | 3,2,2,1,2,2,1,1,1,3,3,4,3,4,2,2,2,3,2
2 | 2,2,2,1,2,2,2,2,1,4,2,2,2,4,3,2,2,6,3
3 | 2,2,1,1,2,2,1,1,1,3,3,4,3,4,3,1,1,1,2
4 | 2,1,1,1,1,1,1,1,1,2,3,2,3,3,2,2,1,1,3
5 | 2,2,2,1,1,2,1,1,1,2,3,2,3,3,2,1,1,1,2
6 | 2,1,1,1,1,1,1,2,1,2,2,2,1,7,1,2,2,2,3
7 | 4,2,1,1,1,1,1,2,1,2,2,2,4,8,1,1,2,2,3
8 | 3,1,1,1,1,1,1,2,1,2,2,2,3,2,1,2,2,1,2
9 | 3,2,1,1,1,1,1,2,1,3,2,2,4,8,3,2,2,3,3
10 | 3,2,1,1,1,2,1,2,1,2,2,2,4,8,3,1,2,3,3
11 | 3,1,1,1,1,2,1,2,1,4,3,4,2,4,3,2,2,6,3
12 | 3,2,2,1,2,2,1,2,1,2,3,3,4,2,2,2,1,1,2
13 | 4,2,2,2,2,2,2,2,1,4,3,4,4,7,3,2,2,8,3
14 | 4,1,1,1,1,2,1,2,1,4,3,4,3,3,3,2,2,5,3
15 | 4,2,1,1,2,2,1,2,1,3,3,4,2,5,3,2,2,3,3
16 | 2,1,1,1,1,1,1,1,1,1,2,2,3,3,1,2,2,1,2
17 | 4,2,1,1,2,2,1,2,1,2,3,2,3,2,3,2,2,4,2
18 | 3,2,1,1,1,1,1,2,1,2,3,3,3,5,2,2,1,1,2
19 | 3,2,2,2,2,2,2,1,2,2,2,4,2,4,3,2,2,7,4
20 | 2,2,1,1,1,2,1,2,1,2,3,3,3,5,3,2,2,2,2
21 | 3,2,1,1,1,2,1,2,1,3,2,4,3,4,2,2,2,2,2
22 | 4,1,1,1,1,1,1,1,2,1,3,4,2,8,1,2,2,1,2
23 | 4,1,1,1,1,1,1,2,1,3,2,4,4,8,3,2,2,1,3
24 | 4,1,1,1,1,2,1,2,1,4,3,4,2,5,3,2,2,2,3
25 | 3,2,2,2,2,2,1,2,1,2,3,3,3,4,3,2,2,7,2
26 | 3,1,1,1,1,2,1,2,1,4,2,4,4,2,3,2,2,3,3
27 | 2,1,1,1,1,1,1,2,1,2,2,4,2,2,1,2,2,1,2
28 | 3,2,1,1,1,2,1,1,1,2,2,4,3,4,1,2,2,2,2
29 | 1,1,1,1,1,2,1,2,1,2,2,1,1,2,1,1,1,2,1
30 | 2,1,1,1,1,2,1,2,1,3,3,4,2,8,3,2,2,3,3
31 | 2,1,1,1,1,1,1,2,1,3,3,3,3,6,3,1,2,4,2
32 | 2,2,1,1,2,2,1,1,1,2,2,4,2,8,3,2,2,1,2
33 | 2,2,1,1,1,2,1,2,1,2,3,2,2,4,2,1,1,2,2
34 | 4,2,1,1,2,2,1,2,1,3,3,4,3,4,3,2,2,2,2
35 | 2,2,1,1,1,2,1,2,1,2,2,3,3,4,2,1,2,1,2
36 | 4,1,1,1,2,2,1,2,1,3,3,3,4,5,3,2,2,4,3
37 | 3,2,1,1,2,2,1,2,1,3,3,2,3,4,2,2,2,2,2
38 | 3,2,1,1,1,2,1,2,1,2,3,3,3,2,2,2,2,3,2
39 | 2,1,1,1,1,1,1,2,1,3,2,3,3,8,3,2,2,3,3
40 | 4,2,1,1,2,1,1,2,1,3,3,3,3,2,2,2,2,3,2
41 | 2,1,1,1,1,1,1,1,1,2,2,2,3,8,2,1,2,1,2
42 | 2,1,1,1,1,2,1,2,1,3,2,2,2,4,3,1,2,5,3
43 | 4,2,1,1,2,2,1,2,1,4,3,4,3,4,2,2,2,2,2
44 | 2,2,2,1,2,2,1,2,1,3,3,3,3,8,3,1,2,2,2
45 | 4,2,1,1,1,2,1,1,1,2,2,3,3,5,2,1,2,1,2
46 | 2,2,1,1,1,2,1,2,1,4,3,4,4,8,3,2,2,7,3
47 | 4,2,2,2,2,2,1,2,1,4,3,4,4,7,3,2,2,6,3
48 | 2,1,1,1,1,1,1,2,1,3,3,2,3,4,3,2,2,2,3
49 | 2,2,2,1,2,2,1,2,1,3,2,4,3,5,1,2,2,3,2
50 | 4,2,2,1,2,2,1,2,1,3,3,4,3,8,2,2,2,3,2
51 | 2,2,1,1,1,1,1,1,1,2,3,3,3,3,3,1,2,1,2
52 | 4,1,1,1,2,1,2,2,1,3,2,3,4,5,3,2,2,7,3
53 | 2,2,1,1,1,1,1,2,1,4,3,4,2,8,2,1,2,4,2
54 | 2,1,1,1,1,1,1,1,1,1,1,2,2,3,1,2,2,1,2
55 | 3,1,1,1,1,1,2,1,1,2,2,4,3,5,2,1,2,1,2
56 | 2,2,1,1,1,1,1,1,1,1,2,2,3,3,1,1,1,1,2
57 | 2,1,1,1,1,1,1,2,1,2,2,2,2,8,2,2,2,1,3
58 | 2,1,1,1,1,1,1,2,1,2,2,2,2,4,1,2,1,2,2
59 | 3,1,1,1,1,2,1,2,1,3,3,4,4,4,3,1,2,6,3
60 | 2,2,2,1,1,1,1,2,1,2,2,4,3,8,2,2,2,2,2
61 | 3,1,1,1,1,1,1,2,1,2,2,4,2,4,3,2,2,3,3
62 | 4,2,1,1,1,1,1,1,1,4,3,3,3,4,2,2,1,1,2
63 | 2,2,1,1,1,1,1,1,1,2,2,3,3,5,2,2,2,1,2
64 | 2,2,1,1,1,2,1,1,1,2,2,2,3,5,2,1,2,2,2
65 | 2,1,1,1,1,1,1,1,1,2,2,2,2,8,1,1,1,1,2
66 | 4,2,2,1,1,2,1,2,1,2,2,3,3,8,3,2,2,2,2
67 | 2,1,1,1,1,2,1,2,1,2,2,2,2,8,2,1,2,2,3
68 | 2,2,2,2,2,2,1,1,1,2,2,4,3,8,2,2,2,3,2
69 | 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1
70 | 2,2,1,1,1,2,1,2,1,2,2,4,4,5,3,2,2,1,2
71 | 4,2,1,1,1,2,1,1,1,3,3,4,3,8,3,2,2,2,2
72 | 2,1,1,1,1,1,1,2,1,2,2,4,4,8,2,1,2,2,3
73 | 2,1,1,1,1,1,1,1,1,1,1,1,1,3,1,2,2,1,2
74 | 2,1,1,1,1,2,1,2,1,2,2,3,3,3,3,1,2,1,3
75 | 3,2,2,1,2,1,1,2,1,2,2,3,3,8,3,1,2,1,2
76 | 4,2,1,1,1,1,1,2,1,2,2,2,2,3,2,1,1,1,2
77 | 3,1,1,1,1,2,1,1,1,3,2,3,3,8,3,2,2,2,3
78 | 2,2,1,1,1,1,1,2,1,3,3,3,3,3,2,1,2,1,2
79 | 2,2,1,1,1,1,1,2,1,2,2,2,3,8,3,1,2,1,2
80 | 2,2,2,1,2,2,1,1,1,2,2,3,3,4,2,1,2,1,2
81 | 2,2,1,1,1,1,1,2,1,3,3,3,3,2,2,2,1,1,2
82 | 3,1,1,1,1,1,1,1,1,2,2,2,2,5,1,1,2,2,3
83 | 4,1,1,1,1,1,1,2,1,4,2,4,2,8,3,2,2,6,3
84 | 3,2,2,1,2,2,1,2,1,3,2,2,3,4,1,2,2,1,2
85 | 3,2,1,1,1,2,1,2,1,2,2,2,2,1,3,1,1,1,2
86 | 2,2,1,1,1,1,1,2,1,2,3,3,3,5,2,1,2,1,2
87 | 3,1,1,1,1,1,1,1,1,2,3,3,3,4,3,1,2,2,2
88 | 3,1,1,1,2,1,1,2,1,2,3,3,3,5,3,1,1,1,2
89 | 4,2,1,1,1,2,1,2,1,4,3,3,3,7,3,2,2,3,3
90 | 2,2,1,1,1,1,1,2,1,4,3,4,2,7,3,2,2,5,3
91 | 2,1,1,1,1,1,1,1,1,2,3,2,2,8,1,2,1,1,2
92 | 3,1,1,1,1,2,1,2,1,2,2,4,3,8,2,2,2,1,3
93 | 3,2,1,1,1,2,1,2,1,3,3,3,3,4,3,2,2,1,2
94 | 3,1,1,1,2,2,2,1,3,1,1,2,1,5,3,1,1,7,4
95 | 2,1,1,1,1,1,1,2,1,3,2,2,2,8,3,2,2,5,3
96 | 2,1,1,1,1,1,1,2,1,3,2,4,2,8,3,2,2,4,3
97 | 2,2,1,1,1,1,1,2,1,2,2,3,3,2,3,2,2,2,2
98 | 3,1,1,1,1,2,1,2,1,4,2,2,2,4,3,2,2,7,3
99 | 3,2,1,1,2,2,1,1,1,2,2,4,3,2,1,2,2,3,2
100 | 2,2,1,1,1,2,1,2,1,3,3,3,3,8,3,2,2,4,3
101 | 3,2,2,2,2,2,2,2,1,4,3,3,4,8,3,2,2,7,3
102 | 2,1,1,1,1,1,1,2,1,3,2,2,2,6,3,2,2,6,3
103 | 3,1,1,1,1,1,1,2,2,1,2,2,2,8,3,1,2,8,3
104 | 3,2,1,1,1,1,1,1,1,2,2,3,3,3,3,1,1,2,2
105 | 3,1,1,1,1,1,1,2,1,2,2,4,4,2,3,2,1,1,3
106 | 4,2,2,1,1,1,1,2,1,2,2,2,3,3,2,2,2,2,2
107 | 2,2,1,1,1,2,1,2,1,2,3,3,3,5,3,2,2,1,2
108 | 2,2,1,1,1,1,1,2,1,3,2,2,2,8,3,1,2,5,3
109 | 3,1,1,1,1,1,1,2,1,4,3,3,4,5,3,2,2,3,3
110 | 3,2,1,1,1,1,1,1,1,2,2,3,3,5,1,1,1,1,2
111 | 4,2,1,1,2,2,1,1,1,1,3,3,3,3,3,2,2,3,2
112 | 4,2,2,1,2,2,1,2,1,1,2,2,1,3,1,2,2,2,3
113 | 3,2,1,1,1,2,1,1,1,2,3,4,2,4,1,1,1,1,2
114 | 3,1,1,1,1,1,1,1,1,2,2,4,2,8,2,2,2,1,3
115 | 3,2,1,1,1,1,1,2,1,2,3,3,3,8,2,2,2,1,2
116 | 2,2,2,1,2,2,2,2,1,3,3,4,4,5,3,2,2,5,3
117 | 4,2,1,1,1,1,1,2,1,2,2,2,3,3,2,2,2,1,2
118 | 2,1,1,1,1,1,1,2,1,4,3,4,4,5,3,2,2,5,3
119 | 4,1,1,1,1,1,1,2,1,3,3,2,2,4,2,2,2,2,2
120 | 3,2,1,1,2,2,1,2,1,2,2,4,3,5,2,2,2,4,3
121 | 4,1,1,1,1,1,1,2,1,2,2,3,3,3,3,2,1,1,3
122 | 4,1,1,1,1,2,1,2,1,4,2,2,4,7,3,2,2,2,3
123 | 2,1,1,1,1,1,1,1,1,1,2,4,3,8,2,2,2,2,2
124 | 2,2,2,1,2,2,1,2,1,3,3,3,3,8,3,2,2,2,2
125 | 2,2,1,1,1,2,1,2,1,3,3,3,4,8,3,2,2,2,3
126 | 2,1,1,1,1,2,1,1,1,2,2,2,2,3,1,1,1,1,3
127 | 2,1,1,1,1,2,1,2,1,2,2,2,4,8,1,2,2,2,3
128 | 2,1,1,1,1,2,1,2,1,3,3,2,2,4,3,2,2,6,3
129 | 4,1,1,1,1,1,1,2,1,3,2,2,4,3,2,2,1,1,3
130 | 2,1,1,1,1,1,1,2,1,3,2,4,4,4,3,2,2,5,3
131 | 3,1,1,1,1,1,1,2,1,3,2,2,2,8,1,1,1,1,2
132 | 3,2,2,2,2,2,2,2,3,1,1,2,2,8,1,2,2,4,4
133 | 4,2,1,1,1,2,1,1,1,2,3,2,3,2,2,1,1,1,2
134 | 2,1,1,1,1,1,1,1,1,2,3,3,3,8,3,1,1,1,2
135 | 2,2,1,1,1,2,1,2,1,3,3,4,2,8,3,2,2,2,3
136 | 3,1,1,1,2,2,2,1,3,1,1,4,2,5,3,1,2,4,4
137 | 3,2,2,1,1,2,1,1,1,3,2,3,3,4,3,1,2,2,2
138 | 3,1,1,1,1,1,1,1,1,2,2,4,3,5,1,2,2,1,2
139 | 4,1,1,1,1,1,1,2,1,4,2,2,4,7,3,2,2,7,3
140 | 3,2,1,1,1,1,1,2,1,2,2,2,2,5,1,1,1,1,2
141 | 4,1,1,1,1,2,1,2,1,3,3,4,2,4,3,2,2,4,3
142 | 2,2,2,1,2,2,1,2,1,3,3,4,3,4,3,2,2,6,2
143 | 2,1,1,1,1,1,1,2,1,2,2,4,2,8,3,2,2,3,3
144 | 2,2,1,1,1,1,1,1,1,2,3,3,3,2,2,2,2,1,2
145 | 2,2,1,1,1,1,1,2,1,2,3,3,3,5,3,1,2,1,2
146 | 2,2,1,1,1,1,1,2,1,2,3,2,3,8,2,1,1,1,2
147 | 3,2,2,1,1,2,1,1,1,2,3,3,3,5,2,1,2,2,2
148 | 2,1,1,1,1,1,1,2,1,2,2,3,3,5,3,1,1,2,3
149 |
--------------------------------------------------------------------------------
/metacluster/data/jain.csv:
--------------------------------------------------------------------------------
1 | 0.85,17.45,0
2 | 0.75,15.6,0
3 | 3.3,15.45,0
4 | 5.25,14.2,0
5 | 4.9,15.65,0
6 | 5.35,15.85,0
7 | 5.1,17.9,0
8 | 4.6,18.25,0
9 | 4.05,18.75,0
10 | 3.4,19.7,0
11 | 2.9,21.15,0
12 | 3.1,21.85,0
13 | 3.9,21.85,0
14 | 4.4,20.05,0
15 | 7.2,14.5,0
16 | 7.65,16.5,0
17 | 7.1,18.65,0
18 | 7.05,19.9,0
19 | 5.85,20.55,0
20 | 5.5,21.8,0
21 | 6.55,21.8,0
22 | 6.05,22.3,0
23 | 5.2,23.4,0
24 | 4.55,23.9,0
25 | 5.1,24.4,0
26 | 8.1,26.35,0
27 | 10.15,27.7,0
28 | 9.75,25.5,0
29 | 9.2,21.1,0
30 | 11.2,22.8,0
31 | 12.6,23.1,0
32 | 13.25,23.5,0
33 | 11.65,26.85,0
34 | 12.45,27.55,0
35 | 13.3,27.85,0
36 | 13.7,27.75,0
37 | 14.15,26.9,0
38 | 14.05,26.55,0
39 | 15.15,24.2,0
40 | 15.2,24.75,0
41 | 12.2,20.9,0
42 | 12.15,21.45,0
43 | 12.75,22.05,0
44 | 13.15,21.85,0
45 | 13.75,22,0
46 | 13.95,22.7,0
47 | 14.4,22.65,0
48 | 14.2,22.15,0
49 | 14.1,21.75,0
50 | 14.05,21.4,0
51 | 17.2,24.8,0
52 | 17.7,24.85,0
53 | 17.55,25.2,0
54 | 17,26.85,0
55 | 16.55,27.1,0
56 | 19.15,25.35,0
57 | 18.8,24.7,0
58 | 21.4,25.85,0
59 | 15.8,21.35,0
60 | 16.6,21.15,0
61 | 17.45,20.75,0
62 | 18,20.95,0
63 | 18.25,20.2,0
64 | 18,22.3,0
65 | 18.6,22.25,0
66 | 19.2,21.95,0
67 | 19.45,22.1,0
68 | 20.1,21.6,0
69 | 20.1,20.9,0
70 | 19.9,20.35,0
71 | 19.45,19.05,0
72 | 19.25,18.7,0
73 | 21.3,22.3,0
74 | 22.9,23.65,0
75 | 23.15,24.1,0
76 | 24.25,22.85,0
77 | 22.05,20.25,0
78 | 20.95,18.25,0
79 | 21.65,17.25,0
80 | 21.55,16.7,0
81 | 21.6,16.3,0
82 | 21.5,15.5,0
83 | 22.4,16.5,0
84 | 22.25,18.1,0
85 | 23.15,19.05,0
86 | 23.5,19.8,0
87 | 23.75,20.2,0
88 | 25.15,19.8,0
89 | 25.5,19.45,0
90 | 23,18,0
91 | 23.95,17.75,0
92 | 25.9,17.55,0
93 | 27.65,15.65,0
94 | 23.1,14.6,0
95 | 23.5,15.2,0
96 | 24.05,14.9,0
97 | 24.5,14.7,0
98 | 14.15,17.35,1
99 | 14.3,16.8,1
100 | 14.3,15.75,1
101 | 14.75,15.1,1
102 | 15.35,15.5,1
103 | 15.95,16.45,1
104 | 16.5,17.05,1
105 | 17.35,17.05,1
106 | 17.15,16.3,1
107 | 16.65,16.1,1
108 | 16.5,15.15,1
109 | 16.25,14.95,1
110 | 16,14.25,1
111 | 15.9,13.2,1
112 | 15.15,12.05,1
113 | 15.2,11.7,1
114 | 17,15.65,1
115 | 16.9,15.35,1
116 | 17.35,15.45,1
117 | 17.15,15.1,1
118 | 17.3,14.9,1
119 | 17.7,15,1
120 | 17,14.6,1
121 | 16.85,14.3,1
122 | 16.6,14.05,1
123 | 17.1,14,1
124 | 17.45,14.15,1
125 | 17.8,14.2,1
126 | 17.6,13.85,1
127 | 17.2,13.5,1
128 | 17.25,13.15,1
129 | 17.1,12.75,1
130 | 16.95,12.35,1
131 | 16.5,12.2,1
132 | 16.25,12.5,1
133 | 16.05,11.9,1
134 | 16.65,10.9,1
135 | 16.7,11.4,1
136 | 16.95,11.25,1
137 | 17.3,11.2,1
138 | 18.05,11.9,1
139 | 18.6,12.5,1
140 | 18.9,12.05,1
141 | 18.7,11.25,1
142 | 17.95,10.9,1
143 | 18.4,10.05,1
144 | 17.45,10.4,1
145 | 17.6,10.15,1
146 | 17.7,9.85,1
147 | 17.3,9.7,1
148 | 16.95,9.7,1
149 | 16.75,9.65,1
150 | 19.8,9.95,1
151 | 19.1,9.55,1
152 | 17.5,8.3,1
153 | 17.55,8.1,1
154 | 17.85,7.55,1
155 | 18.2,8.35,1
156 | 19.3,9.1,1
157 | 19.4,8.85,1
158 | 19.05,8.85,1
159 | 18.9,8.5,1
160 | 18.6,7.85,1
161 | 18.7,7.65,1
162 | 19.35,8.2,1
163 | 19.95,8.3,1
164 | 20,8.9,1
165 | 20.3,8.9,1
166 | 20.55,8.8,1
167 | 18.35,6.95,1
168 | 18.65,6.9,1
169 | 19.3,7,1
170 | 19.1,6.85,1
171 | 19.15,6.65,1
172 | 21.2,8.8,1
173 | 21.4,8.8,1
174 | 21.1,8,1
175 | 20.4,7,1
176 | 20.5,6.35,1
177 | 20.1,6.05,1
178 | 20.45,5.15,1
179 | 20.95,5.55,1
180 | 20.95,6.2,1
181 | 20.9,6.6,1
182 | 21.05,7,1
183 | 21.85,8.5,1
184 | 21.9,8.2,1
185 | 22.3,7.7,1
186 | 21.85,6.65,1
187 | 21.3,5.05,1
188 | 22.6,6.7,1
189 | 22.5,6.15,1
190 | 23.65,7.2,1
191 | 24.1,7,1
192 | 21.95,4.8,1
193 | 22.15,5.05,1
194 | 22.45,5.3,1
195 | 22.45,4.9,1
196 | 22.7,5.5,1
197 | 23,5.6,1
198 | 23.2,5.3,1
199 | 23.45,5.95,1
200 | 23.75,5.95,1
201 | 24.45,6.15,1
202 | 24.6,6.45,1
203 | 25.2,6.55,1
204 | 26.05,6.4,1
205 | 25.3,5.75,1
206 | 24.35,5.35,1
207 | 23.3,4.9,1
208 | 22.95,4.75,1
209 | 22.4,4.55,1
210 | 22.8,4.1,1
211 | 22.9,4,1
212 | 23.25,3.85,1
213 | 23.45,3.6,1
214 | 23.55,4.2,1
215 | 23.8,3.65,1
216 | 23.8,4.75,1
217 | 24.2,4,1
218 | 24.55,4,1
219 | 24.7,3.85,1
220 | 24.7,4.3,1
221 | 24.9,4.75,1
222 | 26.4,5.7,1
223 | 27.15,5.95,1
224 | 27.3,5.45,1
225 | 27.5,5.45,1
226 | 27.55,5.1,1
227 | 26.85,4.95,1
228 | 26.6,4.9,1
229 | 26.85,4.4,1
230 | 26.2,4.4,1
231 | 26,4.25,1
232 | 25.15,4.1,1
233 | 25.6,3.9,1
234 | 25.85,3.6,1
235 | 24.95,3.35,1
236 | 25.1,3.25,1
237 | 25.45,3.15,1
238 | 26.85,2.95,1
239 | 27.15,3.15,1
240 | 27.2,3,1
241 | 27.95,3.25,1
242 | 27.95,3.5,1
243 | 28.8,4.05,1
244 | 28.8,4.7,1
245 | 28.75,5.45,1
246 | 28.6,5.75,1
247 | 29.25,6.3,1
248 | 30,6.55,1
249 | 30.6,3.4,1
250 | 30.05,3.45,1
251 | 29.75,3.45,1
252 | 29.2,4,1
253 | 29.45,4.05,1
254 | 29.05,4.55,1
255 | 29.4,4.85,1
256 | 29.5,4.7,1
257 | 29.9,4.45,1
258 | 30.75,4.45,1
259 | 30.4,4.05,1
260 | 30.8,3.95,1
261 | 31.05,3.95,1
262 | 30.9,5.2,1
263 | 30.65,5.85,1
264 | 30.7,6.15,1
265 | 31.5,6.25,1
266 | 31.65,6.55,1
267 | 32,7,1
268 | 32.5,7.95,1
269 | 33.35,7.45,1
270 | 32.6,6.95,1
271 | 32.65,6.6,1
272 | 32.55,6.35,1
273 | 32.35,6.1,1
274 | 32.55,5.8,1
275 | 32.2,5.05,1
276 | 32.35,4.25,1
277 | 32.9,4.15,1
278 | 32.7,4.6,1
279 | 32.75,4.85,1
280 | 34.1,4.6,1
281 | 34.1,5,1
282 | 33.6,5.25,1
283 | 33.35,5.65,1
284 | 33.75,5.95,1
285 | 33.4,6.2,1
286 | 34.45,5.8,1
287 | 34.65,5.65,1
288 | 34.65,6.25,1
289 | 35.25,6.25,1
290 | 34.35,6.8,1
291 | 34.1,7.15,1
292 | 34.45,7.3,1
293 | 34.7,7.2,1
294 | 34.85,7,1
295 | 34.35,7.75,1
296 | 34.55,7.85,1
297 | 35.05,8,1
298 | 35.5,8.05,1
299 | 35.8,7.1,1
300 | 36.6,6.7,1
301 | 36.75,7.25,1
302 | 36.5,7.4,1
303 | 35.95,7.9,1
304 | 36.1,8.1,1
305 | 36.15,8.4,1
306 | 37.6,7.35,1
307 | 37.9,7.65,1
308 | 29.15,4.4,1
309 | 34.9,9,1
310 | 35.3,9.4,1
311 | 35.9,9.35,1
312 | 36,9.65,1
313 | 35.75,10,1
314 | 36.7,9.15,1
315 | 36.6,9.8,1
316 | 36.9,9.75,1
317 | 37.25,10.15,1
318 | 36.4,10.15,1
319 | 36.3,10.7,1
320 | 36.75,10.85,1
321 | 38.15,9.7,1
322 | 38.4,9.45,1
323 | 38.35,10.5,1
324 | 37.7,10.8,1
325 | 37.45,11.15,1
326 | 37.35,11.4,1
327 | 37,11.75,1
328 | 36.8,12.2,1
329 | 37.15,12.55,1
330 | 37.25,12.15,1
331 | 37.65,11.95,1
332 | 37.95,11.85,1
333 | 38.6,11.75,1
334 | 38.5,12.2,1
335 | 38,12.95,1
336 | 37.3,13,1
337 | 37.5,13.4,1
338 | 37.85,14.5,1
339 | 38.3,14.6,1
340 | 38.05,14.45,1
341 | 38.35,14.35,1
342 | 38.5,14.25,1
343 | 39.3,14.2,1
344 | 39,13.2,1
345 | 38.95,12.9,1
346 | 39.2,12.35,1
347 | 39.5,11.8,1
348 | 39.55,12.3,1
349 | 39.75,12.75,1
350 | 40.2,12.8,1
351 | 40.4,12.05,1
352 | 40.45,12.5,1
353 | 40.55,13.15,1
354 | 40.45,14.5,1
355 | 40.2,14.8,1
356 | 40.65,14.9,1
357 | 40.6,15.25,1
358 | 41.3,15.3,1
359 | 40.95,15.7,1
360 | 41.25,16.8,1
361 | 40.95,17.05,1
362 | 40.7,16.45,1
363 | 40.45,16.3,1
364 | 39.9,16.2,1
365 | 39.65,16.2,1
366 | 39.25,15.5,1
367 | 38.85,15.5,1
368 | 38.3,16.5,1
369 | 38.75,16.85,1
370 | 39,16.6,1
371 | 38.25,17.35,1
372 | 39.5,16.95,1
373 | 39.9,17.05,1
374 |
--------------------------------------------------------------------------------
/metacluster/data/vary-density.csv:
--------------------------------------------------------------------------------
1 | 0.10939309992822593,0.08540944605400377,0
2 | 0.08257056589315302,0.10179643490510727,0
3 | 0.08498980564609883,0.11364122368069912,0
4 | 0.11461114193975777,0.11552402271516482,0
5 | 0.09735596050211873,0.09548403816561275,0
6 | 0.08661201543909372,0.11308376585550771,0
7 | 0.1073481288254181,0.11021330754420668,0
8 | 0.07801081730421248,0.06810454086466368,0
9 | 0.10277973436298088,0.11647434064820172,0
10 | 0.07339804391235821,0.1078335519818581,0
11 | 0.11325452752755462,0.10404663937378625,0
12 | 0.1273278386781513,0.10836496591740674,0
13 | 0.10534147759505744,0.13110054873722296,0
14 | 0.06898127393954798,0.06346945806802406,0
15 | 0.10414520023820327,0.10185867962316415,0
16 | 0.09482702879554915,0.10603072056048626,0
17 | 0.10590385064004555,0.14671170907858727,0
18 | 0.09145486727290403,0.08445999826012236,0
19 | 0.13583113939077163,0.09458095912800595,0
20 | 0.11555679696075509,0.1368193826144829,0
21 | 0.12031639663697646,0.11357504238950059,0
22 | 0.110254390968761,0.10303178145017239,0
23 | 0.12654829188201455,0.12707367632966104,0
24 | 0.12675852066043894,0.088271417847455,0
25 | 0.08885898147947054,0.09550116287467723,0
26 | 0.15049405364356278,0.08953394131575702,0
27 | 0.10940705122414521,0.10629669673796523,0
28 | 0.10495813908343334,0.11538212046779205,0
29 | 0.07757447841907014,0.07265898459659947,0
30 | 0.1238253003792682,0.0491699906104266,0
31 | 0.10233917685470978,0.1185900517163219,0
32 | 0.11678673146319728,0.09635926549296375,0
33 | 0.10530011246077302,0.061717760531780845,0
34 | 0.09483653517864192,0.1058088375459108,0
35 | 0.10350130132045579,0.08033982802613415,0
36 | 0.10508485279114937,0.09850999414140375,0
37 | 0.10381351018334607,0.09387552143108098,0
38 | 0.0831769550420346,0.11053706424409374,0
39 | 0.07811325253646423,0.08855132614271427,0
40 | 0.11951606167629983,0.11474972435750333,0
41 | 0.1074778514506865,0.08070522440311789,0
42 | 0.053139002741000294,0.0937351594239546,0
43 | 0.11254850299299388,0.08980322106650357,0
44 | 0.09412730741164194,0.08272066154536242,0
45 | 0.08577499351728525,0.09550518018085888,0
46 | 0.11583224975100394,0.11361504928528926,0
47 | 0.07911434718038962,0.08681313153487322,0
48 | 0.12327140272021234,0.12087370312417504,0
49 | 0.08594853245238993,0.09943451279479909,0
50 | 0.13902860076935275,0.03989653512602472,0
51 | 0.12708799464813766,0.2644155003737956,1
52 | 0.4793324677764971,0.25871931635376133,1
53 | 0.4458821928797788,0.27716600674225644,1
54 | 0.34229159591552366,0.21028490710419528,1
55 | 0.34046374847561617,0.4414738051955298,1
56 | 0.30241913939090614,0.2386857924647094,1
57 | 0.34779522491267634,0.3128626547470094,1
58 | 0.21382135719730513,0.23782965172812845,1
59 | 0.35028364402053597,0.20461498396465955,1
60 | 0.2874649391679298,0.2678984084750633,1
61 | 0.3784428745374063,0.29680747857287304,1
62 | 0.2500227391580536,0.2600890500314802,1
63 | 0.3315017176349687,0.1637118716744888,1
64 | 0.2578402775417402,0.3009182097276648,1
65 | 0.3726478760495913,0.25372388900487164,1
66 | 0.3837839669063111,0.33957264730450887,1
67 | 0.2424982091640392,0.40921219433890427,1
68 | 0.22000245069218735,0.3469932279937275,1
69 | 0.17341346365450688,0.4020064293379623,1
70 | 0.3499104092273302,0.2932484438016454,1
71 | 0.19395997666951975,0.17076655953621833,1
72 | 0.1852049387279591,0.27034123229110435,1
73 | 0.25636505807925986,0.23715002230663218,1
74 | 0.3360519588309022,0.26089102326011987,1
75 | 0.3598965154681066,0.27933646264854767,1
76 | 0.16747857120700604,0.18080395872256183,1
77 | 0.2971436755154356,0.1785881112740101,1
78 | 0.4265086206485411,0.2570112112049665,1
79 | 0.21204410133286045,0.15535520650558493,1
80 | 0.3466204995096275,0.30086148653689787,1
81 | 0.30360676277912846,0.17830465777753907,1
82 | 0.39243998141899017,0.25601977747402616,1
83 | 0.2549626614638548,0.2819239003513696,1
84 | 0.12032358975397317,0.22277431378265106,1
85 | 0.18266064110281321,0.32170444835583295,1
86 | 0.3738949523156657,0.3637950876808469,1
87 | 0.3032983212464313,0.20477463837282012,1
88 | 0.2485457866734928,0.22812651611627888,1
89 | 0.261623666531509,0.28150311427335756,1
90 | 0.3167140499426797,0.23374152395018663,1
91 | 0.20719838264252133,0.28596982999568676,1
92 | 0.33133043165510634,0.3602447552840026,1
93 | 0.3178873470609728,0.39298130360522,1
94 | 0.280037321638697,0.3923190227285356,1
95 | 0.29483356718246456,0.37205817081213605,1
96 | 0.28219311892194715,0.2058854424071217,1
97 | 0.16454684903417766,0.24058118193136807,1
98 | 0.47732425015642344,0.2568455828077422,1
99 | 0.3770989616628725,0.41629080310451616,1
100 | 0.4141007008962817,0.1309858708471561,1
101 | 0.7003017451433199,0.7287490220107232,2
102 | 0.6266255352470994,0.6145998677182528,2
103 | 0.7019051822947007,0.5124795489342403,2
104 | 0.5944764664101421,0.6619166659886164,2
105 | 0.7030329164914035,0.558555330006555,2
106 | 0.6481087558379025,0.8017686485184234,2
107 | 0.6856123156334322,0.6671019783170389,2
108 | 0.5643556414487485,0.897212900996877,2
109 | 0.620463808389097,0.4995065419806039,2
110 | 0.6532753152288451,0.6833247457148309,2
111 | 0.6737248904523078,0.4407575134246889,2
112 | 0.7950779507705004,0.7035141539438474,2
113 | 0.47463459287615595,0.7232981542796941,2
114 | 0.5574732261595691,0.8357989225165923,2
115 | 0.4014454414881494,0.9852388193291626,2
116 | 0.6506772676018544,0.7224898701289548,2
117 | 0.8669393705509909,0.4508663778905317,2
118 | 0.7483692879885884,0.7691642297236924,2
119 | 0.8413855122739305,0.8542108203965888,2
120 | 0.6148849574228844,0.7665317715155546,2
121 | 0.7318294589052777,0.5648032707214102,2
122 | 0.5916505458903227,0.7221166207721841,2
123 | 0.7248756039872134,0.5786968660834495,2
124 | 0.705697712333666,0.5193593407084314,2
125 | 0.6599836373076424,0.7284896854713157,2
126 | 0.6193956001890452,0.8335554020802906,2
127 | 0.4134493683302134,0.6035139879486471,2
128 | 0.7139459491081279,0.6914502268766258,2
129 | 0.969061143223094,0.7505299756130679,2
130 | 0.6923638975286772,0.44913882122893567,2
131 | 0.6766768297278943,0.7222330262894585,2
132 | 0.7263143789986654,0.5289504585228786,2
133 | 0.8392202208833831,0.6203618639023891,2
134 | 0.37071602481418825,0.5275675841610348,2
135 | 0.8408501654826845,0.5723346620908862,2
136 | 0.9543873814037929,0.8981462265707956,2
137 | 0.8175474187676226,0.720828263491731,2
138 | 0.3352360532055249,0.8081996365974959,2
139 | 0.820689965102394,0.8033239002809486,2
140 | 0.6532530267644024,0.5317828548127248,2
141 | 0.6029429217025293,0.592257182759498,2
142 | 0.6793171364155167,0.6299178295300052,2
143 | 0.5436590016093669,0.616530004367637,2
144 | 0.6419400626838602,0.36256223286946254,2
145 | 0.6087306717731106,0.6830284049509172,2
146 | 0.8119629689166619,0.6680208091577174,2
147 | 0.4507390690950599,0.6311391633425942,2
148 | 0.69051411630115,0.7322805838706918,2
149 | 0.8025826627064628,0.4721060310247126,2
150 | 0.5616657288787805,0.6772097741388851,2
151 |
--------------------------------------------------------------------------------
/metacluster/utils/visualize_util.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 17:17, 31/07/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | import pandas as pd
8 | import plotly.express as px
9 | import plotly.io as pio
10 |
11 | pio.kaleido.scope.mathjax = None
12 |
13 |
14 | def export_boxplot_figures(df, figure_size=(500, 600), xlabel="Optimizer", ylabel=None, title="Boxplot of comparison models",
15 | show_legend=True, show_mean_only=False, exts=(".png", ".pdf"), file_name="boxplot", save_path="history"):
16 | """
17 | Parameters
18 | ----------
19 | df : pd.DataFrame
20 | The format of df parameter:
21 | optimizer DBI
22 | FBIO 1.18145
23 | FBIO 1.1815
24 | GWO 1.18145
25 | GWO 1.18153
26 | FBIO 1.18147
27 | FBIO 1.18145
28 | GWO 1.18137
29 |
30 | figure_size : list, tuple, np.ndarray, None; default=None
31 | The size for saved figures. `None` means it will automatically set for you.
32 | Or you can pass (width, height) of figure based on pixel (100px to 1500px)
33 |
34 | xlabel : str; default="Optimizer"
35 | The label for x coordinate of boxplot figures.
36 |
37 | ylabel : str; default=None
38 | The label for y coordinate of boxplot figures.
39 |
40 | title : str; default="Boxplot of comparison models"
41 | The title of figures, it should be the same for all objectives since we have y coordinate already difference.
42 |
43 | show_legend : bool; default=True
44 | Show the legend or not. For boxplots we can turn on or off this option, but not for convergence chart.
45 |
46 | show_mean_only : bool; default=False
47 | You can show the mean value only or you can show all mean, std, median of the box by this parameter
48 |
49 | exts : list, tuple, np.ndarray; default=(".png", ".pdf")
50 | List of extensions of the figures. It is for multiple purposes such as latex (need ".pdf" format), word (need ".png" format).
51 |
52 | file_name : str; default="boxplot"
53 | The prefix for filenames that will be saved.
54 |
55 | save_path : str; default="history"
56 | The path to save the figure
57 | """
58 | yaxis = dict(nticks=8)
59 | if abs(df.iloc[0][-1]) > 1e5 or abs(df.iloc[0][-1]) < 1e-5:
60 | yaxis = dict(tickformat=".2e", exponentformat="power", showexponent="first")
61 | if xlabel is None:
62 | xlabel = ""
63 | if ylabel is None:
64 | ylabel = ""
65 | if title is None:
66 | title = ""
67 | boxmean = True if show_mean_only else "sd"
68 | col_name = list(df.columns)[-1]
69 | if ylabel is None:
70 | ylabel = col_name
71 | fig = px.box(df, x="optimizer", y=col_name, color="optimizer",
72 | labels={'optimizer': xlabel, col_name: ylabel})
73 | fig.update_traces(boxmean=boxmean) # boxmean=True if want to show mean only
74 | fig.update_layout(width=figure_size[0], height=figure_size[1],
75 | margin=dict(l=25, r=20, t=40, b=20), showlegend=show_legend,
76 | title={'text': title, 'x': 0.5, 'xanchor': 'center', 'yanchor': 'top'},
77 | font=dict(size=15), yaxis=yaxis)
78 | for idx, ext in enumerate(exts):
79 | fig.write_image(f"{save_path}/{file_name}{ext}")
80 |
81 |
82 | def export_convergence_figures(df, figure_size=(500, 600), xlabel="Epoch", ylabel="Fitness value", title="Convergence chart of comparison models",
83 | legend_name="Optimizer", exts=(".png", ".pdf"), file_name="convergence", save_path="history"):
84 | """
85 | Parameters
86 | ----------
87 | df : pd.DataFrame
88 | The format of df parameter:
89 | FBIO GWO
90 | 62.62501039 62.72457583
91 | 62.62085777 62.71386468
92 | 62.62085777 62.71386468
93 | 62.62085777 62.71386468
94 | 62.62085777 62.66383109
95 | 62.62085777 62.66310589
96 |
97 | figure_size : list, tuple, np.ndarray, None; default=None
98 | The size for saved figures. `None` means it will automatically set for you.
99 | Or you can pass (width, height) of figure based on pixel (100px to 1500px)
100 |
101 | xlabel : str; default="Optimizer"
102 | The label for x coordinate of convergence figures.
103 |
104 | ylabel : str; default=None
105 | The label for y coordinate of boxplot figures.
106 |
107 | title : str; default="Convergence chart of comparison models"
108 | The title of figures, it should be the same for all objectives since we have y coordinate already difference.
109 |
110 | legend_name : str; default="Optimizer"
111 | Set the name for the legend.
112 |
113 | exts : list, tuple, np.ndarray; default=(".png", ".pdf")
114 | List of extensions of the figures. It is for multiple purposes such as latex (need ".pdf" format), word (need ".png" format).
115 |
116 | file_name : str; default="convergence"
117 | The prefix for filenames that will be saved.
118 |
119 | save_path : str; default="history"
120 | The path to save the figure
121 | """
122 | yaxis = dict(nticks=8)
123 | if abs(df.iloc[0][-1]) > 1e5 or abs(df.iloc[0][-1]) < 1e-5:
124 | yaxis = dict(tickformat=".2e", exponentformat="power", showexponent="first")
125 | if xlabel is None:
126 | xlabel = ""
127 | if ylabel is None:
128 | ylabel = ""
129 | if title is None:
130 | title = ""
131 | if legend_name is None:
132 | legend_name = "Optimizer"
133 | # Melt the DataFrame to convert it from wide to long format
134 | df = df.reset_index()
135 | df_long = pd.melt(df, id_vars='index', var_name='Column', value_name='Value')
136 | # Define the line chart using Plotly Express
137 | fig = px.line(df_long, x='index', y='Value', color='Column',
138 | labels={'index': xlabel, 'Value': ylabel, 'Column': legend_name})
139 | fig.update_layout(width=figure_size[0], height=figure_size[1],
140 | margin=dict(l=25, r=20, t=40, b=20), showlegend=True,
141 | title={'text': title, 'x': 0.5, 'xanchor': 'center', 'yanchor': 'top'},
142 | font=dict(size=15), yaxis=yaxis)
143 | for idx, ext in enumerate(exts):
144 | fig.write_image(f"{save_path}/{file_name}{ext}")
145 |
--------------------------------------------------------------------------------
/metacluster/data/liver.csv:
--------------------------------------------------------------------------------
1 | 85,92,45,27,31,0.0,1
2 | 85,64,59,32,23,0.0,2
3 | 86,54,33,16,54,0.0,2
4 | 91,78,34,24,36,0.0,2
5 | 87,70,12,28,10,0.0,2
6 | 98,55,13,17,17,0.0,2
7 | 88,62,20,17,9,0.5,1
8 | 88,67,21,11,11,0.5,1
9 | 92,54,22,20,7,0.5,1
10 | 90,60,25,19,5,0.5,1
11 | 89,52,13,24,15,0.5,1
12 | 82,62,17,17,15,0.5,1
13 | 90,64,61,32,13,0.5,1
14 | 86,77,25,19,18,0.5,1
15 | 96,67,29,20,11,0.5,1
16 | 91,78,20,31,18,0.5,1
17 | 89,67,23,16,10,0.5,1
18 | 89,79,17,17,16,0.5,1
19 | 91,107,20,20,56,0.5,1
20 | 94,116,11,33,11,0.5,1
21 | 92,59,35,13,19,0.5,1
22 | 93,23,35,20,20,0.5,1
23 | 90,60,23,27,5,0.5,1
24 | 96,68,18,19,19,0.5,1
25 | 84,80,47,33,97,0.5,1
26 | 92,70,24,13,26,0.5,1
27 | 90,47,28,15,18,0.5,1
28 | 88,66,20,21,10,0.5,1
29 | 91,102,17,13,19,0.5,1
30 | 87,41,31,19,16,0.5,1
31 | 86,79,28,16,17,0.5,1
32 | 91,57,31,23,42,0.5,1
33 | 93,77,32,18,29,0.5,1
34 | 88,96,28,21,40,0.5,1
35 | 94,65,22,18,11,0.5,1
36 | 91,72,155,68,82,0.5,2
37 | 85,54,47,33,22,0.5,2
38 | 79,39,14,19,9,0.5,2
39 | 85,85,25,26,30,0.5,2
40 | 89,63,24,20,38,0.5,2
41 | 84,92,68,37,44,0.5,2
42 | 89,68,26,39,42,0.5,2
43 | 89,101,18,25,13,0.5,2
44 | 86,84,18,14,16,0.5,2
45 | 85,65,25,14,18,0.5,2
46 | 88,61,19,21,13,0.5,2
47 | 92,56,14,16,10,0.5,2
48 | 95,50,29,25,50,0.5,2
49 | 91,75,24,22,11,0.5,2
50 | 83,40,29,25,38,0.5,2
51 | 89,74,19,23,16,0.5,2
52 | 85,64,24,22,11,0.5,2
53 | 92,57,64,36,90,0.5,2
54 | 94,48,11,23,43,0.5,2
55 | 87,52,21,19,30,0.5,2
56 | 85,65,23,29,15,0.5,2
57 | 84,82,21,21,19,0.5,2
58 | 88,49,20,22,19,0.5,2
59 | 96,67,26,26,36,0.5,2
60 | 90,63,24,24,24,0.5,2
61 | 90,45,33,34,27,0.5,2
62 | 90,72,14,15,18,0.5,2
63 | 91,55,4,8,13,0.5,2
64 | 91,52,15,22,11,0.5,2
65 | 87,71,32,19,27,1.0,1
66 | 89,77,26,20,19,1.0,1
67 | 89,67,5,17,14,1.0,2
68 | 85,51,26,24,23,1.0,2
69 | 103,75,19,30,13,1.0,2
70 | 90,63,16,21,14,1.0,2
71 | 90,63,29,23,57,2.0,1
72 | 90,67,35,19,35,2.0,1
73 | 87,66,27,22,9,2.0,1
74 | 90,73,34,21,22,2.0,1
75 | 86,54,20,21,16,2.0,1
76 | 90,80,19,14,42,2.0,1
77 | 87,90,43,28,156,2.0,2
78 | 96,72,28,19,30,2.0,2
79 | 91,55,9,25,16,2.0,2
80 | 95,78,27,25,30,2.0,2
81 | 92,101,34,30,64,2.0,2
82 | 89,51,41,22,48,2.0,2
83 | 91,99,42,33,16,2.0,2
84 | 94,58,21,18,26,2.0,2
85 | 92,60,30,27,297,2.0,2
86 | 94,58,21,18,26,2.0,2
87 | 88,47,33,26,29,2.0,2
88 | 92,65,17,25,9,2.0,2
89 | 92,79,22,20,11,3.0,1
90 | 84,83,20,25,7,3.0,1
91 | 88,68,27,21,26,3.0,1
92 | 86,48,20,20,6,3.0,1
93 | 99,69,45,32,30,3.0,1
94 | 88,66,23,12,15,3.0,1
95 | 89,62,42,30,20,3.0,1
96 | 90,51,23,17,27,3.0,1
97 | 81,61,32,37,53,3.0,2
98 | 89,89,23,18,104,3.0,2
99 | 89,65,26,18,36,3.0,2
100 | 92,75,26,26,24,3.0,2
101 | 85,59,25,20,25,3.0,2
102 | 92,61,18,13,81,3.0,2
103 | 89,63,22,27,10,4.0,1
104 | 90,84,18,23,13,4.0,1
105 | 88,95,25,19,14,4.0,1
106 | 89,35,27,29,17,4.0,1
107 | 91,80,37,23,27,4.0,1
108 | 91,109,33,15,18,4.0,1
109 | 91,65,17,5,7,4.0,1
110 | 88,107,29,20,50,4.0,2
111 | 87,76,22,55,9,4.0,2
112 | 87,86,28,23,21,4.0,2
113 | 87,42,26,23,17,4.0,2
114 | 88,80,24,25,17,4.0,2
115 | 90,96,34,49,169,4.0,2
116 | 86,67,11,15,8,4.0,2
117 | 92,40,19,20,21,4.0,2
118 | 85,60,17,21,14,4.0,2
119 | 89,90,15,17,25,4.0,2
120 | 91,57,15,16,16,4.0,2
121 | 96,55,48,39,42,4.0,2
122 | 79,101,17,27,23,4.0,2
123 | 90,134,14,20,14,4.0,2
124 | 89,76,14,21,24,4.0,2
125 | 88,93,29,27,31,4.0,2
126 | 90,67,10,16,16,4.0,2
127 | 92,73,24,21,48,4.0,2
128 | 91,55,28,28,82,4.0,2
129 | 83,45,19,21,13,4.0,2
130 | 90,74,19,14,22,4.0,2
131 | 92,66,21,16,33,5.0,1
132 | 93,63,26,18,18,5.0,1
133 | 86,78,47,39,107,5.0,2
134 | 97,44,113,45,150,5.0,2
135 | 87,59,15,19,12,5.0,2
136 | 86,44,21,11,15,5.0,2
137 | 87,64,16,20,24,5.0,2
138 | 92,57,21,23,22,5.0,2
139 | 90,70,25,23,112,5.0,2
140 | 99,59,17,19,11,5.0,2
141 | 92,80,10,26,20,6.0,1
142 | 95,60,26,22,28,6.0,1
143 | 91,63,25,26,15,6.0,1
144 | 92,62,37,21,36,6.0,1
145 | 95,50,13,14,15,6.0,1
146 | 90,76,37,19,50,6.0,1
147 | 96,70,70,26,36,6.0,1
148 | 95,62,64,42,76,6.0,1
149 | 92,62,20,23,20,6.0,1
150 | 91,63,25,26,15,6.0,1
151 | 82,56,67,38,92,6.0,2
152 | 92,82,27,24,37,6.0,2
153 | 90,63,12,26,21,6.0,2
154 | 88,37,9,15,16,6.0,2
155 | 100,60,29,23,76,6.0,2
156 | 98,43,35,23,69,6.0,2
157 | 91,74,87,50,67,6.0,2
158 | 92,87,57,25,44,6.0,2
159 | 93,99,36,34,48,6.0,2
160 | 90,72,17,19,19,6.0,2
161 | 97,93,21,20,68,6.0,2
162 | 93,50,18,25,17,6.0,2
163 | 90,57,20,26,33,6.0,2
164 | 92,76,31,28,41,6.0,2
165 | 88,55,19,17,14,6.0,2
166 | 89,63,24,29,29,6.0,2
167 | 92,79,70,32,84,7.0,1
168 | 92,93,58,35,120,7.0,1
169 | 93,84,58,47,62,7.0,2
170 | 97,71,29,22,52,8.0,1
171 | 84,99,33,19,26,8.0,1
172 | 96,44,42,23,73,8.0,1
173 | 90,62,22,21,21,8.0,1
174 | 92,94,18,17,6,8.0,1
175 | 90,67,77,39,114,8.0,1
176 | 97,71,29,22,52,8.0,1
177 | 91,69,25,25,66,8.0,2
178 | 93,59,17,20,14,8.0,2
179 | 92,95,85,48,200,8.0,2
180 | 90,50,26,22,53,8.0,2
181 | 91,62,59,47,60,8.0,2
182 | 92,93,22,28,123,9.0,1
183 | 92,77,86,41,31,10.0,1
184 | 86,66,22,24,26,10.0,2
185 | 98,57,31,34,73,10.0,2
186 | 95,80,50,64,55,10.0,2
187 | 92,108,53,33,94,12.0,2
188 | 97,92,22,28,49,12.0,2
189 | 93,77,39,37,108,16.0,1
190 | 94,83,81,34,201,20.0,1
191 | 87,75,25,21,14,0.0,1
192 | 88,56,23,18,12,0.0,1
193 | 84,97,41,20,32,0.0,2
194 | 94,91,27,20,15,0.5,1
195 | 97,62,17,13,5,0.5,1
196 | 92,85,25,20,12,0.5,1
197 | 82,48,27,15,12,0.5,1
198 | 88,74,31,25,15,0.5,1
199 | 95,77,30,14,21,0.5,1
200 | 88,94,26,18,8,0.5,1
201 | 91,70,19,19,22,0.5,1
202 | 83,54,27,15,12,0.5,1
203 | 91,105,40,26,56,0.5,1
204 | 86,79,37,28,14,0.5,1
205 | 91,96,35,22,135,0.5,1
206 | 89,82,23,14,35,0.5,1
207 | 90,73,24,23,11,0.5,1
208 | 90,87,19,25,19,0.5,1
209 | 89,82,33,32,18,0.5,1
210 | 85,79,17,8,9,0.5,1
211 | 85,119,30,26,17,0.5,1
212 | 78,69,24,18,31,0.5,1
213 | 88,107,34,21,27,0.5,1
214 | 89,115,17,27,7,0.5,1
215 | 92,67,23,15,12,0.5,1
216 | 89,101,27,34,14,0.5,1
217 | 91,84,11,12,10,0.5,1
218 | 94,101,41,20,53,0.5,2
219 | 88,46,29,22,18,0.5,2
220 | 88,122,35,29,42,0.5,2
221 | 84,88,28,25,35,0.5,2
222 | 90,79,18,15,24,0.5,2
223 | 87,69,22,26,11,0.5,2
224 | 65,63,19,20,14,0.5,2
225 | 90,64,12,17,14,0.5,2
226 | 85,58,18,24,16,0.5,2
227 | 88,81,41,27,36,0.5,2
228 | 86,78,52,29,62,0.5,2
229 | 82,74,38,28,48,0.5,2
230 | 86,58,36,27,59,0.5,2
231 | 94,56,30,18,27,0.5,2
232 | 87,57,30,30,22,0.5,2
233 | 98,74,148,75,159,0.5,2
234 | 94,75,20,25,38,0.5,2
235 | 83,68,17,20,71,0.5,2
236 | 93,56,25,21,33,0.5,2
237 | 101,65,18,21,22,0.5,2
238 | 92,65,25,20,31,0.5,2
239 | 92,58,14,16,13,0.5,2
240 | 86,58,16,23,23,0.5,2
241 | 85,62,15,13,22,0.5,2
242 | 86,57,13,20,13,0.5,2
243 | 86,54,26,30,13,0.5,2
244 | 81,41,33,27,34,1.0,1
245 | 91,67,32,26,13,1.0,1
246 | 91,80,21,19,14,1.0,1
247 | 92,60,23,15,19,1.0,1
248 | 91,60,32,14,8,1.0,1
249 | 93,65,28,22,10,1.0,1
250 | 90,63,45,24,85,1.0,2
251 | 87,92,21,22,37,1.0,2
252 | 83,78,31,19,115,1.0,2
253 | 95,62,24,23,14,1.0,2
254 | 93,59,41,30,48,1.0,2
255 | 84,82,43,32,38,2.0,1
256 | 87,71,33,20,22,2.0,1
257 | 86,44,24,15,18,2.0,1
258 | 86,66,28,24,21,2.0,1
259 | 88,58,31,17,17,2.0,1
260 | 90,61,28,29,31,2.0,1
261 | 88,69,70,24,64,2.0,1
262 | 93,87,18,17,26,2.0,1
263 | 98,58,33,21,28,2.0,1
264 | 91,44,18,18,23,2.0,2
265 | 87,75,37,19,70,2.0,2
266 | 94,91,30,26,25,2.0,2
267 | 88,85,14,15,10,2.0,2
268 | 89,109,26,25,27,2.0,2
269 | 87,59,37,27,34,2.0,2
270 | 93,58,20,23,18,2.0,2
271 | 88,57,9,15,16,2.0,2
272 | 94,65,38,27,17,3.0,1
273 | 91,71,12,22,11,3.0,1
274 | 90,55,20,20,16,3.0,1
275 | 91,64,21,17,26,3.0,2
276 | 88,47,35,26,33,3.0,2
277 | 82,72,31,20,84,3.0,2
278 | 85,58,83,49,51,3.0,2
279 | 91,54,25,22,35,4.0,1
280 | 98,50,27,25,53,4.0,2
281 | 86,62,29,21,26,4.0,2
282 | 89,48,32,22,14,4.0,2
283 | 82,68,20,22,9,4.0,2
284 | 83,70,17,19,23,4.0,2
285 | 96,70,21,26,21,4.0,2
286 | 94,117,77,56,52,4.0,2
287 | 93,45,11,14,21,4.0,2
288 | 93,49,27,21,29,4.0,2
289 | 84,73,46,32,39,4.0,2
290 | 91,63,17,17,46,4.0,2
291 | 90,57,31,18,37,4.0,2
292 | 87,45,19,13,16,4.0,2
293 | 91,68,14,20,19,4.0,2
294 | 86,55,29,35,108,4.0,2
295 | 91,86,52,47,52,4.0,2
296 | 88,46,15,33,55,4.0,2
297 | 85,52,22,23,34,4.0,2
298 | 89,72,33,27,55,4.0,2
299 | 95,59,23,18,19,4.0,2
300 | 94,43,154,82,121,4.0,2
301 | 96,56,38,26,23,5.0,2
302 | 90,52,10,17,12,5.0,2
303 | 94,45,20,16,12,5.0,2
304 | 99,42,14,21,49,5.0,2
305 | 93,102,47,23,37,5.0,2
306 | 94,71,25,26,31,5.0,2
307 | 92,73,33,34,115,5.0,2
308 | 87,54,41,29,23,6.0,1
309 | 92,67,15,14,14,6.0,1
310 | 98,101,31,26,32,6.0,1
311 | 92,53,51,33,92,6.0,1
312 | 97,94,43,43,82,6.0,1
313 | 93,43,11,16,54,6.0,1
314 | 93,68,24,18,19,6.0,1
315 | 95,36,38,19,15,6.0,1
316 | 99,86,58,42,203,6.0,1
317 | 98,66,103,57,114,6.0,1
318 | 92,80,10,26,20,6.0,1
319 | 96,74,27,25,43,6.0,2
320 | 95,93,21,27,47,6.0,2
321 | 86,109,16,22,28,6.0,2
322 | 91,46,30,24,39,7.0,2
323 | 102,82,34,78,203,7.0,2
324 | 85,50,12,18,14,7.0,2
325 | 91,57,33,23,12,8.0,1
326 | 91,52,76,32,24,8.0,1
327 | 93,70,46,30,33,8.0,1
328 | 87,55,36,19,25,8.0,1
329 | 98,123,28,24,31,8.0,1
330 | 82,55,18,23,44,8.0,2
331 | 95,73,20,25,225,8.0,2
332 | 97,80,17,20,53,8.0,2
333 | 100,83,25,24,28,8.0,2
334 | 88,91,56,35,126,9.0,2
335 | 91,138,45,21,48,10.0,1
336 | 92,41,37,22,37,10.0,1
337 | 86,123,20,25,23,10.0,2
338 | 91,93,35,34,37,10.0,2
339 | 87,87,15,23,11,10.0,2
340 | 87,56,52,43,55,10.0,2
341 | 99,75,26,24,41,12.0,1
342 | 96,69,53,43,203,12.0,2
343 | 98,77,55,35,89,15.0,1
344 | 91,68,27,26,14,16.0,1
345 | 98,99,57,45,65,20.0,1
346 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | ---
7 |
8 | [](https://github.com/thieu1995/metacluster/releases)
9 | [](https://pypi.python.org/pypi/metacluster)
10 | [](https://badge.fury.io/py/metacluster)
11 | 
12 | 
13 | [](https://pepy.tech/project/MetaCluster)
14 | [](https://github.com/thieu1995/metacluster/actions/workflows/publish-package.yaml)
15 | 
16 | [](https://metacluster.readthedocs.io/en/latest/?badge=latest)
17 | [](https://t.me/+fRVCJGuGJg1mNDg1)
18 | 
19 | [](https://git-scm.com/book/en/v2/GitHub-Contributing-to-a-Project)
20 | [](https://zenodo.org/badge/latestdoi/670197315)
21 | [](https://www.gnu.org/licenses/gpl-3.0)
22 |
23 |
24 | MetaCluster is the largest open-source nature-inspired optimization (Metaheuristic Algorithms) library for
25 | clustering problem in Python
26 |
27 | * **Free software:** GNU General Public License (GPL) V3 license
28 | * **Provided 3 classes: `MetaCluster`, `MhaKCentersClustering`, and `MhaKMeansTuner`**
29 | * **Total nature-inspired metaheuristic optimizers (Metaheuristic Algorithms)**: > 200 optimizers
30 | * **Total objective functions (as fitness)**: > 40 objectives
31 | * **Total supported datasets**: 48 datasets from Scikit learn, UCI, ELKI, KEEL...
32 | * **Total performance metrics**: > 40 metrics
33 | * **Total different way of detecting the K value**: >= 10 methods
34 | * **Documentation:** https://metacluster.readthedocs.io/en/latest/
35 | * **Python versions:** >= 3.7.x
36 | * **Dependencies:** numpy, scipy, scikit-learn, pandas, mealpy, permetrics, plotly, kaleido
37 |
38 |
39 |
40 | # Citation Request
41 |
42 | Please include these citations if you plan to use this library:
43 |
44 | ```code
45 | @article{VanThieu2023,
46 | author = {Van Thieu, Nguyen and Oliva, Diego and Pérez-Cisneros, Marco},
47 | title = {MetaCluster: An open-source Python library for metaheuristic-based clustering problems},
48 | journal = {SoftwareX},
49 | year = {2023},
50 | pages = {101597},
51 | volume = {24},
52 | DOI = {10.1016/j.softx.2023.101597},
53 | }
54 |
55 | @article{van2023mealpy,
56 | title={MEALPY: An open-source library for latest meta-heuristic algorithms in Python},
57 | author={Van Thieu, Nguyen and Mirjalili, Seyedali},
58 | journal={Journal of Systems Architecture},
59 | year={2023},
60 | publisher={Elsevier},
61 | doi={10.1016/j.sysarc.2023.102871}
62 | }
63 | ```
64 |
65 |
66 | # Installation
67 |
68 | * Install the [current PyPI release](https://pypi.python.org/pypi/metacluster):
69 | ```bash
70 | $ pip install metacluster
71 | ```
72 |
73 | After installation, check the version:
74 | ```bash
75 | $ python
76 | >>> import metacluster
77 | >>> metacluster.__version__
78 | ```
79 |
80 | ### Examples
81 |
82 | We implement a dedicated Github repository for examples at [MetaCluster_examples](https://github.com/thieu1995/MetaCluster_examples)
83 |
84 | Let's go through some basic examples from here:
85 |
86 | #### 1. First, load dataset. You can use the available datasets from MetaCluster:
87 |
88 | ```python
89 | # Load available dataset from MetaCluster
90 | from metacluster import get_dataset
91 |
92 | # Try unknown data
93 | get_dataset("unknown")
94 | # Enter: 1 -> This wil list all of avaialble dataset
95 |
96 | data = get_dataset("Arrhythmia")
97 | ```
98 |
99 | * Or you can load your own dataset
100 |
101 | ```python
102 | import pandas as pd
103 | from metacluster import Data
104 |
105 | # load X and y
106 | # NOTE MetaCluster accepts numpy arrays only, hence use the .values attribute
107 | dataset = pd.read_csv('examples/dataset.csv', index_col=0).values
108 | X, y = dataset[:, 0:-1], dataset[:, -1]
109 | data = Data(X, y, name="my-dataset")
110 | ```
111 |
112 | #### 2. Next, scale your features
113 |
114 | **You should confirm that your dataset is scaled and normalized**
115 |
116 | ```python
117 | # MinMaxScaler
118 | data.X, scaler = data.scale(data.X, method="MinMaxScaler", feature_range=(0, 1))
119 |
120 | # StandardScaler
121 | data.X, scaler = data.scale(data.X, method="StandardScaler")
122 |
123 | # MaxAbsScaler
124 | data.X, scaler = data.scale(data.X, method="MaxAbsScaler")
125 |
126 | # RobustScaler
127 | data.X, scaler = data.scale(data.X, method="RobustScaler")
128 |
129 | # Normalizer
130 | data.X, scaler = data.scale(data.X, method="Normalizer", norm="l2") # "l1" or "l2" or "max"
131 | ```
132 |
133 |
134 | #### 3. Next, select Metaheuristic Algorithm, Its parameters, list of objectives, and list of performance metrics
135 |
136 | ```python
137 | list_optimizer = ["BaseFBIO", "OriginalGWO", "OriginalSMA"]
138 | list_paras = [
139 | {"name": "FBIO", "epoch": 10, "pop_size": 30},
140 | {"name": "GWO", "epoch": 10, "pop_size": 30},
141 | {"name": "SMA", "epoch": 10, "pop_size": 30}
142 | ]
143 | list_obj = ["SI", "RSI"]
144 | list_metric = ["BHI", "DBI", "DI", "CHI", "SSEI", "NMIS", "HS", "CS", "VMS", "HGS"]
145 | ```
146 |
147 | You can check all supported metaheuristic algorithms from: https://github.com/thieu1995/mealpy.
148 | All supported clustering objectives and metrics from: https://github.com/thieu1995/permetrics.
149 |
150 | If you don't want to read the documents, you can print out all supported information by:
151 |
152 | ```python
153 | from metacluster import MetaCluster
154 |
155 | # Get all supported methods and print them out
156 | MetaCluster.get_support(name="all")
157 | ```
158 |
159 |
160 | #### 4. Next, create an instance of MetaCluster class and run it.
161 |
162 | ```python
163 | model = MetaCluster(list_optimizer=list_optimizer, list_paras=list_paras, list_obj=list_obj, n_trials=3, seed=10)
164 |
165 | model.execute(data=data, cluster_finder="elbow", list_metric=list_metric, save_path="history", verbose=False)
166 |
167 | model.save_boxplots()
168 | model.save_convergences()
169 | ```
170 |
171 | As you can see, you can define different datasets and using the same model to run it.
172 | Remember to set the name to your dataset, because the folder that hold your results is the name of your dataset.
173 | More examples can be found [here](/examples)
174 |
175 |
176 | # Support
177 |
178 | ### Official links (questions, problems)
179 |
180 | * Official source code repo: https://github.com/thieu1995/metacluster
181 | * Official document: https://metacluster.readthedocs.io/
182 | * Download releases: https://pypi.org/project/metacluster/
183 | * Issue tracker: https://github.com/thieu1995/metacluster/issues
184 | * Notable changes log: https://github.com/thieu1995/metacluster/blob/master/ChangeLog.md
185 | * Official chat group: https://t.me/+fRVCJGuGJg1mNDg1
186 |
187 | * This project also related to our another projects which are optimization and machine learning. Check it here:
188 | * https://github.com/thieu1995/metaheuristics
189 | * https://github.com/thieu1995/mealpy
190 | * https://github.com/thieu1995/mafese
191 | * https://github.com/thieu1995/pfevaluator
192 | * https://github.com/thieu1995/opfunu
193 | * https://github.com/thieu1995/enoppy
194 | * https://github.com/thieu1995/permetrics
195 | * https://github.com/thieu1995/IntelELM
196 | * https://github.com/thieu1995/MetaPerceptron
197 | * https://github.com/thieu1995/GrafoRVFL
198 | * https://github.com/aiir-team
199 |
200 |
201 | ### Supported links
202 |
203 | ```code
204 | 1. https://jtemporal.com/kmeans-and-elbow-method/
205 | 2. https://medium.com/@masarudheena/4-best-ways-to-find-optimal-number-of-clusters-for-clustering-with-python-code-706199fa957c
206 | 3. https://github.com/minddrummer/gap/blob/master/gap/gap.py
207 | 4. https://www.tandfonline.com/doi/pdf/10.1080/03610927408827101
208 | 5. https://doi.org/10.1016/j.engappai.2018.03.013
209 | 6. https://github.com/tirthajyoti/Machine-Learning-with-Python/blob/master/Clustering-Dimensionality-Reduction/Clustering_metrics.ipynb
210 | 7. https://elki-project.github.io/
211 | 8. https://sci2s.ugr.es/keel/index.php
212 | 9. https://archive.ics.uci.edu/datasets
213 | 10. https://python-charts.com/distribution/box-plot-plotly/
214 | 11. https://plotly.com/python/box-plots/?_ga=2.50659434.2126348639.1688086416-114197406.1688086416#box-plot-styling-mean--standard-deviation
215 | ```
216 |
--------------------------------------------------------------------------------
/metacluster/data/balance.csv:
--------------------------------------------------------------------------------
1 | 1,1,1,1,1
2 | 1,1,1,2,2
3 | 1,1,1,3,2
4 | 1,1,1,4,2
5 | 1,1,1,5,2
6 | 1,1,2,1,2
7 | 1,1,2,2,2
8 | 1,1,2,3,2
9 | 1,1,2,4,2
10 | 1,1,2,5,2
11 | 1,1,3,1,2
12 | 1,1,3,2,2
13 | 1,1,3,3,2
14 | 1,1,3,4,2
15 | 1,1,3,5,2
16 | 1,1,4,1,2
17 | 1,1,4,2,2
18 | 1,1,4,3,2
19 | 1,1,4,4,2
20 | 1,1,4,5,2
21 | 1,1,5,1,2
22 | 1,1,5,2,2
23 | 1,1,5,3,2
24 | 1,1,5,4,2
25 | 1,1,5,5,2
26 | 1,2,1,1,0
27 | 1,2,1,2,1
28 | 1,2,1,3,2
29 | 1,2,1,4,2
30 | 1,2,1,5,2
31 | 1,2,2,1,1
32 | 1,2,2,2,2
33 | 1,2,2,3,2
34 | 1,2,2,4,2
35 | 1,2,2,5,2
36 | 1,2,3,1,2
37 | 1,2,3,2,2
38 | 1,2,3,3,2
39 | 1,2,3,4,2
40 | 1,2,3,5,2
41 | 1,2,4,1,2
42 | 1,2,4,2,2
43 | 1,2,4,3,2
44 | 1,2,4,4,2
45 | 1,2,4,5,2
46 | 1,2,5,1,2
47 | 1,2,5,2,2
48 | 1,2,5,3,2
49 | 1,2,5,4,2
50 | 1,2,5,5,2
51 | 1,3,1,1,0
52 | 1,3,1,2,0
53 | 1,3,1,3,1
54 | 1,3,1,4,2
55 | 1,3,1,5,2
56 | 1,3,2,1,0
57 | 1,3,2,2,2
58 | 1,3,2,3,2
59 | 1,3,2,4,2
60 | 1,3,2,5,2
61 | 1,3,3,1,1
62 | 1,3,3,2,2
63 | 1,3,3,3,2
64 | 1,3,3,4,2
65 | 1,3,3,5,2
66 | 1,3,4,1,2
67 | 1,3,4,2,2
68 | 1,3,4,3,2
69 | 1,3,4,4,2
70 | 1,3,4,5,2
71 | 1,3,5,1,2
72 | 1,3,5,2,2
73 | 1,3,5,3,2
74 | 1,3,5,4,2
75 | 1,3,5,5,2
76 | 1,4,1,1,0
77 | 1,4,1,2,0
78 | 1,4,1,3,0
79 | 1,4,1,4,1
80 | 1,4,1,5,2
81 | 1,4,2,1,0
82 | 1,4,2,2,1
83 | 1,4,2,3,2
84 | 1,4,2,4,2
85 | 1,4,2,5,2
86 | 1,4,3,1,0
87 | 1,4,3,2,2
88 | 1,4,3,3,2
89 | 1,4,3,4,2
90 | 1,4,3,5,2
91 | 1,4,4,1,1
92 | 1,4,4,2,2
93 | 1,4,4,3,2
94 | 1,4,4,4,2
95 | 1,4,4,5,2
96 | 1,4,5,1,2
97 | 1,4,5,2,2
98 | 1,4,5,3,2
99 | 1,4,5,4,2
100 | 1,4,5,5,2
101 | 1,5,1,1,0
102 | 1,5,1,2,0
103 | 1,5,1,3,0
104 | 1,5,1,4,0
105 | 1,5,1,5,1
106 | 1,5,2,1,0
107 | 1,5,2,2,0
108 | 1,5,2,3,2
109 | 1,5,2,4,2
110 | 1,5,2,5,2
111 | 1,5,3,1,0
112 | 1,5,3,2,2
113 | 1,5,3,3,2
114 | 1,5,3,4,2
115 | 1,5,3,5,2
116 | 1,5,4,1,0
117 | 1,5,4,2,2
118 | 1,5,4,3,2
119 | 1,5,4,4,2
120 | 1,5,4,5,2
121 | 1,5,5,1,1
122 | 1,5,5,2,2
123 | 1,5,5,3,2
124 | 1,5,5,4,2
125 | 1,5,5,5,2
126 | 2,1,1,1,0
127 | 2,1,1,2,1
128 | 2,1,1,3,2
129 | 2,1,1,4,2
130 | 2,1,1,5,2
131 | 2,1,2,1,1
132 | 2,1,2,2,2
133 | 2,1,2,3,2
134 | 2,1,2,4,2
135 | 2,1,2,5,2
136 | 2,1,3,1,2
137 | 2,1,3,2,2
138 | 2,1,3,3,2
139 | 2,1,3,4,2
140 | 2,1,3,5,2
141 | 2,1,4,1,2
142 | 2,1,4,2,2
143 | 2,1,4,3,2
144 | 2,1,4,4,2
145 | 2,1,4,5,2
146 | 2,1,5,1,2
147 | 2,1,5,2,2
148 | 2,1,5,3,2
149 | 2,1,5,4,2
150 | 2,1,5,5,2
151 | 2,2,1,1,0
152 | 2,2,1,2,0
153 | 2,2,1,3,0
154 | 2,2,1,4,1
155 | 2,2,1,5,2
156 | 2,2,2,1,0
157 | 2,2,2,2,1
158 | 2,2,2,3,2
159 | 2,2,2,4,2
160 | 2,2,2,5,2
161 | 2,2,3,1,0
162 | 2,2,3,2,2
163 | 2,2,3,3,2
164 | 2,2,3,4,2
165 | 2,2,3,5,2
166 | 2,2,4,1,1
167 | 2,2,4,2,2
168 | 2,2,4,3,2
169 | 2,2,4,4,2
170 | 2,2,4,5,2
171 | 2,2,5,1,2
172 | 2,2,5,2,2
173 | 2,2,5,3,2
174 | 2,2,5,4,2
175 | 2,2,5,5,2
176 | 2,3,1,1,0
177 | 2,3,1,2,0
178 | 2,3,1,3,0
179 | 2,3,1,4,0
180 | 2,3,1,5,0
181 | 2,3,2,1,0
182 | 2,3,2,2,0
183 | 2,3,2,3,1
184 | 2,3,2,4,2
185 | 2,3,2,5,2
186 | 2,3,3,1,0
187 | 2,3,3,2,1
188 | 2,3,3,3,2
189 | 2,3,3,4,2
190 | 2,3,3,5,2
191 | 2,3,4,1,0
192 | 2,3,4,2,2
193 | 2,3,4,3,2
194 | 2,3,4,4,2
195 | 2,3,4,5,2
196 | 2,3,5,1,0
197 | 2,3,5,2,2
198 | 2,3,5,3,2
199 | 2,3,5,4,2
200 | 2,3,5,5,2
201 | 2,4,1,1,0
202 | 2,4,1,2,0
203 | 2,4,1,3,0
204 | 2,4,1,4,0
205 | 2,4,1,5,0
206 | 2,4,2,1,0
207 | 2,4,2,2,0
208 | 2,4,2,3,0
209 | 2,4,2,4,1
210 | 2,4,2,5,2
211 | 2,4,3,1,0
212 | 2,4,3,2,0
213 | 2,4,3,3,2
214 | 2,4,3,4,2
215 | 2,4,3,5,2
216 | 2,4,4,1,0
217 | 2,4,4,2,1
218 | 2,4,4,3,2
219 | 2,4,4,4,2
220 | 2,4,4,5,2
221 | 2,4,5,1,0
222 | 2,4,5,2,2
223 | 2,4,5,3,2
224 | 2,4,5,4,2
225 | 2,4,5,5,2
226 | 2,5,1,1,0
227 | 2,5,1,2,0
228 | 2,5,1,3,0
229 | 2,5,1,4,0
230 | 2,5,1,5,0
231 | 2,5,2,1,0
232 | 2,5,2,2,0
233 | 2,5,2,3,0
234 | 2,5,2,4,0
235 | 2,5,2,5,1
236 | 2,5,3,1,0
237 | 2,5,3,2,0
238 | 2,5,3,3,0
239 | 2,5,3,4,2
240 | 2,5,3,5,2
241 | 2,5,4,1,0
242 | 2,5,4,2,0
243 | 2,5,4,3,2
244 | 2,5,4,4,2
245 | 2,5,4,5,2
246 | 2,5,5,1,0
247 | 2,5,5,2,1
248 | 2,5,5,3,2
249 | 2,5,5,4,2
250 | 2,5,5,5,2
251 | 3,1,1,1,0
252 | 3,1,1,2,0
253 | 3,1,1,3,1
254 | 3,1,1,4,2
255 | 3,1,1,5,2
256 | 3,1,2,1,0
257 | 3,1,2,2,2
258 | 3,1,2,3,2
259 | 3,1,2,4,2
260 | 3,1,2,5,2
261 | 3,1,3,1,1
262 | 3,1,3,2,2
263 | 3,1,3,3,2
264 | 3,1,3,4,2
265 | 3,1,3,5,2
266 | 3,1,4,1,2
267 | 3,1,4,2,2
268 | 3,1,4,3,2
269 | 3,1,4,4,2
270 | 3,1,4,5,2
271 | 3,1,5,1,2
272 | 3,1,5,2,2
273 | 3,1,5,3,2
274 | 3,1,5,4,2
275 | 3,1,5,5,2
276 | 3,2,1,1,0
277 | 3,2,1,2,0
278 | 3,2,1,3,0
279 | 3,2,1,4,0
280 | 3,2,1,5,0
281 | 3,2,2,1,0
282 | 3,2,2,2,0
283 | 3,2,2,3,1
284 | 3,2,2,4,2
285 | 3,2,2,5,2
286 | 3,2,3,1,0
287 | 3,2,3,2,1
288 | 3,2,3,3,2
289 | 3,2,3,4,2
290 | 3,2,3,5,2
291 | 3,2,4,1,0
292 | 3,2,4,2,2
293 | 3,2,4,3,2
294 | 3,2,4,4,2
295 | 3,2,4,5,2
296 | 3,2,5,1,0
297 | 3,2,5,2,2
298 | 3,2,5,3,2
299 | 3,2,5,4,2
300 | 3,2,5,5,2
301 | 3,3,1,1,0
302 | 3,3,1,2,0
303 | 3,3,1,3,0
304 | 3,3,1,4,0
305 | 3,3,1,5,0
306 | 3,3,2,1,0
307 | 3,3,2,2,0
308 | 3,3,2,3,0
309 | 3,3,2,4,0
310 | 3,3,2,5,2
311 | 3,3,3,1,0
312 | 3,3,3,2,0
313 | 3,3,3,3,1
314 | 3,3,3,4,2
315 | 3,3,3,5,2
316 | 3,3,4,1,0
317 | 3,3,4,2,0
318 | 3,3,4,3,2
319 | 3,3,4,4,2
320 | 3,3,4,5,2
321 | 3,3,5,1,0
322 | 3,3,5,2,2
323 | 3,3,5,3,2
324 | 3,3,5,4,2
325 | 3,3,5,5,2
326 | 3,4,1,1,0
327 | 3,4,1,2,0
328 | 3,4,1,3,0
329 | 3,4,1,4,0
330 | 3,4,1,5,0
331 | 3,4,2,1,0
332 | 3,4,2,2,0
333 | 3,4,2,3,0
334 | 3,4,2,4,0
335 | 3,4,2,5,0
336 | 3,4,3,1,0
337 | 3,4,3,2,0
338 | 3,4,3,3,0
339 | 3,4,3,4,1
340 | 3,4,3,5,2
341 | 3,4,4,1,0
342 | 3,4,4,2,0
343 | 3,4,4,3,1
344 | 3,4,4,4,2
345 | 3,4,4,5,2
346 | 3,4,5,1,0
347 | 3,4,5,2,0
348 | 3,4,5,3,2
349 | 3,4,5,4,2
350 | 3,4,5,5,2
351 | 3,5,1,1,0
352 | 3,5,1,2,0
353 | 3,5,1,3,0
354 | 3,5,1,4,0
355 | 3,5,1,5,0
356 | 3,5,2,1,0
357 | 3,5,2,2,0
358 | 3,5,2,3,0
359 | 3,5,2,4,0
360 | 3,5,2,5,0
361 | 3,5,3,1,0
362 | 3,5,3,2,0
363 | 3,5,3,3,0
364 | 3,5,3,4,0
365 | 3,5,3,5,1
366 | 3,5,4,1,0
367 | 3,5,4,2,0
368 | 3,5,4,3,0
369 | 3,5,4,4,2
370 | 3,5,4,5,2
371 | 3,5,5,1,0
372 | 3,5,5,2,0
373 | 3,5,5,3,1
374 | 3,5,5,4,2
375 | 3,5,5,5,2
376 | 4,1,1,1,0
377 | 4,1,1,2,0
378 | 4,1,1,3,0
379 | 4,1,1,4,1
380 | 4,1,1,5,2
381 | 4,1,2,1,0
382 | 4,1,2,2,1
383 | 4,1,2,3,2
384 | 4,1,2,4,2
385 | 4,1,2,5,2
386 | 4,1,3,1,0
387 | 4,1,3,2,2
388 | 4,1,3,3,2
389 | 4,1,3,4,2
390 | 4,1,3,5,2
391 | 4,1,4,1,1
392 | 4,1,4,2,2
393 | 4,1,4,3,2
394 | 4,1,4,4,2
395 | 4,1,4,5,2
396 | 4,1,5,1,2
397 | 4,1,5,2,2
398 | 4,1,5,3,2
399 | 4,1,5,4,2
400 | 4,1,5,5,2
401 | 4,2,1,1,0
402 | 4,2,1,2,0
403 | 4,2,1,3,0
404 | 4,2,1,4,0
405 | 4,2,1,5,0
406 | 4,2,2,1,0
407 | 4,2,2,2,0
408 | 4,2,2,3,0
409 | 4,2,2,4,1
410 | 4,2,2,5,2
411 | 4,2,3,1,0
412 | 4,2,3,2,0
413 | 4,2,3,3,2
414 | 4,2,3,4,2
415 | 4,2,3,5,2
416 | 4,2,4,1,0
417 | 4,2,4,2,1
418 | 4,2,4,3,2
419 | 4,2,4,4,2
420 | 4,2,4,5,2
421 | 4,2,5,1,0
422 | 4,2,5,2,2
423 | 4,2,5,3,2
424 | 4,2,5,4,2
425 | 4,2,5,5,2
426 | 4,3,1,1,0
427 | 4,3,1,2,0
428 | 4,3,1,3,0
429 | 4,3,1,4,0
430 | 4,3,1,5,0
431 | 4,3,2,1,0
432 | 4,3,2,2,0
433 | 4,3,2,3,0
434 | 4,3,2,4,0
435 | 4,3,2,5,0
436 | 4,3,3,1,0
437 | 4,3,3,2,0
438 | 4,3,3,3,0
439 | 4,3,3,4,1
440 | 4,3,3,5,2
441 | 4,3,4,1,0
442 | 4,3,4,2,0
443 | 4,3,4,3,1
444 | 4,3,4,4,2
445 | 4,3,4,5,2
446 | 4,3,5,1,0
447 | 4,3,5,2,0
448 | 4,3,5,3,2
449 | 4,3,5,4,2
450 | 4,3,5,5,2
451 | 4,4,1,1,0
452 | 4,4,1,2,0
453 | 4,4,1,3,0
454 | 4,4,1,4,0
455 | 4,4,1,5,0
456 | 4,4,2,1,0
457 | 4,4,2,2,0
458 | 4,4,2,3,0
459 | 4,4,2,4,0
460 | 4,4,2,5,0
461 | 4,4,3,1,0
462 | 4,4,3,2,0
463 | 4,4,3,3,0
464 | 4,4,3,4,0
465 | 4,4,3,5,0
466 | 4,4,4,1,0
467 | 4,4,4,2,0
468 | 4,4,4,3,0
469 | 4,4,4,4,1
470 | 4,4,4,5,2
471 | 4,4,5,1,0
472 | 4,4,5,2,0
473 | 4,4,5,3,0
474 | 4,4,5,4,2
475 | 4,4,5,5,2
476 | 4,5,1,1,0
477 | 4,5,1,2,0
478 | 4,5,1,3,0
479 | 4,5,1,4,0
480 | 4,5,1,5,0
481 | 4,5,2,1,0
482 | 4,5,2,2,0
483 | 4,5,2,3,0
484 | 4,5,2,4,0
485 | 4,5,2,5,0
486 | 4,5,3,1,0
487 | 4,5,3,2,0
488 | 4,5,3,3,0
489 | 4,5,3,4,0
490 | 4,5,3,5,0
491 | 4,5,4,1,0
492 | 4,5,4,2,0
493 | 4,5,4,3,0
494 | 4,5,4,4,0
495 | 4,5,4,5,1
496 | 4,5,5,1,0
497 | 4,5,5,2,0
498 | 4,5,5,3,0
499 | 4,5,5,4,1
500 | 4,5,5,5,2
501 | 5,1,1,1,0
502 | 5,1,1,2,0
503 | 5,1,1,3,0
504 | 5,1,1,4,0
505 | 5,1,1,5,1
506 | 5,1,2,1,0
507 | 5,1,2,2,0
508 | 5,1,2,3,2
509 | 5,1,2,4,2
510 | 5,1,2,5,2
511 | 5,1,3,1,0
512 | 5,1,3,2,2
513 | 5,1,3,3,2
514 | 5,1,3,4,2
515 | 5,1,3,5,2
516 | 5,1,4,1,0
517 | 5,1,4,2,2
518 | 5,1,4,3,2
519 | 5,1,4,4,2
520 | 5,1,4,5,2
521 | 5,1,5,1,1
522 | 5,1,5,2,2
523 | 5,1,5,3,2
524 | 5,1,5,4,2
525 | 5,1,5,5,2
526 | 5,2,1,1,0
527 | 5,2,1,2,0
528 | 5,2,1,3,0
529 | 5,2,1,4,0
530 | 5,2,1,5,0
531 | 5,2,2,1,0
532 | 5,2,2,2,0
533 | 5,2,2,3,0
534 | 5,2,2,4,0
535 | 5,2,2,5,1
536 | 5,2,3,1,0
537 | 5,2,3,2,0
538 | 5,2,3,3,0
539 | 5,2,3,4,2
540 | 5,2,3,5,2
541 | 5,2,4,1,0
542 | 5,2,4,2,0
543 | 5,2,4,3,2
544 | 5,2,4,4,2
545 | 5,2,4,5,2
546 | 5,2,5,1,0
547 | 5,2,5,2,1
548 | 5,2,5,3,2
549 | 5,2,5,4,2
550 | 5,2,5,5,2
551 | 5,3,1,1,0
552 | 5,3,1,2,0
553 | 5,3,1,3,0
554 | 5,3,1,4,0
555 | 5,3,1,5,0
556 | 5,3,2,1,0
557 | 5,3,2,2,0
558 | 5,3,2,3,0
559 | 5,3,2,4,0
560 | 5,3,2,5,0
561 | 5,3,3,1,0
562 | 5,3,3,2,0
563 | 5,3,3,3,0
564 | 5,3,3,4,0
565 | 5,3,3,5,1
566 | 5,3,4,1,0
567 | 5,3,4,2,0
568 | 5,3,4,3,0
569 | 5,3,4,4,2
570 | 5,3,4,5,2
571 | 5,3,5,1,0
572 | 5,3,5,2,0
573 | 5,3,5,3,1
574 | 5,3,5,4,2
575 | 5,3,5,5,2
576 | 5,4,1,1,0
577 | 5,4,1,2,0
578 | 5,4,1,3,0
579 | 5,4,1,4,0
580 | 5,4,1,5,0
581 | 5,4,2,1,0
582 | 5,4,2,2,0
583 | 5,4,2,3,0
584 | 5,4,2,4,0
585 | 5,4,2,5,0
586 | 5,4,3,1,0
587 | 5,4,3,2,0
588 | 5,4,3,3,0
589 | 5,4,3,4,0
590 | 5,4,3,5,0
591 | 5,4,4,1,0
592 | 5,4,4,2,0
593 | 5,4,4,3,0
594 | 5,4,4,4,0
595 | 5,4,4,5,1
596 | 5,4,5,1,0
597 | 5,4,5,2,0
598 | 5,4,5,3,0
599 | 5,4,5,4,1
600 | 5,4,5,5,2
601 | 5,5,1,1,0
602 | 5,5,1,2,0
603 | 5,5,1,3,0
604 | 5,5,1,4,0
605 | 5,5,1,5,0
606 | 5,5,2,1,0
607 | 5,5,2,2,0
608 | 5,5,2,3,0
609 | 5,5,2,4,0
610 | 5,5,2,5,0
611 | 5,5,3,1,0
612 | 5,5,3,2,0
613 | 5,5,3,3,0
614 | 5,5,3,4,0
615 | 5,5,3,5,0
616 | 5,5,4,1,0
617 | 5,5,4,2,0
618 | 5,5,4,3,0
619 | 5,5,4,4,0
620 | 5,5,4,5,0
621 | 5,5,5,1,0
622 | 5,5,5,2,0
623 | 5,5,5,3,0
624 | 5,5,5,4,0
625 | 5,5,5,5,1
626 |
--------------------------------------------------------------------------------
/metacluster/data/seeds.csv:
--------------------------------------------------------------------------------
1 | 15.26,14.84,0.871,5.763,3.312,2.221,5.22,1
2 | 14.88,14.57,0.8811,5.554,3.333,1.018,4.956,1
3 | 14.29,14.09,0.905,5.291,3.337,2.699,4.825,1
4 | 13.84,13.94,0.8955,5.324,3.379,2.259,4.805,1
5 | 16.14,14.99,0.9034,5.658,3.562,1.355,5.175,1
6 | 14.38,14.21,0.8951,5.386,3.312,2.462,4.956,1
7 | 14.69,14.49,0.8799,5.563,3.259,3.586,5.219,1
8 | 14.11,14.1,0.8911,5.42,3.302,2.7,5,1
9 | 16.63,15.46,0.8747,6.053,3.465,2.04,5.877,1
10 | 16.44,15.25,0.888,5.884,3.505,1.969,5.533,1
11 | 15.26,14.85,0.8696,5.714,3.242,4.543,5.314,1
12 | 14.03,14.16,0.8796,5.438,3.201,1.717,5.001,1
13 | 13.89,14.02,0.888,5.439,3.199,3.986,4.738,1
14 | 13.78,14.06,0.8759,5.479,3.156,3.136,4.872,1
15 | 13.74,14.05,0.8744,5.482,3.114,2.932,4.825,1
16 | 14.59,14.28,0.8993,5.351,3.333,4.185,4.781,1
17 | 13.99,13.83,0.9183,5.119,3.383,5.234,4.781,1
18 | 15.69,14.75,0.9058,5.527,3.514,1.599,5.046,1
19 | 14.7,14.21,0.9153,5.205,3.466,1.767,4.649,1
20 | 12.72,13.57,0.8686,5.226,3.049,4.102,4.914,1
21 | 14.16,14.4,0.8584,5.658,3.129,3.072,5.176,1
22 | 14.11,14.26,0.8722,5.52,3.168,2.688,5.219,1
23 | 15.88,14.9,0.8988,5.618,3.507,0.7651,5.091,1
24 | 12.08,13.23,0.8664,5.099,2.936,1.415,4.961,1
25 | 15.01,14.76,0.8657,5.789,3.245,1.791,5.001,1
26 | 16.19,15.16,0.8849,5.833,3.421,0.903,5.307,1
27 | 13.02,13.76,0.8641,5.395,3.026,3.373,4.825,1
28 | 12.74,13.67,0.8564,5.395,2.956,2.504,4.869,1
29 | 14.11,14.18,0.882,5.541,3.221,2.754,5.038,1
30 | 13.45,14.02,0.8604,5.516,3.065,3.531,5.097,1
31 | 13.16,13.82,0.8662,5.454,2.975,0.8551,5.056,1
32 | 15.49,14.94,0.8724,5.757,3.371,3.412,5.228,1
33 | 14.09,14.41,0.8529,5.717,3.186,3.92,5.299,1
34 | 13.94,14.17,0.8728,5.585,3.15,2.124,5.012,1
35 | 15.05,14.68,0.8779,5.712,3.328,2.129,5.36,1
36 | 16.12,15,0.9,5.709,3.485,2.27,5.443,1
37 | 16.2,15.27,0.8734,5.826,3.464,2.823,5.527,1
38 | 17.08,15.38,0.9079,5.832,3.683,2.956,5.484,1
39 | 14.8,14.52,0.8823,5.656,3.288,3.112,5.309,1
40 | 14.28,14.17,0.8944,5.397,3.298,6.685,5.001,1
41 | 13.54,13.85,0.8871,5.348,3.156,2.587,5.178,1
42 | 13.5,13.85,0.8852,5.351,3.158,2.249,5.176,1
43 | 13.16,13.55,0.9009,5.138,3.201,2.461,4.783,1
44 | 15.5,14.86,0.882,5.877,3.396,4.711,5.528,1
45 | 15.11,14.54,0.8986,5.579,3.462,3.128,5.18,1
46 | 13.8,14.04,0.8794,5.376,3.155,1.56,4.961,1
47 | 15.36,14.76,0.8861,5.701,3.393,1.367,5.132,1
48 | 14.99,14.56,0.8883,5.57,3.377,2.958,5.175,1
49 | 14.79,14.52,0.8819,5.545,3.291,2.704,5.111,1
50 | 14.86,14.67,0.8676,5.678,3.258,2.129,5.351,1
51 | 14.43,14.4,0.8751,5.585,3.272,3.975,5.144,1
52 | 15.78,14.91,0.8923,5.674,3.434,5.593,5.136,1
53 | 14.49,14.61,0.8538,5.715,3.113,4.116,5.396,1
54 | 14.33,14.28,0.8831,5.504,3.199,3.328,5.224,1
55 | 14.52,14.6,0.8557,5.741,3.113,1.481,5.487,1
56 | 15.03,14.77,0.8658,5.702,3.212,1.933,5.439,1
57 | 14.46,14.35,0.8818,5.388,3.377,2.802,5.044,1
58 | 14.92,14.43,0.9006,5.384,3.412,1.142,5.088,1
59 | 15.38,14.77,0.8857,5.662,3.419,1.999,5.222,1
60 | 12.11,13.47,0.8392,5.159,3.032,1.502,4.519,1
61 | 11.42,12.86,0.8683,5.008,2.85,2.7,4.607,1
62 | 11.23,12.63,0.884,4.902,2.879,2.269,4.703,1
63 | 12.36,13.19,0.8923,5.076,3.042,3.22,4.605,1
64 | 13.22,13.84,0.868,5.395,3.07,4.157,5.088,1
65 | 12.78,13.57,0.8716,5.262,3.026,1.176,4.782,1
66 | 12.88,13.5,0.8879,5.139,3.119,2.352,4.607,1
67 | 14.34,14.37,0.8726,5.63,3.19,1.313,5.15,1
68 | 14.01,14.29,0.8625,5.609,3.158,2.217,5.132,1
69 | 14.37,14.39,0.8726,5.569,3.153,1.464,5.3,1
70 | 12.73,13.75,0.8458,5.412,2.882,3.533,5.067,1
71 | 17.63,15.98,0.8673,6.191,3.561,4.076,6.06,2
72 | 16.84,15.67,0.8623,5.998,3.484,4.675,5.877,2
73 | 17.26,15.73,0.8763,5.978,3.594,4.539,5.791,2
74 | 19.11,16.26,0.9081,6.154,3.93,2.936,6.079,2
75 | 16.82,15.51,0.8786,6.017,3.486,4.004,5.841,2
76 | 16.77,15.62,0.8638,5.927,3.438,4.92,5.795,2
77 | 17.32,15.91,0.8599,6.064,3.403,3.824,5.922,2
78 | 20.71,17.23,0.8763,6.579,3.814,4.451,6.451,2
79 | 18.94,16.49,0.875,6.445,3.639,5.064,6.362,2
80 | 17.12,15.55,0.8892,5.85,3.566,2.858,5.746,2
81 | 16.53,15.34,0.8823,5.875,3.467,5.532,5.88,2
82 | 18.72,16.19,0.8977,6.006,3.857,5.324,5.879,2
83 | 20.2,16.89,0.8894,6.285,3.864,5.173,6.187,2
84 | 19.57,16.74,0.8779,6.384,3.772,1.472,6.273,2
85 | 19.51,16.71,0.878,6.366,3.801,2.962,6.185,2
86 | 18.27,16.09,0.887,6.173,3.651,2.443,6.197,2
87 | 18.88,16.26,0.8969,6.084,3.764,1.649,6.109,2
88 | 18.98,16.66,0.859,6.549,3.67,3.691,6.498,2
89 | 21.18,17.21,0.8989,6.573,4.033,5.78,6.231,2
90 | 20.88,17.05,0.9031,6.45,4.032,5.016,6.321,2
91 | 20.1,16.99,0.8746,6.581,3.785,1.955,6.449,2
92 | 18.76,16.2,0.8984,6.172,3.796,3.12,6.053,2
93 | 18.81,16.29,0.8906,6.272,3.693,3.237,6.053,2
94 | 18.59,16.05,0.9066,6.037,3.86,6.001,5.877,2
95 | 18.36,16.52,0.8452,6.666,3.485,4.933,6.448,2
96 | 16.87,15.65,0.8648,6.139,3.463,3.696,5.967,2
97 | 19.31,16.59,0.8815,6.341,3.81,3.477,6.238,2
98 | 18.98,16.57,0.8687,6.449,3.552,2.144,6.453,2
99 | 18.17,16.26,0.8637,6.271,3.512,2.853,6.273,2
100 | 18.72,16.34,0.881,6.219,3.684,2.188,6.097,2
101 | 16.41,15.25,0.8866,5.718,3.525,4.217,5.618,2
102 | 17.99,15.86,0.8992,5.89,3.694,2.068,5.837,2
103 | 19.46,16.5,0.8985,6.113,3.892,4.308,6.009,2
104 | 19.18,16.63,0.8717,6.369,3.681,3.357,6.229,2
105 | 18.95,16.42,0.8829,6.248,3.755,3.368,6.148,2
106 | 18.83,16.29,0.8917,6.037,3.786,2.553,5.879,2
107 | 18.85,16.17,0.9056,6.152,3.806,2.843,6.2,2
108 | 17.63,15.86,0.88,6.033,3.573,3.747,5.929,2
109 | 19.94,16.92,0.8752,6.675,3.763,3.252,6.55,2
110 | 18.55,16.22,0.8865,6.153,3.674,1.738,5.894,2
111 | 18.45,16.12,0.8921,6.107,3.769,2.235,5.794,2
112 | 19.38,16.72,0.8716,6.303,3.791,3.678,5.965,2
113 | 19.13,16.31,0.9035,6.183,3.902,2.109,5.924,2
114 | 19.14,16.61,0.8722,6.259,3.737,6.682,6.053,2
115 | 20.97,17.25,0.8859,6.563,3.991,4.677,6.316,2
116 | 19.06,16.45,0.8854,6.416,3.719,2.248,6.163,2
117 | 18.96,16.2,0.9077,6.051,3.897,4.334,5.75,2
118 | 19.15,16.45,0.889,6.245,3.815,3.084,6.185,2
119 | 18.89,16.23,0.9008,6.227,3.769,3.639,5.966,2
120 | 20.03,16.9,0.8811,6.493,3.857,3.063,6.32,2
121 | 20.24,16.91,0.8897,6.315,3.962,5.901,6.188,2
122 | 18.14,16.12,0.8772,6.059,3.563,3.619,6.011,2
123 | 16.17,15.38,0.8588,5.762,3.387,4.286,5.703,2
124 | 18.43,15.97,0.9077,5.98,3.771,2.984,5.905,2
125 | 15.99,14.89,0.9064,5.363,3.582,3.336,5.144,2
126 | 18.75,16.18,0.8999,6.111,3.869,4.188,5.992,2
127 | 18.65,16.41,0.8698,6.285,3.594,4.391,6.102,2
128 | 17.98,15.85,0.8993,5.979,3.687,2.257,5.919,2
129 | 20.16,17.03,0.8735,6.513,3.773,1.91,6.185,2
130 | 17.55,15.66,0.8991,5.791,3.69,5.366,5.661,2
131 | 18.3,15.89,0.9108,5.979,3.755,2.837,5.962,2
132 | 18.94,16.32,0.8942,6.144,3.825,2.908,5.949,2
133 | 15.38,14.9,0.8706,5.884,3.268,4.462,5.795,2
134 | 16.16,15.33,0.8644,5.845,3.395,4.266,5.795,2
135 | 15.56,14.89,0.8823,5.776,3.408,4.972,5.847,2
136 | 15.38,14.66,0.899,5.477,3.465,3.6,5.439,2
137 | 17.36,15.76,0.8785,6.145,3.574,3.526,5.971,2
138 | 15.57,15.15,0.8527,5.92,3.231,2.64,5.879,2
139 | 15.6,15.11,0.858,5.832,3.286,2.725,5.752,2
140 | 16.23,15.18,0.885,5.872,3.472,3.769,5.922,2
141 | 13.07,13.92,0.848,5.472,2.994,5.304,5.395,0
142 | 13.32,13.94,0.8613,5.541,3.073,7.035,5.44,0
143 | 13.34,13.95,0.862,5.389,3.074,5.995,5.307,0
144 | 12.22,13.32,0.8652,5.224,2.967,5.469,5.221,0
145 | 11.82,13.4,0.8274,5.314,2.777,4.471,5.178,0
146 | 11.21,13.13,0.8167,5.279,2.687,6.169,5.275,0
147 | 11.43,13.13,0.8335,5.176,2.719,2.221,5.132,0
148 | 12.49,13.46,0.8658,5.267,2.967,4.421,5.002,0
149 | 12.7,13.71,0.8491,5.386,2.911,3.26,5.316,0
150 | 10.79,12.93,0.8107,5.317,2.648,5.462,5.194,0
151 | 11.83,13.23,0.8496,5.263,2.84,5.195,5.307,0
152 | 12.01,13.52,0.8249,5.405,2.776,6.992,5.27,0
153 | 12.26,13.6,0.8333,5.408,2.833,4.756,5.36,0
154 | 11.18,13.04,0.8266,5.22,2.693,3.332,5.001,0
155 | 11.36,13.05,0.8382,5.175,2.755,4.048,5.263,0
156 | 11.19,13.05,0.8253,5.25,2.675,5.813,5.219,0
157 | 11.34,12.87,0.8596,5.053,2.849,3.347,5.003,0
158 | 12.13,13.73,0.8081,5.394,2.745,4.825,5.22,0
159 | 11.75,13.52,0.8082,5.444,2.678,4.378,5.31,0
160 | 11.49,13.22,0.8263,5.304,2.695,5.388,5.31,0
161 | 12.54,13.67,0.8425,5.451,2.879,3.082,5.491,0
162 | 12.02,13.33,0.8503,5.35,2.81,4.271,5.308,0
163 | 12.05,13.41,0.8416,5.267,2.847,4.988,5.046,0
164 | 12.55,13.57,0.8558,5.333,2.968,4.419,5.176,0
165 | 11.14,12.79,0.8558,5.011,2.794,6.388,5.049,0
166 | 12.1,13.15,0.8793,5.105,2.941,2.201,5.056,0
167 | 12.44,13.59,0.8462,5.319,2.897,4.924,5.27,0
168 | 12.15,13.45,0.8443,5.417,2.837,3.638,5.338,0
169 | 11.35,13.12,0.8291,5.176,2.668,4.337,5.132,0
170 | 11.24,13,0.8359,5.09,2.715,3.521,5.088,0
171 | 11.02,13,0.8189,5.325,2.701,6.735,5.163,0
172 | 11.55,13.1,0.8455,5.167,2.845,6.715,4.956,0
173 | 11.27,12.97,0.8419,5.088,2.763,4.309,5,0
174 | 11.4,13.08,0.8375,5.136,2.763,5.588,5.089,0
175 | 10.83,12.96,0.8099,5.278,2.641,5.182,5.185,0
176 | 10.8,12.57,0.859,4.981,2.821,4.773,5.063,0
177 | 11.26,13.01,0.8355,5.186,2.71,5.335,5.092,0
178 | 10.74,12.73,0.8329,5.145,2.642,4.702,4.963,0
179 | 11.48,13.05,0.8473,5.18,2.758,5.876,5.002,0
180 | 12.21,13.47,0.8453,5.357,2.893,1.661,5.178,0
181 | 11.41,12.95,0.856,5.09,2.775,4.957,4.825,0
182 | 12.46,13.41,0.8706,5.236,3.017,4.987,5.147,0
183 | 12.19,13.36,0.8579,5.24,2.909,4.857,5.158,0
184 | 11.65,13.07,0.8575,5.108,2.85,5.209,5.135,0
185 | 12.89,13.77,0.8541,5.495,3.026,6.185,5.316,0
186 | 11.56,13.31,0.8198,5.363,2.683,4.062,5.182,0
187 | 11.81,13.45,0.8198,5.413,2.716,4.898,5.352,0
188 | 10.91,12.8,0.8372,5.088,2.675,4.179,4.956,0
189 | 11.23,12.82,0.8594,5.089,2.821,7.524,4.957,0
190 | 10.59,12.41,0.8648,4.899,2.787,4.975,4.794,0
191 | 10.93,12.8,0.839,5.046,2.717,5.398,5.045,0
192 | 11.27,12.86,0.8563,5.091,2.804,3.985,5.001,0
193 | 11.87,13.02,0.8795,5.132,2.953,3.597,5.132,0
194 | 10.82,12.83,0.8256,5.18,2.63,4.853,5.089,0
195 | 12.11,13.27,0.8639,5.236,2.975,4.132,5.012,0
196 | 12.8,13.47,0.886,5.16,3.126,4.873,4.914,0
197 | 12.79,13.53,0.8786,5.224,3.054,5.483,4.958,0
198 | 13.37,13.78,0.8849,5.32,3.128,4.67,5.091,0
199 | 12.62,13.67,0.8481,5.41,2.911,3.306,5.231,0
200 | 12.76,13.38,0.8964,5.073,3.155,2.828,4.83,0
201 | 12.38,13.44,0.8609,5.219,2.989,5.472,5.045,0
202 | 12.67,13.32,0.8977,4.984,3.135,2.3,4.745,0
203 | 11.18,12.72,0.868,5.009,2.81,4.051,4.828,0
204 | 12.7,13.41,0.8874,5.183,3.091,8.456,5,0
205 | 12.37,13.47,0.8567,5.204,2.96,3.919,5.001,0
206 | 12.19,13.2,0.8783,5.137,2.981,3.631,4.87,0
207 | 11.23,12.88,0.8511,5.14,2.795,4.325,5.003,0
208 | 13.2,13.66,0.8883,5.236,3.232,8.315,5.056,0
209 | 11.84,13.21,0.8521,5.175,2.836,3.598,5.044,0
210 | 12.3,13.34,0.8684,5.243,2.974,5.637,5.063,0
211 |
--------------------------------------------------------------------------------
/metacluster/data/Monk3.csv:
--------------------------------------------------------------------------------
1 | 1,1,1,1,1,2,2
2 | 1,1,1,1,2,1,2
3 | 1,1,1,1,2,2,2
4 | 1,1,1,1,3,1,1
5 | 1,1,1,1,4,1,1
6 | 1,1,1,2,1,1,2
7 | 1,1,1,2,2,2,2
8 | 1,1,1,2,4,2,1
9 | 1,1,2,1,2,2,2
10 | 1,1,2,1,4,2,1
11 | 1,1,2,2,2,2,2
12 | 1,1,2,2,4,1,1
13 | 1,1,2,2,4,2,1
14 | 1,1,2,3,1,1,2
15 | 1,1,2,3,1,2,2
16 | 1,1,2,3,3,1,2
17 | 1,1,2,3,3,2,2
18 | 1,2,1,1,3,1,2
19 | 1,2,1,2,2,1,2
20 | 1,2,1,2,2,2,2
21 | 1,2,1,2,3,1,1
22 | 1,2,1,3,1,1,2
23 | 1,2,1,3,1,2,2
24 | 1,2,1,3,2,1,2
25 | 1,2,1,3,2,2,2
26 | 1,2,1,3,3,2,2
27 | 1,2,1,3,4,1,1
28 | 1,2,2,1,3,1,2
29 | 1,2,2,1,4,2,1
30 | 1,2,2,2,1,1,2
31 | 1,2,2,2,2,1,2
32 | 1,2,2,2,2,2,2
33 | 1,2,2,3,1,1,2
34 | 1,2,2,3,2,1,2
35 | 1,2,2,3,2,2,2
36 | 1,3,1,1,2,1,1
37 | 1,3,1,1,4,1,1
38 | 1,3,1,2,3,2,1
39 | 1,3,1,2,4,1,1
40 | 1,3,1,3,1,1,1
41 | 1,3,1,3,3,1,1
42 | 1,3,2,1,1,1,1
43 | 1,3,2,1,1,2,1
44 | 1,3,2,1,2,1,1
45 | 1,3,2,1,4,2,1
46 | 1,3,2,2,3,2,1
47 | 1,3,2,2,4,2,1
48 | 1,3,2,3,4,1,1
49 | 2,1,1,1,1,1,2
50 | 2,1,1,1,1,2,2
51 | 2,1,1,1,4,1,1
52 | 2,1,1,1,4,2,1
53 | 2,1,1,2,1,1,2
54 | 2,1,1,2,1,2,2
55 | 2,1,1,3,2,2,2
56 | 2,1,1,3,3,2,2
57 | 2,1,1,3,4,1,1
58 | 2,1,2,1,2,2,2
59 | 2,1,2,2,4,1,1
60 | 2,1,2,3,1,2,2
61 | 2,2,1,1,3,2,2
62 | 2,2,1,1,4,2,1
63 | 2,2,1,2,1,2,2
64 | 2,2,1,2,2,1,1
65 | 2,2,1,3,1,1,2
66 | 2,2,1,3,2,2,2
67 | 2,2,1,3,3,1,1
68 | 2,2,1,3,3,2,1
69 | 2,2,1,3,4,2,1
70 | 2,2,2,1,2,2,2
71 | 2,2,2,2,1,2,2
72 | 2,2,2,2,3,1,2
73 | 2,2,2,2,3,2,2
74 | 2,2,2,3,4,1,1
75 | 2,3,1,1,3,1,2
76 | 2,3,1,2,1,1,1
77 | 2,3,1,2,2,1,1
78 | 2,3,1,2,2,2,1
79 | 2,3,1,2,3,2,1
80 | 2,3,1,3,3,1,1
81 | 2,3,2,1,1,2,1
82 | 2,3,2,1,2,2,1
83 | 2,3,2,1,4,1,1
84 | 2,3,2,2,3,1,1
85 | 2,3,2,2,4,2,1
86 | 2,3,2,3,1,1,1
87 | 2,3,2,3,2,1,1
88 | 2,3,2,3,4,2,1
89 | 3,1,1,1,1,1,2
90 | 3,1,1,1,2,1,2
91 | 3,1,1,1,3,1,2
92 | 3,1,1,2,4,2,1
93 | 3,1,1,3,1,2,2
94 | 3,1,1,3,4,2,1
95 | 3,1,2,1,2,1,2
96 | 3,1,2,2,3,2,2
97 | 3,1,2,2,4,2,1
98 | 3,1,2,3,1,1,2
99 | 3,2,1,1,2,2,2
100 | 3,2,1,1,4,1,1
101 | 3,2,1,2,3,1,2
102 | 3,2,1,3,1,2,2
103 | 3,2,2,1,2,2,2
104 | 3,2,2,1,3,2,2
105 | 3,2,2,2,1,2,2
106 | 3,2,2,3,1,1,2
107 | 3,2,2,3,3,2,2
108 | 3,2,2,3,4,1,1
109 | 3,3,1,1,3,2,2
110 | 3,3,1,1,4,1,2
111 | 3,3,1,2,4,2,1
112 | 3,3,1,3,1,1,1
113 | 3,3,1,3,2,1,1
114 | 3,3,1,3,2,2,1
115 | 3,3,1,3,4,1,1
116 | 3,3,2,1,1,1,1
117 | 3,3,2,1,1,2,1
118 | 3,3,2,2,2,2,1
119 | 3,3,2,2,3,2,1
120 | 3,3,2,3,1,1,1
121 | 3,3,2,3,3,2,1
122 | 3,3,2,3,4,2,1
123 | 1,1,1,1,1,1,2
124 | 1,1,1,1,1,2,2
125 | 1,1,1,1,2,1,2
126 | 1,1,1,1,2,2,2
127 | 1,1,1,1,3,1,2
128 | 1,1,1,1,3,2,2
129 | 1,1,1,1,4,1,1
130 | 1,1,1,1,4,2,1
131 | 1,1,1,2,1,1,2
132 | 1,1,1,2,1,2,2
133 | 1,1,1,2,2,1,2
134 | 1,1,1,2,2,2,2
135 | 1,1,1,2,3,1,2
136 | 1,1,1,2,3,2,2
137 | 1,1,1,2,4,1,1
138 | 1,1,1,2,4,2,1
139 | 1,1,1,3,1,1,2
140 | 1,1,1,3,1,2,2
141 | 1,1,1,3,2,1,2
142 | 1,1,1,3,2,2,2
143 | 1,1,1,3,3,1,2
144 | 1,1,1,3,3,2,2
145 | 1,1,1,3,4,1,1
146 | 1,1,1,3,4,2,1
147 | 1,1,2,1,1,1,2
148 | 1,1,2,1,1,2,2
149 | 1,1,2,1,2,1,2
150 | 1,1,2,1,2,2,2
151 | 1,1,2,1,3,1,2
152 | 1,1,2,1,3,2,2
153 | 1,1,2,1,4,1,1
154 | 1,1,2,1,4,2,1
155 | 1,1,2,2,1,1,2
156 | 1,1,2,2,1,2,2
157 | 1,1,2,2,2,1,2
158 | 1,1,2,2,2,2,2
159 | 1,1,2,2,3,1,2
160 | 1,1,2,2,3,2,2
161 | 1,1,2,2,4,1,1
162 | 1,1,2,2,4,2,1
163 | 1,1,2,3,1,1,2
164 | 1,1,2,3,1,2,2
165 | 1,1,2,3,2,1,2
166 | 1,1,2,3,2,2,2
167 | 1,1,2,3,3,1,2
168 | 1,1,2,3,3,2,2
169 | 1,1,2,3,4,1,1
170 | 1,1,2,3,4,2,1
171 | 1,2,1,1,1,1,2
172 | 1,2,1,1,1,2,2
173 | 1,2,1,1,2,1,2
174 | 1,2,1,1,2,2,2
175 | 1,2,1,1,3,1,2
176 | 1,2,1,1,3,2,2
177 | 1,2,1,1,4,1,1
178 | 1,2,1,1,4,2,1
179 | 1,2,1,2,1,1,2
180 | 1,2,1,2,1,2,2
181 | 1,2,1,2,2,1,2
182 | 1,2,1,2,2,2,2
183 | 1,2,1,2,3,1,2
184 | 1,2,1,2,3,2,2
185 | 1,2,1,2,4,1,1
186 | 1,2,1,2,4,2,1
187 | 1,2,1,3,1,1,2
188 | 1,2,1,3,1,2,2
189 | 1,2,1,3,2,1,2
190 | 1,2,1,3,2,2,2
191 | 1,2,1,3,3,1,2
192 | 1,2,1,3,3,2,2
193 | 1,2,1,3,4,1,1
194 | 1,2,1,3,4,2,1
195 | 1,2,2,1,1,1,2
196 | 1,2,2,1,1,2,2
197 | 1,2,2,1,2,1,2
198 | 1,2,2,1,2,2,2
199 | 1,2,2,1,3,1,2
200 | 1,2,2,1,3,2,2
201 | 1,2,2,1,4,1,1
202 | 1,2,2,1,4,2,1
203 | 1,2,2,2,1,1,2
204 | 1,2,2,2,1,2,2
205 | 1,2,2,2,2,1,2
206 | 1,2,2,2,2,2,2
207 | 1,2,2,2,3,1,2
208 | 1,2,2,2,3,2,2
209 | 1,2,2,2,4,1,1
210 | 1,2,2,2,4,2,1
211 | 1,2,2,3,1,1,2
212 | 1,2,2,3,1,2,2
213 | 1,2,2,3,2,1,2
214 | 1,2,2,3,2,2,2
215 | 1,2,2,3,3,1,2
216 | 1,2,2,3,3,2,2
217 | 1,2,2,3,4,1,1
218 | 1,2,2,3,4,2,1
219 | 1,3,1,1,1,1,1
220 | 1,3,1,1,1,2,1
221 | 1,3,1,1,2,1,1
222 | 1,3,1,1,2,2,1
223 | 1,3,1,1,3,1,2
224 | 1,3,1,1,3,2,2
225 | 1,3,1,1,4,1,1
226 | 1,3,1,1,4,2,1
227 | 1,3,1,2,1,1,1
228 | 1,3,1,2,1,2,1
229 | 1,3,1,2,2,1,1
230 | 1,3,1,2,2,2,1
231 | 1,3,1,2,3,1,1
232 | 1,3,1,2,3,2,1
233 | 1,3,1,2,4,1,1
234 | 1,3,1,2,4,2,1
235 | 1,3,1,3,1,1,1
236 | 1,3,1,3,1,2,1
237 | 1,3,1,3,2,1,1
238 | 1,3,1,3,2,2,1
239 | 1,3,1,3,3,1,1
240 | 1,3,1,3,3,2,1
241 | 1,3,1,3,4,1,1
242 | 1,3,1,3,4,2,1
243 | 1,3,2,1,1,1,1
244 | 1,3,2,1,1,2,1
245 | 1,3,2,1,2,1,1
246 | 1,3,2,1,2,2,1
247 | 1,3,2,1,3,1,2
248 | 1,3,2,1,3,2,2
249 | 1,3,2,1,4,1,1
250 | 1,3,2,1,4,2,1
251 | 1,3,2,2,1,1,1
252 | 1,3,2,2,1,2,1
253 | 1,3,2,2,2,1,1
254 | 1,3,2,2,2,2,1
255 | 1,3,2,2,3,1,1
256 | 1,3,2,2,3,2,1
257 | 1,3,2,2,4,1,1
258 | 1,3,2,2,4,2,1
259 | 1,3,2,3,1,1,1
260 | 1,3,2,3,1,2,1
261 | 1,3,2,3,2,1,1
262 | 1,3,2,3,2,2,1
263 | 1,3,2,3,3,1,1
264 | 1,3,2,3,3,2,1
265 | 1,3,2,3,4,1,1
266 | 1,3,2,3,4,2,1
267 | 2,1,1,1,1,1,2
268 | 2,1,1,1,1,2,2
269 | 2,1,1,1,2,1,2
270 | 2,1,1,1,2,2,2
271 | 2,1,1,1,3,1,2
272 | 2,1,1,1,3,2,2
273 | 2,1,1,1,4,1,1
274 | 2,1,1,1,4,2,1
275 | 2,1,1,2,1,1,2
276 | 2,1,1,2,1,2,2
277 | 2,1,1,2,2,1,2
278 | 2,1,1,2,2,2,2
279 | 2,1,1,2,3,1,2
280 | 2,1,1,2,3,2,2
281 | 2,1,1,2,4,1,1
282 | 2,1,1,2,4,2,1
283 | 2,1,1,3,1,1,2
284 | 2,1,1,3,1,2,2
285 | 2,1,1,3,2,1,2
286 | 2,1,1,3,2,2,2
287 | 2,1,1,3,3,1,2
288 | 2,1,1,3,3,2,2
289 | 2,1,1,3,4,1,1
290 | 2,1,1,3,4,2,1
291 | 2,1,2,1,1,1,2
292 | 2,1,2,1,1,2,2
293 | 2,1,2,1,2,1,2
294 | 2,1,2,1,2,2,2
295 | 2,1,2,1,3,1,2
296 | 2,1,2,1,3,2,2
297 | 2,1,2,1,4,1,1
298 | 2,1,2,1,4,2,1
299 | 2,1,2,2,1,1,2
300 | 2,1,2,2,1,2,2
301 | 2,1,2,2,2,1,2
302 | 2,1,2,2,2,2,2
303 | 2,1,2,2,3,1,2
304 | 2,1,2,2,3,2,2
305 | 2,1,2,2,4,1,1
306 | 2,1,2,2,4,2,1
307 | 2,1,2,3,1,1,2
308 | 2,1,2,3,1,2,2
309 | 2,1,2,3,2,1,2
310 | 2,1,2,3,2,2,2
311 | 2,1,2,3,3,1,2
312 | 2,1,2,3,3,2,2
313 | 2,1,2,3,4,1,1
314 | 2,1,2,3,4,2,1
315 | 2,2,1,1,1,1,2
316 | 2,2,1,1,1,2,2
317 | 2,2,1,1,2,1,2
318 | 2,2,1,1,2,2,2
319 | 2,2,1,1,3,1,2
320 | 2,2,1,1,3,2,2
321 | 2,2,1,1,4,1,1
322 | 2,2,1,1,4,2,1
323 | 2,2,1,2,1,1,2
324 | 2,2,1,2,1,2,2
325 | 2,2,1,2,2,1,2
326 | 2,2,1,2,2,2,2
327 | 2,2,1,2,3,1,2
328 | 2,2,1,2,3,2,2
329 | 2,2,1,2,4,1,1
330 | 2,2,1,2,4,2,1
331 | 2,2,1,3,1,1,2
332 | 2,2,1,3,1,2,2
333 | 2,2,1,3,2,1,2
334 | 2,2,1,3,2,2,2
335 | 2,2,1,3,3,1,2
336 | 2,2,1,3,3,2,2
337 | 2,2,1,3,4,1,1
338 | 2,2,1,3,4,2,1
339 | 2,2,2,1,1,1,2
340 | 2,2,2,1,1,2,2
341 | 2,2,2,1,2,1,2
342 | 2,2,2,1,2,2,2
343 | 2,2,2,1,3,1,2
344 | 2,2,2,1,3,2,2
345 | 2,2,2,1,4,1,1
346 | 2,2,2,1,4,2,1
347 | 2,2,2,2,1,1,2
348 | 2,2,2,2,1,2,2
349 | 2,2,2,2,2,1,2
350 | 2,2,2,2,2,2,2
351 | 2,2,2,2,3,1,2
352 | 2,2,2,2,3,2,2
353 | 2,2,2,2,4,1,1
354 | 2,2,2,2,4,2,1
355 | 2,2,2,3,1,1,2
356 | 2,2,2,3,1,2,2
357 | 2,2,2,3,2,1,2
358 | 2,2,2,3,2,2,2
359 | 2,2,2,3,3,1,2
360 | 2,2,2,3,3,2,2
361 | 2,2,2,3,4,1,1
362 | 2,2,2,3,4,2,1
363 | 2,3,1,1,1,1,1
364 | 2,3,1,1,1,2,1
365 | 2,3,1,1,2,1,1
366 | 2,3,1,1,2,2,1
367 | 2,3,1,1,3,1,2
368 | 2,3,1,1,3,2,2
369 | 2,3,1,1,4,1,1
370 | 2,3,1,1,4,2,1
371 | 2,3,1,2,1,1,1
372 | 2,3,1,2,1,2,1
373 | 2,3,1,2,2,1,1
374 | 2,3,1,2,2,2,1
375 | 2,3,1,2,3,1,1
376 | 2,3,1,2,3,2,1
377 | 2,3,1,2,4,1,1
378 | 2,3,1,2,4,2,1
379 | 2,3,1,3,1,1,1
380 | 2,3,1,3,1,2,1
381 | 2,3,1,3,2,1,1
382 | 2,3,1,3,2,2,1
383 | 2,3,1,3,3,1,1
384 | 2,3,1,3,3,2,1
385 | 2,3,1,3,4,1,1
386 | 2,3,1,3,4,2,1
387 | 2,3,2,1,1,1,1
388 | 2,3,2,1,1,2,1
389 | 2,3,2,1,2,1,1
390 | 2,3,2,1,2,2,1
391 | 2,3,2,1,3,1,2
392 | 2,3,2,1,3,2,2
393 | 2,3,2,1,4,1,1
394 | 2,3,2,1,4,2,1
395 | 2,3,2,2,1,1,1
396 | 2,3,2,2,1,2,1
397 | 2,3,2,2,2,1,1
398 | 2,3,2,2,2,2,1
399 | 2,3,2,2,3,1,1
400 | 2,3,2,2,3,2,1
401 | 2,3,2,2,4,1,1
402 | 2,3,2,2,4,2,1
403 | 2,3,2,3,1,1,1
404 | 2,3,2,3,1,2,1
405 | 2,3,2,3,2,1,1
406 | 2,3,2,3,2,2,1
407 | 2,3,2,3,3,1,1
408 | 2,3,2,3,3,2,1
409 | 2,3,2,3,4,1,1
410 | 2,3,2,3,4,2,1
411 | 3,1,1,1,1,1,2
412 | 3,1,1,1,1,2,2
413 | 3,1,1,1,2,1,2
414 | 3,1,1,1,2,2,2
415 | 3,1,1,1,3,1,2
416 | 3,1,1,1,3,2,2
417 | 3,1,1,1,4,1,1
418 | 3,1,1,1,4,2,1
419 | 3,1,1,2,1,1,2
420 | 3,1,1,2,1,2,2
421 | 3,1,1,2,2,1,2
422 | 3,1,1,2,2,2,2
423 | 3,1,1,2,3,1,2
424 | 3,1,1,2,3,2,2
425 | 3,1,1,2,4,1,1
426 | 3,1,1,2,4,2,1
427 | 3,1,1,3,1,1,2
428 | 3,1,1,3,1,2,2
429 | 3,1,1,3,2,1,2
430 | 3,1,1,3,2,2,2
431 | 3,1,1,3,3,1,2
432 | 3,1,1,3,3,2,2
433 | 3,1,1,3,4,1,1
434 | 3,1,1,3,4,2,1
435 | 3,1,2,1,1,1,2
436 | 3,1,2,1,1,2,2
437 | 3,1,2,1,2,1,2
438 | 3,1,2,1,2,2,2
439 | 3,1,2,1,3,1,2
440 | 3,1,2,1,3,2,2
441 | 3,1,2,1,4,1,1
442 | 3,1,2,1,4,2,1
443 | 3,1,2,2,1,1,2
444 | 3,1,2,2,1,2,2
445 | 3,1,2,2,2,1,2
446 | 3,1,2,2,2,2,2
447 | 3,1,2,2,3,1,2
448 | 3,1,2,2,3,2,2
449 | 3,1,2,2,4,1,1
450 | 3,1,2,2,4,2,1
451 | 3,1,2,3,1,1,2
452 | 3,1,2,3,1,2,2
453 | 3,1,2,3,2,1,2
454 | 3,1,2,3,2,2,2
455 | 3,1,2,3,3,1,2
456 | 3,1,2,3,3,2,2
457 | 3,1,2,3,4,1,1
458 | 3,1,2,3,4,2,1
459 | 3,2,1,1,1,1,2
460 | 3,2,1,1,1,2,2
461 | 3,2,1,1,2,1,2
462 | 3,2,1,1,2,2,2
463 | 3,2,1,1,3,1,2
464 | 3,2,1,1,3,2,2
465 | 3,2,1,1,4,1,1
466 | 3,2,1,1,4,2,1
467 | 3,2,1,2,1,1,2
468 | 3,2,1,2,1,2,2
469 | 3,2,1,2,2,1,2
470 | 3,2,1,2,2,2,2
471 | 3,2,1,2,3,1,2
472 | 3,2,1,2,3,2,2
473 | 3,2,1,2,4,1,1
474 | 3,2,1,2,4,2,1
475 | 3,2,1,3,1,1,2
476 | 3,2,1,3,1,2,2
477 | 3,2,1,3,2,1,2
478 | 3,2,1,3,2,2,2
479 | 3,2,1,3,3,1,2
480 | 3,2,1,3,3,2,2
481 | 3,2,1,3,4,1,1
482 | 3,2,1,3,4,2,1
483 | 3,2,2,1,1,1,2
484 | 3,2,2,1,1,2,2
485 | 3,2,2,1,2,1,2
486 | 3,2,2,1,2,2,2
487 | 3,2,2,1,3,1,2
488 | 3,2,2,1,3,2,2
489 | 3,2,2,1,4,1,1
490 | 3,2,2,1,4,2,1
491 | 3,2,2,2,1,1,2
492 | 3,2,2,2,1,2,2
493 | 3,2,2,2,2,1,2
494 | 3,2,2,2,2,2,2
495 | 3,2,2,2,3,1,2
496 | 3,2,2,2,3,2,2
497 | 3,2,2,2,4,1,1
498 | 3,2,2,2,4,2,1
499 | 3,2,2,3,1,1,2
500 | 3,2,2,3,1,2,2
501 | 3,2,2,3,2,1,2
502 | 3,2,2,3,2,2,2
503 | 3,2,2,3,3,1,2
504 | 3,2,2,3,3,2,2
505 | 3,2,2,3,4,1,1
506 | 3,2,2,3,4,2,1
507 | 3,3,1,1,1,1,1
508 | 3,3,1,1,1,2,1
509 | 3,3,1,1,2,1,1
510 | 3,3,1,1,2,2,1
511 | 3,3,1,1,3,1,2
512 | 3,3,1,1,3,2,2
513 | 3,3,1,1,4,1,1
514 | 3,3,1,1,4,2,1
515 | 3,3,1,2,1,1,1
516 | 3,3,1,2,1,2,1
517 | 3,3,1,2,2,1,1
518 | 3,3,1,2,2,2,1
519 | 3,3,1,2,3,1,1
520 | 3,3,1,2,3,2,1
521 | 3,3,1,2,4,1,1
522 | 3,3,1,2,4,2,1
523 | 3,3,1,3,1,1,1
524 | 3,3,1,3,1,2,1
525 | 3,3,1,3,2,1,1
526 | 3,3,1,3,2,2,1
527 | 3,3,1,3,3,1,1
528 | 3,3,1,3,3,2,1
529 | 3,3,1,3,4,1,1
530 | 3,3,1,3,4,2,1
531 | 3,3,2,1,1,1,1
532 | 3,3,2,1,1,2,1
533 | 3,3,2,1,2,1,1
534 | 3,3,2,1,2,2,1
535 | 3,3,2,1,3,1,2
536 | 3,3,2,1,3,2,2
537 | 3,3,2,1,4,1,1
538 | 3,3,2,1,4,2,1
539 | 3,3,2,2,1,1,1
540 | 3,3,2,2,1,2,1
541 | 3,3,2,2,2,1,1
542 | 3,3,2,2,2,2,1
543 | 3,3,2,2,3,1,1
544 | 3,3,2,2,3,2,1
545 | 3,3,2,2,4,1,1
546 | 3,3,2,2,4,2,1
547 | 3,3,2,3,1,1,1
548 | 3,3,2,3,1,2,1
549 | 3,3,2,3,2,1,1
550 | 3,3,2,3,2,2,1
551 | 3,3,2,3,3,1,1
552 | 3,3,2,3,3,2,1
553 | 3,3,2,3,4,1,1
554 | 3,3,2,3,4,2,1
555 |
--------------------------------------------------------------------------------
/metacluster/data/Monk1.csv:
--------------------------------------------------------------------------------
1 | 1.9431,1,1,1,3,1,2
2 | 1,1,1,1,3,2,2
3 | 1,1,1,3,2,1,2
4 | 1,1,1,3,3,2,2
5 | 1,1,2,1,2,1,2
6 | 1,1,2,1,2,2,2
7 | 1,1,2,2,3,1,2
8 | 1,1,2,2,4,1,2
9 | 1,1,2,3,1,2,2
10 | 1,2,1,1,1,2,2
11 | 1,2,1,1,2,1,1
12 | 1,2,1,1,3,1,1
13 | 1,2,1,1,4,2,1
14 | 1,2,1,2,1,1,2
15 | 1,2,1,2,3,1,1
16 | 1,2,1,2,3,2,1
17 | 1,2,1,2,4,2,1
18 | 1,2,1,3,2,1,1
19 | 1,2,1,3,4,2,1
20 | 1,2,2,1,2,2,1
21 | 1,2,2,2,3,2,1
22 | 1,2,2,2,4,1,1
23 | 1,2,2,2,4,2,1
24 | 1,2,2,3,2,2,1
25 | 1,2,2,3,3,1,1
26 | 1,2,2,3,3,2,1
27 | 1,3,1,1,2,1,1
28 | 1,3,1,1,4,1,1
29 | 1,3,1,2,2,1,1
30 | 1,3,1,2,4,1,1
31 | 1,3,1,3,1,2,2
32 | 1,3,1,3,2,2,1
33 | 1,3,1,3,3,1,1
34 | 1,3,1,3,4,1,1
35 | 1,3,1,3,4,2,1
36 | 1,3,2,1,2,2,1
37 | 1,3,2,2,1,2,2
38 | 1,3,2,2,2,2,1
39 | 1,3,2,2,3,2,1
40 | 1,3,2,2,4,1,1
41 | 1,3,2,2,4,2,1
42 | 1,3,2,3,1,1,2
43 | 1,3,2,3,2,1,1
44 | 1,3,2,3,4,1,1
45 | 1,3,2,3,4,2,1
46 | 2,1,1,1,3,1,1
47 | 2,1,1,1,3,2,1
48 | 2,1,1,2,1,1,2
49 | 2,1,1,2,1,2,2
50 | 2,1,1,2,2,2,1
51 | 2,1,1,2,3,1,1
52 | 2,1,1,2,4,1,1
53 | 2,1,1,2,4,2,1
54 | 2,1,1,3,4,1,1
55 | 2,1,2,1,2,2,1
56 | 2,1,2,1,3,1,1
57 | 2,1,2,1,4,2,1
58 | 2,1,2,2,3,1,1
59 | 2,1,2,2,4,2,1
60 | 2,1,2,3,2,2,1
61 | 2,1,2,3,4,1,1
62 | 2,2,1,1,2,1,2
63 | 2,2,1,1,2,2,2
64 | 2,2,1,1,3,1,2
65 | 2,2,1,2,3,2,2
66 | 2,2,1,3,1,1,2
67 | 2,2,1,3,1,2,2
68 | 2,2,1,3,2,2,2
69 | 2,2,1,3,3,2,2
70 | 2,2,1,3,4,2,2
71 | 2,2,2,1,1,1,2
72 | 2,2,2,1,3,2,2
73 | 2,2,2,1,4,1,2
74 | 2,2,2,1,4,2,2
75 | 2,2,2,2,2,1,2
76 | 2,2,2,3,4,1,2
77 | 2,3,1,1,1,1,2
78 | 2,3,1,2,1,1,2
79 | 2,3,1,2,3,1,1
80 | 2,3,1,3,1,2,2
81 | 2,3,1,3,3,1,1
82 | 2,3,1,3,4,2,1
83 | 2,3,2,1,3,2,1
84 | 2,3,2,2,1,1,2
85 | 2,3,2,2,1,2,2
86 | 2,3,2,2,2,1,1
87 | 2,3,2,3,3,2,1
88 | 3,1,1,1,1,1,2
89 | 3,1,1,1,1,2,2
90 | 3,1,1,2,1,1,2
91 | 3,1,1,2,2,2,1
92 | 3,1,1,3,2,2,1
93 | 3,1,2,1,1,1,2
94 | 3,1,2,1,2,2,1
95 | 3,1,2,2,2,2,1
96 | 3,1,2,2,3,2,1
97 | 3,1,2,3,2,2,1
98 | 3,2,1,1,1,1,2
99 | 3,2,1,1,4,2,1
100 | 3,2,1,2,1,2,2
101 | 3,2,1,2,4,2,1
102 | 3,2,2,1,1,1,2
103 | 3,2,2,1,1,2,2
104 | 3,2,2,1,3,2,1
105 | 3,2,2,3,1,1,2
106 | 3,2,2,3,2,1,1
107 | 3,2,2,3,4,1,1
108 | 3,3,1,1,1,1,2
109 | 3,3,1,1,2,1,2
110 | 3,3,1,1,4,2,2
111 | 3,3,1,2,3,2,2
112 | 3,3,1,2,4,2,2
113 | 3,3,1,3,1,2,2
114 | 3,3,1,3,2,1,2
115 | 3,3,1,3,2,2,2
116 | 3,3,1,3,4,2,2
117 | 3,3,2,1,1,1,2
118 | 3,3,2,1,3,2,2
119 | 3,3,2,1,4,1,2
120 | 3,3,2,1,4,2,2
121 | 3,3,2,3,1,2,2
122 | 3,3,2,3,2,2,2
123 | 3,3,2,3,3,2,2
124 | 3,3,2,3,4,2,2
125 | 1,1,1,1,1,1,2
126 | 1,1,1,1,1,2,2
127 | 1,1,1,1,2,1,2
128 | 1,1,1,1,2,2,2
129 | 1,1,1,1,3,1,2
130 | 1,1,1,1,3,2,2
131 | 1,1,1,1,4,1,2
132 | 1,1,1,1,4,2,2
133 | 1,1,1,2,1,1,2
134 | 1,1,1,2,1,2,2
135 | 1,1,1,2,2,1,2
136 | 1,1,1,2,2,2,2
137 | 1,1,1,2,3,1,2
138 | 1,1,1,2,3,2,2
139 | 1,1,1,2,4,1,2
140 | 1,1,1,2,4,2,2
141 | 1,1,1,3,1,1,2
142 | 1,1,1,3,1,2,2
143 | 1,1,1,3,2,1,2
144 | 1,1,1,3,2,2,2
145 | 1,1,1,3,3,1,2
146 | 1,1,1,3,3,2,2
147 | 1,1,1,3,4,1,2
148 | 1,1,1,3,4,2,2
149 | 1,1,2,1,1,1,2
150 | 1,1,2,1,1,2,2
151 | 1,1,2,1,2,1,2
152 | 1,1,2,1,2,2,2
153 | 1,1,2,1,3,1,2
154 | 1,1,2,1,3,2,2
155 | 1,1,2,1,4,1,2
156 | 1,1,2,1,4,2,2
157 | 1,1,2,2,1,1,2
158 | 1,1,2,2,1,2,2
159 | 1,1,2,2,2,1,2
160 | 1,1,2,2,2,2,2
161 | 1,1,2,2,3,1,2
162 | 1,1,2,2,3,2,2
163 | 1,1,2,2,4,1,2
164 | 1,1,2,2,4,2,2
165 | 1,1,2,3,1,1,2
166 | 1,1,2,3,1,2,2
167 | 1,1,2,3,2,1,2
168 | 1,1,2,3,2,2,2
169 | 1,1,2,3,3,1,2
170 | 1,1,2,3,3,2,2
171 | 1,1,2,3,4,1,2
172 | 1,1,2,3,4,2,2
173 | 1,2,1,1,1,1,2
174 | 1,2,1,1,1,2,2
175 | 1,2,1,1,2,1,1
176 | 1,2,1,1,2,2,1
177 | 1,2,1,1,3,1,1
178 | 1,2,1,1,3,2,1
179 | 1,2,1,1,4,1,1
180 | 1,2,1,1,4,2,1
181 | 1,2,1,2,1,1,2
182 | 1,2,1,2,1,2,2
183 | 1,2,1,2,2,1,1
184 | 1,2,1,2,2,2,1
185 | 1,2,1,2,3,1,1
186 | 1,2,1,2,3,2,1
187 | 1,2,1,2,4,1,1
188 | 1,2,1,2,4,2,1
189 | 1,2,1,3,1,1,2
190 | 1,2,1,3,1,2,2
191 | 1,2,1,3,2,1,1
192 | 1,2,1,3,2,2,1
193 | 1,2,1,3,3,1,1
194 | 1,2,1,3,3,2,1
195 | 1,2,1,3,4,1,1
196 | 1,2,1,3,4,2,1
197 | 1,2,2,1,1,1,2
198 | 1,2,2,1,1,2,2
199 | 1,2,2,1,2,1,1
200 | 1,2,2,1,2,2,1
201 | 1,2,2,1,3,1,1
202 | 1,2,2,1,3,2,1
203 | 1,2,2,1,4,1,1
204 | 1,2,2,1,4,2,1
205 | 1,2,2,2,1,1,2
206 | 1,2,2,2,1,2,2
207 | 1,2,2,2,2,1,1
208 | 1,2,2,2,2,2,1
209 | 1,2,2,2,3,1,1
210 | 1,2,2,2,3,2,1
211 | 1,2,2,2,4,1,1
212 | 1,2,2,2,4,2,1
213 | 1,2,2,3,1,1,2
214 | 1,2,2,3,1,2,2
215 | 1,2,2,3,2,1,1
216 | 1,2,2,3,2,2,1
217 | 1,2,2,3,3,1,1
218 | 1,2,2,3,3,2,1
219 | 1,2,2,3,4,1,1
220 | 1,2,2,3,4,2,1
221 | 1,3,1,1,1,1,2
222 | 1,3,1,1,1,2,2
223 | 1,3,1,1,2,1,1
224 | 1,3,1,1,2,2,1
225 | 1,3,1,1,3,1,1
226 | 1,3,1,1,3,2,1
227 | 1,3,1,1,4,1,1
228 | 1,3,1,1,4,2,1
229 | 1,3,1,2,1,1,2
230 | 1,3,1,2,1,2,2
231 | 1,3,1,2,2,1,1
232 | 1,3,1,2,2,2,1
233 | 1,3,1,2,3,1,1
234 | 1,3,1,2,3,2,1
235 | 1,3,1,2,4,1,1
236 | 1,3,1,2,4,2,1
237 | 1,3,1,3,1,1,2
238 | 1,3,1,3,1,2,2
239 | 1,3,1,3,2,1,1
240 | 1,3,1,3,2,2,1
241 | 1,3,1,3,3,1,1
242 | 1,3,1,3,3,2,1
243 | 1,3,1,3,4,1,1
244 | 1,3,1,3,4,2,1
245 | 1,3,2,1,1,1,2
246 | 1,3,2,1,1,2,2
247 | 1,3,2,1,2,1,1
248 | 1,3,2,1,2,2,1
249 | 1,3,2,1,3,1,1
250 | 1,3,2,1,3,2,1
251 | 1,3,2,1,4,1,1
252 | 1,3,2,1,4,2,1
253 | 1,3,2,2,1,1,2
254 | 1,3,2,2,1,2,2
255 | 1,3,2,2,2,1,1
256 | 1,3,2,2,2,2,1
257 | 1,3,2,2,3,1,1
258 | 1,3,2,2,3,2,1
259 | 1,3,2,2,4,1,1
260 | 1,3,2,2,4,2,1
261 | 1,3,2,3,1,1,2
262 | 1,3,2,3,1,2,2
263 | 1,3,2,3,2,1,1
264 | 1,3,2,3,2,2,1
265 | 1,3,2,3,3,1,1
266 | 1,3,2,3,3,2,1
267 | 1,3,2,3,4,1,1
268 | 1,3,2,3,4,2,1
269 | 2,1,1,1,1,1,2
270 | 2,1,1,1,1,2,2
271 | 2,1,1,1,2,1,1
272 | 2,1,1,1,2,2,1
273 | 2,1,1,1,3,1,1
274 | 2,1,1,1,3,2,1
275 | 2,1,1,1,4,1,1
276 | 2,1,1,1,4,2,1
277 | 2,1,1,2,1,1,2
278 | 2,1,1,2,1,2,2
279 | 2,1,1,2,2,1,1
280 | 2,1,1,2,2,2,1
281 | 2,1,1,2,3,1,1
282 | 2,1,1,2,3,2,1
283 | 2,1,1,2,4,1,1
284 | 2,1,1,2,4,2,1
285 | 2,1,1,3,1,1,2
286 | 2,1,1,3,1,2,2
287 | 2,1,1,3,2,1,1
288 | 2,1,1,3,2,2,1
289 | 2,1,1,3,3,1,1
290 | 2,1,1,3,3,2,1
291 | 2,1,1,3,4,1,1
292 | 2,1,1,3,4,2,1
293 | 2,1,2,1,1,1,2
294 | 2,1,2,1,1,2,2
295 | 2,1,2,1,2,1,1
296 | 2,1,2,1,2,2,1
297 | 2,1,2,1,3,1,1
298 | 2,1,2,1,3,2,1
299 | 2,1,2,1,4,1,1
300 | 2,1,2,1,4,2,1
301 | 2,1,2,2,1,1,2
302 | 2,1,2,2,1,2,2
303 | 2,1,2,2,2,1,1
304 | 2,1,2,2,2,2,1
305 | 2,1,2,2,3,1,1
306 | 2,1,2,2,3,2,1
307 | 2,1,2,2,4,1,1
308 | 2,1,2,2,4,2,1
309 | 2,1,2,3,1,1,2
310 | 2,1,2,3,1,2,2
311 | 2,1,2,3,2,1,1
312 | 2,1,2,3,2,2,1
313 | 2,1,2,3,3,1,1
314 | 2,1,2,3,3,2,1
315 | 2,1,2,3,4,1,1
316 | 2,1,2,3,4,2,1
317 | 2,2,1,1,1,1,2
318 | 2,2,1,1,1,2,2
319 | 2,2,1,1,2,1,2
320 | 2,2,1,1,2,2,2
321 | 2,2,1,1,3,1,2
322 | 2,2,1,1,3,2,2
323 | 2,2,1,1,4,1,2
324 | 2,2,1,1,4,2,2
325 | 2,2,1,2,1,1,2
326 | 2,2,1,2,1,2,2
327 | 2,2,1,2,2,1,2
328 | 2,2,1,2,2,2,2
329 | 2,2,1,2,3,1,2
330 | 2,2,1,2,3,2,2
331 | 2,2,1,2,4,1,2
332 | 2,2,1,2,4,2,2
333 | 2,2,1,3,1,1,2
334 | 2,2,1,3,1,2,2
335 | 2,2,1,3,2,1,2
336 | 2,2,1,3,2,2,2
337 | 2,2,1,3,3,1,2
338 | 2,2,1,3,3,2,2
339 | 2,2,1,3,4,1,2
340 | 2,2,1,3,4,2,2
341 | 2,2,2,1,1,1,2
342 | 2,2,2,1,1,2,2
343 | 2,2,2,1,2,1,2
344 | 2,2,2,1,2,2,2
345 | 2,2,2,1,3,1,2
346 | 2,2,2,1,3,2,2
347 | 2,2,2,1,4,1,2
348 | 2,2,2,1,4,2,2
349 | 2,2,2,2,1,1,2
350 | 2,2,2,2,1,2,2
351 | 2,2,2,2,2,1,2
352 | 2,2,2,2,2,2,2
353 | 2,2,2,2,3,1,2
354 | 2,2,2,2,3,2,2
355 | 2,2,2,2,4,1,2
356 | 2,2,2,2,4,2,2
357 | 2,2,2,3,1,1,2
358 | 2,2,2,3,1,2,2
359 | 2,2,2,3,2,1,2
360 | 2,2,2,3,2,2,2
361 | 2,2,2,3,3,1,2
362 | 2,2,2,3,3,2,2
363 | 2,2,2,3,4,1,2
364 | 2,2,2,3,4,2,2
365 | 2,3,1,1,1,1,2
366 | 2,3,1,1,1,2,2
367 | 2,3,1,1,2,1,1
368 | 2,3,1,1,2,2,1
369 | 2,3,1,1,3,1,1
370 | 2,3,1,1,3,2,1
371 | 2,3,1,1,4,1,1
372 | 2,3,1,1,4,2,1
373 | 2,3,1,2,1,1,2
374 | 2,3,1,2,1,2,2
375 | 2,3,1,2,2,1,1
376 | 2,3,1,2,2,2,1
377 | 2,3,1,2,3,1,1
378 | 2,3,1,2,3,2,1
379 | 2,3,1,2,4,1,1
380 | 2,3,1,2,4,2,1
381 | 2,3,1,3,1,1,2
382 | 2,3,1,3,1,2,2
383 | 2,3,1,3,2,1,1
384 | 2,3,1,3,2,2,1
385 | 2,3,1,3,3,1,1
386 | 2,3,1,3,3,2,1
387 | 2,3,1,3,4,1,1
388 | 2,3,1,3,4,2,1
389 | 2,3,2,1,1,1,2
390 | 2,3,2,1,1,2,2
391 | 2,3,2,1,2,1,1
392 | 2,3,2,1,2,2,1
393 | 2,3,2,1,3,1,1
394 | 2,3,2,1,3,2,1
395 | 2,3,2,1,4,1,1
396 | 2,3,2,1,4,2,1
397 | 2,3,2,2,1,1,2
398 | 2,3,2,2,1,2,2
399 | 2,3,2,2,2,1,1
400 | 2,3,2,2,2,2,1
401 | 2,3,2,2,3,1,1
402 | 2,3,2,2,3,2,1
403 | 2,3,2,2,4,1,1
404 | 2,3,2,2,4,2,1
405 | 2,3,2,3,1,1,2
406 | 2,3,2,3,1,2,2
407 | 2,3,2,3,2,1,1
408 | 2,3,2,3,2,2,1
409 | 2,3,2,3,3,1,1
410 | 2,3,2,3,3,2,1
411 | 2,3,2,3,4,1,1
412 | 2,3,2,3,4,2,1
413 | 3,1,1,1,1,1,2
414 | 3,1,1,1,1,2,2
415 | 3,1,1,1,2,1,1
416 | 3,1,1,1,2,2,1
417 | 3,1,1,1,3,1,1
418 | 3,1,1,1,3,2,1
419 | 3,1,1,1,4,1,1
420 | 3,1,1,1,4,2,1
421 | 3,1,1,2,1,1,2
422 | 3,1,1,2,1,2,2
423 | 3,1,1,2,2,1,1
424 | 3,1,1,2,2,2,1
425 | 3,1,1,2,3,1,1
426 | 3,1,1,2,3,2,1
427 | 3,1,1,2,4,1,1
428 | 3,1,1,2,4,2,1
429 | 3,1,1,3,1,1,2
430 | 3,1,1,3,1,2,2
431 | 3,1,1,3,2,1,1
432 | 3,1,1,3,2,2,1
433 | 3,1,1,3,3,1,1
434 | 3,1,1,3,3,2,1
435 | 3,1,1,3,4,1,1
436 | 3,1,1,3,4,2,1
437 | 3,1,2,1,1,1,2
438 | 3,1,2,1,1,2,2
439 | 3,1,2,1,2,1,1
440 | 3,1,2,1,2,2,1
441 | 3,1,2,1,3,1,1
442 | 3,1,2,1,3,2,1
443 | 3,1,2,1,4,1,1
444 | 3,1,2,1,4,2,1
445 | 3,1,2,2,1,1,2
446 | 3,1,2,2,1,2,2
447 | 3,1,2,2,2,1,1
448 | 3,1,2,2,2,2,1
449 | 3,1,2,2,3,1,1
450 | 3,1,2,2,3,2,1
451 | 3,1,2,2,4,1,1
452 | 3,1,2,2,4,2,1
453 | 3,1,2,3,1,1,2
454 | 3,1,2,3,1,2,2
455 | 3,1,2,3,2,1,1
456 | 3,1,2,3,2,2,1
457 | 3,1,2,3,3,1,1
458 | 3,1,2,3,3,2,1
459 | 3,1,2,3,4,1,1
460 | 3,1,2,3,4,2,1
461 | 3,2,1,1,1,1,2
462 | 3,2,1,1,1,2,2
463 | 3,2,1,1,2,1,1
464 | 3,2,1,1,2,2,1
465 | 3,2,1,1,3,1,1
466 | 3,2,1,1,3,2,1
467 | 3,2,1,1,4,1,1
468 | 3,2,1,1,4,2,1
469 | 3,2,1,2,1,1,2
470 | 3,2,1,2,1,2,2
471 | 3,2,1,2,2,1,1
472 | 3,2,1,2,2,2,1
473 | 3,2,1,2,3,1,1
474 | 3,2,1,2,3,2,1
475 | 3,2,1,2,4,1,1
476 | 3,2,1,2,4,2,1
477 | 3,2,1,3,1,1,2
478 | 3,2,1,3,1,2,2
479 | 3,2,1,3,2,1,1
480 | 3,2,1,3,2,2,1
481 | 3,2,1,3,3,1,1
482 | 3,2,1,3,3,2,1
483 | 3,2,1,3,4,1,1
484 | 3,2,1,3,4,2,1
485 | 3,2,2,1,1,1,2
486 | 3,2,2,1,1,2,2
487 | 3,2,2,1,2,1,1
488 | 3,2,2,1,2,2,1
489 | 3,2,2,1,3,1,1
490 | 3,2,2,1,3,2,1
491 | 3,2,2,1,4,1,1
492 | 3,2,2,1,4,2,1
493 | 3,2,2,2,1,1,2
494 | 3,2,2,2,1,2,2
495 | 3,2,2,2,2,1,1
496 | 3,2,2,2,2,2,1
497 | 3,2,2,2,3,1,1
498 | 3,2,2,2,3,2,1
499 | 3,2,2,2,4,1,1
500 | 3,2,2,2,4,2,1
501 | 3,2,2,3,1,1,2
502 | 3,2,2,3,1,2,2
503 | 3,2,2,3,2,1,1
504 | 3,2,2,3,2,2,1
505 | 3,2,2,3,3,1,1
506 | 3,2,2,3,3,2,1
507 | 3,2,2,3,4,1,1
508 | 3,2,2,3,4,2,1
509 | 3,3,1,1,1,1,2
510 | 3,3,1,1,1,2,2
511 | 3,3,1,1,2,1,2
512 | 3,3,1,1,2,2,2
513 | 3,3,1,1,3,1,2
514 | 3,3,1,1,3,2,2
515 | 3,3,1,1,4,1,2
516 | 3,3,1,1,4,2,2
517 | 3,3,1,2,1,1,2
518 | 3,3,1,2,1,2,2
519 | 3,3,1,2,2,1,2
520 | 3,3,1,2,2,2,2
521 | 3,3,1,2,3,1,2
522 | 3,3,1,2,3,2,2
523 | 3,3,1,2,4,1,2
524 | 3,3,1,2,4,2,2
525 | 3,3,1,3,1,1,2
526 | 3,3,1,3,1,2,2
527 | 3,3,1,3,2,1,2
528 | 3,3,1,3,2,2,2
529 | 3,3,1,3,3,1,2
530 | 3,3,1,3,3,2,2
531 | 3,3,1,3,4,1,2
532 | 3,3,1,3,4,2,2
533 | 3,3,2,1,1,1,2
534 | 3,3,2,1,1,2,2
535 | 3,3,2,1,2,1,2
536 | 3,3,2,1,2,2,2
537 | 3,3,2,1,3,1,2
538 | 3,3,2,1,3,2,2
539 | 3,3,2,1,4,1,2
540 | 3,3,2,1,4,2,2
541 | 3,3,2,2,1,1,2
542 | 3,3,2,2,1,2,2
543 | 3,3,2,2,2,1,2
544 | 3,3,2,2,2,2,2
545 | 3,3,2,2,3,1,2
546 | 3,3,2,2,3,2,2
547 | 3,3,2,2,4,1,2
548 | 3,3,2,2,4,2,2
549 | 3,3,2,3,1,1,2
550 | 3,3,2,3,1,2,2
551 | 3,3,2,3,2,1,2
552 | 3,3,2,3,2,2,2
553 | 3,3,2,3,3,1,2
554 | 3,3,2,3,3,2,2
555 | 3,3,2,3,4,1,2
556 | 3,3,2,3,4,2,2
557 |
--------------------------------------------------------------------------------
/metacluster/data/heart.csv:
--------------------------------------------------------------------------------
1 | 70,1,4,130,322,0,2,109,0,2.4,2,3,3,0
2 | 67,0,3,115,564,0,2,160,0,1.6,2,0,7,1
3 | 57,1,2,124,261,0,0,141,0,0.3,1,0,7,0
4 | 64,1,4,128,263,0,0,105,1,0.2,2,1,7,1
5 | 74,0,2,120,269,0,2,121,1,0.2,1,1,3,1
6 | 65,1,4,120,177,0,0,140,0,0.4,1,0,7,1
7 | 56,1,3,130,256,1,2,142,1,0.6,2,1,6,0
8 | 59,1,4,110,239,0,2,142,1,1.2,2,1,7,0
9 | 60,1,4,140,293,0,2,170,0,1.2,2,2,7,0
10 | 63,0,4,150,407,0,2,154,0,4,2,3,7,0
11 | 59,1,4,135,234,0,0,161,0,0.5,2,0,7,1
12 | 53,1,4,142,226,0,2,111,1,0,1,0,7,1
13 | 44,1,3,140,235,0,2,180,0,0,1,0,3,1
14 | 61,1,1,134,234,0,0,145,0,2.6,2,2,3,0
15 | 57,0,4,128,303,0,2,159,0,0,1,1,3,1
16 | 71,0,4,112,149,0,0,125,0,1.6,2,0,3,1
17 | 46,1,4,140,311,0,0,120,1,1.8,2,2,7,0
18 | 53,1,4,140,203,1,2,155,1,3.1,3,0,7,0
19 | 64,1,1,110,211,0,2,144,1,1.8,2,0,3,1
20 | 40,1,1,140,199,0,0,178,1,1.4,1,0,7,1
21 | 67,1,4,120,229,0,2,129,1,2.6,2,2,7,0
22 | 48,1,2,130,245,0,2,180,0,0.2,2,0,3,1
23 | 43,1,4,115,303,0,0,181,0,1.2,2,0,3,1
24 | 47,1,4,112,204,0,0,143,0,0.1,1,0,3,1
25 | 54,0,2,132,288,1,2,159,1,0,1,1,3,1
26 | 48,0,3,130,275,0,0,139,0,0.2,1,0,3,1
27 | 46,0,4,138,243,0,2,152,1,0,2,0,3,1
28 | 51,0,3,120,295,0,2,157,0,0.6,1,0,3,1
29 | 58,1,3,112,230,0,2,165,0,2.5,2,1,7,0
30 | 71,0,3,110,265,1,2,130,0,0,1,1,3,1
31 | 57,1,3,128,229,0,2,150,0,0.4,2,1,7,0
32 | 66,1,4,160,228,0,2,138,0,2.3,1,0,6,1
33 | 37,0,3,120,215,0,0,170,0,0,1,0,3,1
34 | 59,1,4,170,326,0,2,140,1,3.4,3,0,7,0
35 | 50,1,4,144,200,0,2,126,1,0.9,2,0,7,0
36 | 48,1,4,130,256,1,2,150,1,0,1,2,7,0
37 | 61,1,4,140,207,0,2,138,1,1.9,1,1,7,0
38 | 59,1,1,160,273,0,2,125,0,0,1,0,3,0
39 | 42,1,3,130,180,0,0,150,0,0,1,0,3,1
40 | 48,1,4,122,222,0,2,186,0,0,1,0,3,1
41 | 40,1,4,152,223,0,0,181,0,0,1,0,7,0
42 | 62,0,4,124,209,0,0,163,0,0,1,0,3,1
43 | 44,1,3,130,233,0,0,179,1,0.4,1,0,3,1
44 | 46,1,2,101,197,1,0,156,0,0,1,0,7,1
45 | 59,1,3,126,218,1,0,134,0,2.2,2,1,6,0
46 | 58,1,3,140,211,1,2,165,0,0,1,0,3,1
47 | 49,1,3,118,149,0,2,126,0,0.8,1,3,3,0
48 | 44,1,4,110,197,0,2,177,0,0,1,1,3,0
49 | 66,1,2,160,246,0,0,120,1,0,2,3,6,0
50 | 65,0,4,150,225,0,2,114,0,1,2,3,7,0
51 | 42,1,4,136,315,0,0,125,1,1.8,2,0,6,0
52 | 52,1,2,128,205,1,0,184,0,0,1,0,3,1
53 | 65,0,3,140,417,1,2,157,0,0.8,1,1,3,1
54 | 63,0,2,140,195,0,0,179,0,0,1,2,3,1
55 | 45,0,2,130,234,0,2,175,0,0.6,2,0,3,1
56 | 41,0,2,105,198,0,0,168,0,0,1,1,3,1
57 | 61,1,4,138,166,0,2,125,1,3.6,2,1,3,0
58 | 60,0,3,120,178,1,0,96,0,0,1,0,3,1
59 | 59,0,4,174,249,0,0,143,1,0,2,0,3,0
60 | 62,1,2,120,281,0,2,103,0,1.4,2,1,7,0
61 | 57,1,3,150,126,1,0,173,0,0.2,1,1,7,1
62 | 51,0,4,130,305,0,0,142,1,1.2,2,0,7,0
63 | 44,1,3,120,226,0,0,169,0,0,1,0,3,1
64 | 60,0,1,150,240,0,0,171,0,0.9,1,0,3,1
65 | 63,1,1,145,233,1,2,150,0,2.3,3,0,6,1
66 | 57,1,4,150,276,0,2,112,1,0.6,2,1,6,0
67 | 51,1,4,140,261,0,2,186,1,0,1,0,3,1
68 | 58,0,2,136,319,1,2,152,0,0,1,2,3,0
69 | 44,0,3,118,242,0,0,149,0,0.3,2,1,3,1
70 | 47,1,3,108,243,0,0,152,0,0,1,0,3,0
71 | 61,1,4,120,260,0,0,140,1,3.6,2,1,7,0
72 | 57,0,4,120,354,0,0,163,1,0.6,1,0,3,1
73 | 70,1,2,156,245,0,2,143,0,0,1,0,3,1
74 | 76,0,3,140,197,0,1,116,0,1.1,2,0,3,1
75 | 67,0,4,106,223,0,0,142,0,0.3,1,2,3,1
76 | 45,1,4,142,309,0,2,147,1,0,2,3,7,0
77 | 45,1,4,104,208,0,2,148,1,3,2,0,3,1
78 | 39,0,3,94,199,0,0,179,0,0,1,0,3,1
79 | 42,0,3,120,209,0,0,173,0,0,2,0,3,1
80 | 56,1,2,120,236,0,0,178,0,0.8,1,0,3,1
81 | 58,1,4,146,218,0,0,105,0,2,2,1,7,0
82 | 35,1,4,120,198,0,0,130,1,1.6,2,0,7,0
83 | 58,1,4,150,270,0,2,111,1,0.8,1,0,7,0
84 | 41,1,3,130,214,0,2,168,0,2,2,0,3,1
85 | 57,1,4,110,201,0,0,126,1,1.5,2,0,6,1
86 | 42,1,1,148,244,0,2,178,0,0.8,1,2,3,1
87 | 62,1,2,128,208,1,2,140,0,0,1,0,3,1
88 | 59,1,1,178,270,0,2,145,0,4.2,3,0,7,1
89 | 41,0,2,126,306,0,0,163,0,0,1,0,3,1
90 | 50,1,4,150,243,0,2,128,0,2.6,2,0,7,0
91 | 59,1,2,140,221,0,0,164,1,0,1,0,3,1
92 | 61,0,4,130,330,0,2,169,0,0,1,0,3,0
93 | 54,1,4,124,266,0,2,109,1,2.2,2,1,7,0
94 | 54,1,4,110,206,0,2,108,1,0,2,1,3,0
95 | 52,1,4,125,212,0,0,168,0,1,1,2,7,0
96 | 47,1,4,110,275,0,2,118,1,1,2,1,3,0
97 | 66,1,4,120,302,0,2,151,0,0.4,2,0,3,1
98 | 58,1,4,100,234,0,0,156,0,0.1,1,1,7,0
99 | 64,0,3,140,313,0,0,133,0,0.2,1,0,7,1
100 | 50,0,2,120,244,0,0,162,0,1.1,1,0,3,1
101 | 44,0,3,108,141,0,0,175,0,0.6,2,0,3,1
102 | 67,1,4,120,237,0,0,71,0,1,2,0,3,0
103 | 49,0,4,130,269,0,0,163,0,0,1,0,3,1
104 | 57,1,4,165,289,1,2,124,0,1,2,3,7,0
105 | 63,1,4,130,254,0,2,147,0,1.4,2,1,7,0
106 | 48,1,4,124,274,0,2,166,0,0.5,2,0,7,0
107 | 51,1,3,100,222,0,0,143,1,1.2,2,0,3,1
108 | 60,0,4,150,258,0,2,157,0,2.6,2,2,7,0
109 | 59,1,4,140,177,0,0,162,1,0,1,1,7,0
110 | 45,0,2,112,160,0,0,138,0,0,2,0,3,1
111 | 55,0,4,180,327,0,1,117,1,3.4,2,0,3,0
112 | 41,1,2,110,235,0,0,153,0,0,1,0,3,1
113 | 60,0,4,158,305,0,2,161,0,0,1,0,3,0
114 | 54,0,3,135,304,1,0,170,0,0,1,0,3,1
115 | 42,1,2,120,295,0,0,162,0,0,1,0,3,1
116 | 49,0,2,134,271,0,0,162,0,0,2,0,3,1
117 | 46,1,4,120,249,0,2,144,0,0.8,1,0,7,0
118 | 56,0,4,200,288,1,2,133,1,4,3,2,7,0
119 | 66,0,1,150,226,0,0,114,0,2.6,3,0,3,1
120 | 56,1,4,130,283,1,2,103,1,1.6,3,0,7,0
121 | 49,1,3,120,188,0,0,139,0,2,2,3,7,0
122 | 54,1,4,122,286,0,2,116,1,3.2,2,2,3,0
123 | 57,1,4,152,274,0,0,88,1,1.2,2,1,7,0
124 | 65,0,3,160,360,0,2,151,0,0.8,1,0,3,1
125 | 54,1,3,125,273,0,2,152,0,0.5,3,1,3,1
126 | 54,0,3,160,201,0,0,163,0,0,1,1,3,1
127 | 62,1,4,120,267,0,0,99,1,1.8,2,2,7,0
128 | 52,0,3,136,196,0,2,169,0,0.1,2,0,3,1
129 | 52,1,2,134,201,0,0,158,0,0.8,1,1,3,1
130 | 60,1,4,117,230,1,0,160,1,1.4,1,2,7,0
131 | 63,0,4,108,269,0,0,169,1,1.8,2,2,3,0
132 | 66,1,4,112,212,0,2,132,1,0.1,1,1,3,0
133 | 42,1,4,140,226,0,0,178,0,0,1,0,3,1
134 | 64,1,4,120,246,0,2,96,1,2.2,3,1,3,0
135 | 54,1,3,150,232,0,2,165,0,1.6,1,0,7,1
136 | 46,0,3,142,177,0,2,160,1,1.4,3,0,3,1
137 | 67,0,3,152,277,0,0,172,0,0,1,1,3,1
138 | 56,1,4,125,249,1,2,144,1,1.2,2,1,3,0
139 | 34,0,2,118,210,0,0,192,0,0.7,1,0,3,1
140 | 57,1,4,132,207,0,0,168,1,0,1,0,7,1
141 | 64,1,4,145,212,0,2,132,0,2,2,2,6,0
142 | 59,1,4,138,271,0,2,182,0,0,1,0,3,1
143 | 50,1,3,140,233,0,0,163,0,0.6,2,1,7,0
144 | 51,1,1,125,213,0,2,125,1,1.4,1,1,3,1
145 | 54,1,2,192,283,0,2,195,0,0,1,1,7,0
146 | 53,1,4,123,282,0,0,95,1,2,2,2,7,0
147 | 52,1,4,112,230,0,0,160,0,0,1,1,3,0
148 | 40,1,4,110,167,0,2,114,1,2,2,0,7,0
149 | 58,1,3,132,224,0,2,173,0,3.2,1,2,7,0
150 | 41,0,3,112,268,0,2,172,1,0,1,0,3,1
151 | 41,1,3,112,250,0,0,179,0,0,1,0,3,1
152 | 50,0,3,120,219,0,0,158,0,1.6,2,0,3,1
153 | 54,0,3,108,267,0,2,167,0,0,1,0,3,1
154 | 64,0,4,130,303,0,0,122,0,2,2,2,3,1
155 | 51,0,3,130,256,0,2,149,0,0.5,1,0,3,1
156 | 46,0,2,105,204,0,0,172,0,0,1,0,3,1
157 | 55,1,4,140,217,0,0,111,1,5.6,3,0,7,0
158 | 45,1,2,128,308,0,2,170,0,0,1,0,3,1
159 | 56,1,1,120,193,0,2,162,0,1.9,2,0,7,1
160 | 66,0,4,178,228,1,0,165,1,1,2,2,7,0
161 | 38,1,1,120,231,0,0,182,1,3.8,2,0,7,0
162 | 62,0,4,150,244,0,0,154,1,1.4,2,0,3,0
163 | 55,1,2,130,262,0,0,155,0,0,1,0,3,1
164 | 58,1,4,128,259,0,2,130,1,3,2,2,7,0
165 | 43,1,4,110,211,0,0,161,0,0,1,0,7,1
166 | 64,0,4,180,325,0,0,154,1,0,1,0,3,1
167 | 50,0,4,110,254,0,2,159,0,0,1,0,3,1
168 | 53,1,3,130,197,1,2,152,0,1.2,3,0,3,1
169 | 45,0,4,138,236,0,2,152,1,0.2,2,0,3,1
170 | 65,1,1,138,282,1,2,174,0,1.4,2,1,3,0
171 | 69,1,1,160,234,1,2,131,0,0.1,2,1,3,1
172 | 69,1,3,140,254,0,2,146,0,2,2,3,7,0
173 | 67,1,4,100,299,0,2,125,1,0.9,2,2,3,0
174 | 68,0,3,120,211,0,2,115,0,1.5,2,0,3,1
175 | 34,1,1,118,182,0,2,174,0,0,1,0,3,1
176 | 62,0,4,138,294,1,0,106,0,1.9,2,3,3,0
177 | 51,1,4,140,298,0,0,122,1,4.2,2,3,7,0
178 | 46,1,3,150,231,0,0,147,0,3.6,2,0,3,0
179 | 67,1,4,125,254,1,0,163,0,0.2,2,2,7,0
180 | 50,1,3,129,196,0,0,163,0,0,1,0,3,1
181 | 42,1,3,120,240,1,0,194,0,0.8,3,0,7,1
182 | 56,0,4,134,409,0,2,150,1,1.9,2,2,7,0
183 | 41,1,4,110,172,0,2,158,0,0,1,0,7,0
184 | 42,0,4,102,265,0,2,122,0,0.6,2,0,3,1
185 | 53,1,3,130,246,1,2,173,0,0,1,3,3,1
186 | 43,1,3,130,315,0,0,162,0,1.9,1,1,3,1
187 | 56,1,4,132,184,0,2,105,1,2.1,2,1,6,0
188 | 52,1,4,108,233,1,0,147,0,0.1,1,3,7,1
189 | 62,0,4,140,394,0,2,157,0,1.2,2,0,3,1
190 | 70,1,3,160,269,0,0,112,1,2.9,2,1,7,0
191 | 54,1,4,140,239,0,0,160,0,1.2,1,0,3,1
192 | 70,1,4,145,174,0,0,125,1,2.6,3,0,7,0
193 | 54,1,2,108,309,0,0,156,0,0,1,0,7,1
194 | 35,1,4,126,282,0,2,156,1,0,1,0,7,0
195 | 48,1,3,124,255,1,0,175,0,0,1,2,3,1
196 | 55,0,2,135,250,0,2,161,0,1.4,2,0,3,1
197 | 58,0,4,100,248,0,2,122,0,1,2,0,3,1
198 | 54,0,3,110,214,0,0,158,0,1.6,2,0,3,1
199 | 69,0,1,140,239,0,0,151,0,1.8,1,2,3,1
200 | 77,1,4,125,304,0,2,162,1,0,1,3,3,0
201 | 68,1,3,118,277,0,0,151,0,1,1,1,7,1
202 | 58,1,4,125,300,0,2,171,0,0,1,2,7,0
203 | 60,1,4,125,258,0,2,141,1,2.8,2,1,7,0
204 | 51,1,4,140,299,0,0,173,1,1.6,1,0,7,0
205 | 55,1,4,160,289,0,2,145,1,0.8,2,1,7,0
206 | 52,1,1,152,298,1,0,178,0,1.2,2,0,7,1
207 | 60,0,3,102,318,0,0,160,0,0,1,1,3,1
208 | 58,1,3,105,240,0,2,154,1,0.6,2,0,7,1
209 | 64,1,3,125,309,0,0,131,1,1.8,2,0,7,0
210 | 37,1,3,130,250,0,0,187,0,3.5,3,0,3,1
211 | 59,1,1,170,288,0,2,159,0,0.2,2,0,7,0
212 | 51,1,3,125,245,1,2,166,0,2.4,2,0,3,1
213 | 43,0,3,122,213,0,0,165,0,0.2,2,0,3,1
214 | 58,1,4,128,216,0,2,131,1,2.2,2,3,7,0
215 | 29,1,2,130,204,0,2,202,0,0,1,0,3,1
216 | 41,0,2,130,204,0,2,172,0,1.4,1,0,3,1
217 | 63,0,3,135,252,0,2,172,0,0,1,0,3,1
218 | 51,1,3,94,227,0,0,154,1,0,1,1,7,1
219 | 54,1,3,120,258,0,2,147,0,0.4,2,0,7,1
220 | 44,1,2,120,220,0,0,170,0,0,1,0,3,1
221 | 54,1,4,110,239,0,0,126,1,2.8,2,1,7,0
222 | 65,1,4,135,254,0,2,127,0,2.8,2,1,7,0
223 | 57,1,3,150,168,0,0,174,0,1.6,1,0,3,1
224 | 63,1,4,130,330,1,2,132,1,1.8,1,3,7,0
225 | 35,0,4,138,183,0,0,182,0,1.4,1,0,3,1
226 | 41,1,2,135,203,0,0,132,0,0,2,0,6,1
227 | 62,0,3,130,263,0,0,97,0,1.2,2,1,7,0
228 | 43,0,4,132,341,1,2,136,1,3,2,0,7,0
229 | 58,0,1,150,283,1,2,162,0,1,1,0,3,1
230 | 52,1,1,118,186,0,2,190,0,0,2,0,6,1
231 | 61,0,4,145,307,0,2,146,1,1,2,0,7,0
232 | 39,1,4,118,219,0,0,140,0,1.2,2,0,7,0
233 | 45,1,4,115,260,0,2,185,0,0,1,0,3,1
234 | 52,1,4,128,255,0,0,161,1,0,1,1,7,0
235 | 62,1,3,130,231,0,0,146,0,1.8,2,3,7,1
236 | 62,0,4,160,164,0,2,145,0,6.2,3,3,7,0
237 | 53,0,4,138,234,0,2,160,0,0,1,0,3,1
238 | 43,1,4,120,177,0,2,120,1,2.5,2,0,7,0
239 | 47,1,3,138,257,0,2,156,0,0,1,0,3,1
240 | 52,1,2,120,325,0,0,172,0,0.2,1,0,3,1
241 | 68,1,3,180,274,1,2,150,1,1.6,2,0,7,0
242 | 39,1,3,140,321,0,2,182,0,0,1,0,3,1
243 | 53,0,4,130,264,0,2,143,0,0.4,2,0,3,1
244 | 62,0,4,140,268,0,2,160,0,3.6,3,2,3,0
245 | 51,0,3,140,308,0,2,142,0,1.5,1,1,3,1
246 | 60,1,4,130,253,0,0,144,1,1.4,1,1,7,0
247 | 65,1,4,110,248,0,2,158,0,0.6,1,2,6,0
248 | 65,0,3,155,269,0,0,148,0,0.8,1,0,3,1
249 | 60,1,3,140,185,0,2,155,0,3,2,0,3,0
250 | 60,1,4,145,282,0,2,142,1,2.8,2,2,7,0
251 | 54,1,4,120,188,0,0,113,0,1.4,2,1,7,0
252 | 44,1,2,130,219,0,2,188,0,0,1,0,3,1
253 | 44,1,4,112,290,0,2,153,0,0,1,1,3,0
254 | 51,1,3,110,175,0,0,123,0,0.6,1,0,3,1
255 | 59,1,3,150,212,1,0,157,0,1.6,1,0,3,1
256 | 71,0,2,160,302,0,0,162,0,0.4,1,2,3,1
257 | 61,1,3,150,243,1,0,137,1,1,2,0,3,1
258 | 55,1,4,132,353,0,0,132,1,1.2,2,1,7,0
259 | 64,1,3,140,335,0,0,158,0,0,1,0,3,0
260 | 43,1,4,150,247,0,0,171,0,1.5,1,0,3,1
261 | 58,0,3,120,340,0,0,172,0,0,1,0,3,1
262 | 60,1,4,130,206,0,2,132,1,2.4,2,2,7,0
263 | 58,1,2,120,284,0,2,160,0,1.8,2,0,3,0
264 | 49,1,2,130,266,0,0,171,0,0.6,1,0,3,1
265 | 48,1,2,110,229,0,0,168,0,1,3,0,7,0
266 | 52,1,3,172,199,1,0,162,0,0.5,1,0,7,1
267 | 44,1,2,120,263,0,0,173,0,0,1,0,7,1
268 | 56,0,2,140,294,0,2,153,0,1.3,2,0,3,1
269 | 57,1,4,140,192,0,0,148,0,0.4,2,0,6,1
270 | 67,1,4,160,286,0,2,108,1,1.5,2,3,3,0
271 |
--------------------------------------------------------------------------------
/metacluster/data/HeartEW.csv:
--------------------------------------------------------------------------------
1 | 34,1,1,118,182,0,2,174,0,0,1,0,3,1
2 | 53,1,4,140,203,1,2,155,1,3.1,3,0,7,2
3 | 61,1,1,134,234,0,0,145,0,2.6,2,2,3,2
4 | 68,1,3,118,277,0,0,151,0,1,1,1,7,1
5 | 54,1,4,122,286,0,2,116,1,3.2,2,2,3,2
6 | 48,1,2,110,229,0,0,168,0,1,3,0,7,2
7 | 55,1,4,132,353,0,0,132,1,1.2,2,1,7,2
8 | 64,1,3,125,309,0,0,131,1,1.8,2,0,7,2
9 | 48,0,3,130,275,0,0,139,0,0.2,1,0,3,1
10 | 54,1,3,120,258,0,2,147,0,0.4,2,0,7,1
11 | 55,0,4,180,327,0,1,117,1,3.4,2,0,3,2
12 | 51,1,3,125,245,1,2,166,0,2.4,2,0,3,1
13 | 62,0,4,140,268,0,2,160,0,3.6,3,2,3,2
14 | 64,0,4,180,325,0,0,154,1,0,1,0,3,1
15 | 65,0,3,155,269,0,0,148,0,0.8,1,0,3,1
16 | 42,1,1,148,244,0,2,178,0,0.8,1,2,3,1
17 | 65,1,4,120,177,0,0,140,0,0.4,1,0,7,1
18 | 43,1,4,120,177,0,2,120,1,2.5,2,0,7,2
19 | 54,1,4,140,239,0,0,160,0,1.2,1,0,3,1
20 | 45,1,2,128,308,0,2,170,0,0,1,0,3,1
21 | 40,1,4,152,223,0,0,181,0,0,1,0,7,2
22 | 58,1,4,100,234,0,0,156,0,0.1,1,1,7,2
23 | 47,1,3,138,257,0,2,156,0,0,1,0,3,1
24 | 41,1,3,130,214,0,2,168,0,2,2,0,3,1
25 | 57,1,4,110,201,0,0,126,1,1.5,2,0,6,1
26 | 56,1,1,120,193,0,2,162,0,1.9,2,0,7,1
27 | 57,0,4,120,354,0,0,163,1,0.6,1,0,3,1
28 | 63,0,2,140,195,0,0,179,0,0,1,2,3,1
29 | 58,1,4,125,300,0,2,171,0,0,1,2,7,2
30 | 29,1,2,130,204,0,2,202,0,0,1,0,3,1
31 | 62,0,3,130,263,0,0,97,0,1.2,2,1,7,2
32 | 61,1,3,150,243,1,0,137,1,1,2,0,3,1
33 | 66,0,4,178,228,1,0,165,1,1,2,2,7,2
34 | 47,1,4,110,275,0,2,118,1,1,2,1,3,2
35 | 52,1,2,120,325,0,0,172,0,0.2,1,0,3,1
36 | 63,0,3,135,252,0,2,172,0,0,1,0,3,1
37 | 43,1,4,110,211,0,0,161,0,0,1,0,7,1
38 | 60,0,4,150,258,0,2,157,0,2.6,2,2,7,2
39 | 71,0,3,110,265,1,2,130,0,0,1,1,3,1
40 | 41,0,3,112,268,0,2,172,1,0,1,0,3,1
41 | 68,1,3,180,274,1,2,150,1,1.6,2,0,7,2
42 | 51,1,1,125,213,0,2,125,1,1.4,1,1,3,1
43 | 46,0,4,138,243,0,2,152,1,0,2,0,3,1
44 | 59,1,3,150,212,1,0,157,0,1.6,1,0,3,1
45 | 42,1,3,130,180,0,0,150,0,0,1,0,3,1
46 | 51,1,3,94,227,0,0,154,1,0,1,1,7,1
47 | 55,1,2,130,262,0,0,155,0,0,1,0,3,1
48 | 37,0,3,120,215,0,0,170,0,0,1,0,3,1
49 | 52,1,4,125,212,0,0,168,0,1,1,2,7,2
50 | 58,1,4,128,216,0,2,131,1,2.2,2,3,7,2
51 | 54,0,3,160,201,0,0,163,0,0,1,1,3,1
52 | 41,1,3,112,250,0,0,179,0,0,1,0,3,1
53 | 41,0,2,130,204,0,2,172,0,1.4,1,0,3,1
54 | 62,0,4,160,164,0,2,145,0,6.2,3,3,7,2
55 | 39,1,4,118,219,0,0,140,0,1.2,2,0,7,2
56 | 42,1,4,136,315,0,0,125,1,1.8,2,0,6,2
57 | 52,1,1,152,298,1,0,178,0,1.2,2,0,7,1
58 | 46,0,3,142,177,0,2,160,1,1.4,3,0,3,1
59 | 58,1,4,150,270,0,2,111,1,0.8,1,0,7,2
60 | 59,1,4,110,239,0,2,142,1,1.2,2,1,7,2
61 | 56,0,4,134,409,0,2,150,1,1.9,2,2,7,2
62 | 65,0,3,140,417,1,2,157,0,0.8,1,1,3,1
63 | 70,1,4,130,322,0,2,109,0,2.4,2,3,3,2
64 | 60,0,4,158,305,0,2,161,0,0,1,0,3,2
65 | 50,1,4,144,200,0,2,126,1,0.9,2,0,7,2
66 | 62,0,4,140,394,0,2,157,0,1.2,2,0,3,1
67 | 56,1,3,130,256,1,2,142,1,0.6,2,1,6,2
68 | 63,1,1,145,233,1,2,150,0,2.3,3,0,6,1
69 | 45,0,2,112,160,0,0,138,0,0,2,0,3,1
70 | 47,1,3,108,243,0,0,152,0,0,1,0,3,2
71 | 61,1,4,140,207,0,2,138,1,1.9,1,1,7,2
72 | 54,1,3,125,273,0,2,152,0,0.5,3,1,3,1
73 | 61,0,4,145,307,0,2,146,1,1,2,0,7,2
74 | 54,1,4,110,239,0,0,126,1,2.8,2,1,7,2
75 | 48,1,4,122,222,0,2,186,0,0,1,0,3,1
76 | 57,1,2,124,261,0,0,141,0,0.3,1,0,7,2
77 | 58,0,3,120,340,0,0,172,0,0,1,0,3,1
78 | 43,1,4,150,247,0,0,171,0,1.5,1,0,3,1
79 | 61,1,4,120,260,0,0,140,1,3.6,2,1,7,2
80 | 46,0,2,105,204,0,0,172,0,0,1,0,3,1
81 | 43,1,4,115,303,0,0,181,0,1.2,2,0,3,1
82 | 34,0,2,118,210,0,0,192,0,0.7,1,0,3,1
83 | 57,1,4,152,274,0,0,88,1,1.2,2,1,7,2
84 | 60,1,4,130,206,0,2,132,1,2.4,2,2,7,2
85 | 57,1,3,128,229,0,2,150,0,0.4,2,1,7,2
86 | 59,1,4,135,234,0,0,161,0,0.5,2,0,7,1
87 | 51,1,3,100,222,0,0,143,1,1.2,2,0,3,1
88 | 57,1,4,140,192,0,0,148,0,0.4,2,0,6,1
89 | 62,1,2,120,281,0,2,103,0,1.4,2,1,7,2
90 | 49,0,2,134,271,0,0,162,0,0,2,0,3,1
91 | 59,1,1,170,288,0,2,159,0,0.2,2,0,7,2
92 | 41,0,2,126,306,0,0,163,0,0,1,0,3,1
93 | 54,1,3,150,232,0,2,165,0,1.6,1,0,7,1
94 | 71,0,4,112,149,0,0,125,0,1.6,2,0,3,1
95 | 60,1,4,117,230,1,0,160,1,1.4,1,2,7,2
96 | 49,0,4,130,269,0,0,163,0,0,1,0,3,1
97 | 56,0,4,200,288,1,2,133,1,4,3,2,7,2
98 | 66,1,4,112,212,0,2,132,1,0.1,1,1,3,2
99 | 71,0,2,160,302,0,0,162,0,0.4,1,2,3,1
100 | 62,1,2,128,208,1,2,140,0,0,1,0,3,1
101 | 50,0,3,120,219,0,0,158,0,1.6,2,0,3,1
102 | 51,0,4,130,305,0,0,142,1,1.2,2,0,7,2
103 | 62,1,3,130,231,0,0,146,0,1.8,2,3,7,1
104 | 65,0,3,160,360,0,2,151,0,0.8,1,0,3,1
105 | 40,1,1,140,199,0,0,178,1,1.4,1,0,7,1
106 | 59,1,3,126,218,1,0,134,0,2.2,2,1,6,2
107 | 52,0,3,136,196,0,2,169,0,0.1,2,0,3,1
108 | 64,1,4,145,212,0,2,132,0,2,2,2,6,2
109 | 49,1,3,120,188,0,0,139,0,2,2,3,7,2
110 | 54,0,3,135,304,1,0,170,0,0,1,0,3,1
111 | 41,1,2,135,203,0,0,132,0,0,2,0,6,1
112 | 67,1,4,160,286,0,2,108,1,1.5,2,3,3,2
113 | 59,1,1,178,270,0,2,145,0,4.2,3,0,7,1
114 | 41,0,2,105,198,0,0,168,0,0,1,1,3,1
115 | 61,1,4,138,166,0,2,125,1,3.6,2,1,3,2
116 | 67,1,4,120,229,0,2,129,1,2.6,2,2,7,2
117 | 64,1,1,110,211,0,2,144,1,1.8,2,0,3,1
118 | 58,1,2,120,284,0,2,160,0,1.8,2,0,3,2
119 | 54,0,2,132,288,1,2,159,1,0,1,1,3,1
120 | 58,1,3,112,230,0,2,165,0,2.5,2,1,7,2
121 | 67,0,4,106,223,0,0,142,0,0.3,1,2,3,1
122 | 50,1,3,129,196,0,0,163,0,0,1,0,3,1
123 | 51,1,4,140,299,0,0,173,1,1.6,1,0,7,2
124 | 67,1,4,120,237,0,0,71,0,1,2,0,3,2
125 | 43,0,3,122,213,0,0,165,0,0.2,2,0,3,1
126 | 43,0,4,132,341,1,2,136,1,3,2,0,7,2
127 | 58,1,4,128,259,0,2,130,1,3,2,2,7,2
128 | 62,0,4,150,244,0,0,154,1,1.4,2,0,3,2
129 | 41,1,4,110,172,0,2,158,0,0,1,0,7,2
130 | 63,1,4,130,330,1,2,132,1,1.8,1,3,7,2
131 | 51,1,3,110,175,0,0,123,0,0.6,1,0,3,1
132 | 52,1,4,112,230,0,0,160,0,0,1,1,3,2
133 | 48,1,4,124,274,0,2,166,0,0.5,2,0,7,2
134 | 58,1,4,146,218,0,0,105,0,2,2,1,7,2
135 | 64,0,3,140,313,0,0,133,0,0.2,1,0,7,1
136 | 67,1,4,125,254,1,0,163,0,0.2,2,2,7,2
137 | 50,0,2,120,244,0,0,162,0,1.1,1,0,3,1
138 | 55,1,4,140,217,0,0,111,1,5.6,3,0,7,2
139 | 35,0,4,138,183,0,0,182,0,1.4,1,0,3,1
140 | 65,1,4,135,254,0,2,127,0,2.8,2,1,7,2
141 | 58,0,2,136,319,1,2,152,0,0,1,2,3,2
142 | 42,0,3,120,209,0,0,173,0,0,2,0,3,1
143 | 45,1,4,104,208,0,2,148,1,3,2,0,3,1
144 | 46,1,2,101,197,1,0,156,0,0,1,0,7,1
145 | 60,0,3,102,318,0,0,160,0,0,1,1,3,1
146 | 44,1,2,120,263,0,0,173,0,0,1,0,7,1
147 | 53,1,3,130,197,1,2,152,0,1.2,3,0,3,1
148 | 45,1,4,115,260,0,2,185,0,0,1,0,3,1
149 | 53,0,4,130,264,0,2,143,0,0.4,2,0,3,1
150 | 53,1,3,130,246,1,2,173,0,0,1,3,3,1
151 | 62,0,4,124,209,0,0,163,0,0,1,0,3,1
152 | 77,1,4,125,304,0,2,162,1,0,1,3,3,2
153 | 54,0,3,110,214,0,0,158,0,1.6,2,0,3,1
154 | 51,1,4,140,261,0,2,186,1,0,1,0,3,1
155 | 69,1,1,160,234,1,2,131,0,0.1,2,1,3,1
156 | 52,1,4,108,233,1,0,147,0,0.1,1,3,7,1
157 | 59,1,4,140,177,0,0,162,1,0,1,1,7,2
158 | 58,1,3,132,224,0,2,173,0,3.2,1,2,7,2
159 | 57,0,4,128,303,0,2,159,0,0,1,1,3,1
160 | 45,0,2,130,234,0,2,175,0,0.6,2,0,3,1
161 | 66,1,4,160,228,0,2,138,0,2.3,1,0,6,1
162 | 51,1,4,140,298,0,0,122,1,4.2,2,3,7,2
163 | 57,1,4,150,276,0,2,112,1,0.6,2,1,6,2
164 | 37,1,3,130,250,0,0,187,0,3.5,3,0,3,1
165 | 39,1,3,140,321,0,2,182,0,0,1,0,3,1
166 | 70,1,4,145,174,0,0,125,1,2.6,3,0,7,2
167 | 59,1,2,140,221,0,0,164,1,0,1,0,3,1
168 | 58,1,3,105,240,0,2,154,1,0.6,2,0,7,1
169 | 44,1,2,120,220,0,0,170,0,0,1,0,3,1
170 | 66,1,2,160,246,0,0,120,1,0,2,3,6,2
171 | 45,1,4,142,309,0,2,147,1,0,2,3,7,2
172 | 52,1,4,128,255,0,0,161,1,0,1,1,7,2
173 | 58,1,3,140,211,1,2,165,0,0,1,0,3,1
174 | 76,0,3,140,197,0,1,116,0,1.1,2,0,3,1
175 | 39,0,3,94,199,0,0,179,0,0,1,0,3,1
176 | 49,1,3,118,149,0,2,126,0,0.8,1,3,3,2
177 | 49,1,2,130,266,0,0,171,0,0.6,1,0,3,1
178 | 54,1,2,108,309,0,0,156,0,0,1,0,7,1
179 | 60,0,3,120,178,1,0,96,0,0,1,0,3,1
180 | 57,1,3,150,126,1,0,173,0,0.2,1,1,7,1
181 | 70,1,3,160,269,0,0,112,1,2.9,2,1,7,2
182 | 60,1,4,140,293,0,2,170,0,1.2,2,2,7,2
183 | 57,1,4,132,207,0,0,168,1,0,1,0,7,1
184 | 57,1,4,165,289,1,2,124,0,1,2,3,7,2
185 | 64,1,4,128,263,0,0,105,1,0.2,2,1,7,1
186 | 44,1,3,140,235,0,2,180,0,0,1,0,3,1
187 | 67,0,3,115,564,0,2,160,0,1.6,2,0,7,1
188 | 38,1,1,120,231,0,0,182,1,3.8,2,0,7,2
189 | 44,1,4,110,197,0,2,177,0,0,1,1,3,2
190 | 44,1,3,130,233,0,0,179,1,0.4,1,0,3,1
191 | 66,0,1,150,226,0,0,114,0,2.6,3,0,3,1
192 | 42,1,2,120,295,0,0,162,0,0,1,0,3,1
193 | 74,0,2,120,269,0,2,121,1,0.2,1,1,3,1
194 | 57,1,3,150,168,0,0,174,0,1.6,1,0,3,1
195 | 66,1,4,120,302,0,2,151,0,0.4,2,0,3,1
196 | 65,1,1,138,282,1,2,174,0,1.4,2,1,3,2
197 | 56,1,4,130,283,1,2,103,1,1.6,3,0,7,2
198 | 52,1,2,128,205,1,0,184,0,0,1,0,3,1
199 | 56,1,2,120,236,0,0,178,0,0.8,1,0,3,1
200 | 51,0,3,120,295,0,2,157,0,0.6,1,0,3,1
201 | 60,1,4,130,253,0,0,144,1,1.4,1,1,7,2
202 | 35,1,4,126,282,0,2,156,1,0,1,0,7,2
203 | 46,1,4,120,249,0,2,144,0,0.8,1,0,7,2
204 | 65,1,4,110,248,0,2,158,0,0.6,1,2,6,2
205 | 59,1,4,170,326,0,2,140,1,3.4,3,0,7,2
206 | 56,1,4,132,184,0,2,105,1,2.1,2,1,6,2
207 | 63,1,4,130,254,0,2,147,0,1.4,2,1,7,2
208 | 54,0,3,108,267,0,2,167,0,0,1,0,3,1
209 | 41,1,2,110,235,0,0,153,0,0,1,0,3,1
210 | 64,1,4,120,246,0,2,96,1,2.2,3,1,3,2
211 | 50,1,4,150,243,0,2,128,0,2.6,2,0,7,2
212 | 59,1,1,160,273,0,2,125,0,0,1,0,3,2
213 | 50,1,3,140,233,0,0,163,0,0.6,2,1,7,2
214 | 58,0,4,100,248,0,2,122,0,1,2,0,3,1
215 | 63,0,4,108,269,0,0,169,1,1.8,2,2,3,2
216 | 48,1,2,130,245,0,2,180,0,0.2,2,0,3,1
217 | 59,0,4,174,249,0,0,143,1,0,2,0,3,2
218 | 61,0,4,130,330,0,2,169,0,0,1,0,3,2
219 | 53,1,4,142,226,0,2,111,1,0,1,0,7,1
220 | 47,1,4,112,204,0,0,143,0,0.1,1,0,3,1
221 | 44,1,4,112,290,0,2,153,0,0,1,1,3,2
222 | 51,0,3,130,256,0,2,149,0,0.5,1,0,3,1
223 | 67,0,3,152,277,0,0,172,0,0,1,1,3,1
224 | 42,1,3,120,240,1,0,194,0,0.8,3,0,7,1
225 | 42,1,4,140,226,0,0,178,0,0,1,0,3,1
226 | 69,1,3,140,254,0,2,146,0,2,2,3,7,2
227 | 62,1,4,120,267,0,0,99,1,1.8,2,2,7,2
228 | 64,0,4,130,303,0,0,122,0,2,2,2,3,1
229 | 60,0,1,150,240,0,0,171,0,0.9,1,0,3,1
230 | 59,1,4,138,271,0,2,182,0,0,1,0,3,1
231 | 44,1,3,120,226,0,0,169,0,0,1,0,3,1
232 | 52,1,3,172,199,1,0,162,0,0.5,1,0,7,1
233 | 63,0,4,150,407,0,2,154,0,4,2,3,7,2
234 | 55,1,4,160,289,0,2,145,1,0.8,2,1,7,2
235 | 43,1,3,130,315,0,0,162,0,1.9,1,1,3,1
236 | 50,0,4,110,254,0,2,159,0,0,1,0,3,1
237 | 58,0,1,150,283,1,2,162,0,1,1,0,3,1
238 | 56,0,2,140,294,0,2,153,0,1.3,2,0,3,1
239 | 44,1,2,130,219,0,2,188,0,0,1,0,3,1
240 | 60,1,3,140,185,0,2,155,0,3,2,0,3,2
241 | 44,0,3,108,141,0,0,175,0,0.6,2,0,3,1
242 | 54,1,2,192,283,0,2,195,0,0,1,1,7,2
243 | 60,1,4,125,258,0,2,141,1,2.8,2,1,7,2
244 | 62,0,4,138,294,1,0,106,0,1.9,2,3,3,2
245 | 44,0,3,118,242,0,0,149,0,0.3,2,1,3,1
246 | 52,1,1,118,186,0,2,190,0,0,2,0,6,1
247 | 67,1,4,100,299,0,2,125,1,0.9,2,2,3,2
248 | 40,1,4,110,167,0,2,114,1,2,2,0,7,2
249 | 52,1,2,134,201,0,0,158,0,0.8,1,1,3,1
250 | 48,1,3,124,255,1,0,175,0,0,1,2,3,1
251 | 42,0,4,102,265,0,2,122,0,0.6,2,0,3,1
252 | 46,1,4,140,311,0,0,120,1,1.8,2,2,7,2
253 | 48,1,4,130,256,1,2,150,1,0,1,2,7,2
254 | 69,0,1,140,239,0,0,151,0,1.8,1,2,3,1
255 | 45,0,4,138,236,0,2,152,1,0.2,2,0,3,1
256 | 51,0,3,140,308,0,2,142,0,1.5,1,1,3,1
257 | 54,1,4,124,266,0,2,109,1,2.2,2,1,7,2
258 | 60,1,4,145,282,0,2,142,1,2.8,2,2,7,2
259 | 64,1,3,140,335,0,0,158,0,0,1,0,3,2
260 | 70,1,2,156,245,0,2,143,0,0,1,0,3,1
261 | 65,0,4,150,225,0,2,114,0,1,2,3,7,2
262 | 53,0,4,138,234,0,2,160,0,0,1,0,3,1
263 | 56,1,4,125,249,1,2,144,1,1.2,2,1,3,2
264 | 54,1,4,110,206,0,2,108,1,0,2,1,3,2
265 | 55,0,2,135,250,0,2,161,0,1.4,2,0,3,1
266 | 54,1,4,120,188,0,0,113,0,1.4,2,1,7,2
267 | 68,0,3,120,211,0,2,115,0,1.5,2,0,3,1
268 | 53,1,4,123,282,0,0,95,1,2,2,2,7,2
269 | 46,1,3,150,231,0,0,147,0,3.6,2,0,3,2
270 | 35,1,4,120,198,0,0,130,1,1.6,2,0,7,2
271 |
--------------------------------------------------------------------------------
/metacluster/data/Glass.csv:
--------------------------------------------------------------------------------
1 | 1,1.521,13.64,4.49,1.1,71.78,0.06,8.75,0,0,1
2 | 2,1.5176,13.89,3.6,1.36,72.73,0.48,7.83,0,0,1
3 | 3,1.5162,13.53,3.55,1.54,72.99,0.39,7.78,0,0,1
4 | 4,1.5177,13.21,3.69,1.29,72.61,0.57,8.22,0,0,1
5 | 5,1.5174,13.27,3.62,1.24,73.08,0.55,8.07,0,0,1
6 | 6,1.516,12.79,3.61,1.62,72.97,0.64,8.07,0,0.26,1
7 | 7,1.5174,13.3,3.6,1.14,73.09,0.58,8.17,0,0,1
8 | 8,1.5176,13.15,3.61,1.05,73.24,0.57,8.24,0,0,1
9 | 9,1.5192,14.04,3.58,1.37,72.08,0.56,8.3,0,0,1
10 | 10,1.5176,13,3.6,1.36,72.99,0.57,8.4,0,0.11,1
11 | 11,1.5157,12.72,3.46,1.56,73.2,0.67,8.09,0,0.24,1
12 | 12,1.5176,12.8,3.66,1.27,73.01,0.6,8.56,0,0,1
13 | 13,1.5159,12.88,3.43,1.4,73.28,0.69,8.05,0,0.24,1
14 | 14,1.5175,12.86,3.56,1.27,73.21,0.54,8.38,0,0.17,1
15 | 15,1.5176,12.61,3.59,1.31,73.29,0.58,8.5,0,0,1
16 | 16,1.5176,12.81,3.54,1.23,73.24,0.58,8.39,0,0,1
17 | 17,1.5178,12.68,3.67,1.16,73.11,0.61,8.7,0,0,1
18 | 18,1.522,14.36,3.85,0.89,71.36,0.15,9.15,0,0,1
19 | 19,1.5191,13.9,3.73,1.18,72.12,0.06,8.89,0,0,1
20 | 20,1.5174,13.02,3.54,1.69,72.73,0.54,8.44,0,0.07,1
21 | 21,1.5175,12.82,3.55,1.49,72.75,0.54,8.52,0,0.19,1
22 | 22,1.5197,14.77,3.75,0.29,72.02,0.03,9,0,0,1
23 | 23,1.5174,12.78,3.62,1.29,72.79,0.59,8.7,0,0,1
24 | 24,1.5175,12.81,3.57,1.35,73.02,0.62,8.59,0,0,1
25 | 25,1.5172,13.38,3.5,1.15,72.85,0.5,8.43,0,0,1
26 | 26,1.5176,12.98,3.54,1.21,73,0.65,8.53,0,0,1
27 | 27,1.5179,13.21,3.48,1.41,72.64,0.59,8.43,0,0,1
28 | 28,1.5172,12.87,3.48,1.33,73.04,0.56,8.43,0,0,1
29 | 29,1.5177,12.56,3.52,1.43,73.15,0.57,8.54,0,0,1
30 | 30,1.5178,13.08,3.49,1.28,72.86,0.6,8.49,0,0,1
31 | 31,1.5177,12.65,3.56,1.3,73.08,0.61,8.69,0,0.14,1
32 | 32,1.5175,12.84,3.5,1.14,73.27,0.56,8.55,0,0,1
33 | 33,1.5177,12.85,3.48,1.23,72.97,0.61,8.56,0.09,0.22,1
34 | 34,1.5175,12.57,3.47,1.38,73.39,0.6,8.55,0,0.06,1
35 | 35,1.5178,12.69,3.54,1.34,72.95,0.57,8.75,0,0,1
36 | 36,1.5157,13.29,3.45,1.21,72.74,0.56,8.57,0,0,1
37 | 37,1.5191,13.89,3.53,1.32,71.81,0.51,8.78,0.11,0,1
38 | 38,1.518,12.74,3.48,1.35,72.96,0.64,8.68,0,0,1
39 | 39,1.5221,14.21,3.82,0.47,71.77,0.11,9.57,0,0,1
40 | 40,1.5221,14.21,3.82,0.47,71.77,0.11,9.57,0,0,1
41 | 41,1.5179,12.79,3.5,1.12,73.03,0.64,8.77,0,0,1
42 | 42,1.5176,12.71,3.42,1.2,73.2,0.59,8.64,0,0,1
43 | 43,1.5178,13.21,3.39,1.33,72.76,0.59,8.59,0,0,1
44 | 44,1.5221,13.73,3.84,0.72,71.76,0.17,9.74,0,0,1
45 | 45,1.5179,12.73,3.43,1.19,72.95,0.62,8.76,0,0.3,1
46 | 46,1.519,13.49,3.48,1.35,71.95,0.55,9,0,0,1
47 | 47,1.5187,13.19,3.37,1.18,72.72,0.57,8.83,0,0.16,1
48 | 48,1.5267,13.99,3.7,0.71,71.57,0.02,9.82,0,0.1,1
49 | 49,1.5222,13.21,3.77,0.79,71.99,0.13,10.02,0,0,1
50 | 50,1.519,13.58,3.35,1.23,72.08,0.59,8.91,0,0,1
51 | 71,1.5157,14.86,3.67,1.74,71.87,0.16,7.36,0,0.12,2
52 | 72,1.5185,13.64,3.87,1.27,71.96,0.54,8.32,0,0.32,2
53 | 73,1.5159,13.09,3.59,1.52,73.1,0.67,7.83,0,0,2
54 | 74,1.5163,13.34,3.57,1.57,72.87,0.61,7.89,0,0,2
55 | 75,1.516,13.02,3.56,1.54,73.11,0.72,7.9,0,0,2
56 | 76,1.5159,13.02,3.58,1.51,73.12,0.69,7.96,0,0,2
57 | 77,1.5165,13.44,3.61,1.54,72.39,0.66,8.03,0,0,2
58 | 78,1.5163,13,3.58,1.54,72.83,0.61,8.04,0,0,2
59 | 79,1.5161,13.92,3.52,1.25,72.88,0.37,7.94,0,0.14,2
60 | 80,1.5159,12.82,3.52,1.9,72.86,0.69,7.97,0,0,2
61 | 81,1.5159,12.86,3.52,2.12,72.66,0.69,7.97,0,0,2
62 | 82,1.5159,13.25,3.45,1.43,73.17,0.61,7.86,0,0,2
63 | 83,1.5165,13.41,3.55,1.25,72.81,0.68,8.1,0,0,2
64 | 84,1.5159,13.09,3.52,1.55,72.87,0.68,8.05,0,0.09,2
65 | 85,1.5141,14.25,3.09,2.08,72.28,1.1,7.08,0,0,2
66 | 86,1.5163,13.36,3.58,1.49,72.72,0.45,8.21,0,0,2
67 | 87,1.5157,13.24,3.49,1.47,73.25,0.38,8.03,0,0,2
68 | 88,1.5165,13.4,3.49,1.52,72.65,0.67,8.08,0,0.1,2
69 | 89,1.5162,13.01,3.5,1.48,72.89,0.6,8.12,0,0,2
70 | 90,1.5164,12.55,3.48,1.87,73.23,0.63,8.08,0,0.09,2
71 | 91,1.5184,12.93,3.74,1.11,72.28,0.64,8.96,0,0.22,2
72 | 92,1.516,12.9,3.44,1.45,73.06,0.44,8.27,0,0,2
73 | 93,1.5159,13.12,3.41,1.58,73.26,0.07,8.39,0,0.19,2
74 | 94,1.5159,13.24,3.34,1.47,73.1,0.39,8.22,0,0,2
75 | 95,1.5163,12.71,3.33,1.49,73.28,0.67,8.24,0,0,2
76 | 96,1.5186,13.36,3.43,1.43,72.26,0.51,8.6,0,0,2
77 | 97,1.5184,13.02,3.62,1.06,72.34,0.64,9.13,0,0.15,2
78 | 98,1.5174,12.2,3.25,1.16,73.55,0.62,8.9,0,0.24,2
79 | 99,1.5169,12.67,2.88,1.71,73.21,0.73,8.54,0,0,2
80 | 100,1.5181,12.96,2.96,1.43,72.92,0.6,8.79,0.14,0,2
81 | 101,1.5166,12.75,2.85,1.44,73.27,0.57,8.79,0.11,0.22,2
82 | 102,1.5173,12.35,2.72,1.63,72.87,0.7,9.23,0,0,2
83 | 103,1.5182,12.62,2.76,0.83,73.81,0.35,9.42,0,0.2,2
84 | 104,1.5273,13.8,3.15,0.66,70.57,0.08,11.64,0,0,2
85 | 105,1.5241,13.83,2.9,1.17,71.15,0.08,10.79,0,0,2
86 | 106,1.5248,11.45,0,1.88,72.19,0.81,13.24,0,0.34,2
87 | 107,1.5313,10.73,0,2.1,69.81,0.58,13.3,3.15,0.28,2
88 | 108,1.5339,12.3,0,1,70.16,0.12,16.19,0,0.24,2
89 | 109,1.5222,14.43,0,1,72.67,0.1,11.52,0,0.08,2
90 | 110,1.5182,13.72,0,0.56,74.45,0,10.99,0,0,2
91 | 111,1.5266,11.23,0,0.77,73.21,0,14.68,0,0,2
92 | 112,1.5274,11.02,0,0.75,73.08,0,14.96,0,0,2
93 | 113,1.5278,12.64,0,0.67,72.02,0.06,14.4,0,0,2
94 | 114,1.5189,13.46,3.83,1.26,72.55,0.57,8.21,0,0.14,2
95 | 115,1.5185,13.1,3.97,1.19,72.44,0.6,8.43,0,0,2
96 | 147,1.5177,13.65,3.66,1.11,72.77,0.11,8.6,0,0,3
97 | 148,1.5161,13.33,3.53,1.34,72.67,0.56,8.33,0,0,3
98 | 149,1.5167,13.24,3.57,1.38,72.7,0.56,8.44,0,0.1,3
99 | 150,1.5164,12.16,3.52,1.35,72.89,0.57,8.53,0,0,3
100 | 151,1.5167,13.14,3.45,1.76,72.48,0.6,8.38,0,0.17,3
101 | 152,1.5213,14.32,3.9,0.83,71.5,0,9.49,0,0,3
102 | 153,1.5178,13.64,3.65,0.65,73,0.06,8.93,0,0,3
103 | 154,1.5161,13.42,3.4,1.22,72.69,0.59,8.32,0,0,3
104 | 155,1.5169,12.86,3.58,1.31,72.61,0.61,8.79,0,0,3
105 | 156,1.5165,13.04,3.4,1.26,73.01,0.52,8.58,0,0,3
106 | 157,1.5166,13.41,3.39,1.28,72.64,0.52,8.65,0,0,3
107 | 158,1.5212,14.03,3.76,0.58,71.79,0.11,9.65,0,0,3
108 | 159,1.5178,13.53,3.41,1.52,72.04,0.58,8.79,0,0,3
109 | 160,1.518,13.5,3.36,1.63,71.94,0.57,8.81,0,0.09,3
110 | 164,1.5151,14.01,2.68,3.5,69.89,1.68,5.87,2.2,0,5
111 | 165,1.5192,12.73,1.85,1.86,72.69,0.6,10.09,0,0,5
112 | 166,1.5217,11.56,1.88,1.56,72.86,0.47,11.41,0,0,5
113 | 167,1.5215,11.03,1.71,1.56,73.44,0.58,11.62,0,0,5
114 | 168,1.5197,12.64,0,1.65,73.75,0.38,11.53,0,0,5
115 | 169,1.5167,12.86,0,1.83,73.88,0.97,10.17,0,0,5
116 | 170,1.5199,13.27,0,1.76,73.03,0.47,11.32,0,0,5
117 | 171,1.5237,13.44,0,1.58,72.22,0.32,12.24,0,0,5
118 | 172,1.5132,13.02,0,3.04,70.48,6.21,6.96,0,0,5
119 | 177,1.5191,14,2.39,1.56,72.37,0,9.57,0,0,6
120 | 178,1.5194,13.79,2.41,1.19,72.76,0,9.77,0,0,6
121 | 179,1.5183,14.46,2.24,1.62,72.38,0,9.26,0,0,6
122 | 180,1.5185,14.09,2.19,1.66,72.67,0,9.32,0,0,6
123 | 181,1.513,14.4,1.74,1.54,74.55,0,7.59,0,0,6
124 | 182,1.5189,14.99,0.78,1.74,72.5,0,9.95,0,0,6
125 | 186,1.5113,13.69,3.2,1.81,72.81,1.76,5.43,1.19,0,7
126 | 187,1.5184,14.32,3.26,2.22,71.25,1.46,5.79,1.63,0,7
127 | 188,1.5232,13.44,3.34,1.23,72.38,0.6,8.83,0,0,7
128 | 189,1.5225,14.86,2.2,2.06,70.26,0.76,9.76,0,0,7
129 | 190,1.5236,15.79,1.83,1.31,70.43,0.31,8.61,1.68,0,7
130 | 191,1.5161,13.88,1.78,1.79,73.1,0,8.67,0.76,0,7
131 | 192,1.516,14.85,0,2.38,73.28,0,8.76,0.64,0.09,7
132 | 193,1.5162,14.2,0,2.79,73.46,0.04,9.04,0.4,0.09,7
133 | 194,1.5172,14.75,0,2,73.02,0,8.53,1.59,0.08,7
134 | 195,1.5168,14.56,0,1.98,73.29,0,8.52,1.57,0.07,7
135 | 196,1.5155,14.14,0,2.68,73.39,0.08,9.07,0.61,0.05,7
136 | 197,1.5156,13.87,0,2.54,73.23,0.14,9.41,0.81,0.01,7
137 | 198,1.5173,14.7,0,2.34,73.28,0,8.95,0.66,0,7
138 | 199,1.5153,14.38,0,2.66,73.1,0.04,9.08,0.64,0,7
139 | 200,1.5161,15.01,0,2.51,73.05,0.05,8.83,0.53,0,7
140 | 201,1.5151,15.15,0,2.25,73.5,0,8.34,0.63,0,7
141 | 202,1.5165,11.95,0,1.19,75.18,2.7,8.93,0,0,7
142 | 203,1.5151,14.85,0,2.42,73.72,0,8.39,0.56,0,7
143 | 204,1.5166,14.8,0,1.99,73.11,0,8.28,1.71,0,7
144 | 205,1.5162,14.95,0,2.27,73.3,0,8.71,0.67,0,7
145 | 51,1.5232,13.72,3.72,0.51,71.75,0.09,10.06,0,0.16,1
146 | 52,1.5193,13.2,3.33,1.28,72.36,0.6,9.14,0,0.11,1
147 | 53,1.5181,13.43,2.87,1.19,72.84,0.55,9.03,0,0,1
148 | 54,1.5184,13.14,2.84,1.28,72.85,0.55,9.07,0,0,1
149 | 55,1.5178,13.21,2.81,1.29,72.98,0.51,9.02,0,0.09,1
150 | 56,1.5177,12.45,2.71,1.29,73.7,0.56,9.06,0,0.24,1
151 | 57,1.5122,12.99,3.47,1.12,72.98,0.62,8.35,0,0.31,1
152 | 58,1.5182,12.87,3.48,1.29,72.95,0.6,8.43,0,0,1
153 | 59,1.5175,13.48,3.74,1.17,72.99,0.59,8.03,0,0,1
154 | 60,1.5175,13.39,3.66,1.19,72.79,0.57,8.27,0,0.11,1
155 | 61,1.5191,13.6,3.62,1.11,72.64,0.14,8.76,0,0,1
156 | 62,1.5198,13.81,3.58,1.32,71.72,0.12,8.67,0.69,0,1
157 | 63,1.5217,13.51,3.86,0.88,71.79,0.23,9.54,0,0.11,1
158 | 64,1.5223,14.17,3.81,0.78,71.35,0,9.69,0,0,1
159 | 65,1.5217,13.48,3.74,0.9,72.01,0.18,9.61,0,0.07,1
160 | 66,1.521,13.69,3.59,1.12,71.96,0.09,9.4,0,0,1
161 | 67,1.5215,13.05,3.65,0.87,72.22,0.19,9.85,0,0.17,1
162 | 68,1.5215,13.05,3.65,0.87,72.32,0.19,9.85,0,0.17,1
163 | 69,1.5215,13.12,3.58,0.9,72.2,0.23,9.82,0,0.16,1
164 | 70,1.523,13.31,3.58,0.82,71.99,0.12,10.17,0,0.03,1
165 | 116,1.5185,13.41,3.89,1.33,72.38,0.51,8.28,0,0,2
166 | 117,1.5183,13.24,3.9,1.41,72.33,0.55,8.31,0,0.1,2
167 | 118,1.5171,13.72,3.68,1.81,72.06,0.64,7.88,0,0,2
168 | 119,1.5167,13.3,3.64,1.53,72.53,0.65,8.03,0,0.29,2
169 | 120,1.5165,13.56,3.57,1.47,72.45,0.64,7.96,0,0,2
170 | 121,1.5184,13.25,3.76,1.32,72.4,0.58,8.42,0,0,2
171 | 122,1.5166,12.93,3.54,1.62,72.96,0.64,8.03,0,0.21,2
172 | 123,1.5169,13.23,3.54,1.48,72.84,0.56,8.1,0,0,2
173 | 124,1.5171,13.48,3.48,1.71,72.52,0.62,7.99,0,0,2
174 | 125,1.5218,13.2,3.68,1.15,72.75,0.54,8.52,0,0,2
175 | 126,1.5187,12.93,3.66,1.56,72.51,0.58,8.55,0,0.12,2
176 | 127,1.5167,12.94,3.61,1.26,72.75,0.56,8.6,0,0,2
177 | 128,1.5208,13.78,2.28,1.43,71.99,0.49,9.85,0,0.17,2
178 | 129,1.5207,13.55,2.09,1.67,72.18,0.53,9.57,0.27,0.17,2
179 | 130,1.5202,13.98,1.35,1.63,71.76,0.39,10.56,0,0.18,2
180 | 131,1.5218,13.75,1.01,1.36,72.19,0.33,11.14,0,0,2
181 | 132,1.5261,13.7,0,1.36,71.24,0.19,13.44,0,0.1,2
182 | 133,1.5181,13.43,3.98,1.18,72.49,0.58,8.15,0,0,2
183 | 134,1.518,13.71,3.93,1.54,71.81,0.54,8.21,0,0.15,2
184 | 135,1.5181,13.33,3.85,1.25,72.78,0.52,8.12,0,0,2
185 | 136,1.5179,13.19,3.9,1.3,72.33,0.55,8.44,0,0.28,2
186 | 137,1.5181,13,3.8,1.08,73.07,0.56,8.38,0,0.12,2
187 | 138,1.5171,12.89,3.62,1.57,72.96,0.61,8.11,0,0,2
188 | 139,1.5167,12.79,3.52,1.54,73.36,0.66,7.9,0,0,2
189 | 140,1.5167,12.87,3.56,1.64,73.14,0.65,7.99,0,0,2
190 | 141,1.5169,13.33,3.54,1.61,72.54,0.68,8.11,0,0,2
191 | 142,1.5185,13.2,3.63,1.07,72.83,0.57,8.41,0.09,0.17,2
192 | 143,1.5166,12.85,3.51,1.44,73.01,0.68,8.23,0.06,0.25,2
193 | 144,1.5171,13,3.47,1.79,72.72,0.66,8.18,0,0,2
194 | 145,1.5166,12.99,3.18,1.23,72.97,0.58,8.81,0,0.24,2
195 | 146,1.5184,12.85,3.67,1.24,72.57,0.62,8.68,0,0.35,2
196 | 161,1.5183,13.33,3.34,1.54,72.14,0.56,8.99,0,0,3
197 | 162,1.5193,13.64,3.54,0.75,72.65,0.16,8.89,0.15,0.24,3
198 | 163,1.5221,14.19,3.78,0.91,71.36,0.23,9.14,0,0.37,3
199 | 173,1.5132,13,0,3.02,70.7,6.21,6.93,0,0,5
200 | 174,1.5204,13.38,0,1.4,72.25,0.33,12.5,0,0,5
201 | 175,1.5206,12.85,1.61,2.17,72.18,0.76,9.7,0.24,0.51,5
202 | 176,1.5212,12.97,0.33,1.51,73.39,0.13,11.27,0,0.28,5
203 | 183,1.5192,14.15,0,2.09,72.74,0,10.88,0,0,6
204 | 184,1.5197,14.56,0,0.56,73.48,0,11.22,0,0,6
205 | 185,1.5112,17.38,0,0.34,75.41,0,6.65,0,0,6
206 | 206,1.5173,14.95,0,1.8,72.99,0,8.61,1.55,0,7
207 | 207,1.5165,14.94,0,1.87,73.11,0,8.67,1.38,0,7
208 | 208,1.5183,14.39,0,1.82,72.86,1.41,6.47,2.88,0,7
209 | 209,1.5164,14.37,0,2.74,72.85,0,9.45,0.54,0,7
210 | 210,1.5162,14.14,0,2.88,72.61,0.08,9.18,1.06,0,7
211 | 211,1.5169,14.92,0,1.99,73.06,0,8.4,1.59,0,7
212 | 212,1.5207,14.36,0,2.02,73.42,0,8.44,1.64,0,7
213 | 213,1.5165,14.38,0,1.94,73.61,0,8.48,1.57,0,7
214 | 214,1.5171,14.23,0,2.08,73.36,0,8.62,1.67,0,7
215 |
--------------------------------------------------------------------------------
/metacluster/utils/cluster.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Created by "Thieu" at 17:39, 30/07/2023 ----------%
3 | # Email: nguyenthieu2102@gmail.com %
4 | # Github: https://github.com/thieu1995 %
5 | # --------------------------------------------------%
6 |
7 | import numpy as np
8 | from sklearn import metrics
9 | from sklearn.cluster import KMeans
10 | from sklearn.mixture import GaussianMixture
11 | from permetrics import ClusteringMetric
12 |
13 | DEFAULT_LIST_CLUSTERS = list(range(2, 11))
14 |
15 |
16 | def get_all_clustering_metrics():
17 | dict_results = {}
18 | for key, value in ClusteringMetric.SUPPORT.items():
19 | if value["type"] in ("min", "max"):
20 | dict_results[key] = value["type"]
21 | return dict_results
22 |
23 |
24 | ### ELBOW
25 | def get_clusters_by_elbow(X, list_clusters=None, **kwargs):
26 | """
27 | 1. First, apply K-means clustering to the dataset for a range of different values of K, where K is the number of clusters. For example, you might try K=1,2,3,...,10.
28 | 2. For each value of K, compute the Sum of Squared Errors (SSE), which is the sum of the squared distances between each data point and its assigned centroid. The SSE can be obtained from the KMeans object's `inertia_` attribute.
29 | 3. Plot the SSE for each value of K. You should see that the SSE decreases as K increases, because as K increases, the centroids are closer to the data points. However, at some point, increasing K further will not improve the SSE as much. The idea of the elbow method is to identify the value of K at which the SSE starts to level off or decrease less rapidly, forming an "elbow" in the plot. This value of K is considered the optimal number of clusters.
30 | """
31 | if type(list_clusters) in (list, tuple, np.ndarray):
32 | list_clusters = [item for item in list_clusters]
33 | else:
34 | list_clusters = DEFAULT_LIST_CLUSTERS
35 | wcss = []
36 | for n_c in list_clusters:
37 | kmeans = KMeans(n_clusters=n_c, n_init="auto")
38 | kmeans.fit(X=X)
39 | wcss.append(kmeans.inertia_)
40 | x1, y1 = 2, wcss[0]
41 | x2, y2 = list_clusters[-1], wcss[-1]
42 | distances = []
43 | for i in range(len(wcss)):
44 | x0 = i + 2
45 | y0 = wcss[i]
46 | numerator = abs((y2 - y1) * x0 - (x2 - x1) * y0 + x2 * y1 - y2 * x1)
47 | denominator = np.sqrt((y2 - y1) ** 2 + (x2 - x1) ** 2)
48 | distances.append(numerator / denominator)
49 | return distances.index(max(distances)) + 2
50 | ### END ELBOW
51 |
52 |
53 | def compute_gap_statistic(X, refs=None, B=10, list_K=None, N_init=10):
54 | """
55 | This function first generates B reference samples; for each sample, the sample size is the same as the original datasets;
56 | the value for each reference sample follows a uniform distribution for the range of each feature of the original datasets;
57 | using simplify formula to compute the D of each cluster, and then the Wk; K should be a increment list, 1-10 is fair enough;
58 | the B value is about the number of replicated samples to run gap-statistics,
59 | it is recommended as 10, and it should not be changed/decreased that to a smaller value;
60 |
61 | Parameters
62 | ----------
63 | X : np.array, the original data;
64 | refs : np.ndarray or None, it is the replicated data that you want to compare with if there exists one; if no existing replicated/proper data, just use None, and the function will automatically generates them;
65 | B : int, the number of replicated samples to run gap-statistics; it is recommended as 10, and it should not be changed/decreased that to a smaller value;
66 | K : list type, the range of K values to test on;
67 | N_init : int, states the number of initial starting points for each K-mean running under sklearn, in order to get stable clustering result each time;
68 |
69 | Returns
70 | -------
71 | gaps: np.array, containing all the gap-statistics results;
72 | s_k: float, the baseline value to minus with; say reference paper for detailed meaning;
73 | K: list, containing all the tested K values;
74 | """
75 | shape = X.shape
76 | if refs == None:
77 | tops = X.max(axis=0)
78 | bots = X.min(axis=0)
79 | dists = np.matrix(np.diag(tops - bots))
80 | rands = np.random.random_sample(size=(shape[0], shape[1], B))
81 | for i in range(B):
82 | rands[:, :, i] = rands[:, :, i] * dists + bots
83 | else:
84 | rands = refs
85 |
86 | if type(list_K) in (list, tuple, np.ndarray):
87 | list_clusters = [item for item in list_K]
88 | else:
89 | list_clusters = DEFAULT_LIST_CLUSTERS
90 |
91 | gaps = np.zeros(len(list_clusters))
92 | Wks = np.zeros(len(list_clusters))
93 | Wkbs = np.zeros((len(list_clusters), B))
94 |
95 | for indk, k in enumerate(list_clusters):
96 | # n_init is the number of times each Kmeans running to get stable clustering results under each K value
97 | k_means = KMeans(n_clusters=k, init='k-means++', n_init=N_init, max_iter=300, tol=0.0001, verbose=0, random_state=None, copy_x=True)
98 | k_means.fit(X)
99 | classification_result = k_means.labels_
100 | # compute the Wk for the classification result
101 | Wks[indk] = compute_Wk(X, classification_result)
102 |
103 | # clustering on B reference datasets for each 'k'
104 | for i in range(B):
105 | Xb = rands[:, :, i]
106 | k_means.fit(Xb)
107 | classification_result_b = k_means.labels_
108 | Wkbs[indk, i] = compute_Wk(Xb, classification_result_b)
109 |
110 | # compute gaps and sk
111 | gaps = (np.log(Wkbs)).mean(axis=1) - np.log(Wks)
112 | sd_ks = np.std(np.log(Wkbs), axis=1)
113 | sk = sd_ks * np.sqrt(1 + 1.0 / B)
114 | return gaps, sk, list_clusters
115 |
116 |
117 | ### gap statistics
118 | def get_clusters_by_gap_statistic(X, list_clusters=None, B=10, N_init=10, **kwargs):
119 | gaps, s_k, K = compute_gap_statistic(X, refs=None, B=B, list_K=list_clusters, N_init=N_init)
120 | gaps_thres = gaps - s_k
121 | below_or_above = (gaps[0:-1] >= gaps_thres[1:])
122 | if below_or_above.any():
123 | optimal_k = K[below_or_above.argmax()]
124 | else:
125 | optimal_k = K[-1]
126 | return optimal_k
127 |
128 |
129 | def compute_Wk(data: np.ndarray, classification_result: np.ndarray):
130 | """
131 | This function computes the Wk after each clustering
132 |
133 | Parameters
134 | ----------
135 | data : np.array, containing all the data
136 | classification_result : np.ndarray, containing all the clustering results for all the data
137 |
138 | Returns
139 | -------
140 | Wk : float
141 | """
142 | Wk = 0
143 | label_set = set(classification_result)
144 | for label in label_set:
145 | each_cluster = data[classification_result == label, :]
146 | mu = each_cluster.mean(axis=0)
147 | D = sum(sum((each_cluster - mu) ** 2)) * 2.0 * each_cluster.shape[0]
148 | Wk = Wk + D / (2.0 * each_cluster.shape[0])
149 | return Wk
150 |
151 |
152 | ### silhouette score
153 | def get_clusters_by_silhouette_score(X, list_clusters=None, **kwargs):
154 | if type(list_clusters) in (list, tuple, np.ndarray):
155 | list_clusters = [item for item in list_clusters]
156 | else:
157 | list_clusters = DEFAULT_LIST_CLUSTERS
158 | sil_max = 0
159 | sil_max_clusters = 2
160 | for n_clusters in list_clusters:
161 | model = KMeans(n_clusters=n_clusters, n_init="auto")
162 | labels = model.fit_predict(X)
163 | sil_score = metrics.silhouette_score(X, labels)
164 | if sil_score > sil_max:
165 | sil_max = sil_score
166 | sil_max_clusters = n_clusters
167 | return sil_max_clusters
168 | ### END silhouette score
169 |
170 |
171 | ### DB score
172 | def get_clusters_by_davies_bouldin(X, list_clusters=None, **kwargs):
173 | if type(list_clusters) in (list, tuple, np.ndarray):
174 | list_clusters = [item for item in list_clusters]
175 | else:
176 | list_clusters = DEFAULT_LIST_CLUSTERS
177 | list_dbs = []
178 | for n_clusters in list_clusters:
179 | model = KMeans(n_clusters=n_clusters, n_init="auto")
180 | labels = model.fit_predict(X)
181 | db_score = metrics.davies_bouldin_score(X, labels)
182 | list_dbs.append(db_score)
183 | return list_clusters[np.argmin(list_dbs)]
184 | ### END DB score
185 |
186 |
187 | ### Calinski-Harabasz Index
188 | def get_clusters_by_calinski_harabasz(X, list_clusters=None, **kwargs):
189 | if type(list_clusters) in (list, tuple, np.ndarray):
190 | list_clusters = [item for item in list_clusters]
191 | else:
192 | list_clusters = DEFAULT_LIST_CLUSTERS
193 | list_chs = []
194 | for n_clusters in list_clusters:
195 | model = KMeans(n_clusters=n_clusters, n_init="auto")
196 | labels = model.fit_predict(X)
197 | ch_score = metrics.calinski_harabasz_score(X, labels)
198 | list_chs.append(ch_score)
199 | return list_clusters[np.argmax(list_chs)]
200 | ### END Calinski-Harabasz Index
201 |
202 |
203 | ### Bayesian Information Criterion
204 | def get_clusters_by_bic(X, list_clusters=None, **kwargs):
205 | if type(list_clusters) in (list, tuple, np.ndarray):
206 | list_clusters = [item for item in list_clusters]
207 | else:
208 | list_clusters = DEFAULT_LIST_CLUSTERS
209 | bic_max = 0
210 | bic_max_clusters = 2
211 | for n_clusters in list_clusters:
212 | gm = GaussianMixture(n_components=n_clusters, n_init=10, tol=1e-3, max_iter=1000).fit(X)
213 | bic_score = -gm.bic(X)
214 | if bic_score > bic_max:
215 | bic_max = bic_score
216 | bic_max_clusters = n_clusters
217 | return bic_max_clusters
218 | ### END Bayesian Information Criterion
219 |
220 |
221 | def compute_all_methods(X, list_clusters=None, **kwargs):
222 | k1 = get_clusters_by_elbow(X, list_clusters, **kwargs)
223 | k2 = get_clusters_by_gap_statistic(X, list_clusters, **kwargs)
224 | k3 = get_clusters_by_silhouette_score(X, list_clusters, **kwargs)
225 | k4 = get_clusters_by_davies_bouldin(X, list_clusters, **kwargs)
226 | k5 = get_clusters_by_calinski_harabasz(X, list_clusters, **kwargs)
227 | k6 = get_clusters_by_bic(X, list_clusters, **kwargs)
228 | return [k1, k2, k3, k4, k5, k6]
229 |
230 |
231 | def get_clusters_all_min(X, list_clusters=None, **kwargs):
232 | k_list = compute_all_methods(X, list_clusters, **kwargs)
233 | return min(k_list)
234 |
235 |
236 | def get_clusters_all_max(X, list_clusters=None, **kwargs):
237 | k_list = compute_all_methods(X, list_clusters, **kwargs)
238 | return max(k_list)
239 |
240 |
241 | def get_clusters_all_mean(X, list_clusters=None, **kwargs):
242 | k_list = compute_all_methods(X, list_clusters, **kwargs)
243 | return int(np.mean(k_list))
244 |
245 |
246 | def get_clusters_all_majority(X, list_clusters=None, **kwargs):
247 | k_list = compute_all_methods(X, list_clusters, **kwargs)
248 | return max(set(k_list), key=k_list.count)
249 |
--------------------------------------------------------------------------------