├── .circleci
└── config.yml
├── .github
└── workflows
│ ├── codeql.yml
│ └── python-package
├── LICENSE
├── PyPi Package
├── .vscode
│ └── settings.json
├── LICENSE
├── MANIFEST.in
├── README.md
├── pyproject.toml
├── setup.cfg
├── setup.py
└── src
│ ├── denmune.egg-info
│ ├── PKG-INFO
│ ├── SOURCES.txt
│ ├── dependency_links.txt
│ ├── requires.txt
│ └── top_level.txt
│ └── denmune
│ ├── .idea
│ ├── .gitignore
│ ├── .name
│ ├── denmune.iml
│ ├── inspectionProfiles
│ │ └── profiles_settings.xml
│ ├── misc.xml
│ └── modules.xml
│ ├── __init__.py
│ └── denmune.py
├── README.md
├── codecov.yml
├── colab
├── 2D_shapes_datasets.ipynb
├── Get_97_by_training_MNIST_dataset.ipynb
├── MNIST_dataset.ipynb
├── chameleon_datasets.ipynb
├── clustering_propagation.ipynb
├── clustering_propagation_snapshots.ipynb
├── how_to_use_it.ipynb
├── iris_dataset.ipynb
├── k_nearest_evolution.ipynb
├── noise_detection.ipynb
├── scalability_and_speed.ipynb
├── stability_vs_knn.ipynb
├── training_MNIST.ipynb
└── validation.ipynb
├── images
├── denmune-illustration.png
└── denmune_propagation.png
├── kaggle
├── beauty-of-propagation-part3.ipynb
├── detecting-non-groundtruth-datasets.ipynb
├── detection-of-2d-shape-datasets.ipynb
├── get-97-using-simple-yet-one-parameter-algorithm.ipynb
├── iris-dataset.ipynb
├── k-nearest-evolution.ipynb
├── noise-detection.ipynb
├── scalability-vs-speed.ipynb
├── stability-vs-number-of-nearest-neighbor.ipynb
├── the-beauty-of-clusters-propagation.ipynb
├── the-beauty-of-propagation-part2.ipynb
├── training-MNIST-dataset-to-get-97.ipynb
├── training-pendigits-dataset-to-get-97.ipynb
├── validation.ipynb
└── when-simple-means-powerful.ipynb
├── requirements.txt
└── src
├── __init__.py
├── denmune.py
└── tests
└── test_denmune.py
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | # Use the latest 2.1 version of CircleCI pipeline process engine.
2 | # See: https://circleci.com/docs/2.0/configuration-reference
3 | version: 2.1
4 |
5 | # Orbs are reusable packages of CircleCI configuration that you may share across projects, enabling you to create encapsulated, parameterized commands, jobs, and executors that can be used across multiple projects.
6 | # See: https://circleci.com/docs/2.0/orb-intro/
7 | orbs:
8 | # The python orb contains a set of prepackaged CircleCI configuration you can use repeatedly in your configuration files
9 | # Orb commands and jobs help you with common scripting around a language/tool
10 | # so you dont have to copy and paste it everywhere.
11 | # See the orb documentation here: https://circleci.com/developer/orbs/orb/circleci/python
12 | codecov: codecov/codecov@3.0.0
13 | slack: circleci/slack@4.4.4
14 | python: circleci/python@2.1.1
15 |
16 |
17 | # Define a job to be invoked later in a workflow.
18 | # See: https://circleci.com/docs/2.0/configuration-reference/#jobs
19 | jobs:
20 | build-and-test: # This is the name of the job, feel free to change it to better match what you're trying to do!
21 | # These next lines defines a Docker executors: https://circleci.com/docs/2.0/executor-types/
22 | # You can specify an image from Dockerhub or use one of the convenience images from CircleCI's Developer Hub
23 | # A list of available CircleCI Docker convenience images are available here: https://circleci.com/developer/images/image/cimg/python
24 | # The executor is the environment in which the steps below will be executed - below will use a python 3.8 container
25 | # Change the version below to your required version of python
26 | docker:
27 | - image: cimg/python:3.10
28 |
29 |
30 | # Checkout the code as the first step. This is a dedicated CircleCI step.
31 | # The python orb's install-packages step will install the dependencies from a Pipfile via Pipenv by default.
32 | # Here we're making sure we use just use the system-wide pip. By default it uses the project root's requirements.txt.
33 | # Then run your tests!
34 | # CircleCI will report the results back to your VCS provider.
35 | steps:
36 | - checkout
37 | - python/install-packages:
38 | pkg-manager: pip
39 | # app-dir: ~/project/package-directory/ # If you're requirements.txt isn't in the root directory.
40 | # pip-dependency-file: test-requirements.txt # if you have a different name for your requirements file, maybe one that combines your runtime and test requirements.
41 |
42 | - run:
43 | name: Treon Test
44 | command: |
45 | cd colab
46 | # git clone https://github.com/egy1st/datasets
47 | # treon --threads=2
48 |
49 | - run:
50 | name: CodeCov pyTest
51 | command: |
52 | coverage run -m pytest
53 | coverage report
54 | coverage html
55 | coverage xml
56 | cp coverage.xml htmlcov/coverage.xml
57 |
58 | - codecov/upload
59 |
60 | - store_artifacts:
61 | path: htmlcov
62 |
63 | - slack/notify:
64 | template: basic_success_1
65 | channel: C0326UK1VFY
66 | # Invoke jobs via workflows
67 | # See: https://circleci.com/docs/2.0/configuration-reference/#workflows
68 | workflows:
69 | Python-3.10: # This is the name of the workflow, feel free to change it to better match your workflow.
70 | # Inside the workflow, you define the jobs you want to run.
71 | jobs:
72 | - build-and-test:
73 | context: Slack
74 |
75 |
--------------------------------------------------------------------------------
/.github/workflows/codeql.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: "CodeQL"
13 |
14 | on:
15 | push:
16 | branches: [ "main" ]
17 | pull_request:
18 | # The branches below must be a subset of the branches above
19 | branches: [ "main" ]
20 | schedule:
21 | - cron: '45 0 * * 6'
22 |
23 | jobs:
24 | analyze:
25 | name: Analyze
26 | runs-on: ubuntu-latest
27 | permissions:
28 | actions: read
29 | contents: read
30 | security-events: write
31 |
32 | strategy:
33 | fail-fast: false
34 | matrix:
35 | language: [ 'python' ]
36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
37 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
38 |
39 | steps:
40 | - name: Checkout repository
41 | uses: actions/checkout@v3
42 |
43 | # Initializes the CodeQL tools for scanning.
44 | - name: Initialize CodeQL
45 | uses: github/codeql-action/init@v2
46 | with:
47 | languages: ${{ matrix.language }}
48 | # If you wish to specify custom queries, you can do so here or in a config file.
49 | # By default, queries listed here will override any specified in a config file.
50 | # Prefix the list here with "+" to use these queries and those in the config file.
51 |
52 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
53 | # queries: security-extended,security-and-quality
54 |
55 |
56 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
57 | # If this step fails, then you should remove it and run the build manually (see below)
58 | - name: Autobuild
59 | uses: github/codeql-action/autobuild@v2
60 |
61 | # ℹ️ Command-line programs to run using the OS shell.
62 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
63 |
64 | # If the Autobuild fails above, remove it and uncomment the following three lines.
65 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
66 |
67 | # - run: |
68 | # echo "Run, Build Application using script"
69 | # ./location_of_script_within_repo/buildscript.sh
70 |
71 | - name: Perform CodeQL Analysis
72 | uses: github/codeql-action/analyze@v2
73 | with:
74 | category: "/language:${{matrix.language}}"
75 |
--------------------------------------------------------------------------------
/.github/workflows/python-package:
--------------------------------------------------------------------------------
1 | name: workflow for codecov
2 | on: [push]
3 | jobs:
4 | run:
5 | runs-on: ${{ matrix.os }}
6 | strategy:
7 | matrix:
8 | os: [ubuntu-latest]
9 | python: ['3.6', '3.7', '3.8', '3.9']
10 | env:
11 | OS: ${{ matrix.os }}
12 | PYTHON: ${{ matrix.python }}
13 | steps:
14 | - uses: actions/checkout@master
15 | - name: Setup Python
16 | uses: actions/setup-python@master
17 | with:
18 | python-version: 3.7
19 | - name: Generate coverage report
20 | run: |
21 | pip install pytest
22 | pip install pytest-cov
23 | pip install numpy
24 | pip install -U scikit-learn
25 | pip install denmune
26 | pytest --cov=./ --cov-report=xml
27 | - name: Upload coverage to Codecov
28 | uses: codecov/codecov-action@v2
29 | with:
30 | token: 'fce1be95-36c5-4c80-83c1-fe9fa8539dae'
31 | files: ./coverage.xml
32 | env_vars: OS,PYTHON
33 | fail_ci_if_error: true
34 | flags: unittests
35 | name: codecov-umbrella
36 | verbose: true
37 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2021, Mohamed Ali Abbas
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | 1. Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | 2. Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | 3. Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/PyPi Package/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "restructuredtext.confPath": ""
3 | }
--------------------------------------------------------------------------------
/PyPi Package/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2021, Mohamed Ali Abbas
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
5 |
6 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
7 |
8 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
9 |
10 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
11 |
12 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
/PyPi Package/MANIFEST.in:
--------------------------------------------------------------------------------
1 | recursive-include data *.ipynb *.py *.txt *.csv
--------------------------------------------------------------------------------
/PyPi Package/README.md:
--------------------------------------------------------------------------------
1 | DenMune: A density-peak clustering algorithm
2 | =============================================
3 |
4 | DenMune a clustering algorithm that can find clusters of arbitrary size, shapes and densities in two-dimensions. Higher dimensions are first reduced to 2-D using the t-sne. The algorithm relies on a single parameter K (the number of nearest neighbors). The results show the superiority of the algorithm. Enjoy the simplicity but the power of DenMune.
5 |
6 |
7 | []( https://pypi.org/project/denmune/)
8 | [](https://mybinder.org/v2/gh/egy1st/denmune-clustering-algorithm/HEAD)
9 | [](https://denmune.readthedocs.io/en/latest/?badge=latest)
10 | [](#colab)
11 | [](https://www.kaggle.com/egyfirst/denmune-clustering-iris-dataset?scriptVersionId=84775816)
12 | [](https://www.sciencedirect.com/science/article/abs/pii/S0031320320303927)
13 | [](https://data.mendeley.com/datasets/b73cw5n43r/4)
14 | [](https://choosealicense.com/licenses/bsd-3-clause/)
15 | [](https://circleci.com/gh/egy1st/denmune-clustering-algorithm/tree/main)
16 | [](https://codecov.io/gh/egy1st/denmune-clustering-algorithm)
17 |
18 | Based on the paper
19 | -------------------
20 |
21 | |Paper|Journal|
22 | |-------------------------------------------------------------------------------------------|-----------------------------|
23 | |Mohamed Abbas, Adel El-Zoghabi, Amin Ahoukry,
[](https://www.scimagojr.com/journalsearch.php?q=24823&tip=sid&clean=0)
24 | |*DenMune: Density peak based clustering using mutual nearest neighbors*
25 | |In: Journal of Pattern Recognition, Elsevier,
26 | |volume 109, number 107589, January 2021
27 | |DOI: https://doi.org/10.1016/j.patcog.2020.107589
28 |
29 | Documentation:
30 | ---------------
31 | Documentation, including tutorials, are available on https://denmune.readthedocs.io
32 |
33 | [](https://denmune.readthedocs.io/en/latest/?badge=latest)
34 |
35 |
36 | Watch it in action
37 | -------------------
38 | This 30 seconds will tell you how a density-baased algorithm, DenMune propagates:
39 |
40 | [](https://colab.research.google.com/drive/1o-tP3uvDGjxBOGYkir1lnbr74sZ06e0U?usp=sharing)
41 |
42 | []()
43 |
44 |
45 |
46 | When less means more
47 | --------------------
48 | Most calssic clustering algorithms fail in detecting complex clusters where clusters are of different size, shape, density, and being exist in noisy data.
49 | Recently, a density-based algorithm named DenMune showed great ability in detecting complex shapes even in noisy data. it can detect number of clusters automatically, detect both pre-identified-noise and post-identified-noise automatically and removing them.
50 |
51 | It can achieve accuracy reach 100% in most classic pattern problems, achieve 97% in MNIST dataset. A great advantage of this algorithm is being single-parameter algorithm. All you need is to set number of k-nearest neighbor and the algorithm will care about the rest. Being Non-senstive to changes in k, make it robust and stable.
52 |
53 | Keep in mind, the algorithm reduce any N-D dataset to only 2-D dataset initially, so it is a good benefit of this algorithm is being always to plot your data and explore it which make this algorithm a good candidate for data exploration. Finally, the algorithm comes with neat package for visualizing data, validating it and analyze the whole clustering process.
54 |
55 | How to install DenMune
56 | ------------------------
57 | Simply install DenMune clustering algorithm using pip command from the official Python repository
58 |
59 | []( https://pypi.org/project/denmune/)
60 |
61 | From the shell run the command
62 |
63 | ```shell
64 | pip install denmune
65 | ```
66 |
67 | From jupyter notebook cell run the command
68 |
69 | ```ipython3
70 | !pip install denmune
71 | ```
72 |
73 | How to use DenMune
74 | --------------------
75 | Once DenMune is installed, you just need to import it
76 |
77 | ```python
78 | from denmune import DenMune
79 | ```
80 | ###### Please note that first denmune (the package) in small letters, while the other one(the class itself) has D and M in capital case.
81 |
82 |
83 | Read data
84 | -----------
85 |
86 | There are four possible cases of data:
87 | - only train data without labels
88 | - only labeld train data
89 | - labeled train data in addition to test data without labels
90 | - labeled train data in addition to labeled test data
91 |
92 |
93 | ```python
94 | #=============================================
95 | # First scenario: train data without labels
96 | # ============================================
97 |
98 | data_path = 'datasets/denmune/chameleon/'
99 | dataset = "t7.10k.csv"
100 | data_file = data_path + dataset
101 |
102 | # train data without labels
103 | X_train = pd.read_csv(data_file, sep=',', header=None)
104 |
105 | knn = 39 # k-nearest neighbor, the only parameter required by the algorithm
106 |
107 | dm = DenMune(train_data=X_train, k_nearest=knn)
108 | labels, validity = dm.fit_predict(show_analyzer=False, show_noise=True)
109 |
110 | ```
111 | This is an intutive dataset which has no groundtruth provided
112 |
113 | 
114 |
115 | ```python
116 | #=============================================
117 | # Second scenario: train data with labels
118 | # ============================================
119 |
120 | data_path = 'datasets/denmune/shapes/'
121 | dataset = "aggregation.csv"
122 | data_file = data_path + dataset
123 |
124 | # train data with labels
125 | X_train = pd.read_csv(data_file, sep=',', header=None)
126 | y_train = X_train.iloc[:, -1]
127 | X_train = X_train.drop(X_train.columns[-1], axis=1)
128 |
129 | knn = 6 # k-nearest neighbor, the only parameter required by the algorithm
130 |
131 | dm = DenMune(train_data=X_train, train_truth= y_train, k_nearest=knn)
132 | labels, validity = dm.fit_predict(show_analyzer=False, show_noise=True)
133 | ```
134 | Datset groundtruth
135 |
136 | 
137 |
138 | Datset as detected by DenMune at k=6
139 |
140 | 
141 |
142 |
143 | ```python
144 | #=================================================================
145 | # Third scenario: train data with labels in addition to test data
146 | # ================================================================
147 |
148 | data_path = 'datasets/denmune/pendigits/'
149 | file_2d = data_path + 'pendigits-2d.csv'
150 |
151 | # train data with labels
152 | X_train = pd.read_csv(data_path + 'train.csv', sep=',', header=None)
153 | y_train = X_train.iloc[:, -1]
154 | X_train = X_train.drop(X_train.columns[-1], axis=1)
155 |
156 | # test data without labels
157 | X_test = pd.read_csv(data_path + 'test.csv', sep=',', header=None)
158 | X_test = X_test.drop(X_test.columns[-1], axis=1)
159 |
160 | knn = 50 # k-nearest neighbor, the only parameter required by the algorithm
161 |
162 | dm = DenMune(train_data=X_train, train_truth= y_train,
163 | test_data= X_test,
164 | k_nearest=knn)
165 | labels, validity = dm.fit_predict(show_analyzer=True, show_noise=True)
166 | ```
167 | dataset groundtruth
168 |
169 | 
170 |
171 |
172 | dataset as detected by DenMune at k=50
173 |
174 | 
175 |
176 | test data as predicted by DenMune on training the dataset at k=50
177 |
178 | 
179 |
180 |
181 | Algorithm's Parameters
182 | -----------------------
183 | 1. Parameters used within the initialization of the DenMune class
184 |
185 | ```python
186 | def __init__ (self,
187 | train_data=None, test_data=None,
188 | train_truth=None, test_truth=None,
189 | file_2d =None, k_nearest=None,
190 | rgn_tsne=False, prop_step=0,
191 | ):
192 | ```
193 |
194 | - train_data:
195 | - data used for training the algorithm
196 | - default: None. It should be provided by the use, otherwise an error will riase.
197 |
198 | - train_truth:
199 | - labels of training data
200 | - default: None
201 |
202 | - test_data:
203 | - data used for testing the algorithm
204 |
205 | - test_truth:
206 | - labels of testing data
207 | - default: None
208 |
209 | - k_nearest:
210 | - number of nearest neighbor
211 | - default: 0. the default is invalid. k-nearest neighbor should be at leat 1.
212 |
213 | - rgn_tsn:
214 | - when set to True: It will regenerate the reduced 2-D version of the N-D dataset each time the algorithm run.
215 | - when set to False: It will generate the reduced 2-D version of the N-D dataset first time only, then will reuse the saved exist file
216 | - default: True
217 |
218 | - file_2d: name (include location) of file used save/load the reduced 2-d version
219 | - if empty: the algorithm will create temporary file named '_temp_2d'
220 | - default: None
221 |
222 | - prop_step:
223 | - size of increment used in showing the clustering propagation.
224 | - leave this parameter set to 0, the default value, unless you are willing intentionally to enter the propagation mode.
225 | - default: 0
226 |
227 |
228 | 2. Parameters used within the fit_predict function:
229 |
230 | ```python
231 | def fit_predict(self,
232 | validate=True,
233 | show_plots=True,
234 | show_noise=True,
235 | show_analyzer=True
236 | ):
237 | ```
238 |
239 | - validate:
240 | - validate data on/off according to five measures integrated with DenMUne (Accuracy. F1-score, NMI index, AMI index, ARI index)
241 | - default: True
242 |
243 | - show_plots:
244 | - show/hide plotting of data
245 | - default: True
246 |
247 | - show_noise:
248 | - show/hide noise and outlier
249 | - default: True
250 |
251 | - show_analyzer:
252 | - show/hide the analyzer
253 | - default: True
254 |
255 | The Analyzer
256 | -------------
257 |
258 | The algorithm provide an intutive tool called analyzer, once called it will provide you with in-depth analysis on how your clustering results perform.
259 |
260 | 
261 |
262 | Noise Detection
263 | ----------------
264 |
265 | DenMune detects noise and outlier automatically, no need to any further work from your side.
266 |
267 | - It plots pre-identified noise in black
268 | - It plots post-identified noise in light grey
269 |
270 | You can set show_noise parameter to False.
271 |
272 |
273 | ```python
274 |
275 | # let us show noise
276 |
277 | m = DenMune(train_data=X_train, k_nearest=knn)
278 | labels, validity = dm.fit_predict(show_noise=True)
279 | ```
280 |
281 | ```python
282 |
283 | # let us show clean data by removing noise
284 |
285 | m = DenMune(train_data=X_train, k_nearest=knn)
286 | labels, validity = dm.fit_predict(show_noise=False)
287 | ```
288 |
289 | | noisy data | clean data |
290 | ----------| ---------------------------------------------------------------------------------------------------|
291 | |  |  |
292 |
293 |
294 | Validatation
295 | --------------
296 | You can get your validation results using 3 methods
297 |
298 | - by showing the Analyzer
299 | - extract values from the validity returned list from fit_predict function
300 | - extract values from the Analyzer dictionary
301 | -
302 | There are five validity measures built-in the algorithm, which are:
303 |
304 | - ACC, Accuracy
305 | - F1 score
306 | - NMI index (Normalized Mutual Information)
307 | - AMI index (Adjusted Mutual Information)
308 | - ARI index (Adjusted Rand Index)
309 |
310 | 
311 |
312 | K-nearest Evolution
313 | -------------------
314 | The following chart shows the evolution of pre and post identified noise in correspondence to increase of number of knn. Also, detected number of clusters is analyzed in the same chart in relation with both types of identified noise.
315 |
316 | 
317 |
318 |
319 | The Scalability
320 | ----------------
321 | | data size | time |
322 | |------------------| ------------------- |
323 | | data size: 5000 | time: 2.3139 seconds |
324 | | data size: 10000 | time: 5.8752 seconds |
325 | | data size: 15000 | time: 12.4535 seconds |
326 | | data size: 20000 | time: 18.8466 seconds |
327 | | data size: 25000 | time: 28.992 seconds |
328 | | data size: 30000 | time: 39.3166 seconds |
329 | | data size: 35000 | time: 39.4842 seconds |
330 | | data size: 40000 | time: 63.7649 seconds |
331 | | data size: 45000 | time: 73.6828 seconds |
332 | | data size: 50000 | time: 86.9194 seconds |
333 | | data size: 55000 | time: 90.1077 seconds |
334 | | data size: 60000 | time: 125.0228 seconds |
335 | | data size: 65000 | time: 149.1858 seconds |
336 | | data size: 70000 | time: 177.4184 seconds |
337 | | data size: 75000 | time: 204.0712 seconds |
338 | | data size: 80000 | time: 220.502 seconds |
339 | | data size: 85000 | time: 251.7625 seconds |
340 | | data size: 100000 | time: 257.563 seconds |
341 |
342 | | 
343 |
344 | The Stability
345 | --------------
346 |
347 | The algorithm is only single-parameter, even more it not sensitive to changes in that parameter, k. You may guess that from the following chart yourself. This is of greate benfit for you as a data exploration analyst. You can simply explore the dataset using an arbitrary k. Being Non-senstive to changes in k, make it robust and stable.
348 |
349 | 
350 |
351 |
352 | Reveal the propagation
353 | -----------------------
354 |
355 | one of the top performing feature in this algorithm is enabling you to watch how your clusters propagate to construct the final output clusters.
356 | just use the parameter 'prop_step' as in the following example:
357 |
358 | ```python
359 | dataset = "t7.10k" #
360 | data_path = 'datasets/denmune/chameleon/'
361 |
362 | # train file
363 | data_file = data_path + dataset +'.csv'
364 | X_train = pd.read_csv(data_file, sep=',', header=None)
365 |
366 |
367 | from itertools import chain
368 |
369 | # Denmune's Paramaters
370 | knn = 39 # number of k-nearest neighbor, the only parameter required by the algorithm
371 |
372 | # create list of differnt snapshots of the propagation
373 | snapshots = chain(range(2,5), range(5,50,10), range(50, 100, 25), range(100,500,100), range(500,2000, 250), range(1000,5500, 500))
374 |
375 | from IPython.display import clear_output
376 | for snapshot in snapshots:
377 | print ("itration", snapshot )
378 | clear_output(wait=True)
379 | dm = DenMune(train_data=X_train, k_nearest=knn, rgn_tsne=False, prop_step=snapshot)
380 | labels, validity = dm.fit_predict(show_analyzer=False, show_noise=False)
381 | ```
382 |
383 | []()
384 |
385 | Interact with the algorithm
386 | ---------------------------
387 | [](https://colab.research.google.com/drive/1EUROd6TRwxW3A_XD3KTxL8miL2ias4Ue?usp=sharing)
388 |
389 | This notebook allows you interact with the algorithm in many asspects:
390 | - you can choose which dataset to cluster (among 4 chameleon datasets)
391 | - you can decide which number of k-nearest neighbor to use
392 | - show noise on/off; thus you can invesitigate noise detected by the algorithm
393 | - show analyzer on/off
394 |
395 | How to run and test
396 | --------------------
397 |
398 | 1. Launch Examples in Repo2Docker Binder
399 |
400 | Simply use our repo2docker offered by mybinder.org, which encapsulate the algorithm and all required data in one virtual machine instance. All jupter notebooks examples found in this repository will be also available to you in action to practice in this respo2docer. Thanks mybinder.org, you made it possible!
401 |
402 | [](https://mybinder.org/v2/gh/egy1st/denmune-clustering-algorithm/HEAD)
403 |
404 | 2. Launch each Example in Kaggle workspace
405 |
406 | If you are a kaggler like me, then Kaggle, the best workspace where data scientist meet, should fit you to test the algorithm with great experince.
407 |
408 | | Dataset | Kaggle URL |
409 | ----------| ---------------------------------------------------------------------------------------------------|
410 | |When less means more - kaggle |[]( https://www.kaggle.com/egyfirst/when-less-means-more) |
411 | |Non-groundtruth datasets - kaggle|[](https://www.kaggle.com/egyfirst/detecting-non-groundtruth-datasets) |
412 | |2D Shape datasets - kaggle|[](https://www.kaggle.com/egyfirst/detection-of-2d-shape-datasets) |
413 | |MNIST dataset kaggle|[](https://www.kaggle.com/egyfirst/get-97-using-simple-yet-one-parameter-algorithm) |
414 | |Iris dataset kaggle| [](https://www.kaggle.com/egyfirst/denmune-clustering-iris-dataset) |
415 | |Training MNIST to get 97%| []( https://www.kaggle.com/egyfirst/training-mnist-dataset-to-get-97) |
416 | |Noise detection - kaggle| []( https://www.kaggle.com/egyfirst/noise-detection) |
417 | |Validation - kaggle| [](https://www.kaggle.com/egyfirst/validate-in-5-built-in-validity-insexes) |
418 | |The beauty of propagation - kaggle| [](https://www.kaggle.com/egyfirst/the-beauty-of-clusters-propagation) |
419 | |The beauty of propagation part2 - kaggle | [](https://www.kaggle.com/egyfirst/the-beauty-of-propagation-part2) |
420 | |Snapshots of propagation -kaggle| [](https://www.kaggle.com/egyfirst/beauty-of-propagation-part3) |
421 | |Scalability kaggle| [](https://www.kaggle.com/egyfirst/scalability-vs-speed) |
422 | |Stability - kaggle| [](https://www.kaggle.com/egyfirst/stability-vs-number-of-nearest-neighbor) |
423 | |k-nearest-evolution - kaggle| [](https://www.kaggle.com/egyfirst/k-nearest-evolution) |
424 |
425 | 3. Launch each Example in Google Research, CoLab
426 |
427 | Need to test examples one by one, then here another option. Use colab offered by google research to test each example individually.
428 |
429 |
430 |
431 | Here is a list of Google CoLab URL to use the algorithm interactively
432 | ----------------------------------------------------------------------
433 |
434 |
435 | | Dataset | CoLab URL |
436 | ----------| ---------------------------------------------------------------------------------------------------|
437 | |How to use it - colab|[]( https://colab.research.google.com/drive/1J_uKdhZ3z1KeY0-wJ7Ruw2PZSY1orKQm)|
438 | |Chameleon datasets - colab|[](https://colab.research.google.com/drive/1EUROd6TRwxW3A_XD3KTxL8miL2ias4Ue?usp=sharing) |
439 | |2D Shape datasets - colab|[]( https://colab.research.google.com/drive/1EaqTPCRHSuTKB-qEbnWHpGKFj6XytMIk?usp=sharing) |
440 | |MNIST dataset - colab|[](https://colab.research.google.com/drive/1a9FGHRA6IPc5jhLOV46iEbpUeQXptSJp?usp=sharing) |
441 | |iris dataset - colab|[](https://colab.research.google.com/drive/1nKql57Xh7xVVu6NpTbg3vRdRg42R7hjm?usp=sharing) |
442 | |Get 97% by training MNIST dataset - colab|[]( https://colab.research.google.com/drive/1NeOtXEQY94oD98Ufbh3IhTHnnYwIA659) |
443 | |Non-groundtruth datasets - colab|[]( https://colab.research.google.com/drive/1d17ejQ83aUy0CZIeQ7bHTugSC9AjJ2mU?usp=sharing) |
444 | |Noise detection - colab|[]( https://colab.research.google.com/drive/1Bp3c-cJfjLWxupmrBJ_6Q4-nqIfZcII4) |
445 | |Validation - colab|[]( https://colab.research.google.com/drive/13_EVaQOv_QiNmQiMWJAcFFHPJHGCrQLe) |
446 | |How it propagates - colab|[](https://colab.research.google.com/drive/1o-tP3uvDGjxBOGYkir1lnbr74sZ06e0U?usp=sharing)|
447 | |Snapshots of propagation - colab|[](https://colab.research.google.com/drive/1vPXNKa8Rf3TnqDHSD3YSWl3g1iNSqjl2?usp=sharing)|
448 | |Scalability - colab|[](https://colab.research.google.com/drive/1d55wkBndLLapO7Yx1ePHhE8mL61j9-TH?usp=sharing)|
449 | |Stability vs number of nearest neighbors - colab|[](https://colab.research.google.com/drive/17VgVRMFBWvkSIH1yA3tMl6UQ7Eu68K2l?usp=sharing)|
450 | |k-nearest-evolution - colab|[]( https://colab.research.google.com/drive/1DZ-CQPV3WwJSiaV3-rjwPwmXw4RUh8Qj)|
451 |
452 |
453 |
454 | How to cite
455 | =====
456 | If you have used this codebase in a scientific publication and wish to cite it, please use the [Journal of Pattern Recognition article](https://www.sciencedirect.com/science/article/abs/pii/S0031320320303927)
457 |
458 | Mohamed Abbas McInnes, Adel El-Zoghaby, Amin Ahoukry, *DenMune: Density peak based clustering using mutual nearest neighbors*
459 | In: Journal of Pattern Recognition, Elsevier, volume 109, number 107589.
460 | January 2021
461 |
462 |
463 | ```bib
464 | @article{ABBAS2021107589,
465 | title = {DenMune: Density peak based clustering using mutual nearest neighbors},
466 | journal = {Pattern Recognition},
467 | volume = {109},
468 | pages = {107589},
469 | year = {2021},
470 | issn = {0031-3203},
471 | doi = {https://doi.org/10.1016/j.patcog.2020.107589},
472 | url = {https://www.sciencedirect.com/science/article/pii/S0031320320303927},
473 | author = {Mohamed Abbas and Adel El-Zoghabi and Amin Shoukry},
474 | keywords = {Clustering, Mutual neighbors, Dimensionality reduction, Arbitrary shapes, Pattern recognition, Nearest neighbors, Density peak},
475 | abstract = {Many clustering algorithms fail when clusters are of arbitrary shapes, of varying densities, or the data classes are unbalanced and close to each other, even in two dimensions. A novel clustering algorithm “DenMune” is presented to meet this challenge. It is based on identifying dense regions using mutual nearest neighborhoods of size K, where K is the only parameter required from the user, besides obeying the mutual nearest neighbor consistency principle. The algorithm is stable for a wide range of values of K. Moreover, it is able to automatically detect and remove noise from the clustering process as well as detecting the target clusters. It produces robust results on various low and high dimensional datasets relative to several known state of the art clustering algorithms.}
476 | }
477 | ```
478 |
479 | Licensing
480 | ------------
481 |
482 | The DenMune algorithm is 3-clause BSD licensed. Enjoy.
483 |
484 | [](https://choosealicense.com/licenses/bsd-3-clause/)
485 |
486 |
487 | Task List
488 | ------------
489 |
490 | - [x] Update Github with the DenMune sourcode
491 | - [x] create repo2docker repository
492 | - [x] Create pip Package
493 | - [x] create CoLab shared examples
494 | - [x] create documentation
495 | - [x] create Kaggle shared examples
496 | - [x] PEP8 compliant
497 | - [x] Continuous integration
498 | - [x] scikit-learn compatible
499 | - [X] Unit tests (coverage: 100%)
500 | - [ ] create conda package
501 |
502 |
--------------------------------------------------------------------------------
/PyPi Package/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools>=42",
4 | "wheel"
5 | ]
6 | build-backend = "setuptools.build_meta"
--------------------------------------------------------------------------------
/PyPi Package/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = denmune
3 | version = 0.0.96
4 | author = Mohamed Ali Abbas
5 | author_email = mohamed.alyabbas@outlook.com
6 | description = This is the package for DenMune Clustering Algorithm published in paper https://doi.org/10.1016/j.patcog.2020.107589
7 | long_description = file: README.md
8 | long_description_content_type = text/markdown
9 | url = https://github.com/egy1st/denmune-clustering-algorithm
10 | project_urls =
11 | Bug Tracker = https://github.com/pypa/sampleproject/issues
12 | classifiers =
13 | Programming Language :: Python :: 3
14 | License :: OSI Approved :: BSD License
15 | Operating System :: OS Independent
16 |
17 | [options]
18 | package_dir =
19 | = src
20 | packages = find:
21 | python_requires = >=3.6
22 |
23 | [options.packages.find]
24 | where = src
25 |
--------------------------------------------------------------------------------
/PyPi Package/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | setup(
4 | install_requires=[
5 |
6 | 'numpy==1.23.5',
7 | 'pandas==1.5.3',
8 | 'matplotlib==3.7.2',
9 | 'scikit-learn==1.2.2',
10 | 'seaborn==0.12.2',
11 | 'ngt==2.0.4',
12 | 'anytree==2.8',
13 | 'treelib==1.6.1',
14 | ]
15 |
16 | )
17 |
--------------------------------------------------------------------------------
/PyPi Package/src/denmune.egg-info/PKG-INFO:
--------------------------------------------------------------------------------
1 | Metadata-Version: 2.1
2 | Name: denmune
3 | Version: 0.0.96
4 | Summary: This is the package for DenMune Clustering Algorithm published in paper https://doi.org/10.1016/j.patcog.2020.107589
5 | Home-page: https://github.com/egy1st/denmune-clustering-algorithm
6 | Author: Mohamed Ali Abbas
7 | Author-email: mohamed.alyabbas@outlook.com
8 | License: UNKNOWN
9 | Project-URL: Bug Tracker, https://github.com/pypa/sampleproject/issues
10 | Platform: UNKNOWN
11 | Classifier: Programming Language :: Python :: 3
12 | Classifier: License :: OSI Approved :: BSD License
13 | Classifier: Operating System :: OS Independent
14 | Requires-Python: >=3.6
15 | Description-Content-Type: text/markdown
16 | License-File: LICENSE
17 |
18 | DenMune: A density-peak clustering algorithm
19 | =============================================
20 |
21 | DenMune a clustering algorithm that can find clusters of arbitrary size, shapes and densities in two-dimensions. Higher dimensions are first reduced to 2-D using the t-sne. The algorithm relies on a single parameter K (the number of nearest neighbors). The results show the superiority of the algorithm. Enjoy the simplicity but the power of DenMune.
22 |
23 |
24 | []( https://pypi.org/project/denmune/)
25 | [](https://mybinder.org/v2/gh/egy1st/denmune-clustering-algorithm/HEAD)
26 | [](https://denmune.readthedocs.io/en/latest/?badge=latest)
27 | [](#colab)
28 | [](https://www.kaggle.com/egyfirst/denmune-clustering-iris-dataset?scriptVersionId=84775816)
29 | [](https://www.sciencedirect.com/science/article/abs/pii/S0031320320303927)
30 | [](https://data.mendeley.com/datasets/b73cw5n43r/4)
31 | [](https://choosealicense.com/licenses/bsd-3-clause/)
32 | [](https://circleci.com/gh/egy1st/denmune-clustering-algorithm/tree/main)
33 | [](https://codecov.io/gh/egy1st/denmune-clustering-algorithm)
34 | [](https://github.com/egy1st/denmune-clustering-algorithm/actions/workflows/python-package.yml)
35 |
36 | Based on the paper
37 | -------------------
38 |
39 | |Paper|Journal|
40 | |-------------------------------------------------------------------------------------------|-----------------------------|
41 | |Mohamed Abbas, Adel El-Zoghabi, Amin Ahoukry, [](https://www.scimagojr.com/journalsearch.php?q=24823&tip=sid&clean=0)
42 | |*DenMune: Density peak based clustering using mutual nearest neighbors*
43 | |In: Journal of Pattern Recognition, Elsevier,
44 | |volume 109, number 107589, January 2021
45 | |DOI: https://doi.org/10.1016/j.patcog.2020.107589
46 |
47 | Documentation:
48 | ---------------
49 | Documentation, including tutorials, are available on https://denmune.readthedocs.io
50 |
51 | [](https://denmune.readthedocs.io/en/latest/?badge=latest)
52 |
53 |
54 | Watch it in action
55 | -------------------
56 | This 30 seconds will tell you how a density-baased algorithm, DenMune propagates:
57 |
58 | [](https://colab.research.google.com/drive/1o-tP3uvDGjxBOGYkir1lnbr74sZ06e0U?usp=sharing)
59 |
60 | []()
61 |
62 |
63 |
64 | When less means more
65 | --------------------
66 | Most calssic clustering algorithms fail in detecting complex clusters where clusters are of different size, shape, density, and being exist in noisy data.
67 | Recently, a density-based algorithm named DenMune showed great ability in detecting complex shapes even in noisy data. it can detect number of clusters automatically, detect both pre-identified-noise and post-identified-noise automatically and removing them.
68 |
69 | It can achieve accuracy reach 100% in most classic pattern problems, achieve 97% in MNIST dataset. A great advantage of this algorithm is being single-parameter algorithm. All you need is to set number of k-nearest neighbor and the algorithm will care about the rest. Being Non-senstive to changes in k, make it robust and stable.
70 |
71 | Keep in mind, the algorithm reduce any N-D dataset to only 2-D dataset initially, so it is a good benefit of this algorithm is being always to plot your data and explore it which make this algorithm a good candidate for data exploration. Finally, the algorithm comes with neat package for visualizing data, validating it and analyze the whole clustering process.
72 |
73 | How to install DenMune
74 | ------------------------
75 | Simply install DenMune clustering algorithm using pip command from the official Python repository
76 |
77 | []( https://pypi.org/project/denmune/)
78 |
79 | From the shell run the command
80 |
81 | ```shell
82 | pip install denmune
83 | ```
84 |
85 | From jupyter notebook cell run the command
86 |
87 | ```ipython3
88 | !pip install denmune
89 | ```
90 |
91 | How to use DenMune
92 | --------------------
93 | Once DenMune is installed, you just need to import it
94 |
95 | ```python
96 | from denmune import DenMune
97 | ```
98 | ###### Please note that first denmune (the package) in small letters, while the other one(the class itself) has D and M in capital case.
99 |
100 |
101 | Read data
102 | -----------
103 |
104 | There are four possible cases of data:
105 | - only train data without labels
106 | - only labeld train data
107 | - labeled train data in addition to test data without labels
108 | - labeled train data in addition to labeled test data
109 |
110 |
111 | ```python
112 | #=============================================
113 | # First scenario: train data without labels
114 | # ============================================
115 |
116 | data_path = 'datasets/denmune/chameleon/'
117 | dataset = "t7.10k.csv"
118 | data_file = data_path + dataset
119 |
120 | # train data without labels
121 | X_train = pd.read_csv(data_file, sep=',', header=None)
122 |
123 | knn = 39 # k-nearest neighbor, the only parameter required by the algorithm
124 |
125 | dm = DenMune(train_data=X_train, k_nearest=knn)
126 | labels, validity = dm.fit_predict(show_analyzer=False, show_noise=True)
127 |
128 | ```
129 | This is an intutive dataset which has no groundtruth provided
130 |
131 | 
132 |
133 | ```python
134 | #=============================================
135 | # Second scenario: train data with labels
136 | # ============================================
137 |
138 | data_path = 'datasets/denmune/shapes/'
139 | dataset = "aggregation.csv"
140 | data_file = data_path + dataset
141 |
142 | # train data with labels
143 | X_train = pd.read_csv(data_file, sep=',', header=None)
144 | y_train = X_train.iloc[:, -1]
145 | X_train = X_train.drop(X_train.columns[-1], axis=1)
146 |
147 | knn = 6 # k-nearest neighbor, the only parameter required by the algorithm
148 |
149 | dm = DenMune(train_data=X_train, train_truth= y_train, k_nearest=knn)
150 | labels, validity = dm.fit_predict(show_analyzer=False, show_noise=True)
151 | ```
152 | Datset groundtruth
153 |
154 | 
155 |
156 | Datset as detected by DenMune at k=6
157 |
158 | 
159 |
160 |
161 | ```python
162 | #=================================================================
163 | # Third scenario: train data with labels in addition to test data
164 | # ================================================================
165 |
166 | data_path = 'datasets/denmune/pendigits/'
167 | file_2d = data_path + 'pendigits-2d.csv'
168 |
169 | # train data with labels
170 | X_train = pd.read_csv(data_path + 'train.csv', sep=',', header=None)
171 | y_train = X_train.iloc[:, -1]
172 | X_train = X_train.drop(X_train.columns[-1], axis=1)
173 |
174 | # test data without labels
175 | X_test = pd.read_csv(data_path + 'test.csv', sep=',', header=None)
176 | X_test = X_test.drop(X_test.columns[-1], axis=1)
177 |
178 | knn = 50 # k-nearest neighbor, the only parameter required by the algorithm
179 |
180 | dm = DenMune(train_data=X_train, train_truth= y_train,
181 | test_data= X_test,
182 | k_nearest=knn)
183 | labels, validity = dm.fit_predict(show_analyzer=True, show_noise=True)
184 | ```
185 | dataset groundtruth
186 |
187 | 
188 |
189 |
190 | dataset as detected by DenMune at k=50
191 |
192 | 
193 |
194 | test data as predicted by DenMune on training the dataset at k=50
195 |
196 | 
197 |
198 |
199 | Algorithm's Parameters
200 | -----------------------
201 | 1. Parameters used within the initialization of the DenMune class
202 |
203 | ```python
204 | def __init__ (self,
205 | train_data=None, test_data=None,
206 | train_truth=None, test_truth=None,
207 | file_2d ='_temp_2d', k_nearest=10,
208 | rgn_tsne=False, prop_step=0,
209 | ):
210 | ```
211 |
212 | - train_data:
213 | - data used for training the algorithm
214 | - default: None. It should be provided by the use, otherwise an error will riase.
215 |
216 | - train_truth:
217 | - labels of training data
218 | - default: None
219 |
220 | - test_data:
221 | - data used for testing the algorithm
222 |
223 | - test_truth:
224 | - labels of testing data
225 | - default: None
226 |
227 | - k_nearest:
228 | - number of nearest neighbor
229 | - default: 10. It should be provided by the user.
230 |
231 | - rgn_tsn:
232 | - when set to True: It will regenerate the reduced 2-D version of the N-D dataset each time the algorithm run.
233 | - when set to False: It will generate the reduced 2-D version of the N-D dataset first time only, then will reuse the saved exist file
234 | - default: True
235 |
236 | - file_2d: name (include location) of file used save/load the reduced 2-d version
237 | - if empty: the algorithm will create temporary file named '_temp_2d'
238 | - default: _temp_2d
239 |
240 | - prop_step:
241 | - size of increment used in showing the clustering propagation.
242 | - leave this parameter set to 0, the default value, unless you are willing intentionally to enter the propagation mode.
243 | - default: 0
244 |
245 |
246 | 2. Parameters used within the fit_predict function:
247 |
248 | ```python
249 | def fit_predict(self,
250 | validate=True,
251 | show_plots=True,
252 | show_noise=True,
253 | show_analyzer=True
254 | ):
255 | ```
256 |
257 | - validate:
258 | - validate data on/off according to five measures integrated with DenMUne (Accuracy. F1-score, NMI index, AMI index, ARI index)
259 | - default: True
260 |
261 | - show_plots:
262 | - show/hide plotting of data
263 | - default: True
264 |
265 | - show_noise:
266 | - show/hide noise and outlier
267 | - default: True
268 |
269 | - show_analyzer:
270 | - show/hide the analyzer
271 | - default: True
272 |
273 | The Analyzer
274 | -------------
275 |
276 | The algorithm provide an intutive tool called analyzer, once called it will provide you with in-depth analysis on how your clustering results perform.
277 |
278 | 
279 |
280 | Noise Detection
281 | ----------------
282 |
283 | DenMune detects noise and outlier automatically, no need to any further work from your side.
284 |
285 | - It plots pre-identified noise in black
286 | - It plots post-identified noise in light grey
287 |
288 | You can set show_noise parameter to False.
289 |
290 |
291 | ```python
292 |
293 | # let us show noise
294 |
295 | m = DenMune(train_data=X_train, k_nearest=knn)
296 | labels, validity = dm.fit_predict(show_noise=True)
297 | ```
298 |
299 | ```python
300 |
301 | # let us show clean data by removing noise
302 |
303 | m = DenMune(train_data=X_train, k_nearest=knn)
304 | labels, validity = dm.fit_predict(show_noise=False)
305 | ```
306 |
307 | | noisy data | clean data |
308 | ----------| ---------------------------------------------------------------------------------------------------|
309 | |  |  |
310 |
311 |
312 | Validatation
313 | --------------
314 | You can get your validation results using 3 methods
315 |
316 | - by showing the Analyzer
317 | - extract values from the validity returned list from fit_predict function
318 | - extract values from the Analyzer dictionary
319 | -
320 | There are five validity measures built-in the algorithm, which are:
321 |
322 | - ACC, Accuracy
323 | - F1 score
324 | - NMI index (Normalized Mutual Information)
325 | - AMI index (Adjusted Mutual Information)
326 | - ARI index (Adjusted Rand Index)
327 |
328 | 
329 |
330 | K-nearest Evolution
331 | -------------------
332 | The following chart shows the evolution of pre and post identified noise in correspondence to increase of number of knn. Also, detected number of clusters is analyzed in the same chart in relation with both types of identified noise.
333 |
334 | 
335 |
336 |
337 | The Scalability
338 | ----------------
339 | | data size | time |
340 | |------------------| ------------------- |
341 | | data size: 5000 | time: 2.3139 seconds |
342 | | data size: 10000 | time: 5.8752 seconds |
343 | | data size: 15000 | time: 12.4535 seconds |
344 | | data size: 20000 | time: 18.8466 seconds |
345 | | data size: 25000 | time: 28.992 seconds |
346 | | data size: 30000 | time: 39.3166 seconds |
347 | | data size: 35000 | time: 39.4842 seconds |
348 | | data size: 40000 | time: 63.7649 seconds |
349 | | data size: 45000 | time: 73.6828 seconds |
350 | | data size: 50000 | time: 86.9194 seconds |
351 | | data size: 55000 | time: 90.1077 seconds |
352 | | data size: 60000 | time: 125.0228 seconds |
353 | | data size: 65000 | time: 149.1858 seconds |
354 | | data size: 70000 | time: 177.4184 seconds |
355 | | data size: 75000 | time: 204.0712 seconds |
356 | | data size: 80000 | time: 220.502 seconds |
357 | | data size: 85000 | time: 251.7625 seconds |
358 | | data size: 100000 | time: 257.563 seconds |
359 |
360 | | 
361 |
362 | The Stability
363 | --------------
364 |
365 | The algorithm is only single-parameter, even more it not sensitive to changes in that parameter, k. You may guess that from the following chart yourself. This is of greate benfit for you as a data exploration analyst. You can simply explore the dataset using an arbitrary k. Being Non-senstive to changes in k, make it robust and stable.
366 |
367 | 
368 |
369 |
370 | Reveal the propagation
371 | -----------------------
372 |
373 | one of the top performing feature in this algorithm is enabling you to watch how your clusters propagate to construct the final output clusters.
374 | just use the parameter 'prop_step' as in the following example:
375 |
376 | ```python
377 | dataset = "t7.10k" #
378 | data_path = 'datasets/denmune/chameleon/'
379 |
380 | # train file
381 | data_file = data_path + dataset +'.csv'
382 | X_train = pd.read_csv(data_file, sep=',', header=None)
383 |
384 |
385 | from itertools import chain
386 |
387 | # Denmune's Paramaters
388 | knn = 39 # number of k-nearest neighbor, the only parameter required by the algorithm
389 |
390 | # create list of differnt snapshots of the propagation
391 | snapshots = chain(range(2,5), range(5,50,10), range(50, 100, 25), range(100,500,100), range(500,2000, 250), range(1000,5500, 500))
392 |
393 | from IPython.display import clear_output
394 | for snapshot in snapshots:
395 | print ("itration", snapshot )
396 | clear_output(wait=True)
397 | dm = DenMune(train_data=X_train, k_nearest=knn, rgn_tsne=False, prop_step=snapshot)
398 | labels, validity = dm.fit_predict(show_analyzer=False, show_noise=False)
399 | ```
400 |
401 | []()
402 |
403 | Interact with the algorithm
404 | ---------------------------
405 | [](https://colab.research.google.com/drive/1EUROd6TRwxW3A_XD3KTxL8miL2ias4Ue?usp=sharing)
406 |
407 | This notebook allows you interact with the algorithm in many asspects:
408 | - you can choose which dataset to cluster (among 4 chameleon datasets)
409 | - you can decide which number of k-nearest neighbor to use
410 | - show noise on/off; thus you can invesitigate noise detected by the algorithm
411 | - show analyzer on/off
412 |
413 | How to run and test
414 | --------------------
415 |
416 | 1. Launch Examples in Repo2Docker Binder
417 |
418 | Simply use our repo2docker offered by mybinder.org, which encapsulate the algorithm and all required data in one virtual machine instance. All jupter notebooks examples found in this repository will be also available to you in action to practice in this respo2docer. Thanks mybinder.org, you made it possible!
419 |
420 | [](https://mybinder.org/v2/gh/egy1st/denmune-clustering-algorithm/HEAD)
421 |
422 | 2. Launch each Example in Kaggle workspace
423 |
424 | If you are a kaggler like me, then Kaggle, the best workspace where data scientist meet, should fit you to test the algorithm with great experince.
425 |
426 | | Dataset | Kaggle URL |
427 | ----------| ---------------------------------------------------------------------------------------------------|
428 | |When less means more - kaggle |[]( https://www.kaggle.com/egyfirst/when-less-means-more) |
429 | |Non-groundtruth datasets - kaggle|[](https://www.kaggle.com/egyfirst/detecting-non-groundtruth-datasets) |
430 | |2D Shape datasets - kaggle|[](https://www.kaggle.com/egyfirst/detection-of-2d-shape-datasets) |
431 | |MNIST dataset kaggle|[](https://www.kaggle.com/egyfirst/get-97-using-simple-yet-one-parameter-algorithm) |
432 | |Iris dataset kaggle| [](https://www.kaggle.com/egyfirst/denmune-clustering-iris-dataset) |
433 | |Training MNIST to get 97%| []( https://www.kaggle.com/egyfirst/training-mnist-dataset-to-get-97) |
434 | |Noise detection - kaggle| []( https://www.kaggle.com/egyfirst/noise-detection) |
435 | |Validation - kaggle| [](https://www.kaggle.com/egyfirst/validate-in-5-built-in-validity-insexes) |
436 | |The beauty of propagation - kaggle| [](https://www.kaggle.com/egyfirst/the-beauty-of-clusters-propagation) |
437 | |The beauty of propagation part2 - kaggle | [](https://www.kaggle.com/egyfirst/the-beauty-of-propagation-part2) |
438 | |Snapshots of propagation -kaggle| [](https://www.kaggle.com/egyfirst/beauty-of-propagation-part3) |
439 | |Scalability kaggle| [](https://www.kaggle.com/egyfirst/scalability-vs-speed) |
440 | |Stability - kaggle| [](https://www.kaggle.com/egyfirst/stability-vs-number-of-nearest-neighbor) |
441 | |k-nearest-evolution - kaggle| [](https://www.kaggle.com/egyfirst/k-nearest-evolution) |
442 |
443 | 3. Launch each Example in Google Research, CoLab
444 |
445 | Need to test examples one by one, then here another option. Use colab offered by google research to test each example individually.
446 |
447 |
448 |
449 | Here is a list of Google CoLab URL to use the algorithm interactively
450 | ----------------------------------------------------------------------
451 |
452 |
453 | | Dataset | CoLab URL |
454 | ----------| ---------------------------------------------------------------------------------------------------|
455 | |How to use it - colab|[]( https://colab.research.google.com/drive/1J_uKdhZ3z1KeY0-wJ7Ruw2PZSY1orKQm)|
456 | |Chameleon datasets - colab|[](https://colab.research.google.com/drive/1EUROd6TRwxW3A_XD3KTxL8miL2ias4Ue?usp=sharing) |
457 | |2D Shape datasets - colab|[]( https://colab.research.google.com/drive/1EaqTPCRHSuTKB-qEbnWHpGKFj6XytMIk?usp=sharing) |
458 | |MNIST dataset - colab|[](https://colab.research.google.com/drive/1a9FGHRA6IPc5jhLOV46iEbpUeQXptSJp?usp=sharing) |
459 | |iris dataset - colab|[](https://colab.research.google.com/drive/1nKql57Xh7xVVu6NpTbg3vRdRg42R7hjm?usp=sharing) |
460 | |Get 97% by training MNIST dataset - colab|[]( https://colab.research.google.com/drive/1NeOtXEQY94oD98Ufbh3IhTHnnYwIA659) |
461 | |Non-groundtruth datasets - colab|[]( https://colab.research.google.com/drive/1d17ejQ83aUy0CZIeQ7bHTugSC9AjJ2mU?usp=sharing) |
462 | |Noise detection - colab|[]( https://colab.research.google.com/drive/1Bp3c-cJfjLWxupmrBJ_6Q4-nqIfZcII4) |
463 | |Validation - colab|[]( https://colab.research.google.com/drive/13_EVaQOv_QiNmQiMWJAcFFHPJHGCrQLe) |
464 | |How it propagates - colab|[](https://colab.research.google.com/drive/1o-tP3uvDGjxBOGYkir1lnbr74sZ06e0U?usp=sharing)|
465 | |Snapshots of propagation - colab|[](https://colab.research.google.com/drive/1vPXNKa8Rf3TnqDHSD3YSWl3g1iNSqjl2?usp=sharing)|
466 | |Scalability - colab|[](https://colab.research.google.com/drive/1d55wkBndLLapO7Yx1ePHhE8mL61j9-TH?usp=sharing)|
467 | |Stability vs number of nearest neighbors - colab|[](https://colab.research.google.com/drive/17VgVRMFBWvkSIH1yA3tMl6UQ7Eu68K2l?usp=sharing)|
468 | |k-nearest-evolution - colab|[]( https://colab.research.google.com/drive/1DZ-CQPV3WwJSiaV3-rjwPwmXw4RUh8Qj)|
469 |
470 |
471 |
472 | How to cite
473 | =====
474 | If you have used this codebase in a scientific publication and wish to cite it, please use the [Journal of Pattern Recognition article](https://www.sciencedirect.com/science/article/abs/pii/S0031320320303927)
475 |
476 | Mohamed Abbas McInnes, Adel El-Zoghaby, Amin Ahoukry, *DenMune: Density peak based clustering using mutual nearest neighbors*
477 | In: Journal of Pattern Recognition, Elsevier, volume 109, number 107589.
478 | January 2021
479 |
480 |
481 | ```bib
482 | @article{ABBAS2021107589,
483 | title = {DenMune: Density peak based clustering using mutual nearest neighbors},
484 | journal = {Pattern Recognition},
485 | volume = {109},
486 | pages = {107589},
487 | year = {2021},
488 | issn = {0031-3203},
489 | doi = {https://doi.org/10.1016/j.patcog.2020.107589},
490 | url = {https://www.sciencedirect.com/science/article/pii/S0031320320303927},
491 | author = {Mohamed Abbas and Adel El-Zoghabi and Amin Shoukry},
492 | keywords = {Clustering, Mutual neighbors, Dimensionality reduction, Arbitrary shapes, Pattern recognition, Nearest neighbors, Density peak},
493 | abstract = {Many clustering algorithms fail when clusters are of arbitrary shapes, of varying densities, or the data classes are unbalanced and close to each other, even in two dimensions. A novel clustering algorithm “DenMune” is presented to meet this challenge. It is based on identifying dense regions using mutual nearest neighborhoods of size K, where K is the only parameter required from the user, besides obeying the mutual nearest neighbor consistency principle. The algorithm is stable for a wide range of values of K. Moreover, it is able to automatically detect and remove noise from the clustering process as well as detecting the target clusters. It produces robust results on various low and high dimensional datasets relative to several known state of the art clustering algorithms.}
494 | }
495 | ```
496 |
497 | Licensing
498 | ------------
499 |
500 | The DenMune algorithm is 3-clause BSD licensed. Enjoy.
501 |
502 | [](https://choosealicense.com/licenses/bsd-3-clause/)
503 |
504 |
505 | Task List
506 | ------------
507 |
508 | - [x] Update Github with the DenMune sourcode
509 | - [x] create repo2docker repository
510 | - [x] Create pip Package
511 | - [x] create CoLab shared examples
512 | - [x] create documentation
513 | - [x] create Kaggle shared examples
514 | - [x] PEP8 compliant
515 | - [x] Continuous integration
516 | - [x] scikit-learn compatible
517 | - [X] Unit tests (coverage: 97%)
518 | - [ ] create conda package
519 |
520 |
521 |
522 |
--------------------------------------------------------------------------------
/PyPi Package/src/denmune.egg-info/SOURCES.txt:
--------------------------------------------------------------------------------
1 | LICENSE
2 | MANIFEST.in
3 | README.md
4 | pyproject.toml
5 | setup.cfg
6 | setup.py
7 | src/denmune/__init__.py
8 | src/denmune/denmune.py
9 | src/denmune.egg-info/PKG-INFO
10 | src/denmune.egg-info/SOURCES.txt
11 | src/denmune.egg-info/dependency_links.txt
12 | src/denmune.egg-info/requires.txt
13 | src/denmune.egg-info/top_level.txt
--------------------------------------------------------------------------------
/PyPi Package/src/denmune.egg-info/dependency_links.txt:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/PyPi Package/src/denmune.egg-info/requires.txt:
--------------------------------------------------------------------------------
1 | numpy>=1.18.5
2 | pandas>=1.0.3
3 | matplotlib>=3.2.1
4 | scikit-learn>=0.22.1
5 | seaborn>=0.10.1
6 | ngt>=1.11.6
7 | anytree>=2.8.0
8 | treelib>=1.6.1
9 |
--------------------------------------------------------------------------------
/PyPi Package/src/denmune.egg-info/top_level.txt:
--------------------------------------------------------------------------------
1 | denmune
2 |
--------------------------------------------------------------------------------
/PyPi Package/src/denmune/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 |
--------------------------------------------------------------------------------
/PyPi Package/src/denmune/.idea/.name:
--------------------------------------------------------------------------------
1 | denmune.py
--------------------------------------------------------------------------------
/PyPi Package/src/denmune/.idea/denmune.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/PyPi Package/src/denmune/.idea/inspectionProfiles/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/PyPi Package/src/denmune/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/PyPi Package/src/denmune/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/PyPi Package/src/denmune/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | from .denmune import DenMune
--------------------------------------------------------------------------------
/PyPi Package/src/denmune/denmune.py:
--------------------------------------------------------------------------------
1 | # ====================================================================================================================
2 | # About the source code and the associated published paper
3 | # ====================================================================================================================
4 | # This is the source code of DenMune Clustering Algorithm accompanied with the experimental work
5 | # which is published in Elsevier Pattern Recognition, Volume 109, January 2021
6 | # paper can be accessed from 107589 https://doi.org/10.1016/j.patcog.2020.107589
7 | # source code and several examples on using it, can be accessed from
8 | # Gitbub's repository at https://github.com/egy1st/denmune-clustering-algorithm
9 | # Authors: Mohamed Abbas, Adel El-Zoghabi, and Amin Shoukry
10 | # Edition 0.0.2.3 Released 29-12-2021
11 | # PyPi package installation from https://pypi.org/project/denmune/
12 | # ====================================================================================================================
13 |
14 |
15 | # ====================================================================================================================
16 | # About the DenMune Algorithm
17 | # ====================================================================================================================
18 | # DenMune Clustering Algorithm's Highlights
19 | # DenMune is a clustering algorithm that can find clusters of arbitrary size, shapes and densities in two-dimensions.
20 | # Higher dimensions are first reduced to 2-D using the t-sne.
21 | # The algorithm relies on a single parameter K (the number of nearest neighbors).
22 | # The results show the superiority of DenMune.
23 | # =====================================================================================================================
24 |
25 |
26 | # =====================================================================================================================
27 | # About me
28 | # =====================================================================================================================
29 | # Name: Mohamed Ali Abbas
30 | # Egypt - Alexandria - Smouha
31 | # Cell-phone: +20-01007500290
32 | # Personal E-mail: mohamed.alyabbas@outlook.com
33 | # Business E-meal: 01@zerobytes.one
34 | # website: https://zerobytes.one
35 | # LinkedIn: https://www.linkedin.com/in/mohabbas/
36 | # Github: https://github.com/egy1st
37 | # Kaggle: https://www.kaggle.com/egyfirst
38 | # Udemy: https://www.udemy.com/user/mohammad-ali-abbas/
39 | # Facebook: https://www.facebook.com/ZeroBytes.One
40 | # =====================================================================================================================
41 |
42 | import operator
43 | import os.path
44 | import time
45 |
46 | import matplotlib.pyplot as plt
47 | import ngtpy
48 | import numpy as np
49 | import pandas as pd
50 | import seaborn as sns
51 | from anytree import Node
52 | from numpy import genfromtxt
53 | from sklearn.manifold import TSNE
54 | from treelib import Tree as tr
55 |
56 | sns.set_context('poster')
57 | sns.set_color_codes()
58 | plot_kwds = {'alpha': 0.99, 's': 80, 'linewidths': 0}
59 |
60 |
61 | # import for possible needs
62 | # from sklearn.metrics import confusion_matrix
63 | # from sklearn import metrics
64 | # import sklearn.cluster as cluster
65 |
66 |
67 | class DataPoint():
68 |
69 | def __init__(self, id):
70 | self.point_id = id
71 | self.class_id = 0 # 0 not clustered but -1 means a noise
72 | self.refer_to = []
73 | self.referred_by = []
74 | self.reference = []
75 | self.visited = False
76 | self.homogeneity = 0
77 |
78 |
79 | class DenMune():
80 |
81 | def __init__(self,
82 | train_data=None, test_data=None,
83 | train_truth=None, test_truth=None,
84 | file_2d=None, k_nearest=0,
85 | rgn_tsne=False, prop_step=0,
86 | ):
87 |
88 | if train_data is None:
89 | raise Exception("No data is provided. At least train data should be provided. Set train_data argmunt properly.")
90 | else:
91 | self.data_indicator = 1
92 |
93 | if train_truth is not None:
94 | self.data_indicator += 2
95 |
96 | if test_data is not None:
97 | self.data_indicator += 4
98 |
99 | if test_truth is not None:
100 | self.data_indicator += 8
101 |
102 | if train_data is not None and train_truth is None and test_truth is not None:
103 | raise Exception("you should provide labels for your traing data to be allowed to work with test data. Set train_truth argmunt properly.")
104 | if train_data is not None and train_truth is None and test_data is not None :
105 | raise Exception("you should provide labels for your traing data to be allowed to work with test data. Set train_truth argmunt properly.")
106 | if train_data is not None and train_truth is not None and test_truth is not None and test_data is None:
107 | raise Exception("Although labels of testing data is provided, the test data itself isnot. Set test_data argument properly.")
108 |
109 | self.analyzer = {}
110 | self.analyzer['n_points'] = {}
111 | if isinstance(train_data, pd.DataFrame):
112 | train_data = train_data.to_numpy()
113 | train_data = train_data.copy(order='C')
114 | if isinstance(test_data, pd.DataFrame):
115 | test_data = test_data.to_numpy()
116 | test_data = test_data.copy(order='C')
117 | if isinstance(train_truth, pd.Series):
118 | train_truth = train_truth.to_numpy()
119 | train_truth = train_truth.copy(order='C')
120 | if isinstance(test_truth, pd.Series):
121 | test_truth = test_truth.to_numpy()
122 | test_truth = test_truth.copy(order='C')
123 |
124 | self.train_sz = len(train_data)
125 |
126 | if test_data is not None:
127 | data = np.append(train_data, test_data, axis=0)
128 | self.test_sz = len(test_data)
129 | else:
130 | self.test_sz = 0
131 | data = train_data
132 |
133 | if test_truth is not None:
134 | self.labels_truth = np.append(train_truth, test_truth, axis=0)
135 | else:
136 | self.labels_truth = train_truth
137 |
138 | self.analyzer["n_points"]["size"] = len(
139 | data) # data.shape[0] # this will changed in preplot when we plot only train or test data
140 |
141 | self.analyzer['exec_time'] = {}
142 | self.analyzer["exec_time"]["t_SNE"] = 0
143 | self.analyzer['n_points']["noise"] = {}
144 | self.analyzer["n_points"]["noise"]["type-1"] = 0
145 | self.analyzer["n_points"]["noise"]["type-2"] = 0
146 | self.analyzer['n_points']["weak"] = {}
147 | self.analyzer["n_points"]["weak"]["all"] = 0
148 | self.analyzer["n_points"]["weak"]["succeeded to merge"] = 0
149 | self.analyzer["n_points"]["weak"]["failed to merge"] = 0
150 | self.analyzer["n_points"]["dim"] = data.shape[1]
151 | self.analyzer["n_clusters"] = {}
152 | self.analyzer["n_clusters"]["actual"] = 0
153 | self.analyzer["n_clusters"]["detected"] = 0
154 | # self.delimiter = delimiter
155 | self.debuger = {}
156 |
157 | if k_nearest == 0:
158 | raise Exception("k-nearest neighbor should be at least 1")
159 |
160 | if file_2d is None:
161 | file_2d = '_temp_2d'
162 |
163 | if data.shape[1] != 2 and file_2d == '_temp_2d':
164 | # raise Exception("Sorry, this is N-D dataset, file-2d parameter should not be empty")
165 | start = time.time()
166 | self.generate_tsne(data, 2, file_2d='_temp_2d')
167 | end = time.time()
168 | self.analyzer["exec_time"]["t_SNE"] = end - start
169 | data = genfromtxt(file_2d, delimiter=',')
170 | elif data.shape[1] != 2 and file_2d != '_temp_2d':
171 | if not os.path.isfile(file_2d) or rgn_tsne == True:
172 | start = time.time()
173 | self.generate_tsne(data, 2, file_2d)
174 | end = time.time()
175 | self.analyzer["exec_time"]["t_SNE"] = end - start
176 | data = genfromtxt(file_2d, delimiter=',')
177 |
178 | start_time = time.time()
179 |
180 | self.alg_name = 'denmune'
181 | self.prop_step = prop_step
182 | self.data = data
183 | self.train_data = train_data
184 | self.test_data = test_data
185 | self.dp_count = self.data.shape[0]
186 | self.dp_dim = self.data.shape[1]
187 | self.k_nearest = k_nearest
188 | self.dp_dis = []
189 | self.train_truth = train_truth
190 | self.test_truth = test_truth
191 |
192 | self.DataPoints = []
193 | self.ClassPoints = {}
194 | self.KernelPoints = []
195 |
196 | self.init_DataPoints()
197 | self.kd_NGT()
198 | self.load_DataPoints() # load_DataPoints must come after kd_NGT()
199 | self.compute_Links()
200 | # self.semi_init_DataPoints #it is useful with csharp and CNune only
201 | self.find_Noise()
202 | self.sort_DataPoints()
203 | self.prepare_Clusters()
204 | self.attach_Points()
205 |
206 | end_time = time.time()
207 | self.analyzer["exec_time"]["DenMune"] = end_time - start_time
208 |
209 | return None # __init__ should return None
210 |
211 | def kd_NGT(self):
212 |
213 | if len(self.dp_dis) == 0:
214 |
215 | ngtpy.create(b"tmp", self.dp_dim)
216 | index = ngtpy.Index(b"tmp")
217 | index.batch_insert(self.data)
218 | index.save()
219 |
220 | k = self.k_nearest
221 | start = time.time()
222 | self.dp_dis = []
223 | for i in range(self.dp_count):
224 | query = self.data[i]
225 | result = index.search(query, k + 1)[1:] # we skip first distance from a point to itself
226 | self.dp_dis.append(result)
227 |
228 | end = time.time()
229 | self.analyzer["exec_time"]["NGT"] = end - start
230 |
231 | def getValue(self, dic, what, who, other=False):
232 |
233 | if what == 'max' and who == 'value' and other == True:
234 | val = max(dic.items(), key=operator.itemgetter(1))[0] # max value==>key
235 | # these cases will never be used here but keep them for future use.
236 | """"
237 | elif what == 'max' and who == 'key' and other == False:
238 | val = max(dic.items(), key=operator.itemgetter(0))[0] # max key
239 | elif what == 'max' and who == 'key' and other == True:
240 | val = max(dic.items(), key=operator.itemgetter(0))[1] # max key==>Value
241 | elif what == 'max' and who == 'value' and other == False:
242 | val = max(dic.items(), key=operator.itemgetter(1))[1] # max value
243 | """
244 | return val
245 |
246 | def init_DataPoints(self):
247 |
248 | self.DataPoints = []
249 | self.KernelPoints = []
250 |
251 | for i in range(self.dp_count):
252 | dp = DataPoint(i)
253 | # no need since datapoint is initialised with these values
254 | """
255 | dp.refer_to = []
256 | dp.referred_by = []
257 | dp.reference = []
258 | dp.class_id = 0
259 | dp.visited = False
260 | dp.homogeneity = 0.0
261 | """
262 | self.DataPoints.append(dp)
263 | return 0
264 |
265 | """
266 | this function is useful with csharp and CNune only
267 |
268 | def semi_init_DataPoints(self):
269 |
270 | for dp in self.DataPoints:
271 | dp.visited = False
272 | dp.class_id = 0
273 | dp.homogeneity = 0
274 | return 0
275 | """
276 |
277 | def find_Noise(self):
278 |
279 | self.ClassPoints[-1] = Node(-1, parent=None)
280 | self.ClassPoints[0] = Node(0, parent=None)
281 |
282 | for i in range(self.dp_count):
283 | dp = self.DataPoints[i]
284 | if len(dp.reference) == 0:
285 | dp.class_id = -1
286 | self.ClassPoints[i] = self.ClassPoints[-1] # Node(-1, parent=None) # this it is a noise
287 | else: # at least one point
288 | dp.class_id = 0 # this is allready set initally
289 | self.ClassPoints[i] = self.ClassPoints[0] # Node(0, parent=None) # this it is a non-clustered point
290 | # where -1 is noise and 0 is non-clustered
291 | return 0
292 |
293 | def sort_DataPoints(self):
294 |
295 | for dp in self.DataPoints:
296 | if len(dp.reference) != 0:
297 | self.KernelPoints.append([dp.point_id, dp.homogeneity])
298 |
299 | self.KernelPoints = self.sort_Tuple(self.KernelPoints, reverse=True)
300 |
301 | return 0
302 |
303 | def compute_Links(self):
304 | start = time.time()
305 |
306 | for i in range(self.dp_count):
307 | for pos in self.DataPoints[i].refer_to:
308 |
309 | for pos2 in self.DataPoints[i].referred_by:
310 | if pos[0] == pos2[0]:
311 | self.DataPoints[i].reference.append(pos)
312 | break
313 |
314 | self.analyzer["n_points"]["strong"] = 0
315 | for i in range(self.dp_count):
316 | self.DataPoints[i].referred_by = self.sort_Tuple(self.DataPoints[i].referred_by, reverse=False)
317 | if len(self.DataPoints[i].referred_by) >= self.k_nearest:
318 | self.analyzer["n_points"]["strong"] += 1
319 | else:
320 | self.analyzer["n_points"]["weak"]["all"] += 1
321 |
322 | self.DataPoints[i].reference = self.sort_Tuple(self.DataPoints[i].reference, reverse=False)
323 | homogeneity = (100 * len(self.DataPoints[i].referred_by)) + len(self.DataPoints[i].reference)
324 | self.DataPoints[i].homogeneity = homogeneity
325 |
326 | end = time.time()
327 |
328 | return 0
329 |
330 | def sort_Tuple(self, li, reverse=False):
331 |
332 | # reverse = None (Sorts in Ascending order)
333 | # key is set to sort using second element of
334 | # sublist lambda has been used
335 | li.sort(key=lambda x: x[1], reverse=reverse)
336 | return li
337 |
338 | def load_DataPoints(self):
339 |
340 | # initialize datapoints to its default values
341 | self.init_DataPoints()
342 |
343 | for i in range(self.dp_count):
344 | result = self.dp_dis[i]
345 | for k, o in enumerate(result):
346 | # no need to this condition, it wont happen
347 | #if k >= self.k_nearest:
348 | # break
349 |
350 | # if k != 0:
351 | _dis = round(o[1], 6)
352 | _point = o[0]
353 |
354 | self.DataPoints[i].refer_to.append([_point, _dis])
355 | self.DataPoints[_point].referred_by.append([i, _dis])
356 |
357 | return 0
358 |
359 | def prepare_Clusters(self):
360 | start = time.time()
361 | class_id = 0
362 |
363 | itr = 0
364 | for dp_kern in self.KernelPoints:
365 | itr += 1
366 | if self.prop_step and self.prop_step <= itr:
367 | continue
368 |
369 | dp_core = self.DataPoints[dp_kern[0]]
370 |
371 | # remember no strong points & weak points in Tirann
372 | # all points with at least one refernce are considered (ignore noises)
373 | if len(dp_core.reference) > 0 and len(dp_core.referred_by) >= len(dp_core.refer_to):
374 |
375 | class_id += 1
376 | dp_core.visited = True
377 | dp_core.class_id = class_id
378 | self.ClassPoints[class_id] = Node(class_id, parent=None)
379 | max_class = -1
380 | weight_map = {}
381 | # Class_Points[class_id] = new TreeCls::Node(class_id)
382 |
383 | for pos2 in dp_core.reference:
384 | # if DataPoints[*pos2].visited && visited was tested not to affect on results, so you can ommit it
385 | if self.DataPoints[pos2[0]].class_id > 0 and len(self.DataPoints[pos2[0]].referred_by) >= len(
386 | self.DataPoints[pos2[0]].refer_to):
387 |
388 | # this condition is a must, as some points may be visited but not classified yet
389 | # maa we may neglect is noise as long as it is in our refernce points
390 |
391 | _cls = self.DataPoints[pos2[0]].class_id
392 | _class_id = self.ClassPoints[_cls].root.name
393 | # _class_id = _cls
394 |
395 | if _class_id not in weight_map.keys():
396 | weight_map[_class_id] = 1
397 | else:
398 | weight_map[_class_id] += 1
399 |
400 |
401 | elif self.DataPoints[pos2[0]].visited == False:
402 | self.DataPoints[pos2[0]].visited = True # this point is visited but not classified yet
403 |
404 | while len(weight_map) > 0:
405 | # weight_no = self.getValue(dic=weight_map, what='max', who='value') # no need to it in DenMune
406 | max_class = self.getValue(dic=weight_map, what='max', who='value', other=True)
407 |
408 | if max_class != -1 and max_class != class_id:
409 | self.ClassPoints[max_class].parent = self.ClassPoints[class_id]
410 |
411 | del weight_map[max_class]
412 |
413 | for i in range(self.dp_count):
414 | clsid = self.DataPoints[i].class_id
415 | clsroot = self.ClassPoints[clsid].root.name
416 | self.DataPoints[i].class_id = clsroot
417 |
418 | if self.prop_step:
419 | # let us update class 0 to be -2
420 | for dp in self.DataPoints:
421 | if dp.class_id == 0:
422 | dp.class_id = -2
423 |
424 | end = time.time()
425 |
426 | return 0
427 |
428 | def attach_Points(self):
429 |
430 | start = time.time()
431 | olditr = 0
432 | newitr = -1
433 | while olditr != newitr:
434 | newitr = olditr
435 | olditr = 0
436 |
437 | for pos in self.KernelPoints:
438 | if self.DataPoints[pos[0]].class_id == 0:
439 | self.DataPoints[pos[0]].class_id = self.attach_StrongPoint(pos[0])
440 | olditr += 1
441 |
442 | olditr = 0
443 | newitr = -1
444 | while olditr != newitr:
445 | newitr = olditr
446 | olditr = 0
447 |
448 | for pos in self.KernelPoints:
449 | if self.DataPoints[pos[0]].class_id == 0:
450 | self.DataPoints[pos[0]].class_id = self.attach_WeakPoint(pos[0])
451 | olditr += 1
452 |
453 | end = time.time()
454 |
455 | # let us update class 0 to be -2
456 | for dp in self.DataPoints:
457 | if dp.class_id == 0:
458 | dp.class_id = -2
459 |
460 | def attach_StrongPoint(self, point_id):
461 | weight_map = {}
462 | max_class = 0 # max_class in attach point = 0 , thus if a point faild to merge with any cluster, it has one more time
463 | # to merge in attach weak point
464 | dp_core = self.DataPoints[point_id]
465 | if len(dp_core.reference) != 0:
466 | dp_core.visited = True
467 |
468 | for pos2 in dp_core.reference:
469 |
470 | if self.DataPoints[pos2[0]].visited == True and len(self.DataPoints[pos2[0]].referred_by) >= len(
471 | self.DataPoints[pos2[0]].refer_to):
472 |
473 | clsid = self.DataPoints[pos2[0]].class_id
474 | clsroot = self.ClassPoints[clsid].root.name
475 | self.DataPoints[pos2[0]].class_id = clsroot
476 |
477 | if clsroot not in weight_map.keys():
478 | weight_map[clsroot] = 1
479 | else:
480 | weight_map[clsroot] += 1
481 |
482 | if len(weight_map) != 0:
483 | weight_map = dict(sorted(weight_map.items()))
484 | max_class = self.getValue(dic=weight_map, what='max', who='value', other=True)
485 |
486 | return max_class # this will return get_Root(max_class) as we computed earlier _class_id = get_Root(_cls)
487 |
488 | def attach_WeakPoint(self, point_id):
489 |
490 | weight_map = {}
491 | max_class = -1 # max_class in attach weak point = -1 , thus if a point faild to merge with any cluster it is a noise
492 |
493 | dp_core = self.DataPoints[point_id]
494 | if len(dp_core.reference) != 0:
495 | dp_core.visited = True
496 |
497 | for pos2 in dp_core.reference:
498 |
499 | if self.DataPoints[pos2[0]].visited == True:
500 |
501 | clsid = self.DataPoints[pos2[0]].class_id
502 | clsroot = self.ClassPoints[clsid].root.name
503 | self.DataPoints[pos2[0]].class_id = clsroot
504 |
505 | if clsroot not in weight_map.keys():
506 | weight_map[clsroot] = 1
507 | else:
508 | weight_map[clsroot] += 1
509 |
510 | if len(weight_map) != 0:
511 | weight_map = dict(sorted(weight_map.items()))
512 | max_class = self.getValue(dic=weight_map, what='max', who='value', other=True)
513 |
514 | return max_class # this will return get_Root(max_class) as we computed earlier _class_id = get_Root(_cls)
515 |
516 | def fit_predict(self,
517 | validate=True,
518 | show_plots=True,
519 | show_noise=True,
520 | show_analyzer=True
521 | ):
522 | data_type = None
523 | validity_scores = []
524 | solution_file = 'solution.txt'
525 |
526 | if os.path.isfile(solution_file):
527 | os.remove(solution_file)
528 |
529 | pred_list = []
530 | for dp in self.DataPoints:
531 | pred_list.append(dp.class_id)
532 |
533 | with open(solution_file, 'w') as f:
534 | f.writelines("%s\n" % pred for pred in pred_list)
535 |
536 | labels_dic = {}
537 | self.train_pred = pred_list[:self.train_sz]
538 | self.test_pred = pred_list[self.train_sz:]
539 |
540 | if self.test_data is not None:
541 | self.labels_pred = np.append(self.train_pred, self.test_pred, axis=0)
542 | else:
543 | self.labels_pred = self.train_pred
544 |
545 | if self.prop_step > 0:
546 | print("Propagation at iteration:", self.prop_step)
547 | self.plot_clusters(show_plots=show_plots, show_noise=show_noise, data_type='train')
548 |
549 | if show_analyzer:
550 | self.show_Analyzer()
551 |
552 | return None, None
553 |
554 | else:
555 | if self.data_indicator >= 3:
556 |
557 | if show_analyzer:
558 | print("Plotting dataset Groundtruth")
559 | self.plot_clusters(show_plots=show_plots, show_noise=show_noise, data_type='ground')
560 |
561 | if validate and self.data_indicator >= 1:
562 |
563 | if self.data_indicator >= 3:
564 | self.analyzer["validity"] = {}
565 | self.analyzer["validity"]['train'] = {}
566 | validity_scores = self.validate_Clusters(data_type='train')
567 |
568 | if show_analyzer:
569 | print('Plotting train data')
570 | self.plot_clusters(show_plots=show_plots, show_noise=show_noise, data_type='train')
571 | if show_analyzer:
572 | self.show_Analyzer(root='Validating train data')
573 |
574 | if self.data_indicator == 15:
575 | validity_scores = self.validate_Clusters(data_type='test')
576 | if show_analyzer:
577 | # self.analyzer["validity"]['test'] = {}
578 | self.show_Analyzer(self.analyzer['validity']['test'], root='Validating test data')
579 |
580 | if self.data_indicator > 3:
581 | if show_analyzer:
582 | print('Plotting test data')
583 | self.plot_clusters(show_plots=show_plots, show_noise=show_noise, data_type='test')
584 |
585 | """"
586 | if self.data_indicator == 15:
587 | validity_scores = self.validate_Clusters(data_type='augmented')
588 | if show_analyzer:
589 | self.analyzer["validity"]['augmented'] = {}
590 | self.show_Analyzer(self.analyzer['validity']['augmented'], root='Validating augmented data (train & test)')
591 | if self.data_indicator > 3:
592 | if show_analyzer:
593 | print ('Plotting augmented data (train & test)')
594 | self.plot_clusters(show_plots=show_plots, show_noise=show_noise, data_type='augmented')
595 | """
596 |
597 | labels_dic['train'] = self.train_pred
598 | labels_dic['test'] = self.test_pred
599 |
600 | if self.data_indicator == 1:
601 | return labels_dic, None
602 | elif validate == False:
603 | return labels_dic, None
604 | elif self.data_indicator >= 3 and validate == True:
605 | return labels_dic, self.analyzer['validity']
606 |
607 | def match_Labels(self):
608 |
609 | labels_true = self.labels_truth
610 |
611 | """"
612 | if isinstance(self.labels_pred, np.ndarray):
613 | # labels_pred = np.array(self.labels_pred, dtype=np.int64)
614 | labels_pred = self.labels_pred.tolist()
615 | else:
616 | labels_pred = self.labels_pred
617 | """
618 |
619 | labels_pred = self.labels_pred
620 | pred_set = set(labels_pred)
621 | index = []
622 | x = 1
623 | old_item = labels_true[0]
624 | old_x = 0
625 |
626 | for item in labels_true:
627 |
628 | if item != old_item:
629 | count = x - old_x
630 | index.append([old_x, old_item, count])
631 | old_item = item
632 | old_x = x
633 | x += 1
634 |
635 | ln = len(labels_true)
636 | count = x - old_x
637 | index.append([old_x, old_item, count])
638 | index[0][2] = index[0][2] - 1
639 |
640 | index.sort(key=lambda x: x[2], reverse=True)
641 |
642 | labeled = []
643 | for n in range(len(index)):
644 | newval = index[n][1]
645 | max_class = max(set(labels_pred), key=labels_pred[index[n][0]:index[n][0] + index[n][2] - 1].count)
646 | if max_class not in labeled:
647 | labels_pred = [newval if x == max_class else x for x in labels_pred]
648 | labeled.append(newval)
649 |
650 | labels_pred = np.array(labels_pred, dtype=np.int64)
651 | self.labels_pred = labels_pred
652 | self.train_pred = labels_pred[:self.train_sz]
653 | self.test_pred = labels_pred[self.train_sz:]
654 |
655 | return labels_pred
656 |
657 | def validate_Clusters(self, data_type=None):
658 |
659 | labels_true = self.labels_truth
660 | if data_type == 'train':
661 | labels_true = labels_true[:self.train_sz]
662 | elif data_type == 'test':
663 | labels_true = labels_true[self.train_sz:]
664 | #elif data_type == 'augmented':
665 | # keep it as it
666 |
667 | if isinstance(self.labels_pred, np.ndarray):
668 | # labels_pred = np.array(self.labels_pred, dtype=np.int64)
669 | self.labels_pred = self.labels_pred.tolist()
670 |
671 | labels_pred = self.labels_pred
672 | if self.prop_step == 0: # do not match labels if yoy are in propagation mode
673 | labels_pred = self.match_Labels()
674 |
675 |
676 | if data_type == 'train':
677 | labels_pred = labels_pred[:self.train_sz]
678 | elif data_type == 'test':
679 | labels_pred = labels_pred[self.train_sz:]
680 | #elif data_type == 'augmented':
681 | # keep it as it
682 |
683 | self.analyzer["n_clusters"]["actual"] = len(np.unique(labels_true))
684 |
685 | # Score the clustering
686 | from sklearn.metrics.cluster import adjusted_mutual_info_score # 2010
687 | from sklearn.metrics.cluster import adjusted_rand_score # 1985
688 |
689 | # from sklearn.metrics import davies_bouldin_score
690 | # #1975 - 2001 ## no ground truth ##Values closer to zero indicate a better partition.
691 |
692 | ## also known as the Variance Ratio Criterion - can be used to evaluate the model,
693 | ## where a higher Calinski-Harabasz score relates to a model with better defined clusters.
694 |
695 | from sklearn import metrics # for homogeneity, completeness, fowlkes
696 | ## homogeneity: each cluster contains only members of a single class.
697 | ## completeness: all members of a given class are assigned to the same cluster.
698 | # v-measure the harmonic mean of homogeneity and completeness called V-measure 2007
699 |
700 | acc = metrics.accuracy_score(labels_true, labels_pred, normalize=False)
701 |
702 | # mi = metrics.mutual_info_score(labels_true, labels_pred)
703 | # print("mutual_info_score: %f." % mi)
704 |
705 | nmi = metrics.normalized_mutual_info_score(labels_true, labels_pred, average_method='arithmetic')
706 | # print("normalized_mutual_info_score: %f." % nmi)
707 |
708 | ami = adjusted_mutual_info_score(labels_true, labels_pred, average_method='arithmetic')
709 | # print("Adjusted_mutual_info_score: %f." % adj_nmi)
710 |
711 | homogeneity = metrics.homogeneity_score(labels_true, labels_pred)
712 | # print("homogeneity_score: %f." % homogeneity_score)
713 |
714 | completeness = metrics.completeness_score(labels_true, labels_pred)
715 | # print("completeness_score: %f." % completeness_score)
716 |
717 | f1_weight = metrics.f1_score(labels_true, labels_pred, average='weighted')
718 | # f1_micro = metrics.f1_score(labels_true, labels_pred, average='micro')
719 | # f1_macro = metrics.f1_score(labels_true, labels_pred, average='macro')
720 | # print("f1_score: %f." % f1_score)
721 |
722 | ari = adjusted_rand_score(labels_true, labels_pred)
723 | # print("adjusted_rand_score: %f." % adj_rand)
724 |
725 | f1 = f1_weight
726 |
727 | validity = {"ACC": acc,
728 | "F1": f1,
729 | "NMI": nmi,
730 | "AMI": ami,
731 | "ARI": ari,
732 | "homogeneity": homogeneity,
733 | "completeness": completeness
734 | }
735 |
736 | # val = [acc, f1, nmi, ami, ari, homogeneity, completeness]
737 | self.analyzer["validity"][data_type] = validity
738 | # self.analyzer["validity"] = validity
739 |
740 | return self.analyzer["validity"][data_type]
741 |
742 | def preplot_Clusters(self, data_type=None):
743 |
744 | self.analyzer["n_points"]["size"] = self.dp_count
745 | if data_type == 'test':
746 | self.analyzer["n_points"]["plot_size"] = self.test_sz
747 | elif data_type == 'train':
748 | self.analyzer["n_points"]["plot_size"] = self.train_sz
749 | #elif data_type == 'augmented':
750 | # self.analyzer["n_points"]["plot_size"] = self.dp_count
751 |
752 | if data_type == 'ground':
753 | labels = self.labels_truth
754 | labels = np.array(labels, dtype=np.int64)
755 | else:
756 | labels = self.labels_pred
757 | if data_type == 'train':
758 | labels = labels[:self.train_sz]
759 | elif data_type == 'test':
760 | labels = labels[self.train_sz:]
761 | #elif data_type == 'augmented':
762 | # nothing to do
763 |
764 | noise_1 = list(labels).count(-1)
765 | self.analyzer["n_points"]["noise"]["type-1"] = noise_1
766 |
767 | noise_2 = list(labels).count(-2)
768 | self.analyzer["n_points"]["noise"]["type-2"] = noise_2
769 |
770 | self.analyzer["n_points"]["weak"]["succeeded to merge"] = self.analyzer["n_points"]["weak"]["all"] - \
771 | self.analyzer["n_points"]["noise"]["type-2"]
772 | self.analyzer["n_points"]["weak"]["failed to merge"] = self.analyzer["n_points"]["noise"]["type-2"]
773 |
774 | unique_labels = np.unique(labels)
775 | num_of_clusters = len(unique_labels)
776 |
777 | fake_clusters = 0 # otlier = -1 and weak points that fail to merge (noise) = 0
778 |
779 | i = 0
780 | for n in (unique_labels):
781 |
782 | if n >= 0: # num_of_clusters:
783 | labels = np.where(labels == n, i, labels)
784 | i += 1
785 | else:
786 | fake_clusters += 1
787 |
788 | self.analyzer["n_clusters"]["detected"] = num_of_clusters - fake_clusters
789 |
790 | return labels
791 |
792 | def plot_clusters(self, data_type=None, show_noise=False, show_plots=True):
793 | data2 = []
794 | colors2 = []
795 |
796 | labels = self.preplot_Clusters(data_type=data_type)
797 |
798 | if show_plots:
799 |
800 | palette = sns.color_palette('deep',
801 | np.unique(labels).max() + 2) # deep, dark, bright, muted, pastel, colorblind
802 |
803 | if self.prop_step:
804 | colors = [palette[x] if x >= 0 else ((0.0, 0.0, 0.0) if x == -1 else (0.0, 0.0, 0.0)) for x in labels]
805 | v = 0
806 | for c in colors:
807 | if (c[0] + c[1] + c[2]) > 0.0: # outlier :: keep it away. Note that even outliers are -1, -2, it become in black after the previous step: color (0.0, 0.0, 0.0
808 | colors2.append((c[0], c[1], c[2], 1.0))
809 | data2.append((self.data[v][0], self.data[v][1]))
810 | v += 1
811 | data2 = np.array(data2)
812 |
813 | else:
814 | if show_noise == False:
815 | colors = [palette[x] if x >= 0 else (1.0, 1.0, 1.0) for x in
816 | labels] # noise points wont be printed due to x > 0 , else (1.0, 1.0, 1.0)
817 | else:
818 | colors = [palette[x] if x >= 0 else ((0.0, 0.0, 0.0) if x == -1 else (0.9, 0.9, 0.9)) for x in
819 | labels] # noise points wont be printed due to x > 0 , else (1.0, 1.0, 1.0)
820 |
821 | # plt.figure(figsize=(12, 8))
822 |
823 | if self.prop_step:
824 | # lenght of data2 will be always equlas to length of the specific data type (test, train)
825 | #print ('datatype', data_type)
826 | if data_type == 'train':
827 | plt.scatter(data2.T[0], data2.T[1], c=colors2, **plot_kwds, marker='o')
828 | #plt.scatter(data2[:self.train_sz].T[0], data2[:self.train_sz].T[1], c=colors2, **plot_kwds, marker='o')
829 |
830 | """"
831 | elif data_type == 'test':
832 | print ('train_sz', self.train_sz, 'test_sz', self.test_sz)
833 | #plt.scatter(data2[self.test_sz:self.train_sz:].T[0], data2[self.train_sz:].T[1], c=colors2, **plot_kwds, marker='o' )
834 | plt.scatter(data2.T[0], data2.T[1], c=colors2, **plot_kwds, marker='o')
835 | elif data_type == 'augmented':
836 | print ('3')
837 | plt.scatter(data2.T[0], data2.T[1], c=colors2, **plot_kwds, marker='o')
838 | elif data_type == 'ground':
839 | #plt.scatter(data2[:self.train_sz].T[0], data2[:self.train_sz].T[1], c=colors2, **plot_kwds, marker='o')
840 | plt.scatter(data2.T[0], data2.T[1], c=colors2, **plot_kwds, marker='o')
841 | """
842 |
843 | else:
844 | if data_type == 'train':
845 | plt.scatter(self.data[:self.train_sz].T[0], self.data[:self.train_sz].T[1], c=colors, **plot_kwds,
846 | marker='o')
847 | elif data_type == 'test':
848 | plt.scatter(self.data[self.train_sz:].T[0], self.data[self.train_sz:].T[1], c=colors, **plot_kwds,
849 | marker='o')
850 | #elif data_type == 'augmented':
851 | # plt.scatter(self.data.T[0], self.data.T[1], c=colors, **plot_kwds, marker='o')
852 | elif data_type == 'ground':
853 | if self.data_indicator == 15:
854 | plt.scatter(self.data.T[0], self.data.T[1], c=colors, **plot_kwds, marker='o')
855 | else:
856 | plt.scatter(self.data[:self.train_sz].T[0], self.data[:self.train_sz].T[1], c=colors,
857 | **plot_kwds, marker='o')
858 |
859 | self.colors = colors
860 | frame = plt.gca()
861 | frame.axes.get_xaxis().set_visible(False)
862 | frame.axes.get_yaxis().set_visible(False)
863 | if show_plots:
864 | if self.prop_step:
865 | prop_folder = 'propagation'
866 | if not os.path.exists(prop_folder):
867 | os.mkdir(prop_folder)
868 | plt.savefig(prop_folder + '/' + str(self.prop_step) + '.png')
869 | plt.show()
870 | # plt.clf() # this is a must to clear figures if you plot continously
871 |
872 | return 0
873 |
874 | def generate_tsne(self, data, d, file_2d):
875 |
876 | dim_two = TSNE(n_components=d, random_state=1971, init='random').fit_transform(data)
877 |
878 | mystr = ""
879 | data_len = len(dim_two)
880 | for i in range(data_len):
881 | for n in range(d):
882 | mystr += str(round(dim_two[i][n], 6))
883 | if (n < d - 1): mystr += ','
884 | if (n == d - 1): mystr += '\n'
885 |
886 | text_file = open(file_2d, "w")
887 | text_file.write(mystr)
888 | text_file.close()
889 |
890 | return 0
891 |
892 | def show_Analyzer(self, mydic=None, root="DenMune"):
893 |
894 | if mydic is None:
895 | mydic = self.analyzer
896 |
897 | tree = tr()
898 | tree.create_node(root, "root")
899 |
900 | def creat_TreefromDict(self, tree, mydict, key, parent):
901 | if type(mydict[key]) is not dict:
902 | val = key + ': ' + str(round(mydict[key], 3))
903 | tree.create_node(val, key, parent=parent)
904 |
905 | for d in mydic:
906 | # print ('sub_roots', d)
907 | if type(mydic[d]) is not dict:
908 | creat_TreefromDict(self, tree, mydic, d, parent='root')
909 | # print('0', 'key:', d , 'value:', mydic[d], 'parent: root')
910 | else:
911 | tree.create_node(d, d, parent="root")
912 | subdic = mydic[d]
913 | # print('1', 'key:', d , 'value:', subdic, 'parent: root')
914 | for v in subdic:
915 | if type(subdic[v]) is not dict:
916 | # print('2', 'key:', v , 'value:', subdic[v], 'parent:', d)
917 | creat_TreefromDict(self, tree, subdic, v, parent=d)
918 | else:
919 | tree.create_node(v, v, parent=d)
920 | subsubdic = subdic[v]
921 | # print('3', 'key:', v , 'value:', subsubdic, 'parent:', d)
922 | for z in subsubdic:
923 | # print('4', 'key:', z , 'value:', subsubdic[z], 'parent:', v)
924 | creat_TreefromDict(self, tree, subsubdic, z, parent=v)
925 | tree.show()
926 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # DenMune: A Density-Peak Clustering Algorithm
2 |
3 | DenMune is a clustering algorithm that can find clusters of arbitrary size, shapes, and densities in two-dimensions. Higher dimensions are first reduced to 2D using t-SNE. The algorithm relies on a single parameter K (the number of nearest neighbors). The results show the superiority of the algorithm. Enjoy the simplicity but the power of DenMune.
4 |
5 | ## Listen to this amazing interview podcast
6 |
7 | [](https://on.soundcloud.com/z7WeqJnHjDd26hD76)
8 |
9 | *click image to listen (24 min)*
10 |
11 | ## Reproducibility & Test Drives
12 |
13 | Now you can reproduce all the research experiments, and even share the results and collaborate with the algorithm using our capsule on CodeOcean. Each Capsule is a self-contained computational experiment with computing environment, code, data, version history, and results.
14 |
15 | Also, you may use our repo2docker offered by mybinder.org, which encapsulates the algorithm and all required data in one virtual machine instance. All Jupyter notebooks examples found in this repository will be also available to you in action to practice in this respo2docer. Thanks mybinder.org, you made it possible!
16 |
17 | | Test-drive | URL |
18 | | ---------------------------------------- | ------------------------------------------------------------ |
19 | | Reproduce our code capsule on Code Ocean | [](https://bit.ly/codeocean-capsule) |
20 | | Use our test-drive on MyBinder | [](https://bit.ly/mybinder-repo2docker) |
21 |
22 | ## Scientific Work
23 |
24 | | Paper & data | Journals | ResearchGate Stats |
25 | | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: |
26 | | [](https://bit.ly/denmune-research-paper) [](https://bit.ly/mendeley-data) | [](https://www.scimagojr.com/journalsearch.php?q=24823&tip=sid&clean=0) [](https://www.scimagojr.com/journalsearch.php?q=21101060167&tip=sid&clean=0) |  |
27 |
28 | ## Coding, Security & Maintenance
29 |
30 | | Code Style | Installation | CI Workflow | Code Coverage | Code Scanning |
31 | | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ |
32 | |  | [](https://pypi.org/project/denmune/) | [](https://circleci.com/gh/egy1st/denmune-clustering-algorithm/tree/main) | [](https://codecov.io/gh/egy1st/denmune-clustering-algorithm) | [](https://github.com/adrinjalali/denmune-clustering-algorithm/actions/workflows/codeql.yml) |
33 |
34 | ## Tutorials
35 |
36 | | Reproducible Capsule | Repo2Docker | Colab | Kaggle |
37 | | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ |
38 | | [](https://bit.ly/codeocean-capsule) | [](https://bit.ly/mybinder-repo2docker) | [](#colab) | [](#kaggle) |
39 |
40 | ## Downloads Stats
41 |
42 | | Download/Week | Download/Month | Total Downloads |
43 | | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ |
44 | | [](https://pepy.tech/project/denmune) | [](https://pepy.tech/project/denmune) | [](https://pepy.tech/project/denmune) |
45 |
46 | ## Based on the paper
47 |
48 | | Paper |
49 | |-
50 | | Mohamed Abbas, Adel El-Zoghabi, Amin Shoukry,
51 | | *DenMune: Density peak based clustering using mutual nearest neighbors*
52 | | In: Journal of Pattern Recognition, Elsevier,
53 | | volume 109, number 107589, January 2021
54 | | DOI: https://doi.org/10.1016/j.patcog.2020.107589
55 |
56 | ## Documentation:
57 |
58 | - [](https://denmune.readthedocs.io/en/latest/?badge=latest)
59 | - [](https://denmune-docs.vercel.app)
60 |
61 | ## Watch it in action
62 |
63 | This 30 seconds will tell you how a density-based algorithm, DenMune propagates:
64 |
65 | [](https://colab.research.google.com/drive/1o-tP3uvDGjxBOGYkir1lnbr74sZ06e0U?usp=sharing)
66 |
67 | []()
68 |
69 | ## Still interested?
70 |
71 | Watch this ***10-min*** illustrative video on:
72 |
73 | - [](https://player.vimeo.com/video/827209757)
74 | - [](https://www.youtube.com/watch?v=o77raaasuOM)
75 |
76 | ## When less means more
77 |
78 | Most classic clustering algorithms fail to detect complex clusters where clusters are of different sizes, shapes, density, and exist in noisy data. Recently, a density-based algorithm named DenMune showed great ability in detecting complex shapes even in noisy data. it can detect a number of clusters automatically, detect both pre-identified-noise and post-identified-noise automatically, and remove them.
79 |
80 | It can achieve an accuracy reach 100% in some classic pattern problems, achieve 97% in the MNIST dataset. A great advantage of this algorithm is being a single-parameter algorithm. All you need is to set a number of k-nearest neighbors and the algorithm will care about the rest. Being Non-sensitive to changes in k, make it robust and stable.
81 |
82 | Keep in mind, that the algorithm reduces any N-D dataset to only a 2-D dataset initially, so it is a good benefit of this algorithm is always to plot your data and explore it which makes this algorithm a good candidate for data exploration. Finally, the algorithm comes with a neat package for visualizing data, validating it, and analyzing the whole clustering process.
83 |
84 | ## How to install DenMune
85 |
86 | Simply install DenMune clustering algorithm using pip command from the official Python repository
87 |
88 | [](https://pypi.org/project/denmune/)
89 |
90 | From the shell run the command
91 |
92 | ```shell
93 | pip install denmune
94 | ```
95 |
96 | From Jupyter notebook cell run the command
97 |
98 | ```ipython3
99 | !pip install denmune
100 | ```
101 |
102 | ## How to use DenMune
103 |
104 | Once DenMune is installed, you just need to import it
105 |
106 | ```python
107 | from denmune import DenMune
108 | ```
109 |
110 | *Please note that first denmune (the package) in small letters, while the other one(the class itself) has D and M in capital case .*
111 |
112 | ## Read data
113 |
114 | There are four possible cases of data:
115 |
116 | - only train data without labels
117 | - only labeled train data
118 | - labeled train data in addition to test data without labels
119 | - labeled train data in addition to labeled test data
120 |
121 | ```python
122 | #=============================================
123 | # First scenario: train data without labels
124 | # ============================================
125 |
126 | data_path = 'datasets/denmune/chameleon/'
127 | dataset = "t7.10k.csv"
128 | data_file = data_path + dataset
129 |
130 | # train data without labels
131 | X_train = pd.read_csv(data_file, sep=',', header=None)
132 |
133 | knn = 39 # k-nearest neighbor, the only parameter required by the algorithm
134 |
135 | dm = DenMune(train_data=X_train, k_nearest=knn)
136 | labels, validity = dm.fit_predict(show_analyzer=False, show_noise=True)
137 |
138 | ```
139 |
140 | This is an intuitive dataset which has no groundtruth provided
141 |
142 | 
143 |
144 | ```python
145 | #=============================================
146 | # Second scenario: train data with labels
147 | # ============================================
148 |
149 | data_path = 'datasets/denmune/shapes/'
150 | dataset = "aggregation.csv"
151 | data_file = data_path + dataset
152 |
153 | # train data with labels
154 | X_train = pd.read_csv(data_file, sep=',', header=None)
155 | y_train = X_train.iloc[:, -1]
156 | X_train = X_train.drop(X_train.columns[-1], axis=1)
157 |
158 | knn = 6 # k-nearest neighbor, the only parameter required by the algorithm
159 |
160 | dm = DenMune(train_data=X_train, train_truth= y_train, k_nearest=knn)
161 | labels, validity = dm.fit_predict(show_analyzer=False, show_noise=True)
162 | ```
163 |
164 | Datset groundtruth
165 |
166 | 
167 |
168 | Dataset as detected by DenMune at k=6
169 |
170 | 
171 |
172 |
173 | ```python
174 | #=================================================================
175 | # Third scenario: train data with labels in addition to test data
176 | # ===============================================================
177 |
178 | data_path = 'datasets/denmune/pendigits/'
179 | file_2d = data_path + 'pendigits-2d.csv'
180 |
181 | # train data with labels
182 | X_train = pd.read_csv(data_path + 'train.csv', sep=',', header=None)
183 | y_train = X_train.iloc[:, -1]
184 | X_train = X_train.drop(X_train.columns[-1], axis=1)
185 |
186 | # test data without labels
187 | X_test = pd.read_csv(data_path + 'test.csv', sep=',', header=None)
188 | X_test = X_test.drop(X_test.columns[-1], axis=1)
189 |
190 | knn = 50 # k-nearest neighbor, the only parameter required by the algorithm
191 |
192 | dm = DenMune(train_data=X_train, train_truth= y_train,
193 | test_data= X_test,
194 | k_nearest=knn)
195 | labels, validity = dm.fit_predict(show_analyzer=True, show_noise=True)
196 | ```
197 |
198 | dataset groundtruth
199 |
200 | 
201 |
202 |
203 | dataset as detected by DenMune at k=50
204 |
205 | 
206 |
207 | test data as predicted by DenMune on training the dataset at k=50
208 |
209 | 
210 |
211 |
212 | ## Algorithm's Parameters
213 |
214 | 1. **Parameters used within the initialization of the DenMune class**
215 |
216 | ```python
217 | def __init__ (self,
218 | train_data=None, test_data=None,
219 | train_truth=None, test_truth=None,
220 | file_2d =None, k_nearest=1,
221 | rgn_tsne=False, prop_step=0,
222 | ):
223 | ```
224 |
225 | - train_data:
226 |
227 | - data used for training the algorithm
228 | - default: None. It should be provided by the use, otherwise an error will raise.
229 |
230 | - train_truth:
231 |
232 | - labels of training data
233 | - default: None
234 |
235 | - test_data:
236 |
237 | - data used for testing the algorithm
238 |
239 | - test_truth:
240 |
241 | - labels of testing data
242 | - default: None
243 |
244 | - k_nearest:
245 |
246 | - number of nearest neighbor
247 | - default: 1. k-nearest neighbor should be at least 1.
248 |
249 | - rgn_tsn:
250 |
251 | - when set to True: It will regenerate the reduced 2-D version of the N-D dataset each time the algorithm run.
252 | - when set to False: It will generate the reduced 2-D version of the N-D dataset first time only, then will reuse the saved exist file
253 | - default: True
254 |
255 | - file_2d: name (include location) of file used save/load the reduced 2-d version
256 |
257 | - if empty: the algorithm will create temporary file named '_temp_2d'
258 | - default: None
259 |
260 | - prop_step:
261 |
262 | - size of increment used in showing the clustering propagation.
263 | - leave this parameter set to 0, the default value, unless you are willing intentionally to enter the propagation mode.
264 | - default: 0
265 |
266 |
267 | 2. **Parameters used within the fit_predict function:**
268 |
269 | ```python
270 | def fit_predict(self,
271 | validate=True,
272 | show_plots=True,
273 | show_noise=True,
274 | show_analyzer=True
275 | ):
276 | ```
277 |
278 | - validate:
279 | - validate data on/off according to five measures integrated with DenMune (Accuracy. F1-score, NMI index, AMI index, ARI index)
280 | - default: True
281 |
282 | - show_plots:
283 | - show/hide plotting of data
284 | - default: True
285 |
286 | - show_noise:
287 | - show/hide noise and outlier
288 | - default: True
289 |
290 | - show_analyzer:
291 | - show/hide the analyzer
292 | - default: True
293 |
294 | ## The Analyzer
295 |
296 | The algorithm provide an exploratory tool called analyzer, once called it will provide you with in-depth analysis on how your clustering results perform.
297 |
298 | 
299 |
300 |
301 | ## Noise Detection
302 |
303 | DenMune detects noise and outlier automatically, no need to any further work from your side.
304 |
305 | - It plots pre-identified noise in black
306 | - It plots post-identified noise in light grey
307 |
308 | You can set show_noise parameter to False.
309 |
310 | ```python
311 | # let us show noise
312 |
313 | m = DenMune(train_data=X_train, k_nearest=knn)
314 | labels, validity = dm.fit_predict(show_noise=True)
315 | ```
316 |
317 | ```python
318 | # let us show clean data by removing noise
319 |
320 | m = DenMune(train_data=X_train, k_nearest=knn)
321 | labels, validity = dm.fit_predict(show_noise=False)
322 | ```
323 |
324 | | noisy data | clean data |
325 | | ------------------------------------------------------------ | ------------------------------------------------------------ |
326 | |  |  |
327 |
328 |
329 |
330 | ## Validation
331 |
332 | You can get your validation results using 3 methods
333 |
334 | - by showing the Analyzer
335 | - extract values from the validity returned list from fit_predict function
336 | - extract values from the Analyzer dictionary
337 | - There are five validity measures built-in the algorithm, which are:
338 | - ACC, Accuracy
339 | - F1 score
340 | - NMI index (Normalized Mutual Information)
341 | - AMI index (Adjusted Mutual Information)
342 | - ARI index (Adjusted Rand Index)
343 |
344 | 
345 |
346 | ## K-nearest Evolution
347 |
348 | The following chart shows the evolution of pre and post identified noise in correspondence to increase of number of knn. Also, detected number of clusters is analyzed in the same chart in relation with both types of identified noise.
349 |
350 | 
351 |
352 | ## The Scalability
353 |
354 | | Data Size | Time |
355 | | ----------------- | ---------------------- |
356 | | data size: 5000 | time: 2.3139 seconds |
357 | | data size: 10000 | time: 5.8752 seconds |
358 | | data size: 15000 | time: 12.4535 seconds |
359 | | data size: 20000 | time: 18.8466 seconds |
360 | | data size: 25000 | time: 28.992 seconds |
361 | | data size: 30000 | time: 39.3166 seconds |
362 | | data size: 35000 | time: 39.4842 seconds |
363 | | data size: 40000 | time: 63.7649 seconds |
364 | | data size: 45000 | time: 73.6828 seconds |
365 | | data size: 50000 | time: 86.9194 seconds |
366 | | data size: 55000 | time: 90.1077 seconds |
367 | | data size: 60000 | time: 125.0228 seconds |
368 | | data size: 65000 | time: 149.1858 seconds |
369 | | data size: 70000 | time: 177.4184 seconds |
370 | | data size: 75000 | time: 204.0712 seconds |
371 | | data size: 80000 | time: 220.502 seconds |
372 | | data size: 85000 | time: 251.7625 seconds |
373 | | data size: 100000 | time: 257.563 seconds |
374 |
375 | |
376 |
377 |
378 | ## The Stability
379 |
380 | The algorithm is only single-parameter, even more it not sensitive to changes in that parameter, k. You may guess that from the following chart yourself. This is of great benefit for you as a data exploration analyst. You can simply explore the dataset using an arbitrary k. Being Non-sensitive to changes in k, make it robust and stable.
381 |
382 | 
383 |
384 | ## Reveal the propagation
385 |
386 | One of the top performing features in this algorithm is enabling you to watch how your clusters propagate to construct the final output clusters. Just use the parameter 'prop_step' as in the following example:
387 |
388 | ```python
389 | dataset = "t7.10k" #
390 | data_path = 'datasets/denmune/chameleon/'
391 |
392 | # train file
393 | data_file = data_path + dataset +'.csv'
394 | X_train = pd.read_csv(data_file, sep=',', header=None)
395 |
396 |
397 | from itertools import chain
398 |
399 | # Denmune's Paramaters
400 | knn = 39 # number of k-nearest neighbor, the only parameter required by the algorithm
401 |
402 | # create list of differnt snapshots of the propagation
403 | snapshots = chain(range(2,5), range(5,50,10), range(50, 100, 25), range(100,500,100), range(500,2000, 250), range(1000,5500, 500))
404 |
405 | from IPython.display import clear_output
406 | for snapshot in snapshots:
407 | print ("itration", snapshot )
408 | clear_output(wait=True)
409 | dm = DenMune(train_data=X_train, k_nearest=knn, rgn_tsne=False, prop_step=snapshot)
410 | labels, validity = dm.fit_predict(show_analyzer=False, show_noise=False)
411 | ```
412 |
413 | ## Interact with the algorithm
414 |
415 | [](https://colab.research.google.com/drive/1EUROd6TRwxW3A_XD3KTxL8miL2ias4Ue?usp=sharing)
416 |
417 | *click image to interact*
418 |
419 |
420 | This notebook allows you interact with the algorithm in many aspects:
421 |
422 | - you can choose which dataset to cluster (among 4 chameleon datasets)
423 | - you can decide which number of k-nearest neighbor to use
424 | - show noise on/off; thus you can invesetigate noise detected by the algorithm
425 | - show analyzer on/off
426 |
427 |
428 | ## We love Jupyter Notebooks
429 |
430 | Need to test examples one by one, then here other two options
431 |
432 | - Use colab offered by google research to test each example individually.
433 | - If you are a kaggler like me, then Kaggle, the best workspace where data scientist meet, should fit you to test the algorithm with great experience.
434 |
435 |
436 | Here is a list of Google CoLab & Kaggle notebooks to practice the use of the algorithm interactively.
437 |
438 |
439 | | Dataset | CoLab Notebook | Kaggle Notebook |
440 | | ---------------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ |
441 | | How to use it? | [](https://bit.ly/colab-how-to-use) | [](https://bit.ly/kaggle-how-to-use) |
442 | | Chameleon datasets | [](https://bit.ly/colab-chameleon) | [](https://bit.ly/kaggle-chameleon) |
443 | | 2D Shape datasets | [](https://bit.ly/colab-2d-shapes) | [](https://bit.ly/kaggle-2d-shapes) |
444 | | Clustering unlabeled data | [](https://bit.ly/colab-unlabeled-data) | [](https://bit.ly/kaggle-chameleon) |
445 | | iris dataset | [](https://bit.ly/colab-iris-dataset) | [](https://bit.ly/kaggle-iris-dataset) |
446 | | MNIST dataset | [](https://bit.ly/colab-mnist-dataset) | [](https://bit.ly/kaggle-score-97-mnist) |
447 | | Scoring 97% on MNIST dataset | [](https://bit.ly/colab-score-97-mnist) | [](https://bit.ly/kaggle-score-97-mnist) |
448 | | Noise detection | [](https://bit.ly/colab-noise-detection) | [](https://bit.ly/kaggle-noise-detection) |
449 | | Validation | [](https://bit.ly/colab-how-to-validate) | [](https://bit.ly/kaggle-how-to-validate) |
450 | | How does it propagate? | [](https://bit.ly/colab-how-propagate) | [](https://bit.ly/kaggle-how-propagate) [](https://bit.ly/kaggle-how-propagate-2) |
451 | | Snapshots of propagation | [](https://bit.ly/colab-propagation-shots) | [](https://bit.ly/kaggle-propagation-shots) |
452 | | Scalability | [](https://bit.ly/colab-scalability) | [](https://bit.ly/kaggle-scalability) |
453 | | Stability | [](https://bit.ly/colab-stability) | [](https://bit.ly/kaggle-stability) |
454 | | k-nearest-evolution | [](https://bit.ly/colab-knn-evolution) | [](https://bit.ly/kaggle-knn-evolution) |
455 |
456 |
457 |
458 | ## Software Impact
459 |
460 | Discover robust clustering without density cutoffs using this open-source Python library pyMune, implementing the parameter-free DenMune algorithm. PyMune identifies and expands cluster cores while removing noise. Fully scikit-learn compatible. pyMune (DenMune implementation) is a cutting-edge tool incorporating advanced techniques, robust performance, and effective propagation strategies. This positions it as the current state-of-the-art in its field, contributing to its high adoption and impact.
461 |
462 | - After extensive research and rigorous validation, we are proud to release pyMune as an open-source tool on GitHub and PyPi for the benefit of the scientific community.
463 | - With over 230,000 downloads already, pyMune has demonstrated its real-world impact and usefulness. We integrated it with [](https://bit.ly/codeocean-capsule) and [](https://bit.ly/mybinder-repo2docker) to further enhance reproducibility and reuse - encapsulating code, data, and outputs for turnkey sharing.
464 | - It is part of a special issue of R-badged articles, https://www.sciencedirect.com/journal/software-impacts/special-issue/10XXN6LQ0J1
465 | - it is part of Scikit-learn-contrib , https://github.com/scikit-learn-contrib
466 |
467 | 
468 |
469 | ### Warning: Plagiarized Works
470 |
471 | It has come to our attention that the following papers have plagiarized significant portions of the DenMune algorithm and research work:
472 |
473 | 1. **Paper 1:** "DEDIC: Density Estimation Clustering Method Using Directly Interconnected Cores" published in IEEE Access, doi: 10.1109/ACCESS.2022.3229582 Authors: Yisen Lin, Xinlun Zhang, Lei Liu, and Huichen Qu, reported at https://pubpeer.com/publications/AFC4E173A4FC0A2AD7E70DE688DDA5
474 | 2. **Paper 2:** "Research on stress curve clustering algorithm of Fiber Bragg grating sensor" published in Nature Scientific Reports, doi: 10.1038/s41598-023-39058-w Authors: Yisen Lin, Ye Wang, Huichen Qu & Yiwen Xiong, reported at https://pubpeer.com/publications/7AEF7D0F7505A8B8C130D142522741
475 |
476 | We have conducted a thorough analysis and found extensive evidence of plagiarism in these papers, including:
477 |
478 | - Verbatim copying of the core algorithm logic and steps from DenMune, with only superficial naming and implementation differences intended to obfuscate the similarity.
479 | - Plagiarized background, related work, and technical details from the original DenMune paper, with minor paraphrasing and without proper attribution.
480 | - Copying of mathematical formulations, concepts, and point classifications from DenMune.
481 | - Reuse of experimental setup, datasets, and compared algorithms from DenMune without justification or acknowledgment.
482 | - Fabricated experimental results, with values directly copied from DenMune's results and falsely claimed as their own.
483 | - Lack of substantive analysis or discussion, further indicating that the experiments were likely not conducted.
484 |
485 | Despite our efforts to address these concerns through proper channels, the publishers have decided to allow these plagiarized papers to remain published with only a correction acknowledging the issues, rather than retracting them or mandating a comprehensive correction.
486 |
487 | We strongly condemn such academic misconduct and the potential enabling of plagiarism by reputable publishers. Researchers and practitioners should exercise caution when referring to or using the methods described in these plagiarized works.
488 |
489 | For the original, properly cited implementation of the DenMune clustering algorithm, please refer to the official repository and resources provided here.
490 |
491 | We remain committed to upholding academic integrity and ethical research practices, and we urge the scientific community to take a firm stance against plagiarism and misconduct in scholarly publications.
492 |
493 |
494 |
495 | ## How to cite
496 |
497 | - How to cite ***The paper***
498 |
499 | If you have used this codebase in a scientific publication and wish to cite it, please use the [Journal of Pattern Recognition article](https://www.sciencedirect.com/science/article/abs/pii/S0031320320303927):
500 |
501 | ```
502 | Mohamed Abbas, Adel El-Zoghaby, Amin Shoukry, *DenMune: Density peak-based clustering using mutual nearest neighbors*
503 | In: Journal of Pattern Recognition, Elsevier, volume 109, number 107589.
504 | January 2021
505 | ```
506 |
507 | ```bib
508 | @article{ABBAS2021107589,
509 | title = {DenMune: Density peak-based clustering using mutual nearest neighbors},
510 | journal = {Pattern Recognition},
511 | volume = {109},
512 | pages = {107589},
513 | year = {2021},
514 | issn = {0031-3203},
515 | doi = {https://doi.org/10.1016/j.patcog.2020.107589},
516 | url = {https://www.sciencedirect.com/science/article/pii/S0031320320303927},
517 | author = {Mohamed Abbas and Adel El-Zoghabi and Amin Shoukry},
518 | keywords = {Clustering, Mutual neighbors, Dimensionality reduction, Arbitrary shapes, Pattern recognition, Nearest neighbors, Density peak},
519 | abstract = {Many clustering algorithms fail when clusters are of arbitrary shapes, of varying densities, or the data classes are unbalanced and close to each other, even in two dimensions. A novel clustering algorithm, “DenMune” is presented to meet this challenge. It is based on identifying dense regions using mutual nearest neighborhoods of size K, where K is the only parameter required from the user, besides obeying the mutual nearest neighbor consistency principle. The algorithm is stable for a wide range of values of K. Moreover, it is able to automatically detect and remove noise from the clustering process as well as detect the target clusters. It produces robust results on various low and high-dimensional datasets relative to several known state-of-the-art clustering algorithms.}
520 | }
521 | ```
522 |
523 |
524 |
525 |
526 |
527 | - How to cite ***The Software***
528 | If you have used this codebase in a scientific publication and wish to cite it, please use the [Journal of Software Impacts article](https://www.sciencedirect.com/science/article/pii/S266596382300101X):
529 |
530 | ```
531 | Abbas, M. A., El-Zoghabi, A., & Shoukry, A. (2023). PyMune: A Python package for complex clusters detection. Software Impacts, 17, 100564. https://doi.org/10.1016/j.simpa.2023.100564
532 | ```
533 |
534 | ```bib
535 | @article{ABBAS2023100564,
536 | title = {pyMune: A Python package for complex clusters detection},
537 | journal = {Software Impacts},
538 | volume = {17},
539 | pages = {100564},
540 | year = {2023},
541 | issn = {2665-9638},
542 | doi = {https://doi.org/10.1016/j.simpa.2023.100564},
543 | url = {https://www.sciencedirect.com/science/article/pii/S266596382300101X},
544 | author = {Mohamed Ali Abbas and Adel El-Zoghabi and Amin Shoukry},
545 | keywords = {Machine learning, Pattern recognition, Dimensionality reduction, Mutual nearest neighbors, Nearest neighbors approximation, DenMune},
546 | abstract = {We introduce pyMune, an open-source Python library for robust clustering of complex real-world datasets without density cutoff parameters. It implements DenMune (Abbas et al., 2021), a mutual nearest neighbor algorithm that uses dimensionality reduction and approximate nearest neighbor search to identify and expand cluster cores. Noise is removed with a mutual nearest-neighbor voting system. In addition to clustering, pyMune provides classification, visualization, and validation functionalities. It is fully compatible with scikit-learn and has been accepted into the scikit-learn-contrib repository. The code, documentation, and demos are available on GitHub, PyPi, and CodeOcean for easy use and reproducibility.}
547 | }
548 | ```
549 |
550 | ## Licensing
551 |
552 | The DenMune algorithm is 3-clause BSD licensed. Enjoy.
553 |
554 | [](https://choosealicense.com/licenses/bsd-3-clause/)
555 |
556 | ## Task List
557 |
558 | - [x] Update Github with the DenMune source code
559 | - [x] create repo2docker repository
560 | - [x] Create pip Package
561 | - [x] create CoLab shared examples
562 | - [x] create documentation
563 | - [x] create Kaggle shared examples
564 | - [x] PEP8 compliant
565 | - [x] Continuous integration
566 | - [x] scikit-learn compatible
567 | - [x] creating unit tests (coverage: 100%)
568 | - [x] generating API documentation
569 | - [x] Create a reproducible capsule on code ocean
570 | - [x] Submitting pyMune to Software Impacts (Published August 5 , 2023)
571 | - [ ] create conda package (*postponed until NGT has conda installation*)
572 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | codecov:
2 | require_ci_to_pass: yes
3 |
4 | coverage:
5 | precision: 2
6 | round: down
7 | range: "70...100"
8 |
9 | status:
10 | project:
11 | default: false # disable the default status that measures entire project
12 | tests: # declare a new status context "tests"
13 | target: 100% # we always want 100% coverage here
14 | paths: "tests/" # only include coverage in "tests/" folder
15 | jupyter: # declare a new status context "app"
16 | paths: "!tests/" # remove all files in "tests/"
17 |
18 | if_ci_failed: error #success, failure, error, ignore
19 | informational: true
20 |
21 | parsers:
22 | gcov:
23 | branch_detection:
24 | conditional: yes
25 | loop: yes
26 | method: no
27 | macro: no
28 |
29 | comment:
30 | layout: "reach,diff,flags,files,footer"
31 | behavior: default
32 | require_changes: no
33 |
34 |
--------------------------------------------------------------------------------
/colab/iris_dataset.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "id": "view-in-github",
7 | "colab_type": "text"
8 | },
9 | "source": [
10 | " "
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": null,
16 | "metadata": {
17 | "id": "zaaLaJHT35Fd"
18 | },
19 | "outputs": [],
20 | "source": [
21 | "import pandas as pd\n",
22 | "import numpy as np\n",
23 | "import time\n",
24 | "import os.path\n",
25 | "\n",
26 | "import warnings\n",
27 | "warnings.filterwarnings('ignore')"
28 | ]
29 | },
30 | {
31 | "cell_type": "code",
32 | "execution_count": null,
33 | "metadata": {
34 | "scrolled": true,
35 | "id": "69XXeoif35Fn"
36 | },
37 | "outputs": [],
38 | "source": [
39 | "# install DenMune clustering algorithm using pip command from the offecial Python repository, PyPi\n",
40 | "# from https://pypi.org/project/denmune/\n",
41 | "!pip install denmune\n",
42 | "\n",
43 | "# then import it\n",
44 | "from denmune import DenMune"
45 | ]
46 | },
47 | {
48 | "cell_type": "code",
49 | "execution_count": null,
50 | "metadata": {
51 | "id": "H3H8DYwU35Fo",
52 | "colab": {
53 | "base_uri": "https://localhost:8080/"
54 | },
55 | "outputId": "fee68095-9fd7-456d-f288-9140ceef8ea0"
56 | },
57 | "outputs": [
58 | {
59 | "output_type": "stream",
60 | "name": "stdout",
61 | "text": [
62 | "Cloning into 'datasets'...\n",
63 | "remote: Enumerating objects: 57, done.\u001b[K\n",
64 | "remote: Counting objects: 100% (57/57), done.\u001b[K\n",
65 | "remote: Compressing objects: 100% (46/46), done.\u001b[K\n",
66 | "remote: Total 57 (delta 9), reused 54 (delta 9), pack-reused 0\u001b[K\n",
67 | "Unpacking objects: 100% (57/57), done.\n"
68 | ]
69 | }
70 | ],
71 | "source": [
72 | "# clone datasets from our repository datasets\n",
73 | "if not os.path.exists('datasets'):\n",
74 | " !git clone https://github.com/egy1st/datasets"
75 | ]
76 | },
77 | {
78 | "cell_type": "code",
79 | "execution_count": null,
80 | "metadata": {
81 | "colab": {
82 | "base_uri": "https://localhost:8080/",
83 | "height": 1000
84 | },
85 | "id": "xm54UWO835Fq",
86 | "outputId": "195b4bb6-b755-467c-82c2-c099f2a9445e"
87 | },
88 | "outputs": [
89 | {
90 | "output_type": "stream",
91 | "name": "stdout",
92 | "text": [
93 | "Plotting dataset Groundtruth\n"
94 | ]
95 | },
96 | {
97 | "output_type": "display_data",
98 | "data": {
99 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWkAAADzCAYAAABE8effAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3deXRb1aEu8G8fTZYlW57t2BmdxEmcwSZkTphCEgiFAIUWWoYWaKF3tXRuX0t729fSvk6P9lJuC6+3tFB6mS6UqSQUAiSQeXTmwUnsxEM827ItS9ZwzvtDgyVrsGTL1kny/bqcZZ+ztbUdVj/t7LMHoSgKiIhInaRUN4CIiKJjSBMRqRhDmohIxRjSREQqpk1WRUKI/QCmAOgFcCpZ9RIRXeSmATADqFEU5bLBN0WyZncIIboAWJJSGRHRpceqKErW4ItJ60nD24O2WCwWVFZWJrFaIqKLV1VVFaxWK+DN0DDJDOlTAEoqKyuxadOmJFZLRHTxuvrqq7F582YgyjAxHxwSEakYQ5qISMWSOdxBRDQsNrsLjW29SE/ToSTfnOrmqApDmohSxt7vxl/fOoJN++rR7/IAAKaWWPC5G8pRUZYf87X7TrTg3Z1n0dzRh9zMNFy7cAIWzx4HSRJj0fQxw5AmopTweGT85M87cKy2I+T66QYrfvL0Dvz4C0tQMT08qG12F/746gFsOdAYuHamwYrdx5qxbO44fPvuBdBcREHNkCailNhxpCksoP08soLnNhwLCenOHgee+edRfFTVALdbDlyXhAB8mbz1YCNcz+xCaYkF0yZkYcHMwgu+Z82QJqKU2FLVEPN+dV0XmtptKMo1odfuwg+e3IaG1l7IcugCPFlRIBTAf3XnkSbsPtYMABiXm45HPr8IE4sy42qTLCsQAhBCPcHOkCailLD3u4cs43B60NrZhx//aTvqWiKu9QAwENCDnW/vw//+8w788bsrkaaPHnc7D5/Ha5tP43htBzSSwMLyIty+cjqmTQhbADjmGNJElBKlJRbsP9ka9X66QYtOqwNf/+0OeOThb1/RbnXgo/0NWLN4UsT7r28+hb+8dSTws8etYOvBRmw71Ii8LCOmjc/CDcsmo7KsYNhtGAnOkyailLhuyWRoNdEjaNWiifjV3/ckHNCRBioOVreF/NzR7UBblx3tVjueffsoFAWBLz9FAVo77dh5pAk//q8deG7DsYTakSzsSRNRShTmpOObn52P3z6/D26P70Gg4v2jfEouyiZm4fXNpxOuN9J4suT7LNh+6Dxefv8kzjRYAQAmoxZuzxAfAooCCIFXPqjG/BkFmF2am3CbRoIhTUQps3xeMWZMzMaL753AtoON6OlzQQjgWG07Wrr6hlXn4J09hRBYOKsIG3edwxP/UxVyr8fmGro+DPTO39ley5AmokuLvd+NrQca0dfvDkyXUwC0dNiHVZ8S+MNLqxHwyDKe+eeRQIAPd/ZGQ2v0h5ejhSFNRCn1ygfV6Isw00OI0DHi4fJ4ZDz2/L6Qa4rinWoXDxE0yu1wevC9P2yB0+XBzMk5WLt0MiYUZoy8kTEwpIkoZRTFO5PC38MNDuWRTFUeWN8iIEdJ+rg/AAQAxTsfu76lJ9ALP91gxb+21+KBm+fgqsvGw2TUDb/BMTCkiShlZFmB0+WJGJjD7UULDAxnKCOYuuc3ePGMnyIrcMoKnnz1IP702iEsml2Ee9bOSnrPmlPwiChlGtts0GuHjqFEetXCP66tKFEXuQyXf+hDlkPrlhUFO4804Xt/2IK65p6kvidDmohS4uS5TnzniY/R75KHLBtvr9of5oqsJNwTj+eDQFaUsNkjwED7eu0uPP+v44m98RAY0kSUEk++eiCupeHxEgLepd/D7EHHG+pDldt5pCmpvxdDmojGXE2jFWcau5Nap04r4Rt3XgZNjFWMoyW4F+6RFfQ5hp5/HS+GNBGNuXarI/B9sjacmz4hG0vnFWNReWHS6hwOi0mPLLMhafVxdgcRjbmCbGPS6zxe245bv/vmqE2Fi8W774d3HGTJnHFJ7c2zJ01EY25iUSZmTMwGkLy9mz0yICvepd7Dnb4nCRGzFx5PUz8+0ICaRuvwGhCpTUmriYgoAV++vQKZ6foR1RExM8XwhlA0koD/kWO0lw8V/kIAfQ43/vT6ocQbEAVDmohSYtK4TDz2tSux7opSGPSawHUpzpAVInSz/+DXCCGiBm00Ht+0PUXBsOdX+0P8aE0HzrfZhllLKIY0EaVMQU46Hlg3B59aOR2SJLwbLAnhDdkhUjZWrzZ4IYskDV1XMvnHpjt7HEOUjA9DmohSbvXiSdANWnkYT1B7y/m/BgoPDnAhBMbyPFpJAAXZ6cmpKym1EBGNQE5mGr75mflRT2oRAoGedvDXwH3h7T0PWhEoBLyLW3w967HoUQshMH9GIfKykjODhVPwiEgVls0rRmmJBe9sr0V1XRfS9FoYDRp8VNUQdQaIEIDFrEdXjzPi/Wjjy5IkIMsKJOGdEZIsQgB5WUY8eOvcpNXJkCYi1SjKNeHzN84O/Fx1sgUfH2iM+Rq3W0l872kFyM8yos06vIMFIslI1+GmFaW4YfkUWLiYhYguBRXT81GcZwqciDK4R52epkVfvzts/FqRY+/fISsK2qx2//GFw5pX7dtmGnOn5eGhW+diUlFm4pXEgSFNRKrU2mnHSxtPoK3LHhSiA8dfWUx6KApghyf8xQJxzaMTwjs/2uOJf1OmwH7VvoMFFswsHLWABvjgkIhUqLXTjv/1h4/x3q5zcLrlwDQ6RQFMaTrcfd1MPPHtawZOGR8m4Zvu99An52LOlPADZrUab/c8+GGlkETIapeK6XkjasNQGNJEpDovvHs8ZBMmwDeNThLo63cjN8sIi9mAaeOzIr4+nqUs/jIeWUGaQYuTdZ1B0/m8X7LiW4kYZTxk7tRcTI3ShmRhSBORqrjcMrYcaIhZ5sM9dQCAm64ojVxA+Iclot4O9IYFgC1VDXDLSqBn7f8CvKMmOZlpYXXMmpyD79y9YMjfZ6Q4Jk1EqtLncA15WktXbz8AYNHsInxmzQy88O6JsDJ3rC5Db58L/9p5Fi73QH0CA0dsAUBlWT4OnW6P+X6SJPCbh6/A3uMtgKKgsqwAs6bkJPBbDR9DmohUxZyuh8Wkh9UWee4zABTnmwLf37l6BpbPK8bGXefQ2mVHXpYRqxZOwETfw7w718zAb5/fh/0nW8Jmh2Sm6/HAujn4+m83xWyTLCsom5iNMt/OfWOJIU1EqqKRBFYvnoRXPqiOWua6JZNDfp5QmIH7bpodsazFbMBPHlyK93aexT+31qD2fDeMeg1WVJbg9pXTUZRrwtxpedh/sjXq+1VMzx/W75IMDGkiUp1PXTsdR86041htR9i9dVeUYv6MgoTrXL14ElYvnuRdaThoI49br56GqpOtEafhaSWBddHGvscAQ5qIVCdNr8VPH1yKD/fW48O9dei29aM4z4zrl07GglmFI6p7cEAD3p7yVz5Vgf964zAczoF516Y0LR7+dOWoz+CIhSFNRKqk12lw3ZJJuG7JpDF5v1WLJmHp3GJsOdCADqsDBTnpWD6vGGmG1MYkQ5qIyMdk1IWNd6ca50kTEakYQ5qISMUY0kREKsaQJiJSMYY0EZGKMaSJiFSMIU1EpGIMaSIiFWNIExGpGEOaiEjFGNJERCrGkCYiUjGGNBGRijGkiYhUjCFNRKRiDGkiIhVjSBMRqRhDmohIxRjSREQqxpAmIlIxhjQRkYoxpImIVIwhTUSkYgxpIiIVY0gTEakYQ5qISMUY0kREKsaQJiJSMYY0EZGKMaSJiFSMIU1EpGIMaSIiFWNIExGpGEOaiEjFGNJERCrGkCYiUjGGNBGRijGkiYhUjCFNRKRiDGkiIhVjSBMRqRhDmohIxRjSREQqxpAmIlIxhjQRkYoxpImIVIwhTUSkYgxpIiIVY0gTEakYQ5qISMUY0kREKsaQJiJSMYY0EZGKMaSJiFSMIU1EpGIMaSIiFWNIExGpGEOaiEjFGNJERCrGkCYiUjGGNBGRijGkiYhUjCFNRKRiDGkiIhVjSBMRqRhDmohIxRjSREQqxpAmIlIxhjQRkYoxpImIVIwhTUSkYgxpIiIVY0gTEakYQ5qISMUY0kREKsaQJiJSMYY0EZGKMaSJiFSMIU1EpGIMaSIiFWNIExGpGEOaiEjFGNJERCrGkCYiUjGGNBGRijGkiYhUjCFNRKRiDGkiIhVjSBMRqRhDmohIxRjSREQqxpAmIlIxhjQRkYoxpImIVIwhTUSkYgxpIiIVY0gTEakYQ5qISMUY0kREKsaQJiJSMYY0EZGKMaSJiFSMIU1EpGIMaSIiFWNIExGpGEOaiEjFGNJERCrGkCYiUjGGNBGRijGkR0iW5VQ3gYguYtpUN+BCVGdtxGvH/oXd9VVwy26U5kzCDWXXYJJlPPY2HkS7vQsz8qZiUUkFdBpdqptLRBcwhnSCqttr8PPNT8Dh7g9cO91xFo9v/0tIuXeqN0Gv0eHOuTfjxhnXjnUzAxp7mvHm8fews24f+j0uTPV9oCydcHnK2kRE8WNIJ+jpvS+GBDQAyErkIQ+nx4Xnql6BJARuKFsJAHC4+1F1/gjsLgemZE/A5OwJo9bW2s56/HTTf6DPZQ9cq26vwePba1BnbcSn59w0au9NRMnBkE7A2a561HbVD7qqxHyNAuDVoxuwauoV2Hj6Y7xyZH1IaM7IK8XDi+9Dnikn6e19et+LIe8V7B9H38HyiQtRklmU9PclouThg8MEdNq7w64psTMaANDbb8OLB9/A36peDQvNE21n8Ojmx9HvdgbVqaC/uRaOxlOQnY5htbWhuwnV7TUxy2yq2T6suolo7LAnnYCCEfR2t57bE/Vec28btp7bg5Wly9B75GN0fvwyXB1NAADJkI6My1Yh56rPQGji/8/VabcOWabD3hV3fUSUGuxJJ6A4swiz8qeFXFOGGO4AAEmS0OUI74UH23f+EHoObULLG7/3BrSiAIoC2dEL6/bX0fC3H0Bxu+JuazzDJ/mm3LjrI6LUYEgn6IsLPoustMyEXlNRVA4hRMwysuxB56YXvD8oCryj2QMfAM7GU6h76qtwd7fF9Z5F5nzMLiiLel8IgWumLI2rLiJKHYZ0goozCvGL1d/DLbPWINtoGbK8UZuGLy+8F5a0jJjlynQWuHs6ggI6nNvaipY3Ho+7rQ/MvyPq+9417xYUmvPjrouIUoMhPQzZRgvunHszrp929ZBlMwwmZKSZsXb6NVHLZBrMWGaZHNd7O+qOo78p9gNBv+LMIvxi1fdw04xVyDflwmLIwPxxc/DIlV/BjTNWxVUHEaUWHxyOwPjMcUOW8Q+N3DxzDboc3fjXqc1QgqaE5KZn4zvLH0KOxoheSQI8nuiV+UZM+s+fhqFoSsQiiscNxe2CZDACAHLSs3BXxa24q+LWIdva31yLnqr34ba2QJuRC3PFSqQVTxvydUQ0ehjSI3B5yVykaQ1hi1uCXTl5ceD7T4pszO/WY29/O5x6PUoLZ+DKFffAkFUIADCVLYTt2I4Y7+hNaX8AB3O2N6Jr6yuwHd8Bxe2CLrsQGfPXwLLwExCSJmqNjoZq9DeeRN+ZA7Cf3h9yr3v/e7AsvAG5q++L0SYiGk0M6RhabO3YcnYXevttKM4sxPKJC2HUpYWUeWjB3Xh8x9OhL1QQ6PU+f/ANlGZPgmXHBvQc+ABmAFcBAPqAlp1oqjmB4nsehS67CHlrH4KrrRHO1rPhjRHekSnJYET61Pkht5ytdWj8+48g23sD11ydzeh4/zn0N55CzrX3wnZkCzw2K3TZhTDPvgKy04Hm1x5Df0P1oHFwAQQ95LTuXg/DuGkwz7ki0b8+IkoChnQULxx8A2+eeC9kaOK/D76Ory65D5eNmxO4tnzSAlgdVjxT9UroyhbFO+DvcDnw1x3P4oEDhyO+j6e3Cx0f/B2Ft30bGmMGSr7wGzS9/EvYT+/zFhACgcQHkLXsk2E96Y4PngsJ6GC2o9tgO74jpG3tH/4dGoPJ+6DS39jghisIDeo9GxjSRCnCB4cRvH96C944/m5IQAOA3eXAb7f9GU09LSHXa0/thKQokICQLwCAIuN0TxPadNH/qvuq98DT1wMAEJIGRXd8H9lXfQYaUxb8Aa3NzEPedV9A1tJbQl7rsVnRd+ZA5Ir9PWQ5dJxbcTq8U/miLpcMve5sju9BJRElH3vSEbx98oOo91weF949/THurbwNAKAoMjra6gBDrBoV9Gok5Lkib8SkyB54bF3QpHunywkhIXvF7chacjOcLWcBSYK+YFLEsWX7uaOA7A66EjxcESWEleBvos3fHrgn9GlRyhDRaGNID9Ll6EZjT3PMMsdaqgPfy/Ze5Nr7gAgP8/wEgJwoAQ0AQqOFJiN8haDQ6mCIMbvCdnxnhHnTiq+HHBy+vu8DPefgYZko7VKUQNiby5dHbQMRjS6G9CBaEX0mRKCMr0fb2NOMLTU7YNVogh+7hSnzaHDaqIMMYKrDhSx3aDCaZi6BJs2UUDtlpwOt6/8YNpQxYFAvOloYx6LI0JiyYFmyzvujxw1b9W64WusgpZlhLl8OjWnoBT1ENHwM6UHMBhNm5E3FibbTUctcXjIPf9v/CjZUf+iNwkwDoChhQa0A0CjAca2ME4UZgKJAAJjX249bW3uhVwBddhFyVt6bcDttx7dDdvTFWTqOrfqiyFn9eegsBXA0nETzPx6DJ/Cw0fvAMmv5bchecfuw6yei2FQX0nsaDuLlw2+h1dYBvVaHZRMW4DNz10Gv1Y9ZG24rX4tffvzHiJv55xizIACsr/5w4KIQEL6hBAEgwy2jVyNBFoDbN2Tgj0kJwAGzAS4B3NsN5K55ANqM7ITb6La2er+JZ6/UEeivPwHjhHI0vfTzsA8FxeNG50cvQZuRg4yKlaPaDqJLlapmd/znjmfw6y1PorarHjZXHzrtVrx98n08+Ob3htxFLpnmFc3C15bej1xjVsj16TmT8cOrvooPzmwb9Arhm8csIAPo0UrwCHjHdAPdam+Y+iP1qMmA8247Wl7/HVydTQm3UWP2B/vohnT3ng1oeOb7kO0239sp3qETRQ78Tl073xzVNhBdylTTk95dfwAfnd0Z8V6fy47/s/kJ/Pq6HyT1PWVFxv7zR7D//GHIioI5BTOwaHwltJIGi8dfhoXFFTjccgI9zl6UZIzD5Ozx6OjrQrNt8E50CmQovkD2BnU0wY/0jpv0KOzqg3X328hb80BCbTfPWob2jc9CcUY+eSWZPD3t3m/Ceu3eh5Su1nq4u9uhzeTWp0TJppqQfvnwWzHv13bVo9NuRbbRApfHNeJTuK2Obvzyoz+ipqsucO2DM1tRZM7H96/8MgrN+ZAkCfOKZoW8ThthGpw81JDDoPv+oPaHeV/1XmBQSPc31aB7/7twtdVDMmbAPOcKmMoWBabhSWkm5Fx7L9o3/L+4ft/RpcBj62JIE40C1YR0a1/HkGWe3vsCTrSdQY/Thuw0C64pXYZ1M1cjTRtzknJE/7nzmZCA9mvqbcXPNj2BsrwpONN5DgaNHksmzMeq0hUwG0zITMtAWe4UnPQdTTV4wUsiptp9m/j7ZmjI/X3ortoI66634fHvGy0kQFHQd2IXdLklyFlzH0yllQAAy/w1sB3dAsfZI8NuQ7JY925AwY1fSXUziC46qhmT1sfRM97VcAA9Tu/YaKfDin8c3YCfb/o9nEHnA8bi8riw9dxu/HnPCzjYdDxiwCqKgmZbK7ac3Y3zPS2o7arHi4fexCMbf4U23wfJ7bM/EdjEP56TWQZPzBMAJjncmNjvXYSSNmkOPDYrGp79ATo2/m0goAHf1DnvykFXez2aX/gZ6v70De8iFwB5ax+ClJ7pDXORuv+cvYc/Ttl7E13MVBPSKyYuGLKMFCGEqjtqsfHMliFf+071Ztz3j2/h8e1/wbunP4Li+5+syCFf0UK3xdaOv+x9CYD3weI3ln4BuelxzsoYNHl6gsONu5q9D0KFpIFl4SfQuv4puFrrMPSDQAWutnqcf+FReGxW6HNLUHzPo0ibMi9kv40x53HD2d6QuvcnukipJqTvmHszTLr0Yb12c02s7T2BZ/f/D/6y70U45fjPCIxkf9MRtPd1AgAWja/EnXNviv/FQkBSFNzfaMW/NVph9iiQ9GnIXftFdGx+AX0ndyGRmRoemxXdVe/D2d6A9vf+CkftIUAexoKVJLJH20OEiIZNFWPS3f292FyzHWZ9OmyuyAs0RNQ9JhB1ep5b9uBQ0zGsP/lhxPuxRDqTUFEUNPe2BXrQ1W21kIQUcT51SF0QMGj0+Mbiu1Ha2gJPbye0lnyYypej6aWfe7cLHYa+k7vQvWcDPDYVnPqdwqEWootZSkO6sbsJzx14DfsaD0UdZhAQgaEJKJHDs8CcF/Jzq60drxxZj+11e2NuyB+LoigR3yv4zECtxvvXFyuoSzKKsKBkHlZPvcLbzskD9/pO7x92QAOAu7sNHpt14IIQo764JSLh3dTJ6HugSUTJk7KQbuptxY8//B2sQyxSUaCEBHWkHvW1pQMbALXY2vGj9//vqCx+mZYzGSWZRYGfF5bMw3rfjnn+8XL/w0ghBMz6dPxqzfejrpbsq94zjFYM/P6y0xHlfrKC2vdevg8roTNAcTmCPggGdtwzlS2CPrc4Se9LRH4p+zfqa0ffQbejJ+HXDZ6RsXTC/JAjql4+/NaoBLRBq8fnL/tUyLVZ+dMxt3BGyDUhRKAHvm7mmogB7epsQtu7T6Pn0OaBlXsJPvST0tIBKZ7XjPBhYlC7hFaH9LJFAzNJ/AE9YxHyb+L0O6LRkJKetKzI2F63N87pa16SkKAoCqZkj4fNaUduejaunboCyycugCQkdNqtcLgc2HFuX1LaOC6jAE29rdAICQtKKnBb+VpMzCoJK/etZQ/i6X0vYdu5PfD4hjxM+nTcPHMN1s1cHVbe0XASTS/+DHK/PXTr0Lj/KhRIeiMsS26GveYQHGcHnfjiP8lFkSH0RmhMFuhyxsFefwLotyOxXnbolqe67HEouv27cLbVw36myjvEMaUC+rzxCdRJRIlISUi7PW44PYnOtFBg1KVhomU8qpqO4HjbadRZG7H93D602ztwtqveu61Ekv6pr5O0+NKCu7Bi8uKIqwz90nRp+PLiz+Gz827BqY5a6CQdyvOnRexBK4qC1ree8AZ0XII28PePeQsB2elA56YXIBnNkXvhvrKKywF3Vz/cXS2QDMaYy9WjGwjqzMtWAQD0eeMZzERjJCUhrdfqUWTOH3JzfT8FChTFu4fHptrtALwPFHudfdjTOLxpX9lGCzrt1oj3JCGhrvs8ntrz37C7+7G27Jq46ltYUhGzjOPsYbg6gjZTEsLXsY3ywRIIaN/9QTMoZHsvhE4PxeUML4vQsxHlfjuERgsl0Q9HX0abZi2Fed7Vib2WiEYsZQ8O10y7Es/uf2XYPd+R9pg/V3k7ynJL8Y+jG/D+ma1QoEAadOgrALx0+C1cPWVp2Cnhw+Hqagm/GAhqAFCQNrEc5jlXoe/kLtjrjgGSBvC4obgiz1JR3C5kLlgLodXC1dUCe81BKBEfKHqP6cq8/DrIbjdc7Q2QnXa4Ws4i5AHhoNNb9IWTkbX0FpjKl0Fwmh3RmEtZSF8/7Wqcaq/F1nN7kjZEkYhNNTuwbOIClOZMxAc120JmjQTP0HC4+7G74UDIw8nh0pqjrFAMDFd4p7FlXrYqMLTg6mxC3ZMPx6zX2XoOxXf/BM62etSf2BWzrKurFePuHNhNsPG5H8FRdyxiW3S5xSh54NcMZ6IUStn/+yRJwsNL7sMjV30F8wpnwmLIgDSGzTnWegrPH3wdO+urAteUoKXh/iXjiqKg17dfyEgZSyugDTnL0Head2B/Zhmdm1/E+RcehbPVt/mTNPTfiYijTKDsoPHrglu+Dn1++PiyNjMPhbd/lwFNlGIpXcwihEBFUTkqisoBAL/Y/Afsbzo8xKuSo9/TjzePvzvkw0YFCqp9O96NlJA0yLv+QTS98qsYZxMC9pqDaHju35Gz4jY46k945yc7HVGn6RmnzgcA6HJLoMsuhKsz+li/v6yfNiMHJff/BrYTO9F3eh8gyzBOngtT+XJIusR3FySi5FLFsnC/eUUzxyykgfgX5+1pOIjefhvMhsQOi43E3dsZe48N/wku9h60v/ds0DixEjSTYyCstRk5yJjnfbAphIBl6a1oW/9UxKq1mXnImHtV2HWh0cJcvpynghOpkKr+LXvNlGUwjOFZhoHl5jEICLhkNw40H41axn72CKy716Pn0GbI/dEPh1VkD7q2vupbRh3rrz704V1IMAd9sugLJ6Posz+CxmgOXMusvBY5K++B0Ic+6NQXTMK4z/47JIMxxvsSkdqoqiedrjfih1d9FT/58D/glt1Dls80mHF7+Sewq7EKh5tPJLUt/geJ/jFctyd8eMLZVo+W1x6Ds7U+cK1dn4bsK++AZdGN4eWbauDuHnz01mAxpuP5bqXPXIysRTchbfyMiEWzlqxD5mWrYKveA9nRB0PhZKRNmDnE+xKRGqmqJw0AM/Km4s83/zrinGNvf1JAEhIkIWFazmRcX3Y1vr3sIRi0hpg75SVCElLI8m4BYEb+1JAyHnsvzj//05CABrz7abRvfBY9hzaF1avE8cETk28jI0lriBrQfpIhHRlzroRlwfUMaKILmOpCGvD2qL+65H5k6E2BQPYGpxQyOyHHmIWq80dwvO0UXB6XN1iTFNTBLi+ZhyJzfsi1noMfwtPbGfU1XdteC7umL5gEyZA+cOL2UKI8KOSQBdGlQ1XDHcEMWj1Wli7HWyc2ht3zz2N+/8xWfFCzDVC8p3X7e8CIMGMj05CB7v7EN3SalT8N/7bwnrDr9jP7Y77O1d4IV2cTJEM6eqreR+/hj+DuafctCY/ziWXo1hkB5vIV8b2eiC54qg1pALhjzk1o7GnG3sZDQVcHtiwN9Kp9MyBkRR4YqsDAMzYFCkw6I/pc9phj3QJAcWYRLiuaDUnS4PLiOZiVPz1y4Thy1tlah7b1Tw1jU37/dqPhKZ1etsusRogAAAQ9SURBVJDDF0SXEFWHtFajxXdWfAlHW6qx7dwe9LkdONNRi/M9rWGLMvx7TkfbfL+pt3XomRxCYOWU5bhp5qoh22acPBf22kNR72uzCtC6/inIwzk1Rfj/EIFpd5LRjMzKVci+8tOJ10dEFyxVh7RfecF0lBdMh93lwP2vfSviiSlCiIinf/vFM9UuPz0X15QujatNGZXXwrrrn/D0Rdq7WoG7px1wD/NMRUUJTNErvvdRCL0RupxxXFxCdAlS5YPDaFweV8y1gX7DeXg4p3AG/v2ar8Osj2/BiiY9E0V3/hDarILQG5LkbYp7hDM5AAidHvqiUhgKJzOgiS5RF0RP2i/DYEaROR9Nva1h94I70UP1qv0kIaAogFbS4GtLH0CmwTzka4IZiqZgwpd+j77T++FsOQdNWjq6dq+Hu+N8Es4aVGCetYzhTHSJu6B60kIIrJ0ee2/nxHrR3oePHkVGm619eG2SNDBNX4Ds5Z+EsbTSG9BJIBlMyL7yzqTURUQXrgsqpAHvPtRrpl0Zdl0jSSEzPoYK68F3MxLsRUeiBG+aFGFv6kTkXv9FaDNzR9wmIrqwXVDDHYC3N33//DuweuoV+Kh2J7r7e1GcUYilE+bjkY2/Qq8z+t4ZofUMfD6V509HvmnkgajLLoI2Mxfubn+vPI4hj+A9PHwzOXQ547jZEREBuABD2m+CpRh3Vdwacu2byx7Eb7Y8Cbu7P2RRi396np8UFIxGrQH3VN6WlDYJSQPLohvRvvHZWKUGnYASfEtAY7Kg4NZvRpzBQkSXngs2pCMpL5iO3639MTae2YoTbaegkbRYVFKBhSWVsLvt2HZuLz48sw3NtjZoJA0WFs/DbbNvwARLcdLaYFl0I9w9HbDuejvC0u+gg2WFgDCkI3vZrbDXeM9pNE6pQEbFtdCkZyStPUR0YbuoQhoAsowW3D77hrDrGQYTbpl1HW6euQZ9Ljv0Gh10Gt2otCH32nuRueB6NL/6GJznTyHkDMEgprKFyFp6C7KW3jIq7SCiC98F9+BwpIQQMOnTRy2g/XSWAhSsexjCkB7YvS6kHToDshavG9U2ENGF75IL6bGkzxuPcXc8Al1OUch1XXYRij79fegLJqaoZUR0objohjvUJm3CLIx/6PdwnDsKd3crtJl5SJs4mw8GiSguDOkxIISAcdLsVDeDiC5AHO4gIlIxhjQRkYoxpImIVEzEs1tcXBUJUQ+gxGKxoLKyMil1EhFd7KqqqmC1WgGgQVGU8YPvJzOkuwBYklIZEdGlx6ooStbgi8mc3VEDYAqAXgCnklgvEdHFbBoAM7wZGiZpPWkiIko+PjgkIlIxhjQRkYoxpImIVIwhTUSkYgxpIiIVY0gTEakYQ5qISMUY0kREKsaQJiJSMYY0EZGKMaSJiFSMIU1EpGIMaSIiFWNIExGpGEOaiEjFGNJERCr2/wEp4dNKS1qWBgAAAABJRU5ErkJggg==\n",
100 | "text/plain": [
101 | ""
102 | ]
103 | },
104 | "metadata": {
105 | "needs_background": "light"
106 | }
107 | },
108 | {
109 | "output_type": "stream",
110 | "name": "stdout",
111 | "text": [
112 | "Plotting train data\n"
113 | ]
114 | },
115 | {
116 | "output_type": "display_data",
117 | "data": {
118 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWkAAADzCAYAAABE8effAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3dd3wc1aEv8N+ZbdoirbpkyR1XuUg47jZgbDCYYAPBCSUhCSEXcj8JaTfJS+cRkvsS8kjeDUngJSQXLgkOXAiEYggYbIN7lYvci2wVy+orabWrLTP3jy3a1Rbtrlbasf375iN/pJkzZ47MJ789PnPOGaEoCoiISJ2kTDeAiIhiY0gTEakYQ5qISMUY0kREKqZNV0VCiP0AJgDoAXAqXfUSEV3mJgGwADirKMrVA0+KdM3uEEJ0ArCmpTIioiuPTVGU3IEH09aThq8HbbVaraiqqkpjtUREl6/q6mrYbDbAl6ER0hnSpwCUV1VVYdOmTWmslojo8rVs2TJs3rwZiDFMzAeHREQqxpAmIlKxdA53EBGlxO5wo7G1B6YsHcqLLJlujqowpIkoYxx9HvznGzXYtK8efW4vAOCqcis+d0sFKqcUxb123/FmvLvzHC6296IgJwsr5o3BghmjIEliJJo+YhjSRJQRXq+MR5/ZgaO17WHHTzfY8OifduCRLy5E5eTIoLY73Pj9Kwew5UBj8NiZBht2H72IxbNG4VufmQvNZRTUDGkiyogdNU0RAR3glRU8//bRsJDu6Hbi2TeP4MPqBng8cvC4JATgz+StBxvhfnYXJpZbMWlMLuZOK7nke9YMaSLKiC3VDXHPn6zrRFObHaUFZvQ43PjBU9vQ0NIDWQ5fgCcrCoQCBI7urGnC7qMXAQCjCkz4/ufnY2xpTkJtkmUFQgBCqCfYGdJElBGOPs+gZZwuL1o6evHIH7ajrjnqWg8A/QE90IW2XvzvZ3bg999Zjix97LjbefgCXt18Gsdq26GRBOZVlGLt8smYNCZiAeCIY0gTUUZMLLdi/4mWmOdNBi06bE58/Vc74JVT376izebEh/sbsHLBuKjnX9t8Cn9+oyb4s9ejYOvBRmw71IjCXCMmjc7FLYvHo2pKccptGArOkyaijLhp4XhoNbEj6Ib5Y/GLv+xJOqCjDVQcPNka9nN7lxOtnQ602Rx47q0jUBQEvwIUBWjpcGBnTRMe+eMOPP/20aTakS7sSRNRRpTkm/DNe+fgVy/sg8frfxCo+P6omFCAKWNz8drm00nXG208WfJ/Fmw/dAEvvX8CZxpsAACzUQuPd5APAUUBhMDLH5zEnKnFmDGxIOk2DQVDmogyZsnsMkwdm4e/vXcc2w42orvXDSGAo7VtaO7sTanOgTt7CiEwb3opNuw6jyf/uzrsXLfdPXh96O+dv7O9liFNRFcWR58HWw80orfPE5wupwBobnekVJ8S/MNHqxHwyjKefbMmGOCpzt5oaIn98HK4MKSJKKNe/uAkeqPM9BAifIw4VV6vjCde2Bd2TFF8U+0SIUJGuZ0uL777uy1wub2YNj4fqxaNx5iS7KE3Mg6GNBFljKL4ZlIEerihoTyUqcr961sE5BhJn/AHgACg+OZj1zd3B3vhpxts+Of2Wjxw20xcd/VomI261BscB0OaiDJGlhW43N6ogZlqL1qgfzhDGcLUvYCBi2cCFFmBS1bw1CsH8YdXD2H+jFLct2p62nvWnIJHRBnT2GqHXjt4DCXTqxaBcW1FibnIJVWBoQ9ZDq9bVhTsrGnCd3+3BXUXu9N6T4Y0EWXEifMd+PaTH6HPLQ9aNtFedSDMFVlJuieeyAeBrCgRs0eA/vb1ONx44Z/HkrvxIBjSRJQRT71yIKGl4YkSAr6l3yn2oBMN9cHK7axpSuvvxZAmohF3ttGGM41daa1Tp5XwjbuvhibOKsbhEtoL98oKep2Dz79OFEOaiEZcm80Z/D5dG85NHpOHRbPLML+iJG11psJq1iPXYkhbfZzdQUQjrjjPmPY6j9W24Y7vvD5sU+Hi8e374RsHWThzVFp78+xJE9GIG1uag6lj8wCkb+9mrwzIim+pd6rT9yQh4vbCE2nqRwcacLbRlloDorUpbTURESXhy2srkWPSD6mOqJkpUhtC0UgCgUeOsS4fLPyFAHqdHvzhtUPJNyAGhjQRZcS4UTl44mvXYs01E2HQa4LHpQRDVojwzf5DrxFCxAzaWLz+aXuKgpTnVwdC/MjZdlxotadYSziGNBFlTHG+CQ+smYlPLp8MSRK+DZaE8IXsICkbr1cbupBFkgavK50CY9Md3c5BSiaGIU1EGXfjgnHQDVh5mEhQ+8oFvvoLDwxwIQRG8n20kgCK80zpqSsttRARDUF+Tha+ec+cmG9qEQLBnnboV/954es9D1gRKAR8i1v8PeuR6FELITBnagkKc9Mzg4VT8IhIFRbPLsPEcive2V6Lk3WdyNJrYTRo8GF1Q8wZIEIAVosend2uqOdjjS9LkoAsK5CEb0ZIuggBFOYa8eAds9JWJ0OaiFSjtMCMz986I/hz9YlmfHSgMe41Ho+S/N7TClCUa0SrLbUXC0STbdJh9dKJuGXJBFi5mIWIrgSVk4tQVmgOvhFlYI/alKVFb58nYvxakePv3yErClptjsDrC1OaV+3fZhqzJhXioTtmYVxpTvKVJIAhTUSq1NLhwIsbjqO10xESov2vv7Ka9VAUwAFv5MUCCc2jE8I3P9rrTXxTpuB+1f4XC8ydVjJsAQ3wwSERqVBLhwP/63cf4b1d5+HyyMFpdIoCmLN0+MxN0/Dkt67vf8t4ioR/ut9Dn5iFmRMiXzCr1fi656EPK4Ukwla7VE4uHFIbBsOQJiLVWffusbBNmAD/NDpJoLfPg4JcI6wWAyaNzo16fSJLWQJlvLKCLIMWJ+o6Qqbz+b5kxb8SMcZ4yKyrCnBVjDakC0OaiFTF7ZGx5UBD3DIb99QBAFZfMzF6AREYloh5OtgbFgC2VDfAIyvBnnXgC/CNmuTnZEXUMX18Pr79mbmD/j5DxTFpIlKVXqd70Le1dPb0AQDmzyjFPSunYt27xyPK3HXjFPT0uvHPnefg9vTXJ9D/ii0AqJpShEOn2+LeT5IEfvnwNdh7rBlQFFRNKcb0CflJ/FapY0gTkapYTHpYzXrY7NHnPgNAWZE5+P3dN07Fktll2LDrPFo6HSjMNeKGeWMw1v8w7+6VU/GrF/Zh/4nmiNkhOSY9HlgzE1//1aa4bZJlBVPG5mGKf+e+kcSQJiJV0UgCNy4Yh5c/OBmzzE0Lx4f9PKYkG/evnhG1rNViwKMPLsJ7O8/hza1nUXuhC0a9BkuryrF2+WSUFpgxa1Ih9p9oiXm/yslFKf0u6cCQJiLV+eSKyag504ajte0R59ZcMxFzphYnXeeNC8bhxgXjfCsNB2zkcceySag+0RJ1Gp5WElgTa+x7BDCkiUh1svRa/OTBRdi4tx4b99ahy96HskILbl40HnOnlwyp7oEBDfh6yl/5ZCX++I/DcLr6512bs7R4+FNVwz6DIx6GNBGpkl6nwU0Lx+GmheNG5H43zB+HRbPKsOVAA9ptThTnm7BkdhmyDJmNSYY0EZGf2aiLGO/ONM6TJiJSMYY0EZGKMaSJiFSMIU1EpGIMaSIiFWNIExGpGEOaiEjFGNJERCrGkCYiUjGGNBGRijGkiYhUjCFNRKRiDGkiIhVjSBMRqRhDmohIxRjSREQqxpAmIlIxhjQRkYoxpImIVIwhTUSkYgxpIiIVY0gTEakYQ5qISMUY0kREKsaQJiJSMYY0EZGKMaSJiFSMIU1EpGIMaSIiFWNIExGpGEOaiEjFGNJERCrGkCYiUjGGNBGRijGkiYhUjCFNRKRiDGkiIhVjSBMRqRhDmohIxRjSREQqxpAmIlIxhjQRkYoxpImIVIwhTUSkYgxpIiIVY0gTEakYQ5qISMUY0kREKsaQJiJSMYY0EZGKMaSJiFSMIU1EpGIMaSIiFWNIExGpGEOaiEjFGNJERCrGkCYiUjGGNBGRijGkiYhUjCFNRKRiDGkiIhVjSBMRqRhDmohIxRjSREQqxpAmIlIxhjQRkYoxpImIVIwhTUSkYgxpIiIVY0gTEakYQ5qISMUY0kREKsaQJiJSMYY0EZGKMaSJiFSMIU1EpGIMaSIiFWNIExGpGEOaiEjFGNJERCrGkCYiUjGGNBGRijGkiYhUjCFNRKRiDGkiIhVjSBMRqRhDmohIxRjSREQqxpAmIlIxhjQRkYoxpImIVIwhTUSkYgxpIiIVY0gTEakYQ5qISMUY0kREKsaQJiJSMYY0EZGKMaSJiFSMIU1EpGIMaSIiFWNIExGpGEOaiEjFGNJERCrGkCYiUjGGNBGRijGkiYhUjCFNRKRiDGkiIhVjSA+RInsz3QQiuoxpM92AS5Gr5Tw6t70K+/GdULweGEZdBeu8j0NfPA69p/bA09WOrNHTYJ46H0Kry3RziegSxpBOkrPhBJrWPQbZ5Qwe62s8hebXfh1WrmvPegitHnnL7kXugtUj3cwgV1sjbDv+Afux7ZDdfTCMmgTr/I/DMn1xxtpERIljSCep9Z1nwgIaAKDIUcsqHhfaNzwHIQSs828FAMguJ3rP7IfS54C+ZAIMpROGra19F2tx4a+PQHb29h9rOIHmV0/A1Xwe+dfdPWz3JqL0YEgnoe9iLVwXzyZ5lYKOra8ge85KdO9/Dx0fvRQWmlmjp6L4tq9Bay1Kb2MBtL7zx7B7herc+gosM5ZCXzg67fclovThg8MkeHs6Ig8qyqDXyY5udGx6AW3vPRsRms7647jwwk8gu/tCqlTQd7EWzsZTkb32BLla69HXcCJume6DG1Oqm4hGDnvSSdDmFqd8bU/Nlpjn3B1N6KnZgpyqFeip+QgdH70Ed3sTAEAymJB99Q3Iv+4eCE3i/7mifqAMLNPdnnB9RJQZDOkk6AvKkTV2Opznj4YcHbwnDaGBt6cTECJmkd5TeyE0GrS88Tt/tb56ZWcPbNtfg+PcYZTf99OEZ4skMnyitab+oUNEI4PDHUkqXPUlaCy5SV1jnFgZN6ABALIXHZvW+b5XFPjCv/8DwNV4CnVPfxWertaE7qnLK4Vx/MzYBYSE7MrrE6qLiDKHIZ0kfUEZyu9/HLmL74AmO3/Q8sJgQvHqr0Bjjh/s2pxCeLrbQwI6ksfWguZ//EfCbS246Ysx75u//NPQ5ZUmXBcRZQZDOgXa7DzkL7sX1rmrBi2rMVqgMeXAOu+W2GVMOTCMmZbQvZ11x9DXlNgME31BOcrv/zmsC9dAm1sMjdkK06SPofSeHyJ3wZqE6iCizOKY9BDoEpi+FujJWhfdDo+9E1173gmbV63NKUTJ2u9AY8oGJAnwxllm7h8x6btwOub8asXrgeJxQzIY/fUXoGD5fShYft+gbe27WIvu6vfhsTVDm10AS+VyZJVNGvQ6Iho+DOkhME+eC6HPghJnmpx51rLg9/qiMdDll8Ld1ghotDCOm4nCm78IXW6Jr+yUebAf3RHnjr6UDgRwKFdbIzq3vgz7sR1QPG7o8kqQPWclrPM+DiFpYtbobDiJvsYT6D1zAI7T+8POde1/D9Z5t6DgxvvjtImIhhNDOg53ZzN6aj6C7OiGLr8MlhnXRARk4S3/ipYBS8JDdW78C7JGXYXu/e+h+8AH/Se8HjjOVKPxv36Isvsegy6vFIWrHoK7tRGulnORFQnfyJRkMMJ01ZywU66WOjT+5ceQHT39be+4iPb3n0df4ynkr/gs7DVb4LXboMsrgWXGNZBdTlx89Qn0NZwcMA4uwh5y2navh2HUJFhmXpPYXxoRpRVDOob2jX9F547Xw4Ym2jf+FcW3fQ2mSf0hmT1jKbw9HWjf8GzUemSXAy1v/hbu1oao5709nWj/4C8oufNb0BizUf7FX6LppZ/DcXqfr4AQCI5zAMhd/ImID4r2D54PC+hQ9iPbYD+2I2zRTdvGv0BjMPseVAIIf1Cp+H4MDeo9bzOkiTKEDw6j6Nq/AZ3bX4vYk0Pu68XFvz8Bd/uFsOP2o9tiV6YocLfUxV2Z2HtyD7y93QAAIWlQetf3kHfdPf7xbF9YanMKUXjTF5G76Pawa712G3rPHIh5b0ABBmynqricvql8MdsUfjz5pfBElC7sSUdh2/VGzHOKx4Wufe+i4IbP+X5WZLiaowxPJEGRvfDaO30PDwEIISFv6VrkLrzNV7ckQV88LurYsuP8EUD2hBwJHa6IEcJK6Dex5m/3nxP6rOR+ISJKG4b0AJ6eTt+DvTic52uC38uOHigh+26kQmi0UedcC60OhjizK+zHdkaZN634e8ih4ev/PthzDgnvGDv4QVGCYW+pWBK3/UQ0fBjSAwhN7JkQQf49NFxtjeg5tAm+EIw9nCH0Rt8MkEBIDlh9aJ62EJosc1LtlF1OtKz/fcRQRr8B7YkVxvEoMjTmXFgX+uZUK14P7Cd3w91SBynLAkvFEmjM1uTrJaKEMaQH0BizkTVmGpx1x2KWMU+eh7YNz8K2e33cFYIBisuBsCAP9HSFgC6vFPnLP5t0O+3HtsfchjRKC5KuPyD/xs9DZy2Gs+EELv79ibBNmdo/eB65S+5E3tK1KddPRPGpLqT3NBzES4ffQIu9HXqtDovHzMU9s9ZAr9WPWBtyl65F04v/DsiRvU+tf1jCtuut/oNCSqCnOjAoFUjGbBSsfADa7Lyk2+ixtfirST2AE9FXfxzGMRVoevFnER8KiteDjg9fhDY7H9mVy4e1HURXKlXN7vjtjmfx+JanUNtZD7u7Fx0OG9468T4efP276HR2jVg7TBMqUXL7N6DNKQg7biifjNJ7f4zuA+9HXiSS/6uUe7vR/Nqv4e5oSvpajSUQ7MMb0l173kbDs9+D7LD7b6f4PpAUOfgB0bnz9WFtA9GVTDU96d31B/DhuZ1Rz/W6Hfj3zU/i8Zt+kNZ7yoqM/RdqsP/CYciKgpnFUzF/dBW0kgbmaQthmjIPjnOHIfd2Q1c4GoaS8fB0tcHdcTGyslTGfOGb1mfb/RYKVz6Q1HWW6YvRtuE5/1DK8PJ2t/m+iei1K/4phvXwdLVFfKgR0dCpJqRfOhx72hsA1HbWo8NhQ57RCrfXDZ1maG/htjm78PMPf4+znXXBYx+c2YpSSxG+d+2XUWIpgpA0ME2oDLsu6sb7KQZ0QO/JvcCAkO5rOouu/e/C3VoPyZgNy8xrYJ4yPzgNT8oyI3/FZ9H29v8f0r3TQ4HX3smQJhoGqgnplt7B3xLyp73rcLz1DLpdduRlWXH9xMVYM+1GZGkNSd/vtzufDQvogKaeFvx005OYUjgBZzrOw6DRY+GYObhh4lJYDGZozFZkjZ4KZ/1x/xVpGG7wz9CQ+3rRVb0Btl1vwRvYN1pIgKKg9/gu6ArKkb/yfpgnVgEArHNWwn5kC5znamLVPGJse99G8a1fyXQziC47qhmT1ifQM97VcADdLt/YaIfThr8feRs/2/QbuDyuhO7h9rqx9fxuPLNnHQ42HYMS5aGboii4aG/BlnO7caG7GbWd9fjbodfx/Q2/QKv/gyR36Sf7x6CH8uDOPxUva9xMeO02NDz3A7Rv+K/+gAb8vXTfDBJ3Wz0urvsp6v7wjeACmsJVD0Ey5fjak8K4eLr0HP4oY/cmupypJqSXjp07aBkpSgidbK/FhjOx3x8Y8M7Jzbj/7/+G/9j+Z7x7+kMo/v/Jihz2pcToGTfb2/DnvS8CAEwTK1FyxzehzSkc9L6x+VfzSRpY530cLeuf9i0fH7RnrsDdWo8L6x6D126DvqAcZfc9hqwJswd/+8tw8nrgaou+PwkRpU41IX3XrNtg1plSunbz2XjbewLP7f9v/Hnf3+CS3SnVH7C/qQZtvb4XvJqnLUDesnsQe1l1HEIChICkz0LBqn9B++Z16D2xC8kMnXjtNnRVvw9XWwPa3vtPOGsPRZ0yOJIcsfYQIaKUqWJMuquvB5vPbodFb4LdHX2BhogThrGm53lkLw41HcX6ExuTbpOI0itVFAUXe1pRYPJNf+trOOHrvQ465CEAnQHFq78Mb08nvD0d0FqLYK5YgqYXf+bbLjQFvSd2oWvP2/DaO1O6Pq0yONRCdDnLaEg3djXh+QOvYl/joZjDDAIiODQBJXp4FlvChx1a7G14uWY9ttfthdOT2r4aiqJEvZc1K7u/bYFx9DiLWXQFo2GaMhc5c1YGN/cP6D29P+WABgBPVyu8dlv/gYQ+MIaB8K2eNPofaBJR+mQspJt6WvDIxl/DNsgiFQVKWFBH61GvmNi/AVCzvQ0/fv//Dsvil0n541Ge0//yVtOUebDtetP3Q7An2b95vmS0oPyBxyHpos8+6T25J4VW9P/+ctQ3wsTfRySle/k/rITOAMUdsgdJyI575inzoS8oS9N9iSggY/9GffXIO+hydid93cAZGYvGzMG14xcEf37p8BvDEtAGrR6fv/qTYceMYytgnDBrQMn+TfpzF90eNaDdHU1offdP6D60uX/lXpIP/aQsEyAlcs0QHyaGtEtodTBNmd8/kyQQ0FPno2g1p98RDYeM9KRlRcb2ur0xhziikYQERVEwIW807C4HCkx5WHHVUiwZOxeSkNDhsMHpdmLH+X1paeOo7GI09bRAIyTMLa/EnRWrMDa3PKJcyZ3fRus/n4G9ZisU/3xnyWhB7qLbkbvwtojyzoYTaPrbTyH3OcK3Dk34r0KBpDfCuvA2OM4egvPc4fDTgTe5KDKE3giN2Qpd/ig46o8DfQ4k18sO3/JUlzcKpWu/A1drPRxnqn1DHBMqoU/ghbxElJqMhLTH64HLm+xMCwVGXRbGWkejuqkGx1pPo87WiO3n96HN0Y5znfW+bSXS9E99naTFl+Z+GkvHL4A2zotcJb0RxasfhmfZZ9DXeBJCq0PW2IqoPWhFUdDyxpO+gE5IyAb+gTFvISC7nOjYtA6S0RK9F+4vq7id8HT2wdPZDMlgRGpzP/qDOufqGwAA+sLRDGaiEZKRkNZr9Si1FKGxO8oeGFEoUKAovj08NtVuB+B7oNjj6sWextSmfeUZrehw2KKek4SEuq4LeHrPX+Hw9GHVlOsHrU+bnQft1PlxyzjPHYa7PWQzJSH8HdsYHyzBgA7sQx0+OiU7eiB0eihuV2TZkGEXAJD7HBAaLZRkPxz9GW2evgiW2cuSu5aIhixjDw5XTroWz+1/OeWe71B7zJ+rWospBRPx9yNv4/0zW6FAgTTgpa8A8OLhN7BswiIYdUN/hZS7sznyYDCoAUBB1tgKWGZeh94Tu+CoOwpIGsDrifn2F8XjRs7cVRBaLdydzXCcPeh7wUC0srIXOR+7CbLHA3dbA2SXA+7mcwh7QDjg7S36kvHIXXQ7zBWLITjNjmjEZSykb560DKfaarH1/J60DVEkY9PZHVg8di4m5o/FB2e3hc0aCTycFELA6enD7oYDYQ8nU6W1xNg3Ojhc4ZvGlnP1DcGhBXdHE+qeejhuva6W8yj7zKNwtdaj/viuuGXdnS0YdXf/boKNz/8YzrqjUduiKyhD+QOPM5yJMihj/++TJAkPL7wf37/uK5hdMg1WQzakEWzO0ZZTeOHga9hZXx08poQsDQ8sGVcUBT3+/UKGyjixMvjSAP8dfV/B/ZlldGz+Gy6sewyuFv/mT9LgfycigTLBsgPGr4tv/zr0RZHjy9qcQpSs/Q4DmijDMrqYRQiBytIKVJZWAAD+z+bfYX/T4UGuSo8+bx9eP/buoA8bFSg42XY2LfcUkgaFNz+Ippd/EefdhIDj7EE0PP8j5C+9E8764775yS5nzGl6xqvmAAB0BeXQ5ZVE3+96QNkAbXY+yr/wS9iP70Tv6X2ALMM4fhbMFUtizu8mopGjimXhAbNLp41YSAOJL87b03AQPX12WAzJvSw2Gk9PR/w9NvyNUhzdaHvvuZBxYiVkJkd/WGuz85E92/dgUwgB66I70Lr+6ahVa3MKkT3ruojjQqOFpWIJ3wpOpEKq+rfs9RMWwzCC7zIMLjePQ0DALXtw4OKRmGUc52pg270e3Yc2Q+6L/XJYRfaic+sr/mXU8f7qwx/ehQVzyCeLvmQ8Su/9MTRGS/BYTtUK5C+/D0If/qBTXzwOo+79ESSDMc59iUhtVNWTNumN+OF1X8WjG/8fPLJn0PI5BgvWVnwcuxqrcfji8UHLJyPwIDEwhuvxRg5PuFrr0fzqE3C11AePtemzkHftXbDOvzWyfNNZeEL3io4qznQ8/ynTtAXInb8aWaOnRi2au3ANcq6+AfaTeyA7e2EoGY+sMdMGuS8RqZGqetIAMLXwKjxz2+OYV14Zcc7XnxSQhARJSJiUPx43T1mGby1+CAatIe5OecmQhAQhRDCgBYCpRVeFlfE6enDhhZ+EBTTg20+jbcNz6D60KaJeJYEPnrj8GxlJWkPMgA6QDCZkz7wW1rk3M6CJLmGqC2nA16P+6sIvIFtvDgayLzilsNkJ+cZcVF+owbHWU3B73b5gTVNQh/pY+WyUWorCjnUf3AhvT0fMazq3vRpxTF88DpLB1P/G7cHEeFDIIQuiK4eqhjtCGbR6LJ+4BG8c3xBxLjCP+f0zW/HB2W2AokCGEuwBI8qMjRxDNrr6kt/QaXrRJPzrvPsijjvO7I97nbutEe6OJkgGE7qr30fP4Q/h6W7zLwlP8Ill+NYZQZaKpYldT0SXPNWGNADcNXM1GrsvYm/joZCj/VuWBnvV/hkQsiL3D1Wg/xmbAgVmnRG9bkfcsW4BoCynFFeXzoAkafCxspmYXjQ5euEEctbVUofW9U+nsCl/YLvRyJQ2TZnH4QuiK4iqQ1qr0eLbS7+EI80nse38HvR6nDjTXosL3S0RizICe07LMYYRmnpaBp/JIQSWT1iC1dNuGLRtxvGz4Kg9FPO8NrcYLeufhpzKW1NE4A8RnHYnGS3IqboBedd+Kvn6iOiSpeqQDqgonoyK4slwuJ34wqv/FvWNKUKIqG//Dkhkql2RqQDXT1yUUJuyq1bAtutNeHuj7V2twNPdBnhSfKeiogSn6JV99jEIvRG6/FFcXEJ0BVLlg8NY3F533LWBAak8PJxZMhU/uv7rsOgTW4z45YoAAANqSURBVLCiMeWg9O4fQptbHH5CknxN8QxxJgcAodNDXzoRhpLxDGiiK9Ql0ZMOyDZYUGopQlNPS8S50E70YL3qAEkIKAqglTT42qIHkGOwDHpNKEPpBIz50m/Qe3o/XM3nockyoXP3enjaL6ThXYMKLNMXM5yJrnCXVE9aCIFVk+Pv7ZxcL9r38NGryGi1t6XWJkkD8+S5yFvyCRgnVvkCOg0kgxl5196dlrqI6NJ1SYU04NuHeuWkayOOayQpbMbHYGE98Gx2kr3oaJTQTZOi7E2djIKb/wXanIIht4mILm2X1HAH4OtNf2HOXbjxqmvwYe1OdPX1oCy7BIvGzMH3N/wCPa7Ye2eE19P/+VRRNBlF5qEHoi6vFNqcAni6Ar3yBIY8Qvfw8M/k0OWP4mZHRATgEgzpgDHWMny68o6wY99c/CB+ueUpODx9YYtaAtPzAqSQYDRqDbiv6s60tElIGljn34q2Dc/FKzXgDSihpwQ0ZiuK7/hm1BksRHTluWRDOpqK4sn49apHsOHMVhxvPQWNpMX88krMK6+Cw+PAtvN7sfHMNly0t0IjaTCvbDbunHELxljL0tYG6/xb4eluh23XW1GWfoe8WFYICIMJeYvvgOOs7z2NxgmVyK5cAY0pO23tIaJL22UV0gCQa7Ri7YxbIo5nG8y4ffpNuG3aSvS6HdBrdNBpdMPShoIVn0XO3Jtx8ZUn4LpwCmHvEAxhnjIPuYtuR+6i24elHUR06bvkHhwOlRACZr1p2AI6QGctRvGahyEMpuDudWHt0BmQu2DNsLaBiC59V1xIjyR94WiMuuv70OWXhh3X5ZWi9FPfg754bIZaRkSXistuuENtssZMx+iHfgPn+SPwdLVAm1OIrLEz+GCQiBLCkB4BQggYx83IdDOI6BLE4Q4iIhVjSBMRqRhDmohIxUQiu8UlVJEQ9QDKrVYrqqqq0lInEdHlrrq6GjabDQAaFEUZPfB8OkO6E4A1LZUREV15bIqi5A48mM7ZHWcBTADQA+BUGuslIrqcTQJggS9DI6StJ01EROnHB4dERCrGkCYiUjGGNBGRijGkiYhUjCFNRKRiDGkiIhVjSBMRqRhDmohIxRjSREQqxpAmIlIxhjQRkYoxpImIVIwhTUSkYgxpIiIVY0gTEakYQ5qISMX+B5PYgF1R6iDgAAAAAElFTkSuQmCC\n",
119 | "text/plain": [
120 | ""
121 | ]
122 | },
123 | "metadata": {
124 | "needs_background": "light"
125 | }
126 | },
127 | {
128 | "output_type": "stream",
129 | "name": "stdout",
130 | "text": [
131 | "Validating train data\n",
132 | "├── exec_time\n",
133 | "│ ├── DenMune: 0.019\n",
134 | "│ ├── NGT: 0.002\n",
135 | "│ └── t_SNE: 0.85\n",
136 | "├── n_clusters\n",
137 | "│ ├── actual: 3\n",
138 | "│ └── detected: 3\n",
139 | "├── n_points\n",
140 | "│ ├── dim: 4\n",
141 | "│ ├── noise\n",
142 | "│ │ ├── type-1: 0\n",
143 | "│ │ └── type-2: 0\n",
144 | "│ ├── plot_size: 150\n",
145 | "│ ├── size: 150\n",
146 | "│ ├── strong: 84\n",
147 | "│ └── weak\n",
148 | "│ ├── all: 66\n",
149 | "│ ├── failed to merge: 0\n",
150 | "│ └── succeeded to merge: 66\n",
151 | "└── validity\n",
152 | " └── train\n",
153 | " ├── ACC: 135\n",
154 | " ├── AMI: 0.795\n",
155 | " ├── ARI: 0.746\n",
156 | " ├── F1: 0.898\n",
157 | " ├── NMI: 0.798\n",
158 | " ├── completeness: 0.809\n",
159 | " └── homogeneity: 0.787\n",
160 | "\n"
161 | ]
162 | }
163 | ],
164 | "source": [
165 | "data_path = 'datasets/denmune/uci/' \n",
166 | "dataset='iris' \n",
167 | "data_file = data_path + dataset + '.csv'\n",
168 | "\n",
169 | "X_train = pd.read_csv(data_file, sep=',', header=None)\n",
170 | "y_train = X_train.iloc[:, -1]\n",
171 | "X_train = X_train.drop(X_train.columns[-1], axis=1) \n",
172 | "\n",
173 | "knn = 11 # k-nearest neighbor, the only parameter required by the algorithm\n",
174 | "dm = DenMune(train_data=X_train,\n",
175 | " train_truth=y_train,\n",
176 | " k_nearest=knn,\n",
177 | " rgn_tsne=False)\n",
178 | "\n",
179 | "labels, validity = dm.fit_predict(show_noise=True, show_analyzer=True)\n"
180 | ]
181 | }
182 | ],
183 | "metadata": {
184 | "kernelspec": {
185 | "display_name": "Python 3",
186 | "language": "python",
187 | "name": "python3"
188 | },
189 | "language_info": {
190 | "codemirror_mode": {
191 | "name": "ipython",
192 | "version": 3
193 | },
194 | "file_extension": ".py",
195 | "mimetype": "text/x-python",
196 | "name": "python",
197 | "nbconvert_exporter": "python",
198 | "pygments_lexer": "ipython3",
199 | "version": "3.7.3"
200 | },
201 | "colab": {
202 | "name": "iris_dataset.ipynb",
203 | "provenance": [],
204 | "collapsed_sections": [],
205 | "include_colab_link": true
206 | }
207 | },
208 | "nbformat": 4,
209 | "nbformat_minor": 0
210 | }
--------------------------------------------------------------------------------
/images/denmune-illustration.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-learn-contrib/denmune-clustering-algorithm/a023e9283d7ea11af2d3e6dadae1c54e3b90528c/images/denmune-illustration.png
--------------------------------------------------------------------------------
/images/denmune_propagation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-learn-contrib/denmune-clustering-algorithm/a023e9283d7ea11af2d3e6dadae1c54e3b90528c/images/denmune_propagation.png
--------------------------------------------------------------------------------
/kaggle/the-beauty-of-clusters-propagation.ipynb:
--------------------------------------------------------------------------------
1 | {"cells":[{"source":" ","metadata":{},"cell_type":"markdown","outputs":[],"execution_count":0},{"cell_type":"markdown","id":"e7d9b26d","metadata":{"papermill":{"duration":0.004085,"end_time":"2022-01-28T16:59:53.754281","exception":false,"start_time":"2022-01-28T16:59:53.750196","status":"completed"},"tags":[]},"source":["##### Have you ever wondered how a cluster propgate. It is time to reveal the beuty of clusters propgation. It as simple as\n","- running the next cell,\n","- wait,\n","- watch and\n","- ENJOY."]},{"cell_type":"code","execution_count":1,"id":"7d1fe6fe","metadata":{"execution":{"iopub.execute_input":"2022-01-28T16:59:53.765377Z","iopub.status.busy":"2022-01-28T16:59:53.764201Z","iopub.status.idle":"2022-01-28T17:03:36.326661Z","shell.execute_reply":"2022-01-28T17:03:36.325883Z","shell.execute_reply.started":"2022-01-24T22:08:58.752738Z"},"id":"FZgP6jwmzFtZ","papermill":{"duration":222.569057,"end_time":"2022-01-28T17:03:36.326871","exception":false,"start_time":"2022-01-28T16:59:53.757814","status":"completed"},"tags":[]},"outputs":[{"data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAAWkAAADzCAYAAABE8effAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAABqPElEQVR4nO29d5hcx3Wn/da9nXs6TM4ZGORIEMwEmIMCFahAyaIcZMnpc9i11+u1/azDyvK3/nbX62xJlmRZlkVJlERSzCQIigRJgMhxMAiDyTl1jvd+f/SE7pnuCUCHO8B9+QDE3K6pOp1+t+rUqXOEqqro6Ojo6GgTqdAG6Ojo6OhkRhdpHR0dHQ2ji7SOjo6OhtFFWkdHR0fDGLLVkRDiGNAM+ICL2epXR0dH5zpnDVAEdKqqumP+gyJb0R1CiEnAlZXOdHR0dG48plRVdc+/mLWZNIkZtMvlcrF9+/Ysdqujo6Nz/XL8+HGmpqYgoaELyKZIXwRqt2/fzv79+7PYrY6Ojs71y969e3nzzTchg5tY3zjU0dHR0TC6SOvo6OhomGy6O3R0dK5DFEVleNCLqqhUVDuQZX1ul090kdbR0cnI4Xeu8M4bl5iaDAFQVGRi913N3H5PK0KIjL/n94Y5dqiHkSEvRqPMxm01tLSV5cvs6wpdpG9gwoFxhrr2MzF4kng8gs1ZS0XDnZRUbSu0aToa4MC+i+x78XzKNZ8vwr4Xz+P3RXjwwxtTHotF47z203McO9RDNBJPeezoe93YHWY++pnttLSV59z2pVAUlQtnh+g4O0QsGkdVYXTYRzAQpaTMzk23NbBxW02hzQR0kb5hCfqG6Dj8T8Sigdlr/qluOk99l6C3j9q1jxbQOp1CEwpGefv19GfSVFXl0Fud2OxGyisdrFlfweiwl3/5vweIxZSMffq9Yb7zzwcpKjJRXe9mxy31rNlQgcEg5+pppLfDF+Y/vn6IgT4PqqpC0lERIcAzFeLKpTEud4zywU9szatt6dBF+galp/2ZFIFOZvDKmxRXbcfm0MZMQif/XDg3TGTebBiYFTVFVdn3wnmEAKvdRCgQRVGWdzDO54tw4dwwF84NA1Be5aBtYwXVdS7Wba5icjzAyKAPi81IQ3MJkpTZrZJMPKYQ8EcwWwyYzJml7dmnTjDQ55l+QvOfX0KoAY4d6qFtUyVtGyuXNX6u0EX6BiQcHMc7cWnRNmN9h7Gt/3CeLNLRGpFwbME1NY0IqyoEfJFrGmtk0MvIoBchQJYl4nFlVildbgv3f3ADG7ZWMzkeRJIEkiy4cnEMRVExmw3EYwqH3+2iu3MMVQFJEqzbXMmm7TWMDvsBlbUbKqiuczM24uNS+8i07RluKklKffS9bl2kdfJPNOxZsk0kPJUHS7JDyD/MUNdbeEYT/lNH6RoqG+7C6qgusGWrl6pa5+y/57sEcoWqMucuUVWEJJiaDPHDbx/FYJAWdaUkoygq504Ocu7k4Oy1/S91YLObaGh2oyzxfFRgZu4+OZZ+tZlPdJG+ATFZ3Eu2MSe1UZU4E8OnmRg8RiwawuqoorzuNqxFhZ1hAHjHL3Hx2DdRlOjstbH+I0wMHKdl2+dwlW8ooHWrl9qGYqrrnPR3F+5mnTxzX65AL0bAH6H99PCKfsfuMF/zuNeKLtI3ICaLG2dpG56xjoxtSmt3A6DEI1w8+g28k52zj/kmOxnteY/6DR+lvO6WnNubjKLEmBw6TcDbjySbGO19b06gVZWZKZKiqlw583223PXfkGRjXm1c7fT3THJg30WG+r2FNqUwqKCiIoBtu+oKbY0u0jcqDesf4/z7/0Q0svCLWNm0l/GBo4wPHCUS9oCqMLsAnPbVqaj0nPsxRe6mvMyoveMXGezcj3f8Iqoan7Mn47pVJRbxMdLzDpVNe3Jun9YZ6vcwNuLDZjfR2FKKyLAZd+n8CE99831i0Wufua5qpj0ib71+gep6F+WVjoKZoov0DYrZVsaGW3+T4e63GR88gRKPYHPWUVK1k4HLrxIOjiUaqjNfVnXuf0lCPdr7HvXrH8uprd3nfsRI78EkW5LsWYLBzv1UNN6FEDfmKbnRYR/PPnWCvu7J6ZdMxem28sjHNi/YEFMVlWe/d1wX6BkEjI8G+O7XDvGrv7dn0YiRXKKLtAZQlBgj3QcY7TtEJDiB0eKitGYXFQ13IRtMORvXaHZSu/bRlJjorrNPJwn00kIY9A1muD7EYOcbTI6cQVViFLkaqWi8C3fFphXZOD5wbFqgr27nKhb14xntwFW+HgDfZBeTw2dQlSh2dyPuis1I0vX5NfD7wnznn97D6wmlvHxTE0Ge+sb7PP7kTWzYmthcnZwI8NQ3DuP1hAtkrQaZ3kGcmgxy+lgfO29tLIgZ1+enUyMEfUPEYyEs9nIMRlvaNooSS/h8k0LiwsFx+i+9wtTIWdbe9EVkw9VtXkRCU4wPHCMW9WG2lVNStY1o2EM4MIbBVITVUU3A0weo2By1AIwPHk/x7S5ETRHMcHCCcwf/lljYi9HioqzmZsy2Mi4e/yZKfC40yzvZiXeyk+rme3GUrEGSTdicdYseLQYY6Xl3btyrxO/pxlHSyuUT/8bUWNIJup53MFncrNnxi5rYBM02R97tXiDQM6gq/OS7xykutbH/5Q4unB262vvgdc3M5uXLz5wl4I9wxz1rMrqKcoUu0itAiUdR4hFko3XR5bNnrIPejhcI+gYAkCQDJVXbqVv3IWSDJaXtaO/BjDHLfk8vw91vUd1yf8p1VYnjGbtANOLBbC2lqLglRexUVWHg8j4GO19HVZVZ0e0++8NEAyElCe3cN1OSTSnCuhwiwTEi0zPvSGgC/2QXkmxEiUdJJ6wDl19j4PJrICTM1lJq1zxEcdU2gt5BBjpfJ+QfwWh2UORqwu6uJ+AdWJE9qQgQAkky0tP+TKpAz9gfmuTi0X9h053/ZXZGHY+FmRg6SSQ0icniprhyy4L3bTXQcWZwUeGNRuP86z+8SyQc0wV6CaKROPteOM+7b17mc1+6hapad97G1kV6GQR9QwxcfjWxTFYVTGYX5fW3Utm4ByGlHmn1jF3k4rFvJsRxGkWJMdp/mKB/iHW7fjXld8b63k8dbJ54DncdwGQpJuQbZnTgCLGwhxTxExIms5PKpj0YTE5Geg7gm7i8+BNS0/scVyrQGTpfXj+qQjgwwuWT30Gcfgo1KYQu6AXPaPs12iFmfedFxS30X3o1Y8tIeIqJoZOUVu9kfPAE3WefJh6fW/b3nn+Ohg0fpaR6Qfk5TbOcsLVwaOGhFZ3MBP1Rvvq/32bbrjo+8IkteTnSrov0EgS9g5w//I/EY6HZa5HwFH0XX8Y/1UvLts+lzGL7L76YItDJ+Kd6mBg+nZLAKOXQSBo3Qyzq48rp72U2UFWIhCbpaX9mZU9MQyQL9LUzJ84ApTW7iEcD0xEhmfFNdGKxlnHl1H+gznsP4vEwV04/hclaTJG7KYu25o7xUX+hTbiuOXG4F4DHntie87FuzC3vFdB74fkUgU5mcuQMnqQldDg4jt/Tu2h/E4MnUn42ml2Jma2qkJdjXdc96uzrWeRqpH7dhxHL2BiUJANDXW/OCbSa6ntXURm68maujM4qnRdG+er/fovhwRs0zjlPnDrWx0QeTiTqIr0I0bBn0QMfAOP9R4HEjLv77I/nBHfGF5zyR8E7fpHOU9/DO34ZVYkTCU3k46nckPgmOzm+74/pOjPti5//3iThrtiEZ/zi7PuUuGGqKW2944vnO9ECsVicH//7MaLR+JKbsjrXhhJXOX86fXRTNtHdHYsQiwaXbBON+PBPdtNx9GsosfnhSwtnxvFYkPHBY4wPHJ3ewFt8Ga5zrahEQuNprycS6Ug4iltwlKyZ9qWnW83MtNW+6LWfGsTvT9oTEOgLtBxy8kgvt+5pyekY+kx6EUwWF5K0+JFis62Myye/gxILsexvw8xMTRfoguOu2ETr9s8TDk6gKotvollshU9WvxRjI6m+aO3fVlY3g32Jk5y5RJ9JL4JssFBStZ3RvoMZ2wS9/brLYhUzOXyGi+FvEl/G5mU4zfsc8Pbjn+pBko24ytZnjIfPFzZ70uEndYFXRycHnDjcy72PrM9Z/9e9SMcifkZ6DzI5fAolHsXuaqCi4XZszqUTp6iqkhKKlQ7/VFe2TNUpBKqCb3KJkMVpYmEPsWgQg9FKNOyh89R/TPup55TQWlRN0+ZPYXPW5sjgxdm0rYbXfnqOWExZEKWikxt6Lqdzp2WP61qkw8EJOg7/E5HQ5Oy1UGCE8YGjNG58nNLaXYk0nEOnZkXc5qqnrHY3JouLgUuvMTF4vGD262gTVVW4cPRfCHoHmO/iCvoGOHfwb2jZ8lmKqxaWXlJVlYCnh1gkgMVejtlWmlXbbEUm7nl4Ha/+9Jzui84TXl9uj9Jf1yLdfe5HKQI9g4pK17mnsTnruHLm+wS8fbOPTY2dZ+jKm9Sv/wgDna/n0Vqd1cCJ/X+CwWCdLj2WqbKHwpUz38dZ1pZyUnFqtJ3e888RCozOXnOWttG48ePLyvG9XG7d04LTbeGFp08R8GczBl0nHePDfiLhWM4SMF23G4fhwPii4XOqqtB56rspAj0TfqXEw3Sd+X7Gk3k6NzCqQiyaKMm0GEo8ksiDMs1w19tcPPoNQv7hlDBAz1gHHYe/mjEW/2rZuK2Gux5oy2qfOpnZ//LiobrXwvUr0sHRxRuo6lwGt5TY2NkGuTJN54ZAxTuWqLY91neYnvPPsPAzpU4fjx9lrP9w1i0Y0Q+z5I2uy2M563vVuTsiIQ/dZ3+IZ/wiqhJFMlgoq9lFzZqHUpaWBlPRMnqbqXWmC7JO9pkYOsmFI19PHJJZFJWx/qNUNNyZ1fEH+5euZamTHUYGvEyMBSguzX50z6qaSQd9Q5x++ytMjZ6bzfegxEIMd7/Nif1/xvjAsdm2NkcN1qI0hUhnj/suzAKno5Nd1ETagGXEwwd9Axlzvlwt4aDuj84XsZjCaz89l5O+V5VIXzr2zYwHDlQlSuep7zKQlO2sYf1jqQndk4/76uhoCFWJMXXNmf9SCYV0kc4XQhJ0nB0i4M9GJslUVo1IB70DcxVDFqH/8muJzRnA5qqnZdvncJau0zcBdTTP5PCZrPYXi+onWvOJoqj4vdkPx1s1Pumgf2jZbUd630MIibG+w4uHSunoaAjf+PIO1SwXJa5/7vOJLAmKnFdXRWkxNCXSvskuRnoOEPD0Ickmiiu3UlZ3KwajdfnHbVWF0d6DWUpgr6OTP8LBUcLBScxWd1b6M1kMRKP69yAfqKrK+i3VWG3Zr0mqGXfHcPcBzr//D4wPniAUGCXg7afv4ku0H/xbIqEpHMWtCLG8e4ou0Dqrlc5T/46qZMdNsWZdRVb60VkeTWuye3p0Bk2IdMg/Qu/559I+Fg6M0n7o7zn19v+LquqlfnSub/yTV+huf2a6RuS1ccvdzashu+p1gRCCl358huGB7Ic9akKkx/rfT58MZjoaIxqaIKpnmtO5QRjtfZeTb/45/ZdeW1ZO80wUOS164v9cI5itHh5XVA69fSXrQ2hCpEP+NKcD9WgMTWO0FBfahOuaeCzIwKWXOfWzLzPWd3WnESfHA6jMiYjO1SMErN2Y5D4STJfTTH1tuzuznxFPExuHxmWdDrw+UNMk+F1tsx2LvZJNd/wuSjzK1Gg7Q11v419muk+dlaHEw1w5831kkx13+YaUx3yTXQx1/Qzv+EUEAmfZOiqb9mBz1ABgL8r+JtaNyu33tLLnoTb+8r+9tDBHt6rOXovHsz+51MRMurRmV+IfyfUArzNUVU0r0DOPpV6YvqaqWY8elGRzomzX1SKMNG78+HRfRoort1Df9gH0GiC5ROXSsW/Rd+FFouGEz3N88Dgd7/8jk8OnicdCxGJBxgePc/7Q388mFisutVPfVIyqXH/fp3xisxvZcUsDxw72UFRkRlXU2e9n4t9zbSdGA3z/W4eJxbIXo66JmbSiRDGaimY/gNcbmcR5YUNSfPPq3MVlzbbtzmYcZa0EpnoIB8dR4pHECU0hMFlLKKu9mWjYx8ClVzL2UVyxnUhkCiUeJR4LEg17po/gC9wVm6luuW9BQnuzvRxJNqEo0WW6qWaeiy4ey0dhsHMfg1f2U7f2Awxcfi3tPo6ixOg68wM23/lfEZJMabmN7hwnpb+eqaxxsvehNr7xNwcIBqNzE8hM8ycB588Mse+F8zz44Y1ZsaHgIj010s6lE/86nbfg+quauRKBVoIxVG8MqdgEBpF4KcTMw2paoZZkEyVVO6hZ+/CSbiNVVTn91ldY7DWeGjvL9nv/HJE0247HIghJSj1in4TBaKWkegejfYeWfJqzCDFrhpAMVLfcj8VegSwb6Tn/PKHAvMNLyflW0iXFUlRMYz6kaIxYkYW41YRqKvjHO/uoCr0d05FQGVZEkekq9wZLM8cO9ubRuOuLX/m9u3E4LfzdV94gFJqOLBMCgZp+sZ/koz5+qIc9D7Zhtlz7Z7Cgn2JVVenteG4usYyY9sbfYJuGakxBmYoRfWEAvArIgFmGsAJmCbnRBrJAGQihTkaRqxyU3LyLqoceoKiyCUlO9T3Gw2EC3T0IWcbe1IiQEl9mJRZKWwQhGSUewTPagat8rmabbFjat1nX9kFC/hF8E5dZ9o1WCGSDhTU7foEid9Ps5eLJKwx0DoGqIvtCiLhC3GZGNUqgqoi4guwPI0ViSIEIRk8QOU2eCn9rBdFie8axVz2LVDCPhCZ58blTeTZodZGpvJiYnhk5nBZOvN87J9CzDcQCl+z8zdlwOMbwoJf6pmvfYC+oSAemehJVKq5DH/SixFTU8Qjx817iF/wJMU5+CeJAYNqnFYgTP5eaFzje52Gkbx+jz7+FvbkJe1MTVQ8/iL2pkZ7v/5DBF18i6k1UMLZUlFP70ceouO9eJNmEEBLqElnZgr6BFJFeDrLBTNuuLzLS8y497T9J0yLxITZZS3CUtMzWmyyt2YXBaE1pWV5/GxNHX8DUNYgUmZvBRErsBOpLcZzpRU6Tl2L+p8jaM0bUZYN00Q2LCNz1gMniprerp9BmaJbF6j+qqAgEE6MBeruWEfqb4WNkMGRny6+gIj012n5DzJqjrwwnlkJWGXUyitITzIpXR41G8XVcwNdxgaFXXsXW3ESgqzupgUpocIhL//hVLv3T13Bv24pxk52IezHfv0CeJ5rLRQiJioY7CAfHGe5+e8HjkmSgafMncRS3LNpPqOMY1ov9qCR9NtSEO8M45su42538XVEBKRLHfnGIQGsFqpz6W6ZRL5FiO8xcX5WCPe0YnWe7yezEWbYOIXSRvhbMVgNGo5z2sTST6RSKS2xU1TizYkfBRNo/2c1A575CDZ831MkoyiX/9A+5HSvQeSXxD0laGCWjqkyePIU4JTDcV4q8Nr3/WkgGiiu2XJMd9es+hNVewXD32wT9wwgErvL1VLXcj32JKu2qqjDx1vcTtiCBmN5Fn358uXOTGdkyeYIYT3QTKS1CkSWM3iByILEBZJzw42+tTMy0V+3MOul9FhKSkGnc+DhCSDS2lnD2+GBhzdMgKioKKtIiEUkRVDDJrN9SxcmjfQsbLKHSdz+4Nmvx6QUT6cEr+7jeNgnTITltGCqcxIa95O35ZgpjnBa7+DsepGYbYsFyTFDdch8GUwY/7gooq7uFsrpbiMdCCMmQcdNxPpGBy8Qmh6ftnZtJX8vHXSgq5pGFpaSMU0Fcx7qIuq1EypzEXNZVKtQJnKVt1K59ZDZOet3GyhtKpOe7MCZQsQFmJGLTwhxDRSAxBVgAd5p+FKAThf5RP5s2VlLX4Ka3e3JBOyEJauvdDPROoUx/3xxOM3sfWsfWmxafjKyEgoi0qqpMjZ6f/un6i+hIIKhb9yEqG+8ifPMoh3/5V/Pne19iHCUQoVTZgd/WRziQyNFttpVR1bSXsrrdWTUluaTZUsQ8Y0y+/3zeXGACEKqKeSKAaSLA5M4mkJNEOvl1XAXi7Z/qTonwuXBupIDWXBtCSmzfKcuM8Z4fujqESi8qAjATJ7XMb+LzJVCpRlCOwDj9iAfoQ8EPfOO50/yv37ybJ76wm5d+coazx/uJT9vjclu55+E2ttxUh2cyyGC/B5PZQENTMZKc3eMnBZpJq6kRHerc9VVJhlAo30QnlY13MfTaG6BowPeeJDQWUUrTHZ8lEhwHVEzW0oKefAwPdjLwH3+GEvQVZPyIyzq3wZhhFQJoWqzj0SAjvQepaX2ArktjnD0xUGiTrprWtjJu3dPC/pc66OuZBMBqM7J5Rw1mi5Ezx/sZH0m4ERVUelAxkthzH0dlJg+mCmSqw64C/agMoGKa/t3kOI6LPVM8+Scv84E7mlm7vZp7Hl3P6KAXk9lAXYN71p3hdFtxuq9uH2c5FESkhZAocjfjm+ycuTDzyHW1kSikxKZD3zPPFtiShViqqxBCYLblJr3iYiiREN6Tb+A78zZKOICpooFw/8WEQBfo/Z8N1VtsFaIoCX+/ZoVaZazvfSoa7+XH/35s2bNQrSFJgvs+sIHKGictbeVMTQSJRuO4S6wYDInv1D0Pr6Pr0ig//cEpzo14Gb6GCZ4KZKqnMuWP8N1XEqv+ihIbv/3pHbRmIaxuJRTsWHhV057UC6p6XQk0gLtiE13f/R5KIFBoUxLiMo2logL39m0FMSMe9NH/7T9i7JVvEO7rIDrai//MAWITgwV9/5dz8MU05MHSM4ZpxIMU0GbO8khoglMHvo3Xk2n+qG2EgA99ciuVSZERrmIrZRVFswI9Q2NrGb/+X+/hpr2teblvDo8H+O9ffYcX3ukkksfSZAXbOHSVb6B+3Yfp63geRY2zal0dGbAWVRE41E3vUz8otCkJpqMXDDYba3/nN1HCYfqeeY7hfW8QGRvHXFZGxf33UvPhDyKbs18CaIbxN75DZLgrZ/1fLWKpL52qYhmcQkpKoBNzWvG3lKMatXWyMR46S1HRTfh89qSFgVZn/9MIcBfbeORjm1m7YWXFCvpGfXnb7onGVP7x6ZN89+V2PvvQeh65vTnnYxb001XRcAclVdsZGzhK34UXMlYC1ypCMlK79lEGO/cRi/pnr7tK1yFOqXT96F8LaN08VJWaD3+Q6kcfxuh2c/qP/wTfxUuzD4eGh+n+7veYPHqMjX/yxzkRaiUcxHdmYfy0Fm7Q5lEv0dLMx+qNk4EUgQYweII4L40T3L6OiMbynddWD9PekSwgSTkGNERFVRF7HlqHrchEQ1PJisPWJn0hDp3OfwSLNxDln358CovZwD031ed0rIJPAQwmO+6KTfSe157fFiEtsgQX1Kx5kMrGOymvvxXv+CWUWBirs4ZI1yinfvxHeTV1OVhqarBUVdHzg6fxXbi40P8qBJ728wy+9DK1j3046+PHvGOo0exXU84GRm8occClzLHgMRGNY+lNn6RI8vlpqXoAY8M6JkfOEfT2MzVylmh4MscWL46jKN0GrPaEevddLWzYWn3Vv/8Hf3+goLf477/Wwd6ddTnddNdEqtKJwROFNmERpvOJJGG2ltK48eNUNe0FEifpXGXrKK7aisVWxtArr+XfzGUQ6E6cRux/9qeZIxhUleHX38jJ+LLVkT4SRiMbcbYro9iujCIHIonMgzEF07AH59k+5PDCVZ4AUFWm3nsWk8VJRf0tNG78KFv3/CHOsg0L2ucLIaCsdBJJyp/f9GqwF5nYeWvDVf/+28f76B0uTDTQzJ2hf9SfcxsKPpMeHzhO34UXC20GkDhtZzAWEQ1PzQnHzP+n35TKpj3Urn0kJUvcfELDw5rMR2KrqyXm8xPzLHIsXFUJj43lZHzZ7sLWso3ApWPzHtFGrLwg4fYwjy48+LIYod524v4pZLtr9lp53W48o+eybOHykSSVtWuucL6jtWA2LIYkwRNfuLaY/B++cSFL1qwcRZ1JIcxs7HSuKPhMeqDzdQr9BRWSkR33fpmd93+F9bf8BiaLO00jgc1ZS3XL/YsKNICppCQ3hl4jlQ8+wNh7B5dsZ7DmLuaz5N6fQ7Jc+4lGTaGq+M68NfejEsfubsZSVFVAo6CpfpBCf7cycdf9a6mpdy/ZLhqL0zPkZWQitdajoqh09hc+/7yqwpUc21HQmXQ4MErIN7R0w5wi2Lrnj5Gm03GaLC7W7f41+i++zMTQSRQlhmywUFp9EzWtDyAblt5QC/alOeuvAXq+/0OiE5NLtrM1NubMBlN5AzVP/g8m3/4h/vMHUeOxeQeatIcwWVEjixeEjXnGiISmGLj8GuODx1HiEQxGG7LBSjx29cVkrwVZjlNRPs7wSP5j4RdDkgV3P9C2aJtoTOF7r57n5feu4A0k0tCuayjm5x5Zz9Y15aTkLCkw//epo9RVFLFmGTedq6HA+aQLHxfdsv3zC1JlmixumjZ/ioYNHyUWDWIw2Zede2LghZfwX9Jmvb/ep36AZLMt2W7ZhQquElNZHRUf+W2UWAT/2XcYee5vczretbK4QIvETcZm5/z7/5CSrzsWDYCqJqrWxAsTV11WOpkk0trw/be2lS8ZxfFX3znMwTOpURvnuyf406+9xx//0i2cuTyGVs7qxBWVZ966xH/+zE056b+gIm22lU3PNApz2KO4chvFFZsyPi7JJkzy8ot5qvE4Xf/xvWyYljOWc7Bm8shRjnzp19nxd3+NZDQu2X7ZY0fDeN5/Hs+x14h5RpHMthQ/7qpECITBiM8WITIymfZxRYmBMICa/xBTWZ4ZUxsCDVBetTCCJpmTF0c4eGZwdrKQPGeIqipff+Z04TYMM3D6Um72caDAPmkhJMrqbinI2DZnHY2bHs9qn8G+fuKelW06aZXQ4CDnvvyXWetPiYYZ+O6fMf7Gd4lNDoESRwl6iY6u5vJOiVl06QO/wMT4vE3C2ULCyvSf/EdaCAEGg/bOHsSWODj0s2N904WbF3o0VBW6Br0536xbCbn2uhR847Cs7ta8j1lU3Mq63b++ogxtyyHXboJ8M3niJPFwduKaPe8/T7jvPJp2Pq8Qye6k+rP/HfuWPcST3RmqQuJ5Jj/X/D9vVYWqijHKy7R10ObQ21c4+l7mU6e+4MJSaFpnbDLIxelEUNmm4CI9cOnlvI5XXn87bTf98rJ9zCvBWnP1QfmaRFHwX+7MSlfeE/s0s9GTLcwVjVgbNiEbTJjM07kmNPYchYDGem1tZAtJsP+lDmKx9DPqpmqn1l7GJVGBP/7nAzmZqBVcpD1jF/M2VnHlNho2fHQ2O122GXnzraUbrTIk8/J98osRmxrNSj/aQWCp3zj7U1ntTMyv9tRl0rN4FflC4PdH6LyQ3o/7wO6rP+BSSHzB2ILNzmxQcJGORfIX6zgxdILTB/6KsYGjOel/+PV9KdnmVjuy1Yq9qSk7fTnym94x10hmK47t98/+XNm0lyJ37pPtrISZc1hF9iAL3S+FY2a2GZlfhXsaU4a6gquBQ9ejSEsriJ7IBmH/MFdOfY+By9mvrxgeG9PccvdaqPvk44gs3XQcOx/MSj9aQLIWUfWp/4Yh6cYjyUZat38eLUVRzGC3hhBiRrQ18PlUQVVULNb0Lkd/MKqVTAErxpyDG0zBRdpodi7dKAcMXH6VaDi7kRjm8vLrRqTrPv0J6j72kaz159r1KLKzPGv9FYqSB36Rhv/nq1jqF+bmMBitOMvWJfKTLHEqNV8IARZLGLM5eQNYG5/R558+RTBNXu5SlwWHLXuhn/nkA3e2ZL3Pgn+SZopm5pdE+a7xweNZ7bVk981Z7a9QNP3iz9P4xKez2qdkNFOy94ms9pl3hMB104NIxsynTqtb7kMIbS3XLZYo99x1iM0b22lb04ksFz7xkhCCqckQx99fGIJpNMg8sLsxafafiiTgkdtydyr2amlrLKauIvv+/4KLtKu8cNnCknNAZ4OS3buy2l9BkGUq7tmbk64NzrK5WeZqXM+qKsPP/9OiRQuK3E20bv88Jot28reMjjl599B2Tp9dz4VLzcTjEoWcTQtJzHqFOjL4cJ94aD1b15Ql2s/8ngBZEnzxI1v4tce3a2r7x11k4stfuj0nfRf8aRZXbsNsLUBuAVUhHstubmNLVWET6mQDySAjDLk5iGpp2ICxuHL6J6EZl8BK8J96k75v/QGBi0cytnGVrWPDbb+dP6MWYWLSzvtHtzDlST7lV9gbpKrMHPRRiUXTp4aYGvNT1OWhRZVwAQ6gXBVsRGKN28aUL6yJ2s4zbGwuxWLOzfem4N8SSTayZucvYSyAUI90v81w97tZ608IkTOBywuShBKNEezNzSlAISRKH/oCQk5+jVbZjFoI1FiUkZ/+PUoscz6OeJZXaVfL+QstqGq6r3mBhXpap/t7Jnnh6VPEk8uSReN86+/eIRKJU4JgLTLrkWlEwqLAD/71CL29UwW0fiFl11u18PlY7OVsufP3uXDka3jH85sjtqf9Rwxd2Q8CHMUtVDTcic1Ze9X9GZwOouPaOuG1KGnWjJIpdxE3tpbtVH/uz5l850eJ2aiiYKpeixqLEB2+krNxs0HcZCBU7SbqTiSpCr/7D9Ru/Rh211xcr6LEGO56i/6L+T2klY64IhifSBf6qJ0bo6rC4Xe6uHJxlI9/7iZMZpnOi2OEgpmPsyuKyoFXOvJo5dLkMrZbEyINiVnomp2/yOm3/jKRdD+PREKJ0khjwQnGBo7RvOXTlFRtv6q+DFYbUTQs0kIsGoFira3B1pjbwwSWmjVUPf5fUJU4qCpCNjDx7k+Y2Hclp+NeCzGLAe+GOpDnBM4b7OP8oX+gafrz4p/s5sLRrxGPaaNSt9+fboNTaKXGQgqjw37++X/9bNnte7rSlzMrBNVlNppqcpcorODujmQkycC63b9WQAtUUON0nvx3es//NJG9bIXI1uzmA8kq8ydQ8zbvhBA0PPHpnNZrSxlPkmddH6YS7R6pVwH/2qoUgU4gUFHpPvs0nvGLnH//HzQj0ABWSxTZMFPXUMxu2Obr/c0lUQ2tBn7n0ztRcpjwSVMiDWC2liTSOhaYoa43uXjk6yveXJSLtHcEd5aZmCYhMDgcCOPc62ypqGDt7/wWZXfcVhDTbGt3ITm0lZx+hpjDgmJOF7ebWJXEYyEuHfsWagEy3S1Gy9bH2LarmYyxbKsYLUVR/9d/OMCX/vI1frz/Yk7EuvBqmI4C5N1Nh3fiEif2/wklVdupWfNQ+rJa8zCXlS7pUigcAslkonzPXTR9/nMo0SiBK13IVitFa9dk7XThVVkmyVR+5LcYfOorS1ZByScxu5lgbRq/rhDA3GaXEtdWFXSzrZyK+tu4vzzGyJCXniupLjjNfkSXiRGBAyh0YuCZe9/wRJBvPX+WrkEPv/3pnVkdQ5sirSFUJcZY/xE8Yxeoar4X2WCmyN2E2ZZ+1ld2+20M79uvnW+ALFN+5x3UPv5RUBTM5eUY7HM1Bk3b3YWzbR7Whk3UfeH/w3PkJTwn9qOGCvsVDFW5CNali3fW/qx0Zk/FbDHw5K/cyvkzQ5w53k84HKO61oW7xMbzT5+arkKlkc/qClBRqUdwtpDx3oIFrqM3jvTy8K1NrG/KXpy8JkVaNlg05dsDlWh4ip72HwMJn567YjONmz6xICe1e8d2HOva8J5rL4ypMwiBubycLX/5PzCXatONkA5jcRWl9/88pff/PKP7/g3Puz/J6/gqECuyELMaCaUVaLSv0UKitGZuNifJEhu2VrNh65zfX1VUTh7pTcywV5lGz5jrL/AOaCbf/htHerIq0przSQNUNNxVaBMWIXGkfGLoFJdOfDvlkVg0yMTwKco/fW+BbEtCCJp+4clVJdAzREJTBH1Dec8oqJpMTG1rwLe+mlBj2cIGq8S36y7fhNmWxv4khCR44pduprpu9ZYvswHWAr0f0iI1Gqf82a1nqcmZdHXr/Qx07itIyaHlo+Idu8Dx/X+KxVpKJDhONJJYnquqinAZUKdic1/qmSVlHpyBhqIimn/pFyi7vTCbgFeLf6qbvo4X8E52ggoiHsPUUIq1dxyR5Q2Z+b2pBhlfaxmqybDw/VkFwpxMyD+MqiqIJU50mi1GfB4trVhXhh1YrwraBYRQc+5hFALcRWa8gciiRXDrKxav4bhSNDmTFkJiw+2/W2gzlkU84sM/1TUr0JBYBslbXNPLYjXx7kpS4o9IbN4ZchgFsvkv/pyKe/fmrP9c4J/qoePwVxMCDYCKKgnCFU58a6uytqhNl1U5XFrE1LZ64kXTrqukKJjVJdCJMLtQYISpkbNLth4a8OCdWn0infyOyECjmnBB5vKtsphkfvvTO/n6Hz7ArZszh4vKksj6wRZNijSA0WBB+86/zMhbncibZ9KwzsmCbDbT9rv/ie1//b+oeOA+hNkAZglRZkJUZc6utmwkiWCvNsolqUqcoHeAoG8IVV080ULfxRdRlPS17WIOC9Fi27XZAsQlQaTYTqSkiLjFSNRlxbuhlmBLxXVQrCH1hrKcikd+b3iV3YSSmbO7CKhVEkJdXpyb49mRmMKtm6swGWW++NEtabPdSULwax/fRkXJtX1W56NJdweQiDkVYlrfVtnOBonZtPHuMuSb3CiDIQwTJlSrinFjCaPyESTFRni7D3NbY0LApp+iGoojZAFGCVRQegLEDoyjTi6jOOf0F0625S6PwHJQVZWhrjcZ7np7doVhtpZQ1XwPJVXbmRw+QzTswWQtwV2+kVg0iHf80qJ9xuwWTBOBFX0SZr7GKhCqcBCqK03kuZxtsFoFammWc2CluNSWeI1WYTiemP47YbZKJYIdN9Xymce28IUvv5rVYraSEAhAlhM38mKHhb/6f+5i3+Ee3j7RTygSY219MY/e3kRzDk4ealakjWYnFls5ocDI6vsEJSHZDUitibuuAGL48Y1fwjctSinhTwKEVU75WWq0YaqzEvnxAOrI9IZEutdjemlucrtxbdm8pF1B3xBjfYcIBycwWVyU1t6ctdzefR3PM9SdWu8xHByn68wP6Wn/CYoyt9dgNDmoWfPQwk6EAHVu9141SESK7YQqnFh7xjDMSxY/c65u/jWAqMtKqKFsrt8bAGdp25JtikvtNK8t4/KF0VX7HRNJf3vbx9gnneXXHljHP7zSji9Dea6VMLNBuL2tPKXqis1i5IN3tvDBHCT5n49mRVoIQWXj3XSdezpxnHWJ5fKqZzHtkAWmhyuR34HA5V4Swa0zv5e6zG383GeQlsjEN3D5dfovvZJybbjnHSob91DX9uiKzE5EYgwiG8zYXQ1EQ1MMd789/aVf+MVX4tGUFKXRiJee9p8gy2bi8w+EzL7vKoos4W8pByHwbaxF9gSxDE0h4gqhajcxpw0pEsU07MU04UOKJG4EigT+NZVc/0x/JoTA5qhJVIhZBo9+fAv/9k/vMjkRXI0L1hR83jDHDvYAcLPDROveVt45P8qEN8TIRIBYfGVPUJr+XhkkwSfvX/qmlys0K9IAZXW7iYQnGex8Y3rGuco/RddCkYzykMA0WQthFeEwoHpjxHv9KL0hrFIZ9Z/+JGV33I537BKTo2cxmp2U19+OLM8dop0aaZ8T6NnXNPFhHOp6E5uzZvYgRDTiIxwYRTZYsRalCl0sGqD73I+ZHDqFOv2+mCxuilxNSx+PVtWUG4uixLDaSwn6hxaK+/R6PFZSlDLbizut+J2pbh3FbCRUX0KofjpGVVFT3RvXPSqO4laat3xmyciOGYpLbXzht+7iyHtdnDnez/BAoc/wZQefN8Kld3r477+/ly995fXER2H6s7Cco9szbqDKEhtf/MgWNjYXLpRVZOu0kRBiP7Bnz5497N+/Pyt9zhAJTTExeJxIaJLh7ndIPo57PbDc92C+n1FN8tcv7YOUgaVDGg0mByXVO/COXyLkG0wIsKpiMDmw2MspcjdQXLGVK2efJujt4+punAujJgxGG0LIS2dAvJbP63Xr6kg8L0k2suO+L19TT91XxvnW376TDaM0QcOt9fz40FwlHXl6wRFb5HMkS/Abn9xBZYmNTc2li8ZEZ4O9e/fy5ptvArypqure+Y9reiY9g8niorJpD56xCwx3Hyi0OYUhjcCImdv9slhezHks4mW4a2HKyFjEgy/iwTdxicHON5Y5ZiamM74nbe3FIr7l/Wq6OPPk12aV+lZXxvTzXXDTvvbJS0NTCa5iC1MTqy80Lx3HDiXcHyUqVKqCmbWXF0E/yoLcH7Ik+M+fuYm7dlx9TvlssypEGhKzzSunn+J6dHkIIZaYTYtVHIy4GFf5Xi42I850WOg6mEVLsmnap5/+uRS5m7Iyzu47m3n1uXNZ6avQ2BWVKqBm3jfIAbQhcREFUWTGaJTY3FLKh+9qZU29uxCmZmTViLR/qivvxQDyyfWQ41czXGevpc1ZR13bhwDoOPLPGdtVNGYnncJte1t567ULi1ZHWS2YWSjQMwigEQmzL8a6zZV89OPbMOWoTuG1sGoi+KPh62NDQ0dneQiKilvYeMfvseHW38JR0oKjpIWGDR9dsCkoENStfRR3+casjf7EL+3OWl+FZKk1qCnRiI6zw/z0ByfzYtNK0d5tIwOZUoOmcCOE6ulc10iyibabfw17hjqb5XW34i7fyFj/ESKhCYxmF6U1Ny0r13k64kqcw/0nOTl4DhXYVrWBXTVbqW8uYeetDRx9r/vqn8wqQCBm07WeOd5P45oydu6uR2goKmjViLTNUYPZVk44MJKxjaNkbaKQrS7UOquUxk2fyijQMxjNTqqa71l2n6FoiLe63+fCaCeSJLGjehO7arYyGfLwlZ/9Pb2egdm2+y4foNZZxR/c/et84PEtjI/5uXJh7KqfjxaYmU2rc4cL0s6vVRWe/+FJTh3p5YlfuhmzRRv1X1aNSAO0bHuS9oP/FzVN7UGDyUHD+o/SfuhvErmob/S4ap1ViMBdkT2Xxah/nH878SPe6zk6K1ACwf7Od6l31SALKUWgZ+jzDPJ/DnyNLz/w+zz+uZv427/YRzgUS+pj7u/VxHK333uuTPDyM2f58Ke25dii5bFqfNIANkcVm+74fZyl6xBS4v4iJCMl1bvYeNt/wmIvpbzu1kRjMV18U0dnleCu2IQkZWfeNOIf449e/yve7TmSNINMzCYVVaFnqo/LE5ldGZcmujk/eglMcTxbLhCXYkl9JPpRr+NJ0Jnj/QR82c0LfbWsqpk0gNnqZu1NX8j4eE3rg0RCk4wPHl/9hdx0bhhMtjKaNn86a/394PTzTAQzR0Mp098LVVUzRhZdGOvkYM8xLkntSDsuUtHThnO8Ejk2l61RRb2+AkSnD4jFYgrDgx6a1ixePCEfrDqRXgohyTRveYKqpr1MDJ0kHg9jc9Yx3HWAgOf63gTRWT1I02XXjGYnFfV3Ula7C0nOjg80Eo/ybu/RZbdX1YWzYoFAQuLNK+8BoBhiDDafZbD5LCImgSpQUSkbbKa8f+3s71wPqGoiv55WwvG0YUUOsDqqsTrmknN3nfnhXGIffWNRpyAkjsPLspnt9/5ZzkYJRoNE48tL1ZnJZaGi0usZwB9NrdyuqiqKPOf6GK67QMAxSc3lLRijlvndAKtTvIUQVNY4l26YB65bkZ6PLJuIxYJLN9TRyQVJsc3FlVtyOlSRyY7DZMcTXvyovSQklEUmLK9ffhtJkokri6cU8LtG6di+D7unFGPEQsQcwDVSS8nYTIWSdIlktY2iqgz1e6jRwOnDVbVxeC24c/zF0NGZQTZYkWQzM+WskgVaNliobNqb2/Elmb3Nty15inUxgYbpajZLCHSinQoC/K4xJsv7CDgnGGg9zXDNhdl+VhtCCOIxbay4bxiRrmrag8FQ2IolOjcGqqrQtutXcJS0ply3u+ppu+mLWOzlObfh45sepdFdl/NxFmOk7gKXt7zNRGU3akmA2gY3VTVOrHZtxB8vhtks6+6OfGO2ldG260t0t/8E30Qnq/P+rqN5hISiRJENJtp2fYmQf4RIaBKj2bkgJ3cusRjMfGLTB/ifb/9j3sZMR8jqZbDpLIPAH378/2KQE5IzNRng77+yn5hGZqvzqWsq0TcOC4HVUc26m3+VkH8E3+QVhrsPTOdETsVgtFO79hGKils49+5foyjaiJfU0QoSiZzm6eslCgSyMVGM1GIvz8vMORlv2EfXZB/vdB/J67jpmAnRS2TVnXuNJCFRVlnEYJ+nYLYthhLXzs3jhhLpGWa+OGW1NxMOjOIZu0A07EU2WnGWrMFSVDXrz6tZ+zC9HT/VTzDqAFDecAdWexXd7T/O2MZZ2obRtLCadK7onOhhf+e7jAbGGfAM0ecd1NwndXPlegxSokZgLBbn3796kJHhZeYQLwCRyPLyr+eDG1KkkzHbyii3ZQ5Yr2i4k2hokuHuA3PhSrpg37DUrnkUSTYwNXqOqdH2BY/LBgu1a1dWJ3IpPGEfr1x8k/d6jhGKhWgubuDhtXvZVNHGN44+xSsXE0UaltoILBRCCB5b/+Dsz2ePD2haoIGcV2NZCTe8SC+FEIK6dR+ivOEOJgZPEI+F8Ix1EPD2p/+FaQEvKm5BCBklHsE/1ZW+rc7qQkjIBhMALds+x1DnG4z0HiQa8SKEhLt8E9WtD2TV9zwaGOdP9/0fRgLjSdcmeL/vBLtrt3Oo73jawyhaQSD4jVs+z+bKucK4588MJf6h4dPAngnthOvqIr1MzNaS2cxj4eBYZpGezhnSsOFjs19W32QXHe//49IFWnVygmy0EY+GuNbamNaiqtl/S5KB6tYHqGq5j1gkgCSbZgU8m3zr6A9SBDpZkN9bwanCQiFLEmW2kpRrWvL3ZmJqKkQ8piAbCh8AV3gLViHu8s2LPm6xlafMpuyueoxmbYTz3GiU1uxi+z1/ys77/wKTpXg6ZvnqlrJNmz654JoQEkZzUU4EeiI4xdGB07M/K6qi2RlzJmJKnH889G8p5eEaWkoW+Q1tYJAlJFkbLg9dpK+C4sot2Bw1GR+vaX0w5WfPaAeR8NQ1CcSNjBAyZttKXQgCV/lGGjc+nvhJkmnd/vnpDb2VC11RcSu2JfI8Z5sR/9isn1mr/ublMOAd5uzIhdmfyzcYGWw9TfvWfZzfto++ppPE5HABLVxIdb1LMyXtdHfHVSAkmbU7v0DXuR8xNXxmdnZjMruoXfsIxVVbU9oH/YNJv5yUQnVZXzzB3LHa1TWLygYVjXdR2biHvgsvEA4MLdrW5qjDaHFhMFopqd6Jo2RNyhfN5qxl0x2/x5UzP2ByaLmlkgQl1Tto2vypa3gWV4fb6sr7mLlAReW59lcpt5cy6B3mrw78M9Hi6GwmvonyXsLmAM3nb9FMno+dtzYW2oRZdJG+SgwmO63bPkckNEnQO4BssGB3Ny6oPwdgmI6ZvWqEREX9HQAMd7/NjSHWEmt3/TLOkjUA2F0NjA8cI/NzF7Tt+iKycfFTpbLBQtPGxzk52o4SXyz+XWB3NdC260tZy063HFRVpX30IsO+MYqtLjaUr+Fc0ix0tXJs4Ay//cKfYJZNieRPAiQhZt0goaJJ4oYIhqQ0qIXCXWJly87MK+V8o4v0NWKyuJesL+eu2ExP+7MoymKZyTLPlGc2LY1mB0HfAN7xiwt/dzZ3torZVkE4MLyCZ7E0QjJisZdjdzUw1n8YVVVWHIooGSxUNd3LxOBxgr65jVchZGyuBmZeA3f5Biqb9qbMgktrbmLg8mvEIv60Y5bX376kQM8gG620bP05Lh77Zgb7E69ndct9eRXoi2NX+IdD36bfO7dicFucmGQToZi23AErRUUlrsTxK4HE4Zbp93b2PTaojFV3Utm7vuBzkLKKIs24OkAX6bxgMNqoaX2A3gsvZGgh5twgycInJMrrb6O6+T6MZgeQEGyvkKaT3s77IE33Ub/uQ5jtZUwNn0FR4pgsLlQ1jpAMWOyVdBz+p8QschnultYdv4TFVoLFXjH3fEx2BjvfWNFrUNl8DzWtDyaiIlruQYlHCQfHEMKAxb50YnXZYGHN9l/g4vFvEosGUsK3XOUbqGv7wIrscZVvoGnzp7hy5vtJr4OYfU2dJWtwlq3L3EGWGfSN8OWf/S3BaCjl+mTIgywknOaiJbPaaZ0Zt2CmQgFjNZ3Ue9YTyVyrIC9cbB/h8Dtd7LpdGy4PXaTzRGXTHgymIoau7CfoT8xyzbZyIqHJ1NC8aaEVQqJt15cocjel9GOd2bDMcKcXCKyOKkwWN5amPWnbtG57ks6T/04s6k/TQaq7xlHchGxIzRNc0/oQkmRi8PLryz8yryoppaEk2ZgS0rYc7O4GNt/5B0wMHsPv6UGSTRRXbl3wGi2XmSrbfRdexO/pARI3g7KaXdSseTit6ypXvNjxxgKBniGuKjQXN3BlogdP2LfqIjxWQiysoIV4hjdfauemWxs0UTVcF+k8UlpzE6U1NxEJTQIJV4l/qpsrp58iFBidbWcyu2jY8NG04lNavZP+S68kiu2mwVWxaUn3i7N0LZvv+m90nXmKiaGTzG5kzhN+Z8naBQKdaCaobrkXi62Myyf/bdGxpn9j2k1x7cgGE2V1t1DGLVnpz1HSyvpbfoNwcBwlFsFsK0GSsx9OtxRH+08t+vi5kQv8n0f+O984+hRHlmi7epir3g1gM1pRwoUXaAC/P8roiI/ySkehTdFFuhAki6jd1cDG238X38RlwsFxjGYHztK2jLO4hD/1c1w+8a/E52182YqqadzwsWXZIBtMNGx8nIC3n3BwfMHjQshUt96/aB/uik3IBivxZRRTyHeSoZVithY2djemxFMOqsy4A2Z8ozElzreP/fC6EejUkEIVSUj84s5Psu9AdvdSroVwKLZ0ozygi7QGEELgKGnFQevSjQFn6Ro23fFfGO07hH+yCyEbKa7YjLtyy4qqTRuMVtp2fYmus0/jGeuYvW61V1C37sNLuhGEJFO//iNcOf0fS7Yrrbl52XbdaPgifiLxyIKq3tP/QEVFFjIH+44XxsA8oKhKwv8uCeJxbbhzDEa50CYAukivWoxmB9Ut911zPyaLm7U7f4lwYJxQYASjyY7Nufxk8aU1O5kYOsHUyNkMLQRNmz41u/Gps5CvHv4u/mgg7WMzYh2/AVIKfPv40+wsepT4VOH9wABFjsKHA4IWPPQ6msBsK8FVtm5FAj1Dy9afo6x2NwiZ5BOVFnsl62/9LUqqt2fP0OuMUf847/edgLmsyzc0Xa5MN/v8IklCMyKtz6R1rhlJNtK46RNUtz6Id+ISAoGjZI0+e14GnZM9swc6hEgItTodhqmNRX9+mSjvpap7Y8FvWFp69XWR1skaJouL0uqdhTZjVWFOE0kihJg9Mn2jocgxFBFHVgsrTRaLduow6u4OHZ0CsrF8LU5zahWX1ZxM6VoRGnH7bNqunWPhq1KkVVUlGAwSjS52zFpHR/sYZAOf2PTB2Z9vZIGeQQtHsrVU0XxVuTtUVeXgwYMcPnyYcDiRy8BqtXLzzTdz8816iJfO6uSBNXchSxLfPv40gah2KoLkm5mcHs5SE96RwsYov/92F/c8vL6gNsywqmbSL7zwAgcOHJgVaIBgMMjPfvYzXnzxxQJapqNzbdzbcgcGoY243EIgEDjMdr646zPccsvaQptDKBjF79NGUqtVM5MeHh6mvX1h4c8Zzp49y65duygv1/bJNq0Qi8WQZRm/34/JZMJkyu9R6P7+fkZHRzGbzbS0tGA0amd5WQhG/eN4s3R0frVRU1TJE9seY0fVJkwGE29d6lj6l/JAwB/BXlT4MLxVI9InTpxYss2zzz7Lk08+mbcvfDwe58KFC3R0dBCNRqmsrGTr1q1YLBY6Ojrwer04nU7a2tpWZFMsFmNychJZlikuLs6avdFolMOHD3Pq1Cm8Xm/KY7Iss3v3bm6//fasjZeO8fFxXnjhBYaH547/ms1m7rjjDrZv357TsTWNSPhi1QJEdSSnDk3GarBkPGSTTfp9Q0TjMUzTJcgG+z05H3MpJEngdC0v9W2uWTUiPTo6umSbyclJfvzjH/P4448jSbn15ITDYZ5++mkGB+eqrnR1dXH48GGEEMTjcyfE9u/fz4MPPsjatYsv4xRF4d133+XkyZMEgwnfZFlZGbfffjtr1qy5JntjsRg/+tGP6OvrSysE8Xicd999F6/Xy0MPPXRNY81HURTOnDnDiRMnGB4eTokLhsRruW/fPsxmMxs2bMjq2KuFMlsJdc5qeqb68xajW2S0IYTAn8EPHogGkYSUl83Mbx39PrvrtmOSjZo4Fr5+axVmizbkcVX4pD0eD0NDi5dOmqG3t5dLly7l2CJ44403UgR6hng8TiwWSxHCcDjM888/n7Z9Mi+++CIHDx4kEAgkku2oKqOjozz33HOcP3/+muw9c+YMfX19S7Y7ffo0fn92lt2xWIyOjg6+853v8OqrrzI0NJTyusy/WRw6dGjZfQcCAQ4dOsQzzzzDCy+8wIULF1CU1R0Z8ZEND80eaEmHJKRrDk+b6b3Y4uJ/PvyH/Nl9v0tzcX1KG5fZwYbytXOJ+fMQEueN+Dk2XXTXXpT/LITzadu0sjS6uUQbt4olOHny5Iq+gOfPn19y1roS+vr6OHjwIGNjY1gsFjZt2pTWP77YUlVRFI4cOcIHPpA+OX1fX9+ifb711lu0tbVddXjSmTNnlrRxhvfee4/77ru2vCAXLlzgtddeIxBYfLmsqurscxobG2NqagqXa/Haft3d3Tz77LNEInNZANvb26mqquJjH/sYFsvC9KqrgTsbb8YT9vL90z8lFAvPvlc1jkqe2PoYgWgQk2zin9//N4JXUalFIDDJRm6t38knN3+QMlsi899XHvivXBy7Qq9nAIe5iG2VG/i7Q/8KkJKZL9dMhRJujlis8Dfbd9+4xNad+S08nIlVIdKdnZ3L9tXNxFAvhaIoxONxhBCMjo4ihKC8vHzWTRKLxRgbG+PIkSOcO3du9vc8Hk+KP3UldHd3z46Z7I7x+Xy8/vrriz6nqakpLl++TEtLC8FgkFAohMViYWhoiGg0SkVFBW63m2g0iizLSJI0K4CBQGCBD3oxJiYmFow//+YQDidEJJ0g9vb28txzz12Vf3XmZhyNRpEkiampKWRZnhXuUCjEc889lyLQMwwODvLyyy+zadMmDAYDdXV1GAyr4iM+y6Nt97K3+TYO953EF/FT76phc8W6lNd/3+W3OTmUZhN95vVOcyMXwMOOLWy2N9DSuI1Se2nK42tKm1hT2jT7c5O7joNdR3D6YggVvDaJmCG3M+p4r41vvfwOPZ0LU+fmm6F+D1MTQVzFhfdLi2xtVAgh9gN79uzZw/79+6+qD0VR6OrqwuPx4HA4aGpqQpIkvvWtbzE2NraivqqrqykpKSEcDlNUVMRtt92GzWbj7NmzHDhwAI9n4eaEEII1a9ZQUlKS4hfOFjPCKUkSDQ0NAPT09BCLLT8m1GAwEI/HUwRwZsPJYDDM9iVJEoqipFxbLtu3b2fLli28/vrrjIyMEI1GMZlMNDY20tDQwKlTp2ZvVDM3t3vvvZfa2sTM4+tf/zpTUyuvgWQ0GikrK2NkZCTFZiEExcXFGI1GxsbGlnw+M4JmtVq59dZb2bFjx4pt0TIdo5f5o9f/avZnW1BBUlV81iTv5ayrgtl0p9svRHAGEmXXmtdv44FPfRGTOSFCQb+XU+/to7vjFPFYDAwy/X2dGKb9w3EJhkpkLtUYIAfVShr7tuEcqEvYWniXNAAVVQ7u/9AG1qyvWLrxNbB3717efPNNgDdVVd07/3HNiHR3dzcvv/xyyozPbrdjsViWtWmoU3juvvtu1q1bx9e+9rWC2jF/1n/fffexbdu2AlmzkAsnD3HinVcZ7u3EYDDRuvkmdt79KMUV1cvu42/e+yYHut7H7Ymx8XKUmAHe32hGSXrqAjHrqrCFVHadT1p9CEHLxh088Mkvsv/H3+b8iXfTquP8KyNuifam7PqMrT43Ledunzd+/vzhmRCSQAj4xJM3sW5z7nzUS4m0JtaCo6Oj/OQnP1kwQ/L5fPh8q7v45o3Ez372sxWveHLBfPfMe++9x5YtW3Ie8bMc3nvlRxx58/nZn6PRMO3H3uHy2WN85Au/R3nN8oqf/sbuz1NTVEHPU08jAaYYtPTFuFRnmBXWGYGWFWjrmZdCQVW5fOYo3/yL3yESXv6KsXxSoSeo4rdmTzxt3uKM0+dMRWvzQWICK3j9hXbaNlUW7Li6JkT6yJEjCwS6EPGiOtfOzAZloUkWar/fz8DAwKw7plCMD/enCHQykXCQN5/9Do//yh8ueExVVXounOHku6/R39mBJMvUtW4gPNSPITb3PakZi2MLq/SWy3jsEragQt1IHJdPwZhhL24pgRYsnE2XT8XxW7MnHVFTOhtUKHCipdkN7RE/A71T1NS7C2KHJkS6s7Oz0CboXIckC3Vy3HqhaD96YNHHh3ouMz7cT0lFDUo8Tvuxd2g/eoDB7kso8dRJzMVT7wMLZcztU3D7shsdIQA1Sa2FAhbZRCi+zErxSxA3aD9RWihYuFwimhBpfdask0sMBgMVFbnd/FmKWDRCf2cH6kwoqUhzyk8Fz/gIY4O9vPXcvxPwFf7kHQBCIE3bqqLyibuewNpQy5++8ddZ6T79TLrwmfBmkCRBeWXR0g1zhCZEur6+no6OufP6umjrZJNNmzYVNHZ6uO8Kz3/7b/B7J+cuTh9Wmu9QeP7bf4OqsXSlsz5hAe6SSnbu3IskSZRY3YwHJ6+5/4g1wFjVFUoHm665r1ywfnMVDlfhPj+F30kBdu3aNbupowu0TjZpbW1lz549BRs/Eg7x03/9awK+qQwbYKmfd60JdCKpSOJfDncpH/jcb85+V3/5pieyNspgwzl6Wo8TcEyiSIV3TUFCi6pqnDzysc0FtUMTM+mqqioeffRRXn31VUKhUKHN0blOqK2t5bHHHiuoDR3H3yXonw4rFdN/rYKJiJBkNt+yF1QVRYlT07yONZtvRk46HHRu5GJWx/SUDuArG8Tsd9Jy+nYKXZz31rubuffR9RgMhU0hqwmRBmhra6O5uZl33nmHw4cPF9ocneuALVu2FNoEBrrmCdkqEOjKhlY++su/j8GQOXNjOBbh+fP7sj52k7ueeuM6xjSwyD91tI9b7mop+KnDwr8SSRiNRnbt2lVoM3SuEzZu3FhoE5CThG41uPKsRU4+8at/tKhAA/zvA18jTvbdEp/c9CGip5xZ7/dqCPijvPly4XNba0qkIXHKUE/cr3OtCCGyfqz/aqiobUJVlERUxyoQaUmaW9pHYhGO9p/m3Z4jDPlGZq9fmejh2ODp7I8tJMSIHa9XGxVRAM6c6CcWLayPXDPujmSs1sInNdFZ3cwkpbLZbAWzYXy4n/deebpg418NDnciM94LHfv40dkX8UXmshg6zUUYhAFPaPnJulbCTTVbUYLamjfGYgrhUAyDsXB+aU2K9FJ5l3V0lkO28mJfLe+9/DThUCCR7Ejjs+gZ60bs8I2jT/HyhTcXpCidDOU2bvtI/0kikgyqdnI522xGrLbClnbT1m1rmpVmbdPRSce1Fkq4FkIBH1fOnwQWJnzSEiqpQYAHRC8vXdiftxzSySiqwqn4McKmQEHGX4AK23fXo6gqPZ3jdF8eJ1oA14cmZ9KrYYNFR/sU0icdCvjnYp41+nmeb5XXJoiYCntDUVHpbzlJ/fldyIpMIU8eOt1m2k8P8u7+yyiKihBgtZm4+c4m7r5/LSIHKVvTocmZtBaylemsfqqqCrdstjvdGE1mVEVdFZMOFbhYV/iK7Soqfsc4lzceIGos7Abi1ESIsWE/ipJ4/1Q1UUH8Z69e4JVnz+bNDk2qoclU+BpnOquf1tbWgo1tNJmpblzLwvmq9lCBk61GfDZtyIGqqoStfgJFE0s3LgSqyuF3uvBM5melpo13ZR7FxcWFNkHnOqDQUUKjA90FHX8xZm4dQRO8v8GMx1HYU3WzJN3TTCF74exYBFUFRVVpP5WfAAdNivT8Gns6OlfD1ZTwyhZBnwe/Z7Jg42cieaMwMYM2ETZrZGMzSaAtPieWoKNwtiyDfG0iak6kx8bGNHEIQWf1MzIysnSjHNF1/lTBxs7EfMfLZJEgYtKcBAAgqRKK0FqyqSRUNW9FADT1DqmKSsgX1HTIks7qQAiB2Wwu2PhjQ70FGzsZlYVhdgARA3TUG7SUtjkJQcAxyXjlFW2E4qVBBapq83N8XRMheDF/hPF3e/CeGyYSiWBAIpqDvAA6Nw4zld8LRXH58ovK5gqPTTDslohLUDWu4AiqxCXorpDpLzckqq1ojjmjJsp7KBtsKaAti9N+apAdtzTkfJyCi3QsEKX3qVNEJxIuDhmJEhwMMVlYw3SujZkJUIGEoLm5uaAbhxZbYSp5VDevo6apjZ/1vs9x29zezlDpvIZaXK0KpmPKE7ZFLIGCFqJdilAoP4fuCi7Sk+/3zgq0jxCHOI9PTcoprc33RycT81enBRLrQsZIA5w78lbex1yzZTcPf+ZXAfjmMwchJLRQz/UqSBhtiJqQtOWRnUOFyur8bGwW/BXwnB0GQEHhIOfxq6HUz5Q2XVI6KyXP7+OJEycKVnxWVVW6L2Q/S1wmzFY7tz/yyVmBnghOzeXZWG0CLab/oOIerSuwMZmRZUHz2rK8jFXQmbSqqsSnq/AOMkFADWHDTBFWosSYwJdoV0gjdTCbzZSXl9Pbu/hmmFChmCLGp9+3+RhUmTpRCjVWugd7UJTc7d77/X7GxsYKVoA2nuP8M47iMh781Jew2otwl6WuGo4PnMnp2LlGIHDg0rQ/2uGy5i3AoaAiLYTA5LYQmQgypQbYTRvluJi5/QcIc5ZuBrlB4qYXuxvl4vMwf7zpMUwmE6qqYjabWbduHTt37sTpdNLV1cV7773H0NAQ0WgUIQQ2mw1ZlglNBbiN9TiwMYmPs/QQIYoFE2aMGJFpoxYLJpwllTg/8gEOHTrE8ePHc5ZQq1BRQkM9l3Pav8Fo4mNf/IPZtKLzuTKhjciSpZCEQFWZjeAQCOwmG2WT9Vja6zHEtHfyWIjEXxu25s+dVlCRjodjGFxmIuNB1jO3tJnRDhtmdrGG02oXZoz0MEqA6fP8Gb5/O3bsoKqqirNnz9Lf3080Gr1q+4QQKXkXzGoit0FYRCkrK8NutzM4OEgkEsFms9HY2EgwGKSnpwchBEIIIpHIomOYzWYsRgt+rw8ZiXJchIgwhZ84CkYMVFPMhC3IVGAuj+982wBsNht33XUXDoeD3t5eVFWlrKyMgYEBOjs7CQQChMPhzDeDaf/lBx58hJZ1CyMjGhsbaWxsBBLJi+LxOHa7Hf+lcQaebUdVFASCEhzcwYbU13LmDVMT0TxWq5XNmzdz5MiRRV+fq8XhcFBaOn+3LD/k1B8tBPF4jP4r51m3/ba0TUYD47kb/xqRxKw/A0iIniDxWf6NWz6PubeClw6c0dTyWQiRojcWi4Gb72jK2/gFE+l4OEbfU6cIjyzM+Ztc5F4g2EITKlBLGe/RnhDqNBsiQgg2bdpEZWXlbOmkAwcOcOjQoRUtrWdmYM3NzVRVVdF36BJN0XIcJBLIR80KtTvbKN5SjaqqxGIxjMaFyWm+//3v09PTs+hYzc3NbB6rIeJdPPexJBl5reh4So7k5JmiJEk8+eST2O2Jo7QzYgqwfv167rnnHgDeeukNDp0+OvuYERkViE2HPBpUGadn6RnMTOREdCrE4E/Pg5q6C7/YjrzJbUGNK1y6eHH2eWQ7CdHu3bsLlqjLN5W7ld/Mez7YdTGjSLss2ig/tVykuIx7pI5zzwQZG2zXlEAbjBLx+JxBZRVFPPbpbbhL8ldMomAiPXGwN0WgBSIlcH3+V1wAdszcxSZ6GKWfMSZVf0pDSZJwOFJ3XO+44w42b97MuXPnOH/+PKOjo8u2cceOHbjHTZTF4injmMMyo69cgrBC8a7atAIN0FTXtKRISwpEhpdOTq94o+x55G5efOWlVEFTE39tMDUSPjmGZbsJ2Zo5m9kaaungPHYstFKNc/rGM46XC/RRghPFs/zVx9TJQdT4ynzLnrMjTB4dYET0JS6IxYV6pSK+efNmtm3btiKbsomzOEcbSkk3ZTnDZw6gzKbd3DeKqiLNVE0HDBEzDe03YwkVMST8qIqGFBpoXVfOTbc14vOEKS6z0diS/9VZwUTac3powbX5Qj3/UVAxYqCFKlqoop1uxlUfk/hRBbRVNhO74mMq5iEWiBCdCKGEY0TGg5R6FHYqDbwj+fErwcyjCIEZI3fV70K8OcHIqD/jnX30zU78l8eRTBJGlwVDmY3YRIhYMIr/0jiuQAQ7ZvyE07pnBGA7v3x/bGt9Cx/72Mc4ePAgvT29gIoTO2uopjZYytg73XjOjVD7+CZ8F8bwnh0hOhVCGCTMlXbszSWEeqe4gw1ISClPqwQHt7IegGDvFDF/GIM99cSeGlfwnh/F1z5CPBLHVGLFf3lixTMfZTq+tBQnqNP+U1VgxEBcxFHmdSipAqdqR0ElQJioSP+aGQwG7r77bnbs2LEyg7LMmq27OfVe9itpJ6+cWjbuzNhub8vtfO/0s1kf/1qQhISqqqio00KdeC5VVzZiDhVp9pRxU2spa9YXZvN5hoKItBKNEw+mn63NLJNTZ9Vi9l/JirCBBlQgSoyIGqOoz8JQX+bqvhaM7GUz3YzQLvUSUWMYMWCQZKxuOy6Xi6qiciovGlG6Y0TUJWa4KgS7MyfxkZC4l20MMcFxtZMIc+IigCKsVJF+8ycdQy93ULm3hT3O7UyqVaioyKRmL4tOBLny9SMwb0YS8EUIXEosw+d+J7XNzOscGQnQ+Y/vY19bgrnUTswfQRglfB1jxH1zPvZQ77WVUypVHZTipEp1U0MpJozEVYV+xjhPHzFibKCBBsqRpm1TUBlUJzhDNzZM1FGOTZgoqnDTeu8mimq1MIvMwWwwScTq12yipqktY9MSq4vNFes4PVy4yjTJSCLhdhIiEbctCcHmyvUYI2bCU1WpB2tSv+IF53LHKLfcXdgok4KItGSUka3GjEINmX2a6WbbJgyYMCzrZJKERBOVNCmVKCiJYPl4Ytnlbqlj8lAvsUD2ko0LBJUUczd29nECIzINVFCBCxf2WfFZDsGuKXr+9djs80iLyrIrgSz1evkvjOO/kNtNqNumZ+8zGJCop5xK3EgkZtfJSAhqKKEK99xroIIYgoHvnaHktgZKb8/9Ud3FyEWOf0GiknfT+m3c8cgnM7Yb9Y/z3PnX6Jnqz74RV8H8z5gQAhX4zVt/gfHeMN/mvYWPa6hIwuWOEfy+MPaiwuWBKZi7w7mlkolD00vdFd49Uzeorp5koYt5w4y+dukaesuMQGDFxAPswDT9kmv1qGu+Sb9yAgum6SvpPxjzVxAzTcff7cZSVYS9ZfkrlGxTXtOAkCTULMaBO9ylxKIRLp89yuWzR6lrWc8tD3yMqoa5wgZ9nkH+9I3/gyecPk59xcwXywwuCWtIoXo0jsuvoAoYd8oMlMpEjSLtse4Sqxu7yUbInmkPQjsVx+KKysX2EbbtKtzBmoKdOCzeXYe5Iim/wXWuWYKEr1tM/6eTipj3X+La3N/z22ZEhcEXOlBihUlzqSoKJw68mr0Op4XRMzFKMDAnvr2X2/nJ1/8n/Vfm3HvfOvb93An0zLV5f4qnYuzsiFA7GqcoqOIIqDQOxtjZEcYWSrwHqqKk/GkJ2xjqukR5lYPqPGWSu2pUiBWg+GwyBRNp2Wyg7lObKb2zEWOxFckkYyjSXvC6TmFJRNUuFPDFUEIxBp87l3vj0vD289/j/X3PZm8WvciUMh6P8c6L3wdgxD/G6aEV+qDTiO7snwzYgyq7z4TZ2BmlbijGxisxpDRP1RSFdV1RmN4snPmv2BPHdOQiT3/1K7z61Fe5/0MbMBiSZEhVNTOLnqG+uXCrMijwYRbJZKDklnpKbqknNOhj8MXzZDhRrKOzIvyXJohMBDAV5y+e1Ts5xqmDbyR+yPaaPYOrYai3k8nRIcbxrWy/7WpsU1U2dEUxx8DsUShdYt+4KJiYVU84ZEwxlYrxOKUeZfY223HyIKXV9Tz5q7fxs1c6uNQxQg4zBVw1Lndhy7AVPMESQKBrkt7vnSQ6rldk0ckeE+/nd/Ps8tljqGpCZbLu0lpEVMNBP6XW4nmJyVSsQWWR2XHSvs68hwzxxJ/5FHtVrOGViXvjUJztFyNsvBKlLEmgZ+w59d4+auqcPPGF3dzzUJsm3Z6SXFijCp6qVFVVhl+9uOIDETo6SxHsncLfOYGpxIrRZcn5eLFI9qKC0qGq6oJ4Ylk24CqtwGIrYmvVBk4MnMXlibG2J0pMEhxdb0rugDkVTIikPaiw42KMiFHgN4EhpuAIJh6NGEFWoKdCZqhExh7K/nfUNzVOwO/F7nDR1zM1/fy04/JYs74co7GwRXoLLtLBnimiU6GlG+rorJDoeJD+H50BIbA3F1PxwJqc7ntU1jcn/ZQDlVHVBW6PtVt3zxYYuMvYQqf3JBuvRJEUMKNS4lEYdyYvmFPtcgRUJEXFElYxJ91jBGCZjpBtGYjTMhAnsgK1WO7cUwiB0Zh4TyRJzFzUTHjHnfetLbQJhRfpmG/xBEQ6OteMquK/PE7fD05T/9ltSKbczIxqWzZQUlHL+HBfTvqH1Nl0eXUDbdtv49DrzxCLRjjz7utsikVTNvLWdUVpbzIy4Ujv2SyfnGu8VCSsOemgZ7YktGHtZkyWhM+3bWMl504NJmyRhCaOiB/Yd5FnvucjEolTXedi951NeT+BWHCfdD6WoTo6AJHxAN5zuasgLoTgkc/++nTujtz4MU1mC5V1Ldz9oc9isdp59pv/i/f3PcvRn71INBJGVuby5gvAoMDmy1F2dERoHIhRPzSntMVeBbdveS6MdLl0lts2fSOBwWDi5vsem720cXs1ZUlhuUISKQsHs0XOe9WvC+eGmZwIEvBHuHR+hP/4l/d5+/WLebWh4CJtrXViKs3fDrzOjY3vwvITbF0N7rJKnvit/0Hr5l056T8SCjLYfZH3XvkR3ZfOLtl+RtOKgioNw3HqRuLICtSOxtnUGU0rvun+ZOp7OW2FkOZfoLKuhQ//4n+ism7ORWQwyHz2i7tpai1NaWuzm3j4o5v4/S8/gtWWObFUvnjjpfOMDHmXbpglCu7uAKh8eC19PzyDEo5p7uy+zvWFmodDLgajEWdJWU59q+FgIq/Mco5QJ28VCgVuORPGkK99eiGoaV7HQ5/+EvFYDJ9nHIutKGM1dafLyud+5VZGBr0M9E1hNhtoWTe3eSekPE+lM3D8UA8PfGhjXsbShEhbqhw0/Nx2Jo/24788TiwQRY0U9pSPzvWJJU/FQ0srazWXh0IABpX8ToJUlZaN27E5XAA4ipeX6rO8ykF51cL3qrzSgd87llUTFyOTe2VqIn/hwgV3d8xgdFsov7eFpi/sYs1v3oasgWWNzvWFMEi4tqWfwWWb1s03Y7UVLd3wOsdoMrN+551Z6691XXnW+loOiRDzhXc1Rx730jQj0vNxbc/Pl0nnOmf6+yUZZao+uA5kQaB7kvBwbo+2GoxGHvrMryEKVB1GK7hKKzFbs7fnFCtEThaVBZEm23fX5214Tbg70pHNDGI6NzACnJsqce+qYeztLvyX22f9xKYyO+X3NGNrcOdk6NrmdZRU1DI2uHh1nuuC+f736Rqfdmd283srilq48LzpOPXb9rRQWZ2/xFCavM1PvN/LxMHVUfFYR/sEuicZeLYd/6XxFCGJjPrp/9FZgn3XVrxgMQwGzc6DsooQAiFJc3+mnbnrtt+a1XFmylflJRRv3hhFTgsf+8wO7v/ghvTtc4TmRFqJxhl7p1uP8NDJGjFPmGiGjR41rjD+bnfOxvbmsCitVlgQYjdNTVMbrVuyG4rYvLY0kd40DyqduPHM/Vm/uZJNO2pyPu58NCfSvgtjqFHd1aGTPwLdU8RDy681uRLi8eUX9V2t3P+JL9C8YfusWFusdnbc+RAf/PxvI8vZXUkIIfjkL+yiutaZ93C8qlpXXsebQXNrscCV63/moZNHlrMiU9WcxU87XKWEA0tXg1+tbLv9AdbtuI11O24jHAwQDgWwO9zIOXTzOF1WvvDbd3Hx3DCvv3COof4cHSxJypVis5vYvKM2N+MsgeZm0prMVaizqpGsiwuG0WlBtucm5PPmez6Uk34LjdFs4eHP/Bp3fegzs9fMVhvO4rKcCnQyx9/vYXjQl3PPh8Vi4JM/vwtjjnK+LIXmZtKm0sIm2Na5/pCtRtRIPGNEgGtH9YIUoNmidcsumjZs58q54znpv1Cs3XYLa7bcXLDxh/o9s8mYcnWys765hHWbKtm+ux6rrXBVozQ3k3ZuqtCgVTqrmeh4EMlmQsgLP1jOzZW4b8rtZtAHn/wtKutacjpGvmnbektBx28/PTj771yd6vz453Zy297Wggo0aHAmbSgyU763hZF9lwttis51RNwXpmhdGeYyO5HxALLViGNDBZaq/JwKbN2yi6He6+MzbbJYqWvNbxjafOLTRUJUVc1ZJFjAH8HhLHyWTk3OWd07a6h4uPDJtnWuL/yXxnFtr6bq0XWU39OSN4EG2H7Hg0hSYSt8ZIv1N2XvmPfVUt80XRw2V6G62qk7oE2RBnBtrsS5tTIv+4jWBhfum2sxuMy5H0ynYKgxhZg3tyWuMiHJMnse+7mCjJ1VhKCpbWuhrWDNunLKyu256VyA02mholIbuVc0K9IAFfevoXhXLZJZXjyx7TTmqiIkm2HRBLjCJM32ZSxOJHWq/cRmyvc00/SFXdR/bjvWxtzGQ8pFJorWlyEsq3RmtZz4VAlsLcVLvmeldzVhqsjRl22GJBtkS+E8fJt27+WxL/wX3GVVBbPhmhCC4rIq6tduKrQlCEnw6Mc3Z6WfBX+E4JY9LUhp9jAKgeZ80skISVC2p5niW+sJD/rwto/gOTWU8YsvJEHzF29m4Jl2Ap0L461NZTbqPrU1cfY/piBZDSm7+kIILJVF1H1iC/FglIFn2wn2TF2F4dP/V8FS48DW6MZYbMVcbkfIEkaXeXYTK9g7Rd/TZ7J/gCf5NVrJsm2xfN4zfS4jb0LjL96E0WHmyr8cyTh7tTa4KbmlDu+54RUYuEKSXgdrgxuDo7Crpb5LZ/GMjyAkadXlp7E73Dz8mV/LWSTMSnGXLH1zt9qMqCqEw6mHlWx2E49+fDNvvHiesZG5OHZZSgj0bXu0s9GraZGeQTYbsDW6GXu3e9GZWWjAi/f8aOJATJp2kdEA4+92U35PCywR8yhbjRicV/mFni7KbCq3U/PRjcjWzDG41joX9qZifBeuIkdumudodFuJTl59rlthkDAWW4kMJx3AWKw0Rxokk4zBnoimqPnIBvqePks8kFrL0lRqo+qRxL6DwWkmMhpYhnHT/1/qHpHGLskkU3Z309Jj5JD2owc48uYLcxc0VHB1Kax2B0/89p9jseZ41bMCnG4LTpcFzyKFrDdur+bOe9dy5L0uOjtGEUKwZkM5O29tpMhhZv3mKi6dH2Gwz4PZbGD91ipNbBYmsypEepZlzOC8ZxevYec5O0zZnuZlHSldUU7rpBmosdSKe0cNzo0Vyyp66t5Vi+/S+LKe31IU31zL5NF+ImNJoreCajdqVElEQIwsckpuif4cGyqQpitpmCuKaPzFnXjPDhPs8YAkKGotoaitdHY1UdRWRuBydk6aFt9RjyRJTB0fJOYLIySBvaWEktsbMOfKh7lMjr/9SsrPWisKsBihgF9zNxQhBHc/uJaf/vBU2s+j0SRz1/1rcbqs3PvIengkfR9r1lfkvbjsSlhVIm2tcxEayHwE1FRsJeZbfGNICcVQQrFlCbBraxWT76+g8vO07pfe1oBj/fKTk1trnVQ+tJahVy/CYseTk+4rQhao8blPppAE7pvrcG2tAiEYfuVC+t9dzvdsCVeckCVcO6qZPLzwtTFXFFF6Z2PKNdlswL2jBneG5DSO9eWMvHZpeUezF7NfgK3Gha3RTfHuOuKBKJJRzll18JUQCYcYG1qY2XG1uD1UVaHjxEG23nZfoU1JYeetjXgmQ7y97yKKos6uYouKTDz++V04Xav/cNyqEmnXtiqmjg+gRNOX1nLvqsXXMUZ0PPNyXzLJiY3IZWAqtmKpdRJaKpVlinhKWOtXvvHo3FSBvbUE77kRwiN+IuOBxA1pRoiTN79sJuo+vZlgr5foeADJYsSxoQzj9DLNtaWSmCfExKHelFN2ktmAqcxGqHfx52N0WbA1uAl0T6Z93LWtivI9zdibi5k6PkBkLIhklnFsKMe5qXLFoigZJNw31TBxqHfl/vN5eM+NYGt0I4TAYC/sIYRkZFleVTPndAz1XAaNiTTA3ofXsfO2Bs4cHyAUjFJeUcT6rVUYDIW/OWeDVSXSRpeF6o9sYPC588RDc9nFkmeRkkkm0JV56ezYUJH25Fkmqh/bQM+3jxHzRdI3mCcUjk0VVy0OssWAe8dcRZrQoI+R1y8RGpxbPVjrXFTc34qp2IapOHPFi9I7GnFuqcR7boR4MIapxIpjfTnhYR+93zuV2QiRqDnp3lnD0EsXUnIwC1nCtbWSsj2JCs+2BnfWEuaX3t5AZDSA//J46gMzmrbMvap4ODfZ7K4V2WCkYe1mujoWvvarZTZtMGrnpjcfp8uqqc2+bLKqRBoSwtD0xV34zo8SGVs4iyxqK8N+fhT/xYUbccZiKyW3razsjcFmpOHJHUwc7sNzZph4IIJkNSJJgpg/SbiFwLG+PLEpmSUsVUXUf3Yb4VE/MV8Eo9OCqWT5yzej00LJLanP11rnwlxVRHgwTfkokVg92JoSM9GaxzYQmQgS7J1CSAJbU3HOZqdClqj+yAYCXZP42keIh+OYy2wJf3X3ZMJeSeBtH0msLjKItrkse6Wass2uez5E76VzxOMLbyR2pxtFUQj6cleA4Fpp3XRToU24IVl1Ig2JenXOzZVpHxOSoPpD65k6PsDUyUEi40EMNiOOjRUU76q9qgK3ss1I2d1NlN3dhKqqsyFIoUEvwZ4phCxhaynG5M6N/8tcZsdclr1Nr+oPb6D/6TNExlMjKgwOM9WPbUgJsTIVWzEV58evJ4TA3lSMvSm15FLyhp8QAs+ZofS/L0s4t2g3BrmqoZUPPPmb/OzZf2dybO45VDes4Z6P/QLO4lJe/M7fceX8yQJamZ4iV4km4qNvRFalSC+FkATunTW4d2Y/cU6ygFmqHFjSlJ3XOkanmYYnt+PtGE3Ek6uJU5eO9eWzURlapWxvM+FRP+Gh1JWAkCUqH16L8WrDJvNE/ZpNfOZ3vsxg9yWCPg+uskpKK+fyFH/w53+Ho2++wLG3XyYU8OXNDWK1O2jdvIuz7/8MRVFI9jMZzWYeeuJXNRMffaNxXYq0ztIIWcK5oQLnBu2GHqVDthio+/RWfB2j+M6PokTimCuLcG2rytuM/1oRQlDduCbj4zv3PMrarbdw9K0XuXT6CAHvZC6NwWg08ejnfpPqxjVsuOlOjh94hb7L55Ekiab129h2x4MUl2t3hXK9o4u0zqpDMkg4N1bg3Li6bjDL5cQ7r/HuSz9I+K5zHA1SUl7NfY//EpX1ib2UyvoWHvr0r+R0TJ2VoYu0jo6GuNJ+gref/4+5CzMuhhyJ9eZb750VaB1toou0jo6GOPbWSwuuJXzBAlVd2j9tslhp3bSLyvpmOs+doOv8iUXbjw7krlK6TnbQRVpHRyOoisLAlQvpHxQghITd4aZt+62MDvRgttqoaVpHyO8lFotQUdtE84YdSHJi83dybHhJkS5yFi/6uE7h0UVaR0criESqzEy1GCFxKOb2hz+xrO5aN+7keJqZeTLVTW0rMlEn/2gjYaqOjg5CCBrXbVu0TfOG7cvur7ppLWXVDRkfL66oKXgZLJ2l0UVaR0dD3LTnUWQ5/QLXbLGtOMHRIz/367hKKxIbkEl/HMVlPPpzv6HHPq8CdHeHjo6GqKxv4ZHP/gZvPvtveCfnUhsUT4fKOUuWn10RwFVSwRO/9ed0nDhI93TekPo1m2jbfitGk7YP/ugk0EVaR0djNK7bwuf+81/Se/kcfs8UrpJyqpuuvjCzwWhi46672LjrrixaqZMvdJHW0dEgQpKoX6PnytDRfdI6Ojo6mkYXaR0dHR0No4u0jo6OjoYR2SrnI4ToBWpdLhfbt2/PSp86Ojo61zvHjx9namoKoE9V1br5j2dTpCeBlRf309HR0dEBmFJV1T3/YjajOzqBZsAHXMxivzo6OjrXM2uAIhIauoCszaR1dHR0dLKPvnGoo6Ojo2F0kdbR0dHRMLpI6+jo6GgYXaR1dHR0NIwu0jo6OjoaRhdpHR0dHQ2ji7SOjo6OhtFFWkdHR0fD6CKto6Ojo2F0kdbR0dHRMLpI6+jo6GgYXaR1dHR0NIwu0jo6OjoaRhdpHR0dHQ2ji7SOjo6OhtFFWkdHR0fD/P/IvtmlE6m+LwAAAABJRU5ErkJggg==\n","text/plain":[""]},"metadata":{"needs_background":"light"},"output_type":"display_data"}],"source":["import pandas as pd\n","import time\n","import os.path\n","import warnings\n","warnings.filterwarnings('ignore')\n","\n","# install DenMune clustering algorithm using pip from https://pypi.org/project/denmune/\n","!pip install denmune\n","# now import it\n","from denmune import DenMune\n","\n","#let us create data folder to hold our data\n","if not os.path.exists('data'):\n"," os.makedirs('data')\n","data_path = 'data/' \n","\n","# download datasets and extract them to our data folder\n","if not os.path.exists(\"chameleon-data.zip\"):\n"," !wget https://data.zerobytes.one/clustering/chameleon-data.zip\n"," !unzip -o chameleon-data.zip -d data \n","\n","#@title { run: \"auto\", vertical-output: true, form-width: \"50%\" }\n","chameleon_dataset = \"t7.10k.dat\" #@param [\"t4.8k.dat\", \"t5.8k.dat\", \"t7.10k.dat\", \"t8.8k.dat\"]\n","show_noize_checkbox = True #@param {type:\"boolean\"}\n","data_path = 'data/' \n","\n","# train file\n","data_file = data_path + chameleon_dataset\n","X_train = pd.read_csv(data_file, sep=',', header=None)\n","\n","\n","# Denmune's Paramaters\n","verpose_mode = True # view in-depth analysis of time complexity and outlier detection, num of clusters\n","show_plot = True # show plots on/off\n","show_noise = True # show noise and outlier on/off\n","\n","knn = 39\n","from IPython.display import clear_output\n","for x in range (250, 5500, 250 ):\n"," print (\"itration\", x )\n"," clear_output(wait=True)\n"," dm = DenMune(train_data=X_train, k_nearest=knn, rgn_tsne=False, prop_step=x)\n"," labels, validity = dm.fit_predict(show_analyzer=False, show_noise=show_noize_checkbox)\n"," #time.sleep(0.2)\n"," \n"]}],"metadata":{"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.7.12"},"papermill":{"default_parameters":{},"duration":234.174038,"end_time":"2022-01-28T17:03:37.445211","environment_variables":{},"exception":null,"input_path":"__notebook__.ipynb","output_path":"__notebook__.ipynb","parameters":{},"start_time":"2022-01-28T16:59:43.271173","version":"2.3.3"}},"nbformat":4,"nbformat_minor":5}
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | ngt>=2.0.4
2 | numpy>=1.23.5
3 | pandas>=1.5.3
4 | matplotlib>=3.7.2
5 | scikit-learn>=1.2.2
6 | seaborn>=0.12.2
7 | anytree>=2.8
8 | treelib>=1.6.1
9 | pytest>=6.2.5
10 | coverage>=6.3.1
11 | treon
12 | testbook
13 | notebook
14 |
15 |
--------------------------------------------------------------------------------
/src/__init__.py:
--------------------------------------------------------------------------------
1 | from .denmune import DenMune
2 |
3 | __all__ = ["DenMune"]
4 |
--------------------------------------------------------------------------------
/src/tests/test_denmune.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from itertools import chain
3 | import pandas as pd
4 | import pytest
5 | from sklearn.datasets import make_blobs
6 | from sklearn.datasets import load_iris
7 | from src.denmune import DenMune
8 |
9 |
10 | # test DenMune's results
11 | X_cc, y_cc = make_blobs(
12 | n_samples=1000,
13 | centers=np.array([[-1, -1], [1, 1]]),
14 | random_state=0,
15 | shuffle=False,
16 | cluster_std=0.5,
17 | )
18 |
19 | knn = 10
20 |
21 | def test_DenMune_results():
22 | dm = DenMune(train_data=X_cc, train_truth=y_cc, k_nearest=knn)
23 | labels, validity = dm.fit_predict(show_analyzer=False)
24 | # This test use data that are not perfectly separable so the
25 | # accuracy is not 1. Accuracy around 0.90
26 | assert (np.mean(dm.labels_pred == y_cc) > 0.90) or (1 - np.mean(dm.labels_pred == y_cc) > 0.90)
27 |
28 |
29 | @pytest.mark.parametrize("train_data", [None, X_cc[:800] ])
30 | @pytest.mark.parametrize("train_truth", [None, y_cc[:800] ])
31 | @pytest.mark.parametrize("test_data", [None, X_cc[800:] ])
32 | @pytest.mark.parametrize("test_truth", [None, y_cc[800:] ])
33 | @pytest.mark.parametrize("validate", [True, False])
34 | @pytest.mark.parametrize("show_plots", [True, False])
35 | @pytest.mark.parametrize("show_noise", [True, False])
36 | @pytest.mark.parametrize("show_analyzer", [True, False])
37 | @pytest.mark.parametrize("prop_step", [0, 600])
38 |
39 | # all possible combinations will be tested over all parameters. Actually, 257 tests will be covered
40 | def test_parameters(train_data, train_truth, test_data, test_truth, validate, show_plots, show_noise, show_analyzer, prop_step):
41 | if not (train_data is None):
42 | if not (train_data is not None and train_truth is None and test_truth is not None):
43 | if not (train_data is not None and test_data is not None and train_truth is None):
44 | if not (train_data is not None and train_truth is not None and test_truth is not None and test_data is None):
45 | dm = DenMune(train_data=train_data, train_truth=train_truth, test_data=test_data, test_truth=test_truth, k_nearest=10,prop_step=prop_step)
46 | labels, validity = dm.fit_predict(validate=validate, show_plots=show_plots, show_noise=show_noise, show_analyzer=show_analyzer)
47 | # This test use data that are not perfectly separable so the
48 | # accuracy is not 1. Accuracy around 0.70
49 | assert ( np.mean(labels == y_cc) > 0.70 or (1 - np.mean( labels == y_cc) > 0.70) )
50 |
51 |
52 | def test_DenMune_propagation():
53 | snapshots = chain([0], range(2,5), range(5,50,5), range(50, 100, 10), range(100,500,50), range(500,1100, 100))
54 | for snapshot in snapshots:
55 | dm = DenMune(train_data=X_cc, k_nearest=knn, prop_step=snapshot)
56 | labels, validity = dm.fit_predict(show_analyzer=False, show_plots=False)
57 | # if snapshot iteration = 1000, this means we could propagate to the end properly
58 | assert (snapshot == 1000)
59 |
60 | # we are going to do some tests using iris data
61 | X_iris = load_iris()["data"]
62 | y_iris = load_iris()["target"]
63 |
64 | # we test t_SNE reduction by applying it on Iris dataset which has 4 dimentions.
65 | @pytest.mark.parametrize("file_2d", [None, 'iris_2d.csv'])
66 | @pytest.mark.parametrize("rgn_tsne", [True, False])
67 |
68 |
69 | def test_t_SNE(rgn_tsne, file_2d):
70 | dm = DenMune(train_data=X_iris, train_truth=y_iris, k_nearest=knn, rgn_tsne=rgn_tsne, file_2d=file_2d)
71 | labels, validity = dm.fit_predict(show_analyzer=False, show_plots=False)
72 | assert (dm.data.shape[1] == 2) # this means it was reduced properly to 2-d using t-SNE
73 |
74 | def test_knn():
75 | for k in range (5, 55, 5):
76 | dm = DenMune(train_data=X_iris, train_truth=y_iris, k_nearest=k, rgn_tsne=False)
77 | labels, validity = dm.fit_predict(show_analyzer=False, show_plots=False)
78 | #assert (k == 50) # this means we tested the algorithm works fine with several knn inputs
79 |
80 |
81 | data_file = 'https://raw.githubusercontent.com/egy1st/datasets/dd90854f92cb5ef73b4146606c1c158c32e69b94/denmune/shapes/aggr_rand.csv'
82 | data = pd.read_csv(data_file, sep=',', header=None)
83 | labels = data.iloc[:, -1]
84 | data = data.drop(data.columns[-1], axis=1)
85 | train_data = data [:555]
86 | test_data = data [555:]
87 | train_labels = labels [:555]
88 | test_labels = labels [555:]
89 |
90 | # check if data will be treated correctly when comes as dataframe
91 | def test_dataframe():
92 | knn = 11 # k-nearest neighbor, the only parameter required by the algorithm
93 | dm = DenMune(train_data=train_data, train_truth=train_labels, test_data=test_data, test_truth=test_labels, k_nearest=knn, rgn_tsne=True)
94 | labels, validity = dm.fit_predict(validate=True, show_noise=True, show_analyzer=True)
95 | assert ( np.mean(dm.labels_pred == labels) > 0.97 or (1 - np.mean( dm.labels_pred == labels) > 0.97) )
96 |
97 |
98 | def test_exceptions():
99 |
100 | with pytest.raises(Exception) as execinfo:
101 | dm = DenMune(train_data=None, k_nearest=10)
102 | #labels, validity = dm.fit_predict()
103 | #raise Exception('train data is None')
104 |
105 | with pytest.raises(Exception) as execinfo:
106 | dm = DenMune(train_data=train_data, test_truth=test_labels, k_nearest=10)
107 | #labels, validity = dm.fit_predict()
108 | #raise Exception('train_data is not None and train_truth is None and test_truth is not None')
109 |
110 | with pytest.raises(Exception) as execinfo:
111 | dm = DenMune(train_data=train_data, test_data=test_data, k_nearest=10)
112 | #labels, validity = dm.fit_predict()
113 | #raise Exception('train_data is not None and test_data is not None and train_truth is None')
114 |
115 | with pytest.raises(Exception) as execinfo:
116 | dm = DenMune(train_data=train_data, train_truth=train_labels, test_truth=test_labels, test_data=None, k_nearest=10)
117 | #labels, validity = dm.fit_predict()
118 | #raise Exception('train_data is not None and train_truth is not None and test_truth is not None and test_data is None')
119 | with pytest.raises(Exception) as execinfo:
120 |
121 | dm = DenMune(train_data=train_data, train_truth=train_labels, k_nearest=0) # default value for k_nearest is 1 which is valid
122 | #labels, validity = dm.fit_predict()
123 | #raise Exception('k-nearest neighbor should be at least 1')
124 |
--------------------------------------------------------------------------------