├── model.pkl ├── docs ├── static │ ├── favicon.ico │ ├── favicon-16x16.png │ ├── favicon-32x32.png │ ├── apple-touch-icon.png │ ├── android-chrome-192x192.png │ ├── android-chrome-512x512.png │ ├── FontAwesome │ │ ├── fonts │ │ │ ├── FontAwesome.otf │ │ │ ├── fontawesome-webfont.eot │ │ │ ├── fontawesome-webfont.ttf │ │ │ ├── fontawesome-webfont.woff │ │ │ └── fontawesome-webfont.woff2 │ │ └── css │ │ │ └── font-awesome.min.css │ ├── site.webmanifest │ ├── css │ │ ├── print.css │ │ ├── general.css │ │ ├── variables.css │ │ └── chrome.css │ ├── ayu-highlight.css │ ├── highlight.css │ ├── tomorrow-night.css │ ├── clipboard.min.js │ └── book.js ├── chapters │ ├── example │ │ ├── output_21_0.png │ │ └── example.md │ └── cpu_vs_gpu │ │ ├── output_13_0.png │ │ └── cpu_vs_gpu.md ├── 404.html ├── _config.yml ├── index.md └── _layouts │ └── default.html ├── MANIFEST.in ├── kmeans_pytorch ├── main.py ├── __init__.py └── soft_dtw_cuda.py ├── makefile ├── tests └── test_project.py ├── README.md ├── LICENSE ├── setup.py ├── .gitignore └── cluster.py /model.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/model.pkl -------------------------------------------------------------------------------- /docs/static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/static/favicon.ico -------------------------------------------------------------------------------- /docs/static/favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/static/favicon-16x16.png -------------------------------------------------------------------------------- /docs/static/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/static/favicon-32x32.png -------------------------------------------------------------------------------- /docs/static/apple-touch-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/static/apple-touch-icon.png -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | # Things to include in the built package (besides the packages defined in setup.py) 2 | include README.md 3 | include LICENSE -------------------------------------------------------------------------------- /docs/chapters/example/output_21_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/chapters/example/output_21_0.png -------------------------------------------------------------------------------- /docs/static/android-chrome-192x192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/static/android-chrome-192x192.png -------------------------------------------------------------------------------- /docs/static/android-chrome-512x512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/static/android-chrome-512x512.png -------------------------------------------------------------------------------- /docs/chapters/cpu_vs_gpu/output_13_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/chapters/cpu_vs_gpu/output_13_0.png -------------------------------------------------------------------------------- /docs/static/FontAwesome/fonts/FontAwesome.otf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/static/FontAwesome/fonts/FontAwesome.otf -------------------------------------------------------------------------------- /docs/static/FontAwesome/fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/static/FontAwesome/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /docs/static/FontAwesome/fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/static/FontAwesome/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /docs/static/FontAwesome/fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/static/FontAwesome/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /docs/static/FontAwesome/fonts/fontawesome-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kernelmachine/balanced-kmeans/HEAD/docs/static/FontAwesome/fonts/fontawesome-webfont.woff2 -------------------------------------------------------------------------------- /kmeans_pytorch/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __version__ = "0.3" 5 | 6 | 7 | def main(): 8 | print("TODO") 9 | 10 | 11 | if __name__ == "__main__": 12 | main() -------------------------------------------------------------------------------- /docs/static/site.webmanifest: -------------------------------------------------------------------------------- 1 | {"name":"","short_name":"","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone"} -------------------------------------------------------------------------------- /docs/404.html: -------------------------------------------------------------------------------- 1 | --- 2 | layout: default 3 | title: "404: Page not found" 4 | permalink: 404.html 5 | --- 6 | 7 |
8 |

Sorry, we've misplaced that URL or it's pointing to something that doesn't exist. Head back home to try finding it again.

9 |
10 | -------------------------------------------------------------------------------- /makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean build publish 2 | 3 | build: clean 4 | python -m pip install --upgrade --quiet setuptools wheel twine 5 | python setup.py --quiet sdist bdist_wheel 6 | 7 | publish: build 8 | python -m twine check dist/* 9 | python -m twine upload dist/* 10 | 11 | clean: 12 | rm -r build dist *.egg-info || true -------------------------------------------------------------------------------- /tests/test_project.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import unittest 5 | import kmeans_pytorch 6 | 7 | 8 | class UnitTests(unittest.TestCase): 9 | def test_import(self): 10 | self.assertIsNotNone(kmeans_pytorch) 11 | 12 | def test_project(self): 13 | self.assertTrue(False, "write more tests here") -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Balanced K-Means using PyTorch 2 | 3 | PyTorch implementation of balanced kmeans. Based on https://github.com/subhadarship/kmeans_pytorch 4 | 5 | # Requirements 6 | * [PyTorch](http://pytorch.org/) version >= 1.0.0 7 | * Python version >= 3.6 8 | 9 | # Installing from source 10 | 11 | To install from source and develop locally: 12 | ``` 13 | git clone https://github.com/kernelmachine/balanced-kmeans/ 14 | cd balanced-kmeans 15 | pip install --editable . 16 | ``` 17 | 18 | Install pytorch 19 | 20 | ``` 21 | pip3 install torch==1.10.1+cu113 torchvision==0.11.2+cu113 torchaudio==0.10.1+cu113 -f https://download.pytorch.org/whl/cu113/torch_stable.html 22 | ``` 23 | 24 | Install additional dependencies 25 | 26 | ``` 27 | pip install matplotlib tqdm scikit-learn numba 28 | ``` 29 | 30 | # Run example 31 | 32 | This will output a plot of clusters in a pdf file. 33 | 34 | ``` 35 | python cluster.py 36 | ``` 37 | 38 | You can check out the notebook `example.ipynb` as well. -------------------------------------------------------------------------------- /docs/_config.yml: -------------------------------------------------------------------------------- 1 | permalink: pretty 2 | 3 | # Setup 4 | title: 'kmeans PyTorch' 5 | #tagline: 'A Jekyll theme' 6 | url: https://subhadarship.github.io/kmeans_pytorch/ 7 | baseurl: '/kmeans_pytorch' 8 | 9 | # About/contact 10 | author: 11 | name: Subhadarshi Panda 12 | url: https://subhadarship.github.io 13 | github: 14 | repo: https://github.com/subhadarship/kmeans_pytorch/ 15 | 16 | # Custom vars 17 | version: 0.3 18 | 19 | multilingual: false 20 | src: "." 21 | default_theme: "Rust" 22 | 23 | # For Maths 24 | markdown: kramdown 25 | 26 | # To use hljs, disable the default highlighter 27 | kramdown: 28 | syntax_highlighter_opts: 29 | disable: true 30 | 31 | # Chapter Configuration (up to a 2 level nested list) 32 | 33 | chapters: 34 | - path: chapters/example/example.md 35 | - path: chapters/cpu_vs_gpu/cpu_vs_gpu.md 36 | -------------------------------------------------------------------------------- /docs/static/css/print.css: -------------------------------------------------------------------------------- 1 | 2 | #sidebar, 3 | #menu-bar, 4 | .nav-chapters, 5 | .mobile-nav-chapters { 6 | display: none; 7 | } 8 | 9 | #page-wrapper.page-wrapper { 10 | transform: none; 11 | margin-left: 0px; 12 | overflow-y: initial; 13 | } 14 | 15 | #content { 16 | max-width: none; 17 | margin: 0; 18 | padding: 0; 19 | } 20 | 21 | .page { 22 | overflow-y: initial; 23 | } 24 | 25 | code { 26 | background-color: #666666; 27 | border-radius: 5px; 28 | 29 | /* Force background to be printed in Chrome */ 30 | -webkit-print-color-adjust: exact; 31 | } 32 | 33 | pre > .buttons { 34 | z-index: 2; 35 | } 36 | 37 | a, a:visited, a:active, a:hover { 38 | color: #4183c4; 39 | text-decoration: none; 40 | } 41 | 42 | h1, h2, h3, h4, h5, h6 { 43 | page-break-inside: avoid; 44 | page-break-after: avoid; 45 | } 46 | 47 | pre, code { 48 | page-break-inside: avoid; 49 | white-space: pre-wrap; 50 | } 51 | 52 | .fa { 53 | display: none !important; 54 | } 55 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 subhadarshi 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /docs/static/ayu-highlight.css: -------------------------------------------------------------------------------- 1 | /* 2 | Based off of the Ayu theme 3 | Original by Dempfi (https://github.com/dempfi/ayu) 4 | */ 5 | 6 | .hljs { 7 | display: block; 8 | overflow-x: auto; 9 | background: #191f26; 10 | color: #e6e1cf; 11 | padding: 0.5em; 12 | } 13 | 14 | .hljs-comment, 15 | .hljs-quote, 16 | .hljs-meta { 17 | color: #5c6773; 18 | font-style: italic; 19 | } 20 | 21 | .hljs-variable, 22 | .hljs-template-variable, 23 | .hljs-attribute, 24 | .hljs-attr, 25 | .hljs-regexp, 26 | .hljs-link, 27 | .hljs-selector-id, 28 | .hljs-selector-class { 29 | color: #ff7733; 30 | } 31 | 32 | .hljs-number, 33 | .hljs-builtin-name, 34 | .hljs-literal, 35 | .hljs-type, 36 | .hljs-params { 37 | color: #ffee99; 38 | } 39 | 40 | .hljs-string, 41 | .hljs-bullet { 42 | color: #b8cc52; 43 | } 44 | 45 | .hljs-title, 46 | .hljs-built_in, 47 | .hljs-section { 48 | color: #ffb454; 49 | } 50 | 51 | .hljs-keyword, 52 | .hljs-selector-tag, 53 | .hljs-symbol { 54 | color: #ff7733; 55 | } 56 | 57 | .hljs-name { 58 | color: #36a3d9; 59 | } 60 | 61 | .hljs-tag { 62 | color: #00568d; 63 | } 64 | 65 | .hljs-emphasis { 66 | font-style: italic; 67 | } 68 | 69 | .hljs-strong { 70 | font-weight: bold; 71 | } 72 | 73 | .hljs-addition { 74 | color: #91b362; 75 | } 76 | 77 | .hljs-deletion { 78 | color: #d96c75; 79 | } 80 | -------------------------------------------------------------------------------- /docs/static/highlight.css: -------------------------------------------------------------------------------- 1 | /* Base16 Atelier Dune Light - Theme */ 2 | /* by Bram de Haan (http://atelierbram.github.io/syntax-highlighting/atelier-schemes/dune) */ 3 | /* Original Base16 color scheme by Chris Kempson (https://github.com/chriskempson/base16) */ 4 | 5 | /* Atelier-Dune Comment */ 6 | .hljs-comment, 7 | .hljs-quote { 8 | color: #AAA; 9 | } 10 | 11 | /* Atelier-Dune Red */ 12 | .hljs-variable, 13 | .hljs-template-variable, 14 | .hljs-attribute, 15 | .hljs-tag, 16 | .hljs-name, 17 | .hljs-regexp, 18 | .hljs-link, 19 | .hljs-name, 20 | .hljs-selector-id, 21 | .hljs-selector-class { 22 | color: #d73737; 23 | } 24 | 25 | /* Atelier-Dune Orange */ 26 | .hljs-number, 27 | .hljs-meta, 28 | .hljs-built_in, 29 | .hljs-builtin-name, 30 | .hljs-literal, 31 | .hljs-type, 32 | .hljs-params { 33 | color: #b65611; 34 | } 35 | 36 | /* Atelier-Dune Green */ 37 | .hljs-string, 38 | .hljs-symbol, 39 | .hljs-bullet { 40 | color: #60ac39; 41 | } 42 | 43 | /* Atelier-Dune Blue */ 44 | .hljs-title, 45 | .hljs-section { 46 | color: #6684e1; 47 | } 48 | 49 | /* Atelier-Dune Purple */ 50 | .hljs-keyword, 51 | .hljs-selector-tag { 52 | color: #b854d4; 53 | } 54 | 55 | .hljs { 56 | display: block; 57 | overflow-x: auto; 58 | background: #f1f1f1; 59 | color: #6e6b5e; 60 | padding: 0.5em; 61 | } 62 | 63 | .hljs-emphasis { 64 | font-style: italic; 65 | } 66 | 67 | .hljs-strong { 68 | font-weight: bold; 69 | } 70 | 71 | .hljs-addition { 72 | color: #22863a; 73 | background-color: #f0fff4; 74 | } 75 | 76 | .hljs-deletion { 77 | color: #b31d28; 78 | background-color: #ffeef0; 79 | } 80 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: default 3 | title: K Means using PyTorch 4 | --- 5 | 6 | PyTorch implementation of kmeans for utilizing GPU 7 | 8 | ![Alt Text](https://media.giphy.com/media/WsYIwIHHXUcuiR8BeS/giphy.gif) 9 | 10 | # Getting Started 11 | ``` 12 | 13 | import torch 14 | import numpy as np 15 | from kmeans_pytorch import kmeans 16 | 17 | # data 18 | data_size, dims, num_clusters = 1000, 2, 3 19 | x = np.random.randn(data_size, dims) / 6 20 | x = torch.from_numpy(x) 21 | 22 | # kmeans 23 | cluster_ids_x, cluster_centers = kmeans( 24 | X=x, num_clusters=num_clusters, distance='euclidean', device=torch.device('cuda:0') 25 | ) 26 | ``` 27 | 28 | see [`example.ipynb`](https://github.com/subhadarship/kmeans_pytorch/blob/master/example.ipynb) for a more elaborate example 29 | 30 | # Requirements 31 | * [PyTorch](http://pytorch.org/) version >= 1.0.0 32 | * Python version >= 3.6 33 | 34 | # Installation 35 | 36 | install with `pip`: 37 | ``` 38 | pip install kmeans-pytorch 39 | ``` 40 | 41 | **Installing from source** 42 | 43 | To install from source and develop locally: 44 | ``` 45 | git clone https://github.com/subhadarship/kmeans_pytorch 46 | cd kmeans_pytorch 47 | pip install --editable . 48 | ``` 49 | 50 | # CPU vs GPU 51 | see [`cpu_vs_gpu.ipynb`](https://github.com/subhadarship/kmeans_pytorch/blob/master/cpu_vs_gpu.ipynb) for a comparison between CPU and GPU 52 | 53 | # Notes 54 | - useful when clustering large number of samples 55 | - utilizes GPU for faster matrix computations 56 | - support euclidean and cosine distances (for now) 57 | 58 | # Credits 59 | - This implementation closely follows the style of [this](https://github.com/overshiki/kmeans_pytorch) 60 | - Documentation is done using the awesome theme [jekyllbook](https://github.com/ebetica/jekyllbook) 61 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # For a fully annotated version of this file and what it does, see 5 | # https://github.com/pypa/sampleproject/blob/master/setup.py 6 | 7 | # To upload this file to PyPI you must build it then upload it: 8 | # python setup.py sdist bdist_wheel # build in 'dist' folder 9 | # python-m twine upload dist/* # 'twine' must be installed: 'pip install twine' 10 | 11 | 12 | import ast 13 | import io 14 | import re 15 | import os 16 | from setuptools import find_packages, setup 17 | 18 | DEPENDENCIES = [] 19 | EXCLUDE_FROM_PACKAGES = ["contrib", "docs", "tests*"] 20 | CURDIR = os.path.abspath(os.path.dirname(__file__)) 21 | 22 | with io.open(os.path.join(CURDIR, "README.md"), "r", encoding="utf-8") as f: 23 | README = f.read() 24 | 25 | 26 | def get_version(): 27 | main_file = os.path.join(CURDIR, "kmeans_pytorch", "main.py") 28 | _version_re = re.compile(r"__version__\s+=\s+(?P.*)") 29 | with open(main_file, "r", encoding="utf8") as f: 30 | match = _version_re.search(f.read()) 31 | version = match.group("version") if match is not None else '"unknown"' 32 | return str(ast.literal_eval(version)) 33 | 34 | 35 | setup( 36 | name="kmeans_pytorch", 37 | version=get_version(), 38 | author="Subhadarshi", 39 | author_email="subhadarshipanda08@gmail.com", 40 | description="", 41 | long_description=README, 42 | long_description_content_type="text/markdown", 43 | url="https://github.com/subhadarship/kmeans_pytorch", 44 | packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES), 45 | include_package_data=True, 46 | keywords=[], 47 | scripts=[], 48 | entry_points={"console_scripts": ["kmeans_pytorch=kmeans_pytorch.main:main"]}, 49 | zip_safe=False, 50 | install_requires=DEPENDENCIES, 51 | test_suite="tests.test_project", 52 | python_requires=">=3.6", 53 | # license and classifier list: 54 | # https://pypi.org/pypi?%3Aaction=list_classifiers 55 | license="License :: OSI Approved :: MIT License", 56 | classifiers=[ 57 | "Programming Language :: Python", 58 | # "Programming Language :: Python :: 3", 59 | # "Operating System :: OS Independent", 60 | # "Private :: Do Not Upload" 61 | ], 62 | ) -------------------------------------------------------------------------------- /docs/static/tomorrow-night.css: -------------------------------------------------------------------------------- 1 | /* Tomorrow Night Theme */ 2 | /* http://jmblog.github.com/color-themes-for-google-code-highlightjs */ 3 | /* Original theme - https://github.com/chriskempson/tomorrow-theme */ 4 | /* http://jmblog.github.com/color-themes-for-google-code-highlightjs */ 5 | 6 | /* Tomorrow Comment */ 7 | .hljs-comment { 8 | color: #969896; 9 | } 10 | 11 | /* Tomorrow Red */ 12 | .hljs-variable, 13 | .hljs-attribute, 14 | .hljs-tag, 15 | .hljs-regexp, 16 | .ruby .hljs-constant, 17 | .xml .hljs-tag .hljs-title, 18 | .xml .hljs-pi, 19 | .xml .hljs-doctype, 20 | .html .hljs-doctype, 21 | .css .hljs-id, 22 | .css .hljs-class, 23 | .css .hljs-pseudo { 24 | color: #cc6666; 25 | } 26 | 27 | /* Tomorrow Orange */ 28 | .hljs-number, 29 | .hljs-preprocessor, 30 | .hljs-pragma, 31 | .hljs-built_in, 32 | .hljs-literal, 33 | .hljs-params, 34 | .hljs-constant { 35 | color: #de935f; 36 | } 37 | 38 | /* Tomorrow Yellow */ 39 | .ruby .hljs-class .hljs-title, 40 | .css .hljs-rule .hljs-attribute { 41 | color: #f0c674; 42 | } 43 | 44 | /* Tomorrow Green */ 45 | .hljs-string, 46 | .hljs-value, 47 | .hljs-inheritance, 48 | .hljs-header, 49 | .hljs-name, 50 | .ruby .hljs-symbol, 51 | .xml .hljs-cdata { 52 | color: #b5bd68; 53 | } 54 | 55 | /* Tomorrow Aqua */ 56 | .hljs-title, 57 | .css .hljs-hexcolor { 58 | color: #8abeb7; 59 | } 60 | 61 | /* Tomorrow Blue */ 62 | .hljs-function, 63 | .python .hljs-decorator, 64 | .python .hljs-title, 65 | .ruby .hljs-function .hljs-title, 66 | .ruby .hljs-title .hljs-keyword, 67 | .perl .hljs-sub, 68 | .javascript .hljs-title, 69 | .coffeescript .hljs-title { 70 | color: #81a2be; 71 | } 72 | 73 | /* Tomorrow Purple */ 74 | .hljs-keyword, 75 | .javascript .hljs-function { 76 | color: #b294bb; 77 | } 78 | 79 | .hljs { 80 | display: block; 81 | overflow-x: auto; 82 | background: #1d1f21; 83 | color: #c5c8c6; 84 | padding: 0.5em; 85 | -webkit-text-size-adjust: none; 86 | } 87 | 88 | .coffeescript .javascript, 89 | .javascript .xml, 90 | .tex .hljs-formula, 91 | .xml .javascript, 92 | .xml .vbscript, 93 | .xml .css, 94 | .xml .hljs-cdata { 95 | opacity: 0.5; 96 | } 97 | 98 | .hljs-addition { 99 | color: #718c00; 100 | } 101 | 102 | .hljs-deletion { 103 | color: #c82829; 104 | } 105 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /cluster.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | from kmeans_pytorch import KMeans 5 | from sklearn.decomposition import PCA, TruncatedSVD 6 | from sklearn import datasets 7 | import numpy as np 8 | from tqdm.auto import tqdm 9 | 10 | 11 | def batchify(a, batch_size=512): 12 | n = (len(a) // batch_size) + len(a) % batch_size 13 | for i in np.array_split(a, n, axis=0): 14 | yield i 15 | 16 | 17 | def plot_blobs(data,cluster_centers, labels, plot_file): 18 | plt.figure(figsize=(4, 3), dpi=160) 19 | pca = PCA(n_components=2) 20 | master = np.concatenate([data, cluster_centers], 0) 21 | pca = pca.fit(master) 22 | data = pca.transform(data) 23 | plt.scatter(data[:, 0], data[:, 1], c=labels) 24 | cluster_centers = pca.transform(cluster_centers) 25 | plt.scatter( 26 | cluster_centers[:, 0], cluster_centers[:, 1], 27 | c='white', 28 | alpha=0.6, 29 | edgecolors='black', 30 | linewidths=2 31 | ) 32 | #plt.axis([-1, 1, -1, 1]) 33 | plt.tight_layout() 34 | plt.savefig(plot_file, dpi=300) 35 | 36 | 37 | if __name__ == '__main__': 38 | seed = 235 39 | num_clusters = 8 40 | balanced = True 41 | debug = True 42 | # set random seed 43 | np.random.seed(seed) 44 | 45 | n_samples = 8000 46 | blobs = datasets.make_blobs(n_samples=n_samples, 47 | random_state=seed, 48 | centers=[[1, 1], [-1, -1], [1, -1]], 49 | cluster_std=0.6) 50 | X = torch.from_numpy(blobs[0][:n_samples//2]) 51 | y = torch.from_numpy(blobs[0][n_samples//2:]) 52 | 53 | if torch.cuda.is_available(): 54 | device = torch.device('cuda:0') 55 | else: 56 | device = torch.device('cpu') 57 | 58 | kmeans = KMeans(n_clusters=8, device=torch.device('cuda:0'), balanced=True) 59 | batched_X = batchify(X.to(device), batch_size=8) 60 | 61 | counter = 0 62 | _ = kmeans.fit( 63 | X=X, distance='euclidean', iter_limit=100, tqdm_flag=True, online=False 64 | ) 65 | 66 | 67 | kmeans.save("model.pkl") 68 | kmeans = kmeans.load("model.pkl") 69 | 70 | cluster_ids_y_ = [] 71 | 72 | cluster_ids_y = kmeans.predict( 73 | X=y.to(device) 74 | ) 75 | 76 | if balanced: 77 | output = 'balanced_clusters.pdf' 78 | else: 79 | output = 'unbalanced_clusters.pdf' 80 | plot_blobs(y, kmeans.cluster_centers.to('cpu'), cluster_ids_y, output) -------------------------------------------------------------------------------- /docs/static/css/general.css: -------------------------------------------------------------------------------- 1 | /* Base styles and content styles */ 2 | 3 | @import 'variables.css'; 4 | 5 | :root { 6 | /* Browser default font-size is 16px, this way 1 rem = 10px */ 7 | font-size: 62.5%; 8 | } 9 | 10 | html { 11 | font-family: "Open Sans", sans-serif; 12 | color: var(--fg); 13 | background-color: var(--bg); 14 | text-size-adjust: none; 15 | } 16 | 17 | body { 18 | margin: 0; 19 | font-size: 1.6rem; 20 | overflow-x: hidden; 21 | } 22 | 23 | code { 24 | font-family: "Source Code Pro", Consolas, "Ubuntu Mono", Menlo, "DejaVu Sans Mono", monospace, monospace !important; 25 | font-size: 0.875em; /* please adjust the ace font size accordingly in editor.js */ 26 | } 27 | 28 | .left { float: left; } 29 | .right { float: right; } 30 | .boring { opacity: 0.6; } 31 | .hide-boring .boring { display: none; } 32 | .hidden { display: none; } 33 | 34 | h2, h3 { margin-top: 2.5em; } 35 | h4, h5 { margin-top: 2em; } 36 | 37 | .header + .header h3, 38 | .header + .header h4, 39 | .header + .header h5 { 40 | margin-top: 1em; 41 | } 42 | 43 | h1 a.header:target::before, 44 | h2 a.header:target::before, 45 | h3 a.header:target::before, 46 | h4 a.header:target::before { 47 | display: inline-block; 48 | content: "»"; 49 | margin-left: -30px; 50 | width: 30px; 51 | } 52 | 53 | h1 a.header:target, 54 | h2 a.header:target, 55 | h3 a.header:target, 56 | h4 a.header:target { 57 | scroll-margin-top: calc(var(--menu-bar-height) + 0.5em); 58 | } 59 | 60 | .page { 61 | outline: 0; 62 | padding: 0 var(--page-padding); 63 | } 64 | .page-wrapper { 65 | box-sizing: border-box; 66 | } 67 | .js:not(.sidebar-resizing) .page-wrapper { 68 | transition: margin-left 0.3s ease, transform 0.3s ease; /* Animation: slide away */ 69 | } 70 | 71 | .content { 72 | overflow-y: auto; 73 | padding: 0 15px; 74 | padding-bottom: 50px; 75 | } 76 | .content main { 77 | margin-left: auto; 78 | margin-right: auto; 79 | max-width: var(--content-max-width); 80 | } 81 | .content a { text-decoration: none; } 82 | .content a:hover { text-decoration: underline; } 83 | .content img { max-width: 100%; } 84 | .content .header:link, 85 | .content .header:visited { 86 | color: var(--fg); 87 | } 88 | .content .header:link, 89 | .content .header:visited:hover { 90 | text-decoration: none; 91 | } 92 | 93 | table { 94 | margin: 0 auto; 95 | border-collapse: collapse; 96 | } 97 | table td { 98 | padding: 3px 20px; 99 | border: 1px var(--table-border-color) solid; 100 | } 101 | table thead { 102 | background: var(--table-header-bg); 103 | } 104 | table thead td { 105 | font-weight: 700; 106 | border: none; 107 | } 108 | table thead th { 109 | padding: 3px 20px; 110 | } 111 | table thead tr { 112 | border: 1px var(--table-header-bg) solid; 113 | } 114 | /* Alternate background colors for rows */ 115 | table tbody tr:nth-child(2n) { 116 | background: var(--table-alternate-bg); 117 | } 118 | 119 | 120 | blockquote { 121 | margin: 20px 0; 122 | padding: 0 20px; 123 | color: var(--fg); 124 | background-color: var(--quote-bg); 125 | border-top: .1em solid var(--quote-border); 126 | border-bottom: .1em solid var(--quote-border); 127 | } 128 | 129 | 130 | :not(.footnote-definition) + .footnote-definition, 131 | .footnote-definition + :not(.footnote-definition) { 132 | margin-top: 2em; 133 | } 134 | .footnote-definition { 135 | font-size: 0.9em; 136 | margin: 0.5em 0; 137 | } 138 | .footnote-definition p { 139 | display: inline; 140 | } 141 | 142 | .tooltiptext { 143 | position: absolute; 144 | visibility: hidden; 145 | color: #fff; 146 | background-color: #333; 147 | transform: translateX(-50%); /* Center by moving tooltip 50% of its width left */ 148 | left: -8px; /* Half of the width of the icon */ 149 | top: -35px; 150 | font-size: 0.8em; 151 | text-align: center; 152 | border-radius: 6px; 153 | padding: 5px 8px; 154 | margin: 5px; 155 | z-index: 1000; 156 | } 157 | .tooltipped .tooltiptext { 158 | visibility: visible; 159 | } 160 | -------------------------------------------------------------------------------- /docs/chapters/example/example.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: default 3 | title: Example 4 | --- 5 | 6 | ## Installation 7 | Install easily using ```pip``` 8 | 9 | 10 | ```python 11 | !pip install kmeans-pytorch 12 | ``` 13 | 14 | Collecting kmeans-pytorch 15 | Downloading https://files.pythonhosted.org/packages/b5/c9/eb5b82e7e9741e61acf1aff70530a08810aa0c7e2272c534ff7a150fc5bd/kmeans_pytorch-0.3-py3-none-any.whl 16 | Installing collected packages: kmeans-pytorch 17 | Successfully installed kmeans-pytorch-0.3 18 | 19 | 20 | ## Import packages 21 | ```kmeans_pytorch``` and other packages 22 | 23 | 24 | ```python 25 | import torch 26 | import numpy as np 27 | import matplotlib.pyplot as plt 28 | from kmeans_pytorch import kmeans, kmeans_predict 29 | ``` 30 | 31 | ## Set random seed 32 | For reproducibility 33 | 34 | 35 | ```python 36 | # set random seed 37 | np.random.seed(123) 38 | ``` 39 | 40 | ## Generate data 41 | 1. Generate data from a random distribution 42 | 2. Convert to torch.tensor 43 | 44 | 45 | ```python 46 | # data 47 | data_size, dims, num_clusters = 1000, 2, 3 48 | x = np.random.randn(data_size, dims) / 6 49 | x = torch.from_numpy(x) 50 | ``` 51 | 52 | ## Set Device 53 | If available, set device to GPU 54 | 55 | 56 | ```python 57 | # set device 58 | if torch.cuda.is_available(): 59 | device = torch.device('cuda:0') 60 | else: 61 | device = torch.device('cpu') 62 | ``` 63 | 64 | ## Perform K-Means 65 | 66 | 67 | ```python 68 | # k-means 69 | cluster_ids_x, cluster_centers = kmeans( 70 | X=x, num_clusters=num_clusters, distance='euclidean', device=device 71 | ) 72 | ``` 73 | 74 | running k-means on cuda:0.. 75 | 76 | 77 | [running kmeans]: 7it [00:00, 29.79it/s, center_shift=0.000068, iteration=7, tol=0.000100] 78 | 79 | 80 | ### Cluster IDs and Cluster Centers 81 | 82 | 83 | ```python 84 | # cluster IDs and cluster centers 85 | print(cluster_ids_x) 86 | print(cluster_centers) 87 | ``` 88 | 89 | tensor([2, 0, 2, 0, 1, 0, 1, 0, 1, 1, 2, 2, 0, 1, 0, 0, 0, 1, 2, 2, 0, 2, 1, 1, 90 | 2, 0, 1, 2, 2, 1, 2, 0, 1, 1, 2, 1, 1, 2, 0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 91 | 0, 1, 0, 2, 1, 0, 0, 2, 2, 1, 0, 1, 0, 2, 1, 1, 1, 0, 2, 1, 2, 1, 2, 1, 92 | 1, 2, 2, 1, 0, 2, 1, 1, 1, 2, 1, 1, 1, 0, 2, 2, 1, 2, 2, 1, 0, 0, 2, 1, 93 | 1, 0, 0, 0, 1, 1, 1, 0, 2, 1, 0, 2, 1, 2, 0, 0, 1, 0, 2, 2, 2, 1, 1, 1, 94 | 1, 0, 1, 0, 2, 1, 0, 1, 1, 2, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 2, 95 | 2, 2, 1, 2, 1, 2, 1, 2, 2, 2, 1, 1, 2, 1, 0, 2, 0, 0, 1, 0, 2, 2, 0, 0, 96 | 2, 1, 0, 1, 0, 2, 2, 0, 0, 0, 2, 0, 2, 2, 2, 1, 1, 0, 1, 2, 2, 0, 1, 0, 97 | 2, 2, 1, 1, 0, 0, 2, 2, 1, 0, 2, 0, 2, 1, 2, 1, 1, 0, 2, 0, 0, 2, 2, 2, 98 | 0, 1, 0, 1, 1, 2, 1, 2, 1, 0, 0, 2, 2, 2, 2, 0, 1, 1, 1, 2, 1, 0, 2, 0, 99 | 0, 2, 2, 1, 1, 0, 0, 2, 1, 1, 1, 2, 1, 0, 0, 1, 1, 2, 2, 1, 0, 0, 2, 1, 100 | 1, 0, 1, 2, 1, 2, 0, 2, 2, 0, 2, 1, 0, 1, 1, 1, 2, 0, 1, 2, 2, 1, 1, 1, 101 | 0, 1, 0, 1, 2, 0, 2, 1, 2, 1, 0, 1, 1, 1, 1, 1, 1, 2, 1, 1, 2, 0, 2, 1, 102 | 0, 0, 2, 0, 2, 0, 1, 2, 1, 2, 0, 0, 2, 1, 1, 1, 1, 0, 2, 0, 2, 2, 1, 0, 103 | 1, 2, 2, 1, 1, 1, 2, 2, 0, 0, 1, 2, 1, 1, 0, 1, 2, 1, 2, 0, 0, 2, 0, 1, 104 | 1, 1, 2, 2, 1, 2, 0, 2, 0, 0, 2, 0, 2, 1, 2, 1, 1, 2, 2, 0, 1, 0, 0, 0, 105 | 0, 1, 0, 2, 2, 1, 0, 2, 0, 0, 2, 2, 2, 0, 1, 2, 0, 2, 2, 1, 2, 1, 2, 1, 106 | 0, 0, 0, 2, 0, 2, 2, 2, 0, 1, 1, 0, 2, 2, 0, 2, 2, 1, 0, 0, 2, 2, 0, 0, 107 | 1, 0, 1, 2, 0, 2, 0, 1, 0, 0, 0, 1, 2, 2, 1, 1, 2, 1, 1, 1, 0, 0, 2, 0, 108 | 0, 0, 2, 1, 1, 1, 2, 2, 2, 2, 0, 0, 1, 2, 0, 0, 1, 2, 1, 0, 1, 0, 2, 2, 109 | 0, 0, 0, 0, 2, 1, 0, 2, 1, 1, 2, 1, 0, 2, 0, 2, 0, 2, 1, 1, 2, 1, 0, 0, 110 | 0, 1, 2, 1, 1, 0, 2, 0, 2, 1, 2, 1, 1, 2, 0, 1, 0, 0, 2, 0, 2, 2, 2, 1, 111 | 1, 2, 1, 1, 2, 1, 1, 1, 1, 0, 0, 2, 1, 1, 2, 1, 1, 2, 0, 0, 2, 1, 2, 1, 112 | 1, 1, 1, 1, 0, 0, 2, 2, 1, 0, 1, 2, 2, 0, 1, 0, 2, 0, 2, 2, 2, 0, 1, 2, 113 | 2, 0, 1, 2, 1, 1, 2, 1, 2, 1, 0, 2, 0, 2, 1, 0, 2, 0, 1, 2, 1, 2, 1, 0, 114 | 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 2, 0, 0, 2, 1, 0, 1, 1, 1, 0, 0, 115 | 2, 1, 0, 2, 1, 1, 0, 2, 1, 2, 0, 2, 2, 1, 1, 0, 0, 2, 0, 2, 1, 1, 0, 1, 116 | 1, 0, 2, 2, 2, 1, 0, 0, 2, 1, 1, 1, 2, 1, 0, 1, 1, 1, 2, 2, 1, 1, 2, 1, 117 | 0, 1, 0, 0, 0, 2, 0, 1, 0, 0, 1, 1, 0, 1, 2, 1, 1, 1, 1, 0, 1, 0, 0, 2, 118 | 1, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 2, 2, 0, 119 | 2, 2, 0, 2, 2, 1, 1, 1, 1, 0, 2, 1, 2, 2, 1, 1, 1, 1, 1, 1, 2, 1, 0, 2, 120 | 2, 0, 2, 2, 0, 2, 1, 0, 0, 2, 0, 0, 1, 0, 2, 2, 0, 1, 2, 0, 0, 1, 1, 2, 121 | 2, 2, 0, 1, 2, 0, 0, 1, 2, 2, 0, 1, 0, 0, 2, 2, 0, 2, 1, 0, 1, 1, 2, 1, 122 | 0, 2, 1, 1, 0, 1, 1, 0, 2, 2, 2, 2, 1, 0, 0, 0, 2, 1, 2, 2, 0, 0, 0, 2, 123 | 1, 2, 1, 0, 2, 0, 0, 1, 1, 2, 2, 1, 2, 1, 2, 0, 0, 2, 1, 0, 1, 0, 0, 2, 124 | 2, 2, 2, 0, 1, 2, 2, 2, 2, 2, 1, 0, 0, 1, 1, 0, 2, 0, 2, 0, 2, 0, 0, 1, 125 | 0, 0, 0, 2, 0, 2, 1, 2, 0, 1, 0, 2, 0, 0, 0, 1, 0, 1, 1, 1, 0, 2, 2, 0, 126 | 1, 2, 0, 1, 1, 2, 2, 1, 2, 1, 0, 1, 0, 2, 1, 1, 2, 1, 1, 2, 2, 0, 1, 0, 127 | 2, 2, 0, 2, 2, 2, 1, 1, 0, 1, 2, 0, 2, 1, 0, 2, 1, 0, 1, 0, 2, 2, 2, 2, 128 | 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 1, 0, 0, 1, 1, 2, 1, 0, 1, 1, 1, 0, 2, 2, 129 | 2, 2, 1, 2, 0, 1, 2, 0, 1, 1, 1, 1, 2, 2, 0, 0, 2, 1, 1, 1, 0, 2, 0, 2, 130 | 2, 2, 0, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 0, 0]) 131 | tensor([[-0.1075, -0.1522], 132 | [ 0.1544, -0.0137], 133 | [-0.0833, 0.1454]]) 134 | 135 | 136 | ### Create More Data Just for Prediction 137 | 138 | 139 | ```python 140 | # more data 141 | y = np.random.randn(5, dims) / 6 142 | y = torch.from_numpy(y) 143 | ``` 144 | 145 | ## Predict 146 | 147 | 148 | ```python 149 | # predict cluster ids for y 150 | cluster_ids_y = kmeans_predict( 151 | y, cluster_centers, 'euclidean', device=device 152 | ) 153 | ``` 154 | 155 | predicting on cuda:0.. 156 | 157 | 158 | ### Show Predicted Cluster IDs 159 | 160 | 161 | ```python 162 | print(cluster_ids_y) 163 | ``` 164 | 165 | tensor([1, 2, 0, 1, 2]) 166 | 167 | 168 | ## Plot 169 | plot the samples 170 | 171 | 172 | ```python 173 | # plot 174 | plt.figure(figsize=(4, 3), dpi=160) 175 | plt.scatter(x[:, 0], x[:, 1], c=cluster_ids_x, cmap='cool') 176 | plt.scatter(y[:, 0], y[:, 1], c=cluster_ids_y, cmap='cool', marker='X') 177 | plt.scatter( 178 | cluster_centers[:, 0], cluster_centers[:, 1], 179 | c='white', 180 | alpha=0.6, 181 | edgecolors='black', 182 | linewidths=2 183 | ) 184 | plt.axis([-1, 1, -1, 1]) 185 | plt.tight_layout() 186 | plt.show() 187 | ``` 188 | 189 | 190 | ![png](output_21_0.png) 191 | 192 | 193 | ### Hope the example was useful !! 194 | 195 | 196 | ```python 197 | 198 | ``` 199 | -------------------------------------------------------------------------------- /docs/static/css/variables.css: -------------------------------------------------------------------------------- 1 | 2 | /* Globals */ 3 | 4 | :root { 5 | --sidebar-width: 300px; 6 | --page-padding: 15px; 7 | --content-max-width: 750px; 8 | --menu-bar-height: 50px; 9 | } 10 | 11 | /* Themes */ 12 | 13 | .ayu { 14 | --bg: hsl(210, 25%, 8%); 15 | --fg: #c5c5c5; 16 | 17 | --sidebar-bg: #14191f; 18 | --sidebar-fg: #c8c9db; 19 | --sidebar-non-existant: #5c6773; 20 | --sidebar-active: #ffb454; 21 | --sidebar-spacer: #2d334f; 22 | 23 | --scrollbar: var(--sidebar-fg); 24 | 25 | --icons: #737480; 26 | --icons-hover: #b7b9cc; 27 | 28 | --links: #0096cf; 29 | 30 | --inline-code-color: #ffb454; 31 | 32 | --theme-popup-bg: #14191f; 33 | --theme-popup-border: #5c6773; 34 | --theme-hover: #191f26; 35 | 36 | --quote-bg: hsl(226, 15%, 17%); 37 | --quote-border: hsl(226, 15%, 22%); 38 | 39 | --table-border-color: hsl(210, 25%, 13%); 40 | --table-header-bg: hsl(210, 25%, 28%); 41 | --table-alternate-bg: hsl(210, 25%, 11%); 42 | 43 | --searchbar-border-color: #848484; 44 | --searchbar-bg: #424242; 45 | --searchbar-fg: #fff; 46 | --searchbar-shadow-color: #d4c89f; 47 | --searchresults-header-fg: #666; 48 | --searchresults-border-color: #888; 49 | --searchresults-li-bg: #252932; 50 | --search-mark-bg: #e3b171; 51 | } 52 | 53 | .coal { 54 | --bg: hsl(200, 7%, 8%); 55 | --fg: #98a3ad; 56 | 57 | --sidebar-bg: #292c2f; 58 | --sidebar-fg: #a1adb8; 59 | --sidebar-non-existant: #505254; 60 | --sidebar-active: #3473ad; 61 | --sidebar-spacer: #393939; 62 | 63 | --scrollbar: var(--sidebar-fg); 64 | 65 | --icons: #43484d; 66 | --icons-hover: #b3c0cc; 67 | 68 | --links: #2b79a2; 69 | 70 | --inline-code-color: #c5c8c6;; 71 | 72 | --theme-popup-bg: #141617; 73 | --theme-popup-border: #43484d; 74 | --theme-hover: #1f2124; 75 | 76 | --quote-bg: hsl(234, 21%, 18%); 77 | --quote-border: hsl(234, 21%, 23%); 78 | 79 | --table-border-color: hsl(200, 7%, 13%); 80 | --table-header-bg: hsl(200, 7%, 28%); 81 | --table-alternate-bg: hsl(200, 7%, 11%); 82 | 83 | --searchbar-border-color: #aaa; 84 | --searchbar-bg: #b7b7b7; 85 | --searchbar-fg: #000; 86 | --searchbar-shadow-color: #aaa; 87 | --searchresults-header-fg: #666; 88 | --searchresults-border-color: #98a3ad; 89 | --searchresults-li-bg: #2b2b2f; 90 | --search-mark-bg: #355c7d; 91 | } 92 | 93 | .light { 94 | --bg: hsl(0, 0%, 100%); 95 | --fg: #333333; 96 | 97 | --sidebar-bg: #fafafa; 98 | --sidebar-fg: #364149; 99 | --sidebar-non-existant: #aaaaaa; 100 | --sidebar-active: #008cff; 101 | --sidebar-spacer: #f4f4f4; 102 | 103 | --scrollbar: #cccccc; 104 | 105 | --icons: #cccccc; 106 | --icons-hover: #333333; 107 | 108 | --links: #4183c4; 109 | 110 | --inline-code-color: #6e6b5e; 111 | 112 | --theme-popup-bg: #fafafa; 113 | --theme-popup-border: #cccccc; 114 | --theme-hover: #e6e6e6; 115 | 116 | --quote-bg: hsl(197, 37%, 96%); 117 | --quote-border: hsl(197, 37%, 91%); 118 | 119 | --table-border-color: hsl(0, 0%, 95%); 120 | --table-header-bg: hsl(0, 0%, 80%); 121 | --table-alternate-bg: hsl(0, 0%, 97%); 122 | 123 | --searchbar-border-color: #aaa; 124 | --searchbar-bg: #fafafa; 125 | --searchbar-fg: #000; 126 | --searchbar-shadow-color: #aaa; 127 | --searchresults-header-fg: #666; 128 | --searchresults-border-color: #888; 129 | --searchresults-li-bg: #e4f2fe; 130 | --search-mark-bg: #a2cff5; 131 | } 132 | 133 | .navy { 134 | --bg: hsl(226, 23%, 11%); 135 | --fg: #bcbdd0; 136 | 137 | --sidebar-bg: #282d3f; 138 | --sidebar-fg: #c8c9db; 139 | --sidebar-non-existant: #505274; 140 | --sidebar-active: #2b79a2; 141 | --sidebar-spacer: #2d334f; 142 | 143 | --scrollbar: var(--sidebar-fg); 144 | 145 | --icons: #737480; 146 | --icons-hover: #b7b9cc; 147 | 148 | --links: #2b79a2; 149 | 150 | --inline-code-color: #c5c8c6;; 151 | 152 | --theme-popup-bg: #161923; 153 | --theme-popup-border: #737480; 154 | --theme-hover: #282e40; 155 | 156 | --quote-bg: hsl(226, 15%, 17%); 157 | --quote-border: hsl(226, 15%, 22%); 158 | 159 | --table-border-color: hsl(226, 23%, 16%); 160 | --table-header-bg: hsl(226, 23%, 31%); 161 | --table-alternate-bg: hsl(226, 23%, 14%); 162 | 163 | --searchbar-border-color: #aaa; 164 | --searchbar-bg: #aeaec6; 165 | --searchbar-fg: #000; 166 | --searchbar-shadow-color: #aaa; 167 | --searchresults-header-fg: #5f5f71; 168 | --searchresults-border-color: #5c5c68; 169 | --searchresults-li-bg: #242430; 170 | --search-mark-bg: #a2cff5; 171 | } 172 | 173 | .rust { 174 | --bg: hsl(60, 9%, 87%); 175 | --fg: #262625; 176 | 177 | --sidebar-bg: #3b2e2a; 178 | --sidebar-fg: #c8c9db; 179 | --sidebar-non-existant: #505254; 180 | --sidebar-active: #e69f67; 181 | --sidebar-spacer: #45373a; 182 | 183 | --scrollbar: var(--sidebar-fg); 184 | 185 | --icons: #737480; 186 | --icons-hover: #262625; 187 | 188 | --links: #2b79a2; 189 | 190 | --inline-code-color: #6e6b5e; 191 | 192 | --theme-popup-bg: #e1e1db; 193 | --theme-popup-border: #b38f6b; 194 | --theme-hover: #99908a; 195 | 196 | --quote-bg: hsl(60, 5%, 75%); 197 | --quote-border: hsl(60, 5%, 70%); 198 | 199 | --table-border-color: hsl(60, 9%, 82%); 200 | --table-header-bg: #b3a497; 201 | --table-alternate-bg: hsl(60, 9%, 84%); 202 | 203 | --searchbar-border-color: #aaa; 204 | --searchbar-bg: #fafafa; 205 | --searchbar-fg: #000; 206 | --searchbar-shadow-color: #aaa; 207 | --searchresults-header-fg: #666; 208 | --searchresults-border-color: #888; 209 | --searchresults-li-bg: #dec2a2; 210 | --search-mark-bg: #e69f67; 211 | } 212 | 213 | @media (prefers-color-scheme: dark) { 214 | .light.no-js { 215 | --bg: hsl(200, 7%, 8%); 216 | --fg: #98a3ad; 217 | 218 | --sidebar-bg: #292c2f; 219 | --sidebar-fg: #a1adb8; 220 | --sidebar-non-existant: #505254; 221 | --sidebar-active: #3473ad; 222 | --sidebar-spacer: #393939; 223 | 224 | --scrollbar: var(--sidebar-fg); 225 | 226 | --icons: #43484d; 227 | --icons-hover: #b3c0cc; 228 | 229 | --links: #2b79a2; 230 | 231 | --inline-code-color: #c5c8c6;; 232 | 233 | --theme-popup-bg: #141617; 234 | --theme-popup-border: #43484d; 235 | --theme-hover: #1f2124; 236 | 237 | --quote-bg: hsl(234, 21%, 18%); 238 | --quote-border: hsl(234, 21%, 23%); 239 | 240 | --table-border-color: hsl(200, 7%, 13%); 241 | --table-header-bg: hsl(200, 7%, 28%); 242 | --table-alternate-bg: hsl(200, 7%, 11%); 243 | 244 | --searchbar-border-color: #aaa; 245 | --searchbar-bg: #b7b7b7; 246 | --searchbar-fg: #000; 247 | --searchbar-shadow-color: #aaa; 248 | --searchresults-header-fg: #666; 249 | --searchresults-border-color: #98a3ad; 250 | --searchresults-li-bg: #2b2b2f; 251 | --search-mark-bg: #355c7d; 252 | } 253 | } 254 | -------------------------------------------------------------------------------- /docs/chapters/cpu_vs_gpu/cpu_vs_gpu.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: default 3 | title: CPU vs GPU 4 | --- 5 | 6 | **How useful is using kmeans_pytorch if you have GPU?** 7 | 8 | Let's find out !! 9 | 10 | 11 | ```python 12 | # installation 13 | !pip install kmeans-pytorch 14 | ``` 15 | 16 | Collecting kmeans-pytorch 17 | Downloading https://files.pythonhosted.org/packages/b5/c9/eb5b82e7e9741e61acf1aff70530a08810aa0c7e2272c534ff7a150fc5bd/kmeans_pytorch-0.3-py3-none-any.whl 18 | Installing collected packages: kmeans-pytorch 19 | Successfully installed kmeans-pytorch-0.3 20 | 21 | 22 | ## Import Packages 23 | 24 | 25 | ```python 26 | import torch 27 | import numpy as np 28 | import matplotlib.pyplot as plt 29 | from time import time 30 | from kmeans_pytorch import kmeans, kmeans_predict 31 | ``` 32 | 33 | ## Set Random Seed (For Reproducibilty) 34 | 35 | 36 | ```python 37 | # set random seed 38 | np.random.seed(123) 39 | ``` 40 | 41 | ## Set Number of Dimensions, Number of Clusters 42 | 43 | 44 | ```python 45 | # dimensions, num clusters 46 | dims, num_clusters = 2, 3 47 | ``` 48 | 49 | ## Set Data Sizes 50 | 51 | 52 | ```python 53 | # data sizes 54 | data_sizes = [100000, 1000000, 5000000, 10000000] 55 | ``` 56 | 57 | ## Compute CPU and GPU Times 58 | 59 | 60 | ```python 61 | gpu_times = [] 62 | cpu_times = [] 63 | 64 | for data_size in data_sizes: 65 | print(f'\ndata size: {data_size}') 66 | 67 | # data 68 | x = np.random.randn(data_size, dims) / 6 69 | x = torch.from_numpy(x) 70 | 71 | # gpu 72 | start_gpu = time() 73 | kmeans_gpu = kmeans(X=x, num_clusters=num_clusters, device=torch.device('cuda:0')) 74 | gpu_time = time() - start_gpu 75 | gpu_times.append(gpu_time) 76 | print(f'gpu time: {gpu_time}') 77 | 78 | # cpu 79 | start_cpu = time() 80 | kmeans_cpu = kmeans(X=x, num_clusters=num_clusters, device=torch.device('cpu')) 81 | cpu_time = time() - start_cpu 82 | cpu_times.append(cpu_time) 83 | print(f'cpu time: {cpu_time}') 84 | ``` 85 | 86 | 87 | data size: 100000 88 | running k-means on cuda:0.. 89 | 90 | 91 | [running kmeans]: 6it [00:00, 13.96it/s, center_shift=0.000058, iteration=6, tol=0.000100] 92 | [running kmeans]: 2it [00:00, 16.60it/s, center_shift=0.003620, iteration=3, tol=0.000100] 93 | 94 | gpu time: 10.371965885162354 95 | running k-means on cpu.. 96 | 97 | 98 | [running kmeans]: 7it [00:00, 19.78it/s, center_shift=0.000048, iteration=7, tol=0.000100] 99 | [running kmeans]: 0it [00:00, ?it/s] 100 | 101 | cpu time: 0.36179161071777344 102 | 103 | data size: 1000000 104 | running k-means on cuda:0.. 105 | 106 | 107 | [running kmeans]: 7it [00:03, 2.31it/s, center_shift=0.000070, iteration=7, tol=0.000100] 108 | [running kmeans]: 0it [00:00, ?it/s] 109 | 110 | gpu time: 3.0890297889709473 111 | running k-means on cpu.. 112 | 113 | 114 | [running kmeans]: 6it [00:02, 2.31it/s, center_shift=0.000054, iteration=6, tol=0.000100] 115 | 116 | 117 | cpu time: 2.6320385932922363 118 | 119 | data size: 5000000 120 | 121 | 122 | [running kmeans]: 0it [00:00, ?it/s] 123 | 124 | running k-means on cuda:0.. 125 | 126 | 127 | [running kmeans]: 5it [00:10, 2.02s/it, center_shift=0.000037, iteration=5, tol=0.000100] 128 | [running kmeans]: 0it [00:00, ?it/s] 129 | 130 | gpu time: 10.312965869903564 131 | running k-means on cpu.. 132 | 133 | 134 | 135 | [running kmeans]: 0it [00:01, ?it/s, center_shift=0.069426, iteration=1, tol=0.000100] 136 | [running kmeans]: 1it [00:01, 1.98s/it, center_shift=0.069426, iteration=1, tol=0.000100] 137 | [running kmeans]: 1it [00:03, 1.98s/it, center_shift=0.004168, iteration=2, tol=0.000100] 138 | [running kmeans]: 2it [00:03, 1.99s/it, center_shift=0.004168, iteration=2, tol=0.000100] 139 | [running kmeans]: 2it [00:06, 1.99s/it, center_shift=0.001386, iteration=3, tol=0.000100] 140 | [running kmeans]: 3it [00:06, 2.00s/it, center_shift=0.001386, iteration=3, tol=0.000100] 141 | [running kmeans]: 3it [00:07, 2.00s/it, center_shift=0.000462, iteration=4, tol=0.000100] 142 | [running kmeans]: 4it [00:07, 2.00s/it, center_shift=0.000462, iteration=4, tol=0.000100] 143 | [running kmeans]: 4it [00:10, 2.00s/it, center_shift=0.000153, iteration=5, tol=0.000100] 144 | [running kmeans]: 5it [00:10, 2.02s/it, center_shift=0.000153, iteration=5, tol=0.000100] 145 | [running kmeans]: 5it [00:12, 2.02s/it, center_shift=0.000051, iteration=6, tol=0.000100] 146 | [running kmeans]: 6it [00:12, 2.01s/it, center_shift=0.000051, iteration=6, tol=0.000100] 147 | 148 | cpu time: 12.246060371398926 149 | 150 | data size: 10000000 151 | running k-means on cuda:0.. 152 | 153 | 154 | 155 | 156 | [running kmeans]: 0it [00:00, ?it/s] 157 | 158 | [running kmeans]: 0it [00:03, ?it/s, center_shift=0.108101, iteration=1, tol=0.000100] 159 | 160 | [running kmeans]: 1it [00:03, 3.98s/it, center_shift=0.108101, iteration=1, tol=0.000100] 161 | 162 | [running kmeans]: 1it [00:08, 3.98s/it, center_shift=0.007211, iteration=2, tol=0.000100] 163 | 164 | [running kmeans]: 2it [00:08, 4.03s/it, center_shift=0.007211, iteration=2, tol=0.000100] 165 | 166 | [running kmeans]: 2it [00:12, 4.03s/it, center_shift=0.001613, iteration=3, tol=0.000100] 167 | 168 | [running kmeans]: 3it [00:12, 4.04s/it, center_shift=0.001613, iteration=3, tol=0.000100] 169 | 170 | [running kmeans]: 3it [00:16, 4.04s/it, center_shift=0.000406, iteration=4, tol=0.000100] 171 | 172 | [running kmeans]: 4it [00:16, 4.01s/it, center_shift=0.000406, iteration=4, tol=0.000100] 173 | 174 | [running kmeans]: 4it [00:20, 4.01s/it, center_shift=0.000130, iteration=5, tol=0.000100] 175 | 176 | [running kmeans]: 5it [00:20, 3.99s/it, center_shift=0.000130, iteration=5, tol=0.000100] 177 | 178 | [running kmeans]: 5it [00:24, 3.99s/it, center_shift=0.000044, iteration=6, tol=0.000100] 179 | 180 | [running kmeans]: 6it [00:24, 4.00s/it, center_shift=0.000044, iteration=6, tol=0.000100] 181 | 182 | gpu time: 24.558437824249268 183 | running k-means on cpu.. 184 | 185 | 186 | 187 | 188 | 189 | [running kmeans]: 0it [00:00, ?it/s] 190 | 191 | 192 | [running kmeans]: 0it [00:03, ?it/s, center_shift=0.170225, iteration=1, tol=0.000100] 193 | 194 | 195 | [running kmeans]: 1it [00:03, 3.97s/it, center_shift=0.170225, iteration=1, tol=0.000100] 196 | 197 | 198 | [running kmeans]: 1it [00:08, 3.97s/it, center_shift=0.013261, iteration=2, tol=0.000100] 199 | 200 | 201 | [running kmeans]: 2it [00:08, 4.03s/it, center_shift=0.013261, iteration=2, tol=0.000100] 202 | 203 | 204 | [running kmeans]: 2it [00:12, 4.03s/it, center_shift=0.003844, iteration=3, tol=0.000100] 205 | 206 | 207 | [running kmeans]: 3it [00:12, 4.04s/it, center_shift=0.003844, iteration=3, tol=0.000100] 208 | 209 | 210 | [running kmeans]: 3it [00:16, 4.04s/it, center_shift=0.001250, iteration=4, tol=0.000100] 211 | 212 | 213 | [running kmeans]: 4it [00:16, 4.03s/it, center_shift=0.001250, iteration=4, tol=0.000100] 214 | 215 | 216 | [running kmeans]: 4it [00:20, 4.03s/it, center_shift=0.000416, iteration=5, tol=0.000100] 217 | 218 | 219 | [running kmeans]: 5it [00:20, 4.01s/it, center_shift=0.000416, iteration=5, tol=0.000100] 220 | 221 | 222 | [running kmeans]: 5it [00:24, 4.01s/it, center_shift=0.000139, iteration=6, tol=0.000100] 223 | 224 | 225 | [running kmeans]: 6it [00:24, 4.00s/it, center_shift=0.000139, iteration=6, tol=0.000100] 226 | 227 | 228 | [running kmeans]: 6it [00:28, 4.00s/it, center_shift=0.000047, iteration=7, tol=0.000100] 229 | 230 | 231 | [running kmeans]: 7it [00:28, 4.00s/it, center_shift=0.000047, iteration=7, tol=0.000100] 232 | 233 | cpu time: 28.59460973739624 234 | 235 | 236 | ## Plot 237 | Plot the CPU and GPU times 238 | 239 | 240 | ```python 241 | # plot 242 | plt.figure(figsize=(6, 3), dpi=160) 243 | plt.plot(data_sizes, gpu_times, marker='o', label='gpu', color='xkcd:vermillion') 244 | plt.plot(data_sizes, cpu_times, marker='o', label='cpu', color='xkcd:navy') 245 | plt.xticks(data_sizes) 246 | plt.legend(fontsize=12) 247 | plt.grid(alpha=0.2) 248 | plt.xlabel('data size', fontsize=14) 249 | plt.ylabel('time (s)', fontsize=14) 250 | plt.show() 251 | ``` 252 | 253 | 254 | ![png](output_13_0.png) 255 | 256 | 257 | ## Key Takeaways 258 | 1. Using GPU is not always faster than using CPU for kmeans in PyTorch 259 | 2. Use GPU if the data size is large 260 | 261 | 262 | ```python 263 | 264 | ``` 265 | -------------------------------------------------------------------------------- /docs/static/clipboard.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * clipboard.js v2.0.4 3 | * https://zenorocha.github.io/clipboard.js 4 | * 5 | * Licensed MIT © Zeno Rocha 6 | */ 7 | !function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return function(n){var o={};function r(t){if(o[t])return o[t].exports;var e=o[t]={i:t,l:!1,exports:{}};return n[t].call(e.exports,e,e.exports,r),e.l=!0,e.exports}return r.m=n,r.c=o,r.d=function(t,e,n){r.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:n})},r.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)r.d(n,o,function(t){return e[t]}.bind(null,o));return n},r.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return r.d(e,"a",e),e},r.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},r.p="",r(r.s=0)}([function(t,e,n){"use strict";var r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},i=function(){function o(t,e){for(var n=0;n .hljs { 18 | color: var(--links); 19 | } 20 | 21 | /* Menu Bar */ 22 | 23 | #menu-bar { 24 | position: -webkit-sticky; 25 | position: sticky; 26 | top: 0; 27 | z-index: 101; 28 | margin: auto calc(0px - var(--page-padding)); 29 | } 30 | #menu-bar > #menu-bar-sticky-container { 31 | display: flex; 32 | flex-wrap: wrap; 33 | background-color: var(--bg); 34 | border-bottom-color: var(--bg); 35 | border-bottom-width: 1px; 36 | border-bottom-style: solid; 37 | } 38 | .js #menu-bar > #menu-bar-sticky-container { 39 | transition: transform 0.3s; 40 | } 41 | #menu-bar.bordered > #menu-bar-sticky-container { 42 | border-bottom-color: var(--table-border-color); 43 | } 44 | #menu-bar i, #menu-bar .icon-button { 45 | position: relative; 46 | padding: 0 8px; 47 | z-index: 10; 48 | line-height: var(--menu-bar-height); 49 | cursor: pointer; 50 | transition: color 0.5s; 51 | } 52 | @media only screen and (max-width: 420px) { 53 | #menu-bar i, #menu-bar .icon-button { 54 | padding: 0 5px; 55 | } 56 | } 57 | 58 | .icon-button { 59 | border: none; 60 | background: none; 61 | padding: 0; 62 | color: inherit; 63 | } 64 | .icon-button i { 65 | margin: 0; 66 | } 67 | 68 | .right-buttons { 69 | margin: 0 15px; 70 | } 71 | .right-buttons a { 72 | text-decoration: none; 73 | } 74 | 75 | html:not(.sidebar-visible) #menu-bar:not(:hover).folded > #menu-bar-sticky-container { 76 | transform: translateY(calc(-10px - var(--menu-bar-height))); 77 | } 78 | 79 | .left-buttons { 80 | display: flex; 81 | margin: 0 5px; 82 | } 83 | .no-js .left-buttons { 84 | display: none; 85 | } 86 | 87 | .menu-title { 88 | display: inline-block; 89 | font-weight: 200; 90 | font-size: 2rem; 91 | line-height: var(--menu-bar-height); 92 | text-align: center; 93 | margin: 0; 94 | flex: 1; 95 | white-space: nowrap; 96 | overflow: hidden; 97 | text-overflow: ellipsis; 98 | } 99 | .js .menu-title { 100 | cursor: pointer; 101 | } 102 | 103 | .menu-bar, 104 | .menu-bar:visited, 105 | .nav-chapters, 106 | .nav-chapters:visited, 107 | .mobile-nav-chapters, 108 | .mobile-nav-chapters:visited, 109 | .menu-bar .icon-button, 110 | .menu-bar a i { 111 | color: var(--icons); 112 | } 113 | 114 | .menu-bar i:hover, 115 | .menu-bar .icon-button:hover, 116 | .nav-chapters:hover, 117 | .mobile-nav-chapters i:hover { 118 | color: var(--icons-hover); 119 | } 120 | 121 | /* Nav Icons */ 122 | 123 | .nav-chapters { 124 | font-size: 2.5em; 125 | text-align: center; 126 | text-decoration: none; 127 | 128 | position: fixed; 129 | top: 0; 130 | bottom: 0; 131 | margin: 0; 132 | max-width: 150px; 133 | min-width: 90px; 134 | 135 | display: flex; 136 | justify-content: center; 137 | align-content: center; 138 | flex-direction: column; 139 | 140 | transition: color 0.5s, background-color 0.5s; 141 | } 142 | 143 | .nav-chapters:hover { 144 | text-decoration: none; 145 | background-color: var(--theme-hover); 146 | transition: background-color 0.15s, color 0.15s; 147 | } 148 | 149 | .nav-wrapper { 150 | margin-top: 50px; 151 | display: none; 152 | } 153 | 154 | .mobile-nav-chapters { 155 | font-size: 2.5em; 156 | text-align: center; 157 | text-decoration: none; 158 | width: 90px; 159 | border-radius: 5px; 160 | background-color: var(--sidebar-bg); 161 | } 162 | 163 | .previous { 164 | float: left; 165 | } 166 | 167 | .next { 168 | float: right; 169 | right: var(--page-padding); 170 | } 171 | 172 | @media only screen and (max-width: 1080px) { 173 | .nav-wide-wrapper { display: none; } 174 | .nav-wrapper { display: block; } 175 | } 176 | 177 | @media only screen and (max-width: 1380px) { 178 | .sidebar-visible .nav-wide-wrapper { display: none; } 179 | .sidebar-visible .nav-wrapper { display: block; } 180 | } 181 | 182 | /* Inline code */ 183 | 184 | :not(pre) > .hljs { 185 | display: inline; 186 | padding: 0.1em 0.3em; 187 | border-radius: 3px; 188 | } 189 | 190 | :not(pre):not(a) > .hljs { 191 | color: var(--inline-code-color); 192 | overflow-x: initial; 193 | } 194 | 195 | a:hover > .hljs { 196 | text-decoration: underline; 197 | } 198 | 199 | pre { 200 | position: relative; 201 | } 202 | pre > .buttons { 203 | position: absolute; 204 | z-index: 100; 205 | right: 5px; 206 | top: 5px; 207 | 208 | color: var(--sidebar-fg); 209 | cursor: pointer; 210 | } 211 | pre > .buttons :hover { 212 | color: var(--sidebar-active); 213 | } 214 | pre > .buttons i { 215 | margin-left: 8px; 216 | } 217 | pre > .buttons button { 218 | color: inherit; 219 | background: transparent; 220 | border: none; 221 | cursor: inherit; 222 | } 223 | pre > .result { 224 | margin-top: 10px; 225 | } 226 | 227 | /* Search */ 228 | 229 | #searchresults a { 230 | text-decoration: none; 231 | } 232 | 233 | mark { 234 | border-radius: 2px; 235 | padding: 0 3px 1px 3px; 236 | margin: 0 -3px -1px -3px; 237 | background-color: var(--search-mark-bg); 238 | transition: background-color 300ms linear; 239 | cursor: pointer; 240 | } 241 | 242 | mark.fade-out { 243 | background-color: rgba(0,0,0,0) !important; 244 | cursor: auto; 245 | } 246 | 247 | .searchbar-outer { 248 | margin-left: auto; 249 | margin-right: auto; 250 | max-width: var(--content-max-width); 251 | } 252 | 253 | #searchbar { 254 | width: 100%; 255 | margin: 5px auto 0px auto; 256 | padding: 10px 16px; 257 | transition: box-shadow 300ms ease-in-out; 258 | border: 1px solid var(--searchbar-border-color); 259 | border-radius: 3px; 260 | background-color: var(--searchbar-bg); 261 | color: var(--searchbar-fg); 262 | } 263 | #searchbar:focus, 264 | #searchbar.active { 265 | box-shadow: 0 0 3px var(--searchbar-shadow-color); 266 | } 267 | 268 | .searchresults-header { 269 | font-weight: bold; 270 | font-size: 1em; 271 | padding: 18px 0 0 5px; 272 | color: var(--searchresults-header-fg); 273 | } 274 | 275 | .searchresults-outer { 276 | margin-left: auto; 277 | margin-right: auto; 278 | max-width: var(--content-max-width); 279 | border-bottom: 1px dashed var(--searchresults-border-color); 280 | } 281 | 282 | ul#searchresults { 283 | list-style: none; 284 | padding-left: 20px; 285 | } 286 | ul#searchresults li { 287 | margin: 10px 0px; 288 | padding: 2px; 289 | border-radius: 2px; 290 | } 291 | ul#searchresults li.focus { 292 | background-color: var(--searchresults-li-bg); 293 | } 294 | ul#searchresults span.teaser { 295 | display: block; 296 | clear: both; 297 | margin: 5px 0 0 20px; 298 | font-size: 0.8em; 299 | } 300 | ul#searchresults span.teaser em { 301 | font-weight: bold; 302 | font-style: normal; 303 | } 304 | 305 | /* Sidebar */ 306 | 307 | .sidebar { 308 | position: fixed; 309 | left: 0; 310 | top: 0; 311 | bottom: 0; 312 | width: var(--sidebar-width); 313 | font-size: 0.875em; 314 | box-sizing: border-box; 315 | -webkit-overflow-scrolling: touch; 316 | overscroll-behavior-y: contain; 317 | background-color: var(--sidebar-bg); 318 | color: var(--sidebar-fg); 319 | } 320 | .sidebar-resizing { 321 | -moz-user-select: none; 322 | -webkit-user-select: none; 323 | -ms-user-select: none; 324 | user-select: none; 325 | } 326 | .js:not(.sidebar-resizing) .sidebar { 327 | transition: transform 0.3s; /* Animation: slide away */ 328 | } 329 | .sidebar code { 330 | line-height: 2em; 331 | } 332 | .sidebar .sidebar-scrollbox { 333 | overflow-y: auto; 334 | position: absolute; 335 | top: 0; 336 | bottom: 0; 337 | left: 0; 338 | right: 0; 339 | padding: 10px 10px; 340 | } 341 | .sidebar .sidebar-resize-handle { 342 | position: absolute; 343 | cursor: col-resize; 344 | width: 0; 345 | right: 0; 346 | top: 0; 347 | bottom: 0; 348 | } 349 | .js .sidebar .sidebar-resize-handle { 350 | cursor: col-resize; 351 | width: 5px; 352 | } 353 | .sidebar-hidden .sidebar { 354 | transform: translateX(calc(0px - var(--sidebar-width))); 355 | } 356 | .sidebar::-webkit-scrollbar { 357 | background: var(--sidebar-bg); 358 | } 359 | .sidebar::-webkit-scrollbar-thumb { 360 | background: var(--scrollbar); 361 | } 362 | 363 | .sidebar-visible .page-wrapper { 364 | transform: translateX(var(--sidebar-width)); 365 | } 366 | @media only screen and (min-width: 620px) { 367 | .sidebar-visible .page-wrapper { 368 | transform: none; 369 | margin-left: var(--sidebar-width); 370 | } 371 | } 372 | 373 | .chapter { 374 | list-style: none outside none; 375 | padding-left: 0; 376 | line-height: 2.2em; 377 | } 378 | 379 | .chapter ol { 380 | width: 100%; 381 | } 382 | 383 | .chapter li { 384 | display: flex; 385 | color: var(--sidebar-non-existant); 386 | } 387 | .chapter li a { 388 | display: block; 389 | padding: 0; 390 | text-decoration: none; 391 | color: var(--sidebar-fg); 392 | } 393 | 394 | .chapter li a:hover { 395 | color: var(--sidebar-active); 396 | } 397 | 398 | .chapter li a.active { 399 | color: var(--sidebar-active); 400 | } 401 | 402 | .chapter li > a.toggle { 403 | cursor: pointer; 404 | display: block; 405 | margin-left: auto; 406 | padding: 0 10px; 407 | user-select: none; 408 | opacity: 0.68; 409 | } 410 | 411 | .chapter li > a.toggle div { 412 | transition: transform 0.5s; 413 | } 414 | 415 | /* collapse the section */ 416 | .chapter li:not(.expanded) + li > ol { 417 | display: none; 418 | } 419 | 420 | .chapter li.expanded > a.toggle div { 421 | transform: rotate(90deg); 422 | } 423 | 424 | .spacer { 425 | width: 100%; 426 | height: 3px; 427 | margin: 5px 0px; 428 | } 429 | .chapter .spacer { 430 | background-color: var(--sidebar-spacer); 431 | } 432 | 433 | @media (-moz-touch-enabled: 1), (pointer: coarse) { 434 | .chapter li a { padding: 5px 0; } 435 | .spacer { margin: 10px 0; } 436 | } 437 | 438 | .section { 439 | list-style: none outside none; 440 | padding-left: 20px; 441 | line-height: 1.9em; 442 | } 443 | 444 | /* Theme Menu Popup */ 445 | 446 | .theme-popup { 447 | position: absolute; 448 | left: 10px; 449 | top: var(--menu-bar-height); 450 | z-index: 1000; 451 | border-radius: 4px; 452 | font-size: 0.7em; 453 | color: var(--fg); 454 | background: var(--theme-popup-bg); 455 | border: 1px solid var(--theme-popup-border); 456 | margin: 0; 457 | padding: 0; 458 | list-style: none; 459 | display: none; 460 | } 461 | .theme-popup .default { 462 | color: var(--icons); 463 | } 464 | .theme-popup .theme { 465 | width: 100%; 466 | border: 0; 467 | margin: 0; 468 | padding: 2px 10px; 469 | line-height: 25px; 470 | white-space: nowrap; 471 | text-align: left; 472 | cursor: pointer; 473 | color: inherit; 474 | background: inherit; 475 | font-size: inherit; 476 | } 477 | .theme-popup .theme:hover { 478 | background-color: var(--theme-hover); 479 | } 480 | .theme-popup .theme:hover:first-child, 481 | .theme-popup .theme:hover:last-child { 482 | border-top-left-radius: inherit; 483 | border-top-right-radius: inherit; 484 | } 485 | -------------------------------------------------------------------------------- /kmeans_pytorch/__init__.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | 3 | import numpy as np 4 | import torch 5 | from tqdm.auto import tqdm 6 | from .soft_dtw_cuda import SoftDTW 7 | from sklearn.decomposition import PCA 8 | import pickle 9 | 10 | import torch 11 | 12 | def auction_lap(job_and_worker_to_score, return_token_to_worker=True): 13 | """ 14 | Solving the balanced linear assignment problem with auction algorithm. 15 | Arguments: 16 | - job_and_worker_to_score -> N x M euclidean distances between N data points and M cluster centers 17 | Returns: 18 | - assignment -> balanced assignment between jobs and workers 19 | """ 20 | eps = (job_and_worker_to_score.max() - job_and_worker_to_score.min()) / 50 21 | eps.clamp_min_(1e-04) 22 | assert not torch.isnan(job_and_worker_to_score).any() 23 | if torch.isnan(job_and_worker_to_score).any(): 24 | raise Exception("NaN distance") 25 | worker_and_job_to_score = job_and_worker_to_score.detach().transpose(0,1).contiguous() 26 | num_workers, num_jobs = worker_and_job_to_score.size() 27 | jobs_per_worker = num_jobs // num_workers 28 | value = torch.clone(worker_and_job_to_score) 29 | bids = torch.zeros((num_workers, num_jobs), 30 | dtype=worker_and_job_to_score.dtype, 31 | device=worker_and_job_to_score.device, 32 | requires_grad=False) 33 | counter = 0 34 | index = None 35 | cost = torch.zeros((1,num_jobs,), 36 | dtype=worker_and_job_to_score.dtype, 37 | device=worker_and_job_to_score.device, 38 | requires_grad=False) 39 | while True: 40 | top_values, top_index = value.topk(jobs_per_worker + 1, dim=1) 41 | # Each worker bids the difference in value between that job and the k+1th job 42 | bid_increments = top_values[:,:-1] - top_values[:,-1:] + eps 43 | assert bid_increments.size() == (num_workers, jobs_per_worker) 44 | bids.zero_() 45 | bids.scatter_(dim=1, index=top_index[:,:-1], src=bid_increments) 46 | 47 | if counter < 100 and index is not None: 48 | # If we were successful on the last round, put in a minimal bid to retain 49 | # the job only if noone else bids. After N iterations, keep it anyway. 50 | bids.view(-1)[index] = eps 51 | # 52 | if counter > 1000: 53 | bids.view(-1)[jobs_without_bidder] = eps 54 | # Find jobs that was a top choice for some worker 55 | jobs_with_bidder = (bids > 0).any(0).nonzero(as_tuple=False).squeeze(1) 56 | jobs_without_bidder = (bids == 0).all(0).nonzero(as_tuple=False).squeeze(1) 57 | 58 | # Find the highest bidding worker per job 59 | high_bids, high_bidders = bids[:, jobs_with_bidder].max(dim=0) 60 | if high_bidders.size(0) == num_jobs: 61 | # All jobs were bid for 62 | break 63 | 64 | # Make popular items more expensive 65 | cost[:, jobs_with_bidder] += high_bids 66 | value = worker_and_job_to_score - cost 67 | 68 | # # Hack to make sure that this item will be in the winning worker's top-k next time 69 | index = (high_bidders * num_jobs) + jobs_with_bidder 70 | value.view(-1)[index] = worker_and_job_to_score.view(-1)[index] 71 | counter += 1 72 | 73 | 74 | if return_token_to_worker: 75 | return high_bidders 76 | _, sorting = torch.sort(high_bidders) 77 | assignment = jobs_with_bidder[sorting] 78 | assert len(assignment.unique()) == num_jobs 79 | 80 | return assignment.view(-1) 81 | 82 | 83 | 84 | def batchify(a, n=2): 85 | for i in np.array_split(a, n, axis=0): 86 | yield i 87 | 88 | def percentile(t, q): 89 | k = 1 + round(.01 * float(q) * (t.shape[0] - 1)) 90 | return t.kthvalue(k, dim=0).values 91 | 92 | 93 | class KMeans(object): 94 | def __init__(self, n_clusters=None, cluster_centers=None, device=torch.device('cpu'), balanced=False): 95 | self.n_clusters = n_clusters 96 | self.cluster_centers = cluster_centers 97 | self.device = device 98 | self.balanced = balanced 99 | 100 | @classmethod 101 | def load(cls, path_to_file): 102 | with open(path_to_file, 'rb') as f: 103 | saved = pickle.load(f) 104 | return cls(saved['n_clusters'], saved['cluster_centers'], torch.device('cpu'), saved['balanced']) 105 | 106 | def save(self, path_to_file): 107 | with open(path_to_file, 'wb+') as f : 108 | pickle.dump(self.__dict__, f) 109 | 110 | def initialize(self, X): 111 | """ 112 | initialize cluster centers 113 | :param X: (torch.tensor) matrix 114 | :param n_clusters: (int) number of clusters 115 | :return: (np.array) initial state 116 | """ 117 | num_samples = len(X) 118 | indices = np.random.choice(num_samples, self.n_clusters, replace=False) 119 | initial_state = X[indices] 120 | return initial_state 121 | 122 | def fit( 123 | self, 124 | X, 125 | distance='euclidean', 126 | tol=1e-3, 127 | tqdm_flag=True, 128 | iter_limit=0, 129 | gamma_for_soft_dtw=0.001, 130 | online=False, 131 | iter_k=None 132 | ): 133 | """ 134 | perform kmeans 135 | :param X: (torch.tensor) matrix 136 | :param n_clusters: (int) number of clusters 137 | :param distance: (str) distance [options: 'euclidean', 'cosine'] [default: 'euclidean'] 138 | :param tol: (float) threshold [default: 0.0001] 139 | :param device: (torch.device) device [default: cpu] 140 | :param tqdm_flag: Allows to turn logs on and off 141 | :param iter_limit: hard limit for max number of iterations 142 | :param gamma_for_soft_dtw: approaches to (hard) DTW as gamma -> 0 143 | :return: (torch.tensor, torch.tensor) cluster ids, cluster centers 144 | """ 145 | if tqdm_flag: 146 | print(f'running k-means on {self.device}..') 147 | 148 | if distance == 'euclidean': 149 | pairwise_distance_function = partial(pairwise_distance, device=self.device, tqdm_flag=tqdm_flag) 150 | elif distance == 'cosine': 151 | pairwise_distance_function = partial(pairwise_cosine, device=self.device) 152 | elif distance == 'soft_dtw': 153 | sdtw = SoftDTW(use_cuda=device.type == 'cuda', gamma=gamma_for_soft_dtw) 154 | pairwise_distance_function = partial(pairwise_soft_dtw, sdtw=sdtw, device=self.device) 155 | else: 156 | raise NotImplementedError 157 | 158 | # convert to float 159 | X = X.float() 160 | 161 | 162 | # transfer to device 163 | X = X.to(self.device) 164 | 165 | # initialize 166 | if not online or (online and iter_k == 0): # ToDo: make this less annoyingly weird 167 | self.cluster_centers = self.initialize(X) 168 | 169 | 170 | iteration = 0 171 | if tqdm_flag: 172 | tqdm_meter = tqdm(desc='[running kmeans]') 173 | done=False 174 | while True: 175 | if self.balanced: 176 | distance_matrix = pairwise_distance_function(X, self.cluster_centers) 177 | cluster_assignments = auction_lap(-distance_matrix) 178 | else: 179 | dis = pairwise_distance_function(X, self.cluster_centers) 180 | cluster_assignments = torch.argmin(dis, dim=1) 181 | 182 | initial_state_pre = self.cluster_centers.clone() 183 | for index in range(self.n_clusters): 184 | selected = torch.nonzero(cluster_assignments == index).squeeze().to(self.device) 185 | 186 | selected = torch.index_select(X, 0, selected) 187 | 188 | # https://github.com/subhadarship/kmeans_pytorch/issues/16 189 | if selected.shape[0] == 0: 190 | selected = X[torch.randint(len(X), (1,))] 191 | 192 | self.cluster_centers[index] = selected.mean(dim=0) 193 | 194 | center_shift = torch.sum( 195 | torch.sqrt( 196 | torch.sum((self.cluster_centers - initial_state_pre) ** 2, dim=1) 197 | )) 198 | 199 | # increment iteration 200 | iteration = iteration + 1 201 | 202 | # update tqdm meter 203 | if tqdm_flag: 204 | tqdm_meter.set_postfix( 205 | iteration=f'{iteration}', 206 | center_shift=f'{center_shift ** 2:0.6f}', 207 | tol=f'{tol:0.6f}' 208 | ) 209 | tqdm_meter.update() 210 | if center_shift ** 2 < tol: 211 | break 212 | if iter_limit != 0 and iteration >= iter_limit: 213 | break 214 | 215 | return cluster_assignments.cpu() 216 | 217 | 218 | def plot(self, data, labels, plot_file): 219 | if self.cluster_centers is None: 220 | raise Exception("Fit the KMeans object first before plotting!") 221 | plt.figure(figsize=(4, 3), dpi=160) 222 | pca = PCA(n_components=2) 223 | master = np.concatenate([data, self.cluster_centers], 0) 224 | pca = pca.fit(master) 225 | data = pca.transform(data) 226 | plt.scatter(data[:, 0], data[:, 1], c=labels) 227 | cluster_centers = pca.transform(cluster_centers) 228 | plt.scatter( 229 | self.cluster_centers[:, 0], self.cluster_centers[:, 1], 230 | c='white', 231 | alpha=0.6, 232 | edgecolors='black', 233 | linewidths=2 234 | ) 235 | plt.tight_layout() 236 | plt.savefig(plot_file, dpi=300) 237 | 238 | 239 | def predict( 240 | self, 241 | X, 242 | distance='euclidean', 243 | gamma_for_soft_dtw=0.001, 244 | tqdm_flag=False, 245 | return_distances=False, 246 | balanced=False 247 | ): 248 | """ 249 | predict using cluster centers 250 | :param X: (torch.tensor) matrix 251 | :param cluster_centers: (torch.tensor) cluster centers 252 | :param distance: (str) distance [options: 'euclidean', 'cosine'] [default: 'euclidean'] 253 | :param device: (torch.device) device [default: 'cpu'] 254 | :param gamma_for_soft_dtw: approaches to (hard) DTW as gamma -> 0 255 | :return: (torch.tensor) cluster ids 256 | """ 257 | 258 | if distance == 'euclidean': 259 | pairwise_distance_function = partial(pairwise_distance, device=self.device, tqdm_flag=tqdm_flag) 260 | elif distance == 'cosine': 261 | pairwise_distance_function = partial(pairwise_cosine, device=self.device) 262 | elif distance == 'soft_dtw': 263 | sdtw = SoftDTW(use_cuda=device.type == 'cuda', gamma=gamma_for_soft_dtw) 264 | pairwise_distance_function = partial(pairwise_soft_dtw, sdtw=sdtw, device=self.device) 265 | else: 266 | raise NotImplementedError 267 | 268 | # convert to float 269 | X = X.float() 270 | # transfer to device 271 | if self.device != torch.device('cpu'): 272 | X = X.to(self.device) 273 | if balanced: 274 | distance_matrix = pairwise_distance_function(X, self.cluster_centers) 275 | cluster_assignments = auction_lap(-distance_matrix) 276 | else: 277 | distance_matrix = pairwise_distance_function(X, self.cluster_centers) 278 | cluster_assignments = torch.argmin(distance_matrix, dim=1 if len(distance_matrix.shape) > 1 else 0) 279 | if len(distance_matrix.shape) == 1: 280 | cluster_assignments = cluster_assignments.unsqueeze(0) 281 | if return_distances: 282 | return cluster_assignments.cpu(),distance_matrix 283 | else: 284 | return cluster_assignments.cpu() 285 | 286 | 287 | def pairwise_distance(data1, data2, device=torch.device('cpu'), tqdm_flag=True): 288 | # transfer to device 289 | if device != torch.device('cpu'): 290 | data1, data2 = data1.to(device), data2.to(device) 291 | 292 | # N*1*M 293 | A = data1.unsqueeze(dim=1) 294 | 295 | # 1*N*M 296 | B = data2.unsqueeze(dim=0) 297 | 298 | dis = (A - B) ** 2.0 299 | # return N*N matrix for pairwise distance 300 | dis = dis.sum(dim=-1).squeeze() 301 | return dis 302 | 303 | 304 | def pairwise_cosine(data1, data2, device=torch.device('cpu')): 305 | # transfer to device 306 | data1, data2 = data1.to(device), data2.to(device) 307 | 308 | # N*1*M 309 | A = data1.unsqueeze(dim=1) 310 | 311 | # 1*N*M 312 | B = data2.unsqueeze(dim=0) 313 | 314 | # normalize the points | [0.3, 0.4] -> [0.3/sqrt(0.09 + 0.16), 0.4/sqrt(0.09 + 0.16)] = [0.3/0.5, 0.4/0.5] 315 | A_normalized = A / A.norm(dim=-1, keepdim=True) 316 | B_normalized = B / B.norm(dim=-1, keepdim=True) 317 | 318 | cosine = A_normalized * B_normalized 319 | 320 | # return N*N matrix for pairwise distance 321 | cosine_dis = 1 - cosine.sum(dim=-1).squeeze() 322 | return cosine_dis 323 | 324 | 325 | def pairwise_soft_dtw(data1, data2, sdtw=None, device=torch.device('cpu')): 326 | if sdtw is None: 327 | raise ValueError('sdtw is None - initialize it with SoftDTW') 328 | 329 | # transfer to device 330 | data1, data2 = data1.to(device), data2.to(device) 331 | 332 | # (batch_size, seq_len, feature_dim=1) 333 | A = data1.unsqueeze(dim=2) 334 | 335 | # (cluster_size, seq_len, feature_dim=1) 336 | B = data2.unsqueeze(dim=2) 337 | 338 | distances = [] 339 | for b in B: 340 | # (1, seq_len, 1) 341 | b = b.unsqueeze(dim=0) 342 | A, b = torch.broadcast_tensors(A, b) 343 | # (batch_size, 1) 344 | sdtw_distance = sdtw(b, A).view(-1, 1) 345 | distances.append(sdtw_distance) 346 | 347 | # (batch_size, cluster_size) 348 | dis = torch.cat(distances, dim=1) 349 | return dis 350 | -------------------------------------------------------------------------------- /docs/_layouts/default.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | {% if page.title == "Home" %} 7 | {{ site.title }} · {{ site.tagline }} 8 | {% else %} 9 | {{ page.title }} · {{ site.title }} 10 | {% endif %} 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 50 | 51 | 52 | 53 | 54 | 57 | 58 | 59 | 73 | 74 | 75 | 82 | 83 | 84 | 94 | 95 | 179 | 180 |
181 |
182 | 210 | 211 | 212 | 219 | 220 |
221 |
222 |
223 |

{{ page.title }}

224 | {{ content }} 225 |
226 |
227 | 228 | 240 |
241 |
242 | 243 | 252 | 253 |
254 | 255 | 256 | 257 | 258 | 259 | 272 | 273 | 274 | 275 | -------------------------------------------------------------------------------- /kmeans_pytorch/soft_dtw_cuda.py: -------------------------------------------------------------------------------- 1 | # Code by Maghoumi/pytorch-softdtw-cuda 2 | # https://github.com/Maghoumi/pytorch-softdtw-cuda 3 | 4 | # MIT License 5 | # 6 | # Copyright (c) 2020 Mehran Maghoumi 7 | # 8 | # Permission is hereby granted, free of charge, to any person obtaining a copy 9 | # of this software and associated documentation files (the "Software"), to deal 10 | # in the Software without restriction, including without limitation the rights 11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | # copies of the Software, and to permit persons to whom the Software is 13 | # furnished to do so, subject to the following conditions: 14 | # 15 | # The above copyright notice and this permission notice shall be included in all 16 | # copies or substantial portions of the Software. 17 | # 18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 24 | # SOFTWARE. 25 | # ---------------------------------------------------------------------------------------------------------------------- 26 | 27 | import numpy as np 28 | import torch 29 | import torch.cuda 30 | from numba import jit 31 | from torch.autograd import Function 32 | from numba import cuda 33 | import math 34 | 35 | # ---------------------------------------------------------------------------------------------------------------------- 36 | @cuda.jit 37 | def compute_softdtw_cuda(D, gamma, bandwidth, max_i, max_j, n_passes, R): 38 | """ 39 | :param seq_len: The length of the sequence (both inputs are assumed to be of the same size) 40 | :param n_passes: 2 * seq_len - 1 (The number of anti-diagonals) 41 | """ 42 | # Each block processes one pair of examples 43 | b = cuda.blockIdx.x 44 | # We have as many threads as seq_len, because the most number of threads we need 45 | # is equal to the number of elements on the largest anti-diagonal 46 | tid = cuda.threadIdx.x 47 | 48 | # Compute I, J, the indices from [0, seq_len) 49 | 50 | # The row index is always the same as tid 51 | I = tid 52 | 53 | inv_gamma = 1.0 / gamma 54 | 55 | # Go over each anti-diagonal. Only process threads that fall on the current on the anti-diagonal 56 | for p in range(n_passes): 57 | 58 | # The index is actually 'p - tid' but need to force it in-bounds 59 | J = max(0, min(p - tid, max_j - 1)) 60 | 61 | # For simplicity, we define i, j which start from 1 (offset from I, J) 62 | i = I + 1 63 | j = J + 1 64 | 65 | # Only compute if element[i, j] is on the current anti-diagonal, and also is within bounds 66 | if I + J == p and (I < max_i and J < max_j): 67 | # Don't compute if outside bandwidth 68 | if not (abs(i - j) > bandwidth > 0): 69 | r0 = -R[b, i - 1, j - 1] * inv_gamma 70 | r1 = -R[b, i - 1, j] * inv_gamma 71 | r2 = -R[b, i, j - 1] * inv_gamma 72 | rmax = max(max(r0, r1), r2) 73 | rsum = math.exp(r0 - rmax) + math.exp(r1 - rmax) + math.exp(r2 - rmax) 74 | softmin = -gamma * (math.log(rsum) + rmax) 75 | R[b, i, j] = D[b, i - 1, j - 1] + softmin 76 | 77 | # Wait for other threads in this block 78 | cuda.syncthreads() 79 | 80 | # ---------------------------------------------------------------------------------------------------------------------- 81 | @cuda.jit 82 | def compute_softdtw_backward_cuda(D, R, inv_gamma, bandwidth, max_i, max_j, n_passes, E): 83 | k = cuda.blockIdx.x 84 | tid = cuda.threadIdx.x 85 | 86 | # Indexing logic is the same as above, however, the anti-diagonal needs to 87 | # progress backwards 88 | I = tid 89 | 90 | for p in range(n_passes): 91 | # Reverse the order to make the loop go backward 92 | rev_p = n_passes - p - 1 93 | 94 | # convert tid to I, J, then i, j 95 | J = max(0, min(rev_p - tid, max_j - 1)) 96 | 97 | i = I + 1 98 | j = J + 1 99 | 100 | # Only compute if element[i, j] is on the current anti-diagonal, and also is within bounds 101 | if I + J == rev_p and (I < max_i and J < max_j): 102 | 103 | if math.isinf(R[k, i, j]): 104 | R[k, i, j] = -math.inf 105 | 106 | # Don't compute if outside bandwidth 107 | if not (abs(i - j) > bandwidth > 0): 108 | a = math.exp((R[k, i + 1, j] - R[k, i, j] - D[k, i + 1, j]) * inv_gamma) 109 | b = math.exp((R[k, i, j + 1] - R[k, i, j] - D[k, i, j + 1]) * inv_gamma) 110 | c = math.exp((R[k, i + 1, j + 1] - R[k, i, j] - D[k, i + 1, j + 1]) * inv_gamma) 111 | E[k, i, j] = E[k, i + 1, j] * a + E[k, i, j + 1] * b + E[k, i + 1, j + 1] * c 112 | 113 | # Wait for other threads in this block 114 | cuda.syncthreads() 115 | 116 | # ---------------------------------------------------------------------------------------------------------------------- 117 | class _SoftDTWCUDA(Function): 118 | """ 119 | CUDA implementation is inspired by the diagonal one proposed in https://ieeexplore.ieee.org/document/8400444: 120 | "Developing a pattern discovery method in time series data and its GPU acceleration" 121 | """ 122 | 123 | @staticmethod 124 | def forward(ctx, D, gamma, bandwidth): 125 | dev = D.device 126 | dtype = D.dtype 127 | gamma = torch.cuda.FloatTensor([gamma]) 128 | bandwidth = torch.cuda.FloatTensor([bandwidth]) 129 | 130 | B = D.shape[0] 131 | N = D.shape[1] 132 | M = D.shape[2] 133 | threads_per_block = max(N, M) 134 | n_passes = 2 * threads_per_block - 1 135 | 136 | # Prepare the output array 137 | R = torch.ones((B, N + 2, M + 2), device=dev, dtype=dtype) * math.inf 138 | R[:, 0, 0] = 0 139 | 140 | # Run the CUDA kernel. 141 | # Set CUDA's grid size to be equal to the batch size (every CUDA block processes one sample pair) 142 | # Set the CUDA block size to be equal to the length of the longer sequence (equal to the size of the largest diagonal) 143 | compute_softdtw_cuda[B, threads_per_block](cuda.as_cuda_array(D.detach()), 144 | gamma.item(), bandwidth.item(), N, M, n_passes, 145 | cuda.as_cuda_array(R)) 146 | ctx.save_for_backward(D, R, gamma, bandwidth) 147 | return R[:, -2, -2] 148 | 149 | @staticmethod 150 | def backward(ctx, grad_output): 151 | dev = grad_output.device 152 | dtype = grad_output.dtype 153 | D, R, gamma, bandwidth = ctx.saved_tensors 154 | 155 | B = D.shape[0] 156 | N = D.shape[1] 157 | M = D.shape[2] 158 | threads_per_block = max(N, M) 159 | n_passes = 2 * threads_per_block - 1 160 | 161 | D_ = torch.zeros((B, N + 2, M + 2), dtype=dtype, device=dev) 162 | D_[:, 1:N + 1, 1:M + 1] = D 163 | 164 | R[:, :, -1] = -math.inf 165 | R[:, -1, :] = -math.inf 166 | R[:, -1, -1] = R[:, -2, -2] 167 | 168 | E = torch.zeros((B, N + 2, M + 2), dtype=dtype, device=dev) 169 | E[:, -1, -1] = 1 170 | 171 | # Grid and block sizes are set same as done above for the forward() call 172 | compute_softdtw_backward_cuda[B, threads_per_block](cuda.as_cuda_array(D_), 173 | cuda.as_cuda_array(R), 174 | 1.0 / gamma.item(), bandwidth.item(), N, M, n_passes, 175 | cuda.as_cuda_array(E)) 176 | E = E[:, 1:N + 1, 1:M + 1] 177 | return grad_output.view(-1, 1, 1).expand_as(E) * E, None, None 178 | 179 | 180 | # ---------------------------------------------------------------------------------------------------------------------- 181 | # 182 | # The following is the CPU implementation based on https://github.com/Sleepwalking/pytorch-softdtw 183 | # Credit goes to Kanru Hua. 184 | # I've added support for batching and pruning. 185 | # 186 | # ---------------------------------------------------------------------------------------------------------------------- 187 | @jit(nopython=True) 188 | def compute_softdtw(D, gamma, bandwidth): 189 | B = D.shape[0] 190 | N = D.shape[1] 191 | M = D.shape[2] 192 | R = np.ones((B, N + 2, M + 2)) * np.inf 193 | R[:, 0, 0] = 0 194 | for b in range(B): 195 | for j in range(1, M + 1): 196 | for i in range(1, N + 1): 197 | 198 | # Check the pruning condition 199 | if 0 < bandwidth < np.abs(i - j): 200 | continue 201 | 202 | r0 = -R[b, i - 1, j - 1] / gamma 203 | r1 = -R[b, i - 1, j] / gamma 204 | r2 = -R[b, i, j - 1] / gamma 205 | rmax = max(max(r0, r1), r2) 206 | rsum = np.exp(r0 - rmax) + np.exp(r1 - rmax) + np.exp(r2 - rmax) 207 | softmin = - gamma * (np.log(rsum) + rmax) 208 | R[b, i, j] = D[b, i - 1, j - 1] + softmin 209 | return R 210 | 211 | # ---------------------------------------------------------------------------------------------------------------------- 212 | @jit(nopython=True) 213 | def compute_softdtw_backward(D_, R, gamma, bandwidth): 214 | B = D_.shape[0] 215 | N = D_.shape[1] 216 | M = D_.shape[2] 217 | D = np.zeros((B, N + 2, M + 2)) 218 | E = np.zeros((B, N + 2, M + 2)) 219 | D[:, 1:N + 1, 1:M + 1] = D_ 220 | E[:, -1, -1] = 1 221 | R[:, :, -1] = -np.inf 222 | R[:, -1, :] = -np.inf 223 | R[:, -1, -1] = R[:, -2, -2] 224 | for k in range(B): 225 | for j in range(M, 0, -1): 226 | for i in range(N, 0, -1): 227 | 228 | if np.isinf(R[k, i, j]): 229 | R[k, i, j] = -np.inf 230 | 231 | # Check the pruning condition 232 | if 0 < bandwidth < np.abs(i - j): 233 | continue 234 | 235 | a0 = (R[k, i + 1, j] - R[k, i, j] - D[k, i + 1, j]) / gamma 236 | b0 = (R[k, i, j + 1] - R[k, i, j] - D[k, i, j + 1]) / gamma 237 | c0 = (R[k, i + 1, j + 1] - R[k, i, j] - D[k, i + 1, j + 1]) / gamma 238 | a = np.exp(a0) 239 | b = np.exp(b0) 240 | c = np.exp(c0) 241 | E[k, i, j] = E[k, i + 1, j] * a + E[k, i, j + 1] * b + E[k, i + 1, j + 1] * c 242 | return E[:, 1:N + 1, 1:M + 1] 243 | 244 | # ---------------------------------------------------------------------------------------------------------------------- 245 | class _SoftDTW(Function): 246 | """ 247 | CPU implementation based on https://github.com/Sleepwalking/pytorch-softdtw 248 | """ 249 | 250 | @staticmethod 251 | def forward(ctx, D, gamma, bandwidth): 252 | dev = D.device 253 | dtype = D.dtype 254 | gamma = torch.Tensor([gamma]).to(dev).type(dtype) # dtype fixed 255 | bandwidth = torch.Tensor([bandwidth]).to(dev).type(dtype) 256 | D_ = D.detach().cpu().numpy() 257 | g_ = gamma.item() 258 | b_ = bandwidth.item() 259 | R = torch.Tensor(compute_softdtw(D_, g_, b_)).to(dev).type(dtype) 260 | ctx.save_for_backward(D, R, gamma, bandwidth) 261 | return R[:, -2, -2] 262 | 263 | @staticmethod 264 | def backward(ctx, grad_output): 265 | dev = grad_output.device 266 | dtype = grad_output.dtype 267 | D, R, gamma, bandwidth = ctx.saved_tensors 268 | D_ = D.detach().cpu().numpy() 269 | R_ = R.detach().cpu().numpy() 270 | g_ = gamma.item() 271 | b_ = bandwidth.item() 272 | E = torch.Tensor(compute_softdtw_backward(D_, R_, g_, b_)).to(dev).type(dtype) 273 | return grad_output.view(-1, 1, 1).expand_as(E) * E, None, None 274 | 275 | # ---------------------------------------------------------------------------------------------------------------------- 276 | class SoftDTW(torch.nn.Module): 277 | """ 278 | The soft DTW implementation that optionally supports CUDA 279 | """ 280 | 281 | def __init__(self, use_cuda, gamma=1.0, normalize=False, bandwidth=None, dist_func=None): 282 | """ 283 | Initializes a new instance using the supplied parameters 284 | :param use_cuda: Flag indicating whether the CUDA implementation should be used 285 | :param gamma: sDTW's gamma parameter 286 | :param normalize: Flag indicating whether to perform normalization 287 | (as discussed in https://github.com/mblondel/soft-dtw/issues/10#issuecomment-383564790) 288 | :param bandwidth: Sakoe-Chiba bandwidth for pruning. Passing 'None' will disable pruning. 289 | :param dist_func: Optional point-wise distance function to use. If 'None', then a default Euclidean distance function will be used. 290 | """ 291 | super(SoftDTW, self).__init__() 292 | self.normalize = normalize 293 | self.gamma = gamma 294 | self.bandwidth = 0 if bandwidth is None else float(bandwidth) 295 | self.use_cuda = use_cuda 296 | 297 | # Set the distance function 298 | if dist_func is not None: 299 | self.dist_func = dist_func 300 | else: 301 | self.dist_func = SoftDTW._euclidean_dist_func 302 | 303 | def _get_func_dtw(self, x, y): 304 | """ 305 | Checks the inputs and selects the proper implementation to use. 306 | """ 307 | bx, lx, dx = x.shape 308 | by, ly, dy = y.shape 309 | # Make sure the dimensions match 310 | assert bx == by # Equal batch sizes 311 | assert dx == dy # Equal feature dimensions 312 | 313 | use_cuda = self.use_cuda 314 | 315 | if use_cuda and (lx > 1024 or ly > 1024): # We should be able to spawn enough threads in CUDA 316 | print("SoftDTW: Cannot use CUDA because the sequence length > 1024 (the maximum block size supported by CUDA)") 317 | use_cuda = False 318 | 319 | # Finally, return the correct function 320 | return _SoftDTWCUDA.apply if use_cuda else _SoftDTW.apply 321 | 322 | @staticmethod 323 | def _euclidean_dist_func(x, y): 324 | """ 325 | Calculates the Euclidean distance between each element in x and y per timestep 326 | """ 327 | n = x.size(1) 328 | m = y.size(1) 329 | d = x.size(2) 330 | x = x.unsqueeze(2).expand(-1, n, m, d) 331 | y = y.unsqueeze(1).expand(-1, n, m, d) 332 | return torch.pow(x - y, 2).sum(3) 333 | 334 | def forward(self, X, Y): 335 | """ 336 | Compute the soft-DTW value between X and Y 337 | :param X: One batch of examples, batch_size x seq_len x dims 338 | :param Y: The other batch of examples, batch_size x seq_len x dims 339 | :return: The computed results 340 | """ 341 | 342 | # Check the inputs and get the correct implementation 343 | func_dtw = self._get_func_dtw(X, Y) 344 | 345 | if self.normalize: 346 | # Stack everything up and run 347 | x = torch.cat([X, X, Y]) 348 | y = torch.cat([Y, X, Y]) 349 | D = self.dist_func(x, y) 350 | out = func_dtw(D, self.gamma, self.bandwidth) 351 | out_xy, out_xx, out_yy = torch.split(out, X.shape[0]) 352 | return out_xy - 1 / 2 * (out_xx + out_yy) 353 | else: 354 | D_xy = self.dist_func(X, Y) 355 | return func_dtw(D_xy, self.gamma, self.bandwidth) 356 | 357 | # ---------------------------------------------------------------------------------------------------------------------- 358 | def timed_run(a, b, sdtw): 359 | """ 360 | Runs a and b through sdtw, and times the forward and backward passes. 361 | Assumes that a requires gradients. 362 | :return: timing, forward result, backward result 363 | """ 364 | from timeit import default_timer as timer 365 | 366 | # Forward pass 367 | start = timer() 368 | forward = sdtw(a, b) 369 | end = timer() 370 | t = end - start 371 | 372 | grad_outputs = torch.ones_like(forward) 373 | 374 | # Backward 375 | start = timer() 376 | grads = torch.autograd.grad(forward, a, grad_outputs=grad_outputs)[0] 377 | end = timer() 378 | 379 | # Total time 380 | t += end - start 381 | 382 | return t, forward, grads 383 | 384 | # ---------------------------------------------------------------------------------------------------------------------- 385 | def profile(batch_size, seq_len_a, seq_len_b, dims, tol_backward): 386 | sdtw = SoftDTW(False, gamma=1.0, normalize=False) 387 | sdtw_cuda = SoftDTW(True, gamma=1.0, normalize=False) 388 | n_iters = 6 389 | 390 | print("Profiling forward() + backward() times for batch_size={}, seq_len_a={}, seq_len_b={}, dims={}...".format(batch_size, seq_len_a, seq_len_b, dims)) 391 | 392 | times_cpu = [] 393 | times_gpu = [] 394 | 395 | for i in range(n_iters): 396 | a_cpu = torch.rand((batch_size, seq_len_a, dims), requires_grad=True) 397 | b_cpu = torch.rand((batch_size, seq_len_b, dims)) 398 | a_gpu = a_cpu.cuda() 399 | b_gpu = b_cpu.cuda() 400 | 401 | # GPU 402 | t_gpu, forward_gpu, backward_gpu = timed_run(a_gpu, b_gpu, sdtw_cuda) 403 | 404 | # CPU 405 | t_cpu, forward_cpu, backward_cpu = timed_run(a_cpu, b_cpu, sdtw) 406 | 407 | # Verify the results 408 | assert torch.allclose(forward_cpu, forward_gpu.cpu()) 409 | assert torch.allclose(backward_cpu, backward_gpu.cpu(), atol=tol_backward) 410 | 411 | if i > 0: # Ignore the first time we run, in case this is a cold start (because timings are off at a cold start of the script) 412 | times_cpu += [t_cpu] 413 | times_gpu += [t_gpu] 414 | 415 | # Average and log 416 | avg_cpu = np.mean(times_cpu) 417 | avg_gpu = np.mean(times_gpu) 418 | print("\tCPU: ", avg_cpu) 419 | print("\tGPU: ", avg_gpu) 420 | print("\tSpeedup: ", avg_cpu / avg_gpu) 421 | print() 422 | 423 | # ---------------------------------------------------------------------------------------------------------------------- 424 | if __name__ == "__main__": 425 | from timeit import default_timer as timer 426 | 427 | torch.manual_seed(1234) 428 | 429 | profile(128, 17, 15, 2, tol_backward=1e-6) 430 | profile(512, 64, 64, 2, tol_backward=1e-4) 431 | profile(512, 256, 256, 2, tol_backward=1e-3) 432 | -------------------------------------------------------------------------------- /docs/static/book.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | // Fix back button cache problem 4 | window.onunload = function () { }; 5 | 6 | // Global variable, shared between modules 7 | function playpen_text(playpen) { 8 | let code_block = playpen.querySelector("code"); 9 | 10 | if (window.ace && code_block.classList.contains("editable")) { 11 | let editor = window.ace.edit(code_block); 12 | return editor.getValue(); 13 | } else { 14 | return code_block.textContent; 15 | } 16 | } 17 | 18 | (function codeSnippets() { 19 | function fetch_with_timeout(url, options, timeout = 6000) { 20 | return Promise.race([ 21 | fetch(url, options), 22 | new Promise((_, reject) => setTimeout(() => reject(new Error('timeout')), timeout)) 23 | ]); 24 | } 25 | 26 | var playpens = Array.from(document.querySelectorAll(".playpen")); 27 | if (playpens.length > 0) { 28 | fetch_with_timeout("https://play.rust-lang.org/meta/crates", { 29 | headers: { 30 | 'Content-Type': "application/json", 31 | }, 32 | method: 'POST', 33 | mode: 'cors', 34 | }) 35 | .then(response => response.json()) 36 | .then(response => { 37 | // get list of crates available in the rust playground 38 | let playground_crates = response.crates.map(item => item["id"]); 39 | playpens.forEach(block => handle_crate_list_update(block, playground_crates)); 40 | }); 41 | } 42 | 43 | function handle_crate_list_update(playpen_block, playground_crates) { 44 | // update the play buttons after receiving the response 45 | update_play_button(playpen_block, playground_crates); 46 | 47 | // and install on change listener to dynamically update ACE editors 48 | if (window.ace) { 49 | let code_block = playpen_block.querySelector("code"); 50 | if (code_block.classList.contains("editable")) { 51 | let editor = window.ace.edit(code_block); 52 | editor.addEventListener("change", function (e) { 53 | update_play_button(playpen_block, playground_crates); 54 | }); 55 | // add Ctrl-Enter command to execute rust code 56 | editor.commands.addCommand({ 57 | name: "run", 58 | bindKey: { 59 | win: "Ctrl-Enter", 60 | mac: "Ctrl-Enter" 61 | }, 62 | exec: _editor => run_rust_code(playpen_block) 63 | }); 64 | } 65 | } 66 | } 67 | 68 | // updates the visibility of play button based on `no_run` class and 69 | // used crates vs ones available on http://play.rust-lang.org 70 | function update_play_button(pre_block, playground_crates) { 71 | var play_button = pre_block.querySelector(".play-button"); 72 | 73 | // skip if code is `no_run` 74 | if (pre_block.querySelector('code').classList.contains("no_run")) { 75 | play_button.classList.add("hidden"); 76 | return; 77 | } 78 | 79 | // get list of `extern crate`'s from snippet 80 | var txt = playpen_text(pre_block); 81 | var re = /extern\s+crate\s+([a-zA-Z_0-9]+)\s*;/g; 82 | var snippet_crates = []; 83 | var item; 84 | while (item = re.exec(txt)) { 85 | snippet_crates.push(item[1]); 86 | } 87 | 88 | // check if all used crates are available on play.rust-lang.org 89 | var all_available = snippet_crates.every(function (elem) { 90 | return playground_crates.indexOf(elem) > -1; 91 | }); 92 | 93 | if (all_available) { 94 | play_button.classList.remove("hidden"); 95 | } else { 96 | play_button.classList.add("hidden"); 97 | } 98 | } 99 | 100 | function run_rust_code(code_block) { 101 | var result_block = code_block.querySelector(".result"); 102 | if (!result_block) { 103 | result_block = document.createElement('code'); 104 | result_block.className = 'result hljs language-bash'; 105 | 106 | code_block.append(result_block); 107 | } 108 | 109 | let text = playpen_text(code_block); 110 | let classes = code_block.querySelector('code').classList; 111 | let has_2018 = classes.contains("edition2018"); 112 | let edition = has_2018 ? "2018" : "2015"; 113 | 114 | var params = { 115 | version: "stable", 116 | optimize: "0", 117 | code: text, 118 | edition: edition 119 | }; 120 | 121 | if (text.indexOf("#![feature") !== -1) { 122 | params.version = "nightly"; 123 | } 124 | 125 | result_block.innerText = "Running..."; 126 | 127 | fetch_with_timeout("https://play.rust-lang.org/evaluate.json", { 128 | headers: { 129 | 'Content-Type': "application/json", 130 | }, 131 | method: 'POST', 132 | mode: 'cors', 133 | body: JSON.stringify(params) 134 | }) 135 | .then(response => response.json()) 136 | .then(response => result_block.innerText = response.result) 137 | .catch(error => result_block.innerText = "Playground Communication: " + error.message); 138 | } 139 | 140 | // Syntax highlighting Configuration 141 | hljs.configure({ 142 | tabReplace: ' ', // 4 spaces 143 | languages: [], // Languages used for auto-detection 144 | }); 145 | 146 | if (window.ace) { 147 | // language-rust class needs to be removed for editable 148 | // blocks or highlightjs will capture events 149 | Array 150 | .from(document.querySelectorAll('code.editable')) 151 | .forEach(function (block) { block.classList.remove('language-rust'); }); 152 | 153 | Array 154 | .from(document.querySelectorAll('code:not(.editable)')) 155 | .forEach(function (block) { hljs.highlightBlock(block); }); 156 | } else { 157 | Array 158 | .from(document.querySelectorAll('code')) 159 | .forEach(function (block) { hljs.highlightBlock(block); }); 160 | } 161 | 162 | // Adding the hljs class gives code blocks the color css 163 | // even if highlighting doesn't apply 164 | Array 165 | .from(document.querySelectorAll('code')) 166 | .forEach(function (block) { block.classList.add('hljs'); }); 167 | 168 | Array.from(document.querySelectorAll("code.language-rust")).forEach(function (block) { 169 | 170 | var lines = Array.from(block.querySelectorAll('.boring')); 171 | // If no lines were hidden, return 172 | if (!lines.length) { return; } 173 | block.classList.add("hide-boring"); 174 | 175 | var buttons = document.createElement('div'); 176 | buttons.className = 'buttons'; 177 | buttons.innerHTML = ""; 178 | 179 | // add expand button 180 | var pre_block = block.parentNode; 181 | pre_block.insertBefore(buttons, pre_block.firstChild); 182 | 183 | pre_block.querySelector('.buttons').addEventListener('click', function (e) { 184 | if (e.target.classList.contains('fa-expand')) { 185 | e.target.classList.remove('fa-expand'); 186 | e.target.classList.add('fa-compress'); 187 | e.target.title = 'Hide lines'; 188 | e.target.setAttribute('aria-label', e.target.title); 189 | 190 | block.classList.remove('hide-boring'); 191 | } else if (e.target.classList.contains('fa-compress')) { 192 | e.target.classList.remove('fa-compress'); 193 | e.target.classList.add('fa-expand'); 194 | e.target.title = 'Show hidden lines'; 195 | e.target.setAttribute('aria-label', e.target.title); 196 | 197 | block.classList.add('hide-boring'); 198 | } 199 | }); 200 | }); 201 | 202 | if (window.playpen_copyable) { 203 | Array.from(document.querySelectorAll('pre code')).forEach(function (block) { 204 | var pre_block = block.parentNode; 205 | if (!pre_block.classList.contains('playpen')) { 206 | var buttons = pre_block.querySelector(".buttons"); 207 | if (!buttons) { 208 | buttons = document.createElement('div'); 209 | buttons.className = 'buttons'; 210 | pre_block.insertBefore(buttons, pre_block.firstChild); 211 | } 212 | 213 | var clipButton = document.createElement('button'); 214 | clipButton.className = 'fa fa-copy clip-button'; 215 | clipButton.title = 'Copy to clipboard'; 216 | clipButton.setAttribute('aria-label', clipButton.title); 217 | clipButton.innerHTML = ''; 218 | 219 | buttons.insertBefore(clipButton, buttons.firstChild); 220 | } 221 | }); 222 | } 223 | 224 | // Process playpen code blocks 225 | Array.from(document.querySelectorAll(".playpen")).forEach(function (pre_block) { 226 | // Add play button 227 | var buttons = pre_block.querySelector(".buttons"); 228 | if (!buttons) { 229 | buttons = document.createElement('div'); 230 | buttons.className = 'buttons'; 231 | pre_block.insertBefore(buttons, pre_block.firstChild); 232 | } 233 | 234 | var runCodeButton = document.createElement('button'); 235 | runCodeButton.className = 'fa fa-play play-button'; 236 | runCodeButton.hidden = true; 237 | runCodeButton.title = 'Run this code'; 238 | runCodeButton.setAttribute('aria-label', runCodeButton.title); 239 | 240 | buttons.insertBefore(runCodeButton, buttons.firstChild); 241 | runCodeButton.addEventListener('click', function (e) { 242 | run_rust_code(pre_block); 243 | }); 244 | 245 | if (window.playpen_copyable) { 246 | var copyCodeClipboardButton = document.createElement('button'); 247 | copyCodeClipboardButton.className = 'fa fa-copy clip-button'; 248 | copyCodeClipboardButton.innerHTML = ''; 249 | copyCodeClipboardButton.title = 'Copy to clipboard'; 250 | copyCodeClipboardButton.setAttribute('aria-label', copyCodeClipboardButton.title); 251 | 252 | buttons.insertBefore(copyCodeClipboardButton, buttons.firstChild); 253 | } 254 | 255 | let code_block = pre_block.querySelector("code"); 256 | if (window.ace && code_block.classList.contains("editable")) { 257 | var undoChangesButton = document.createElement('button'); 258 | undoChangesButton.className = 'fa fa-history reset-button'; 259 | undoChangesButton.title = 'Undo changes'; 260 | undoChangesButton.setAttribute('aria-label', undoChangesButton.title); 261 | 262 | buttons.insertBefore(undoChangesButton, buttons.firstChild); 263 | 264 | undoChangesButton.addEventListener('click', function () { 265 | let editor = window.ace.edit(code_block); 266 | editor.setValue(editor.originalCode); 267 | editor.clearSelection(); 268 | }); 269 | } 270 | }); 271 | })(); 272 | 273 | (function themes() { 274 | var html = document.querySelector('html'); 275 | var themeToggleButton = document.getElementById('theme-toggle'); 276 | var themePopup = document.getElementById('theme-list'); 277 | var themeColorMetaTag = document.querySelector('meta[name="theme-color"]'); 278 | var stylesheets = { 279 | ayuHighlight: document.querySelector("[href$='ayu-highlight.css']"), 280 | tomorrowNight: document.querySelector("[href$='tomorrow-night.css']"), 281 | highlight: document.querySelector("[href$='highlight.css']"), 282 | }; 283 | 284 | function showThemes() { 285 | themePopup.style.display = 'block'; 286 | themeToggleButton.setAttribute('aria-expanded', true); 287 | themePopup.querySelector("button#" + document.body.className).focus(); 288 | } 289 | 290 | function hideThemes() { 291 | themePopup.style.display = 'none'; 292 | themeToggleButton.setAttribute('aria-expanded', false); 293 | themeToggleButton.focus(); 294 | } 295 | 296 | function set_theme(theme, store = true) { 297 | let ace_theme; 298 | 299 | if (theme == 'coal' || theme == 'navy') { 300 | stylesheets.ayuHighlight.disabled = true; 301 | stylesheets.tomorrowNight.disabled = false; 302 | stylesheets.highlight.disabled = true; 303 | 304 | ace_theme = "ace/theme/tomorrow_night"; 305 | } else if (theme == 'ayu') { 306 | stylesheets.ayuHighlight.disabled = false; 307 | stylesheets.tomorrowNight.disabled = true; 308 | stylesheets.highlight.disabled = true; 309 | ace_theme = "ace/theme/tomorrow_night"; 310 | } else { 311 | stylesheets.ayuHighlight.disabled = true; 312 | stylesheets.tomorrowNight.disabled = true; 313 | stylesheets.highlight.disabled = false; 314 | ace_theme = "ace/theme/dawn"; 315 | } 316 | 317 | setTimeout(function () { 318 | themeColorMetaTag.content = getComputedStyle(document.body).backgroundColor; 319 | }, 1); 320 | 321 | if (window.ace && window.editors) { 322 | window.editors.forEach(function (editor) { 323 | editor.setTheme(ace_theme); 324 | }); 325 | } 326 | 327 | var previousTheme; 328 | try { previousTheme = localStorage.getItem('mdbook-theme'); } catch (e) { } 329 | if (previousTheme === null || previousTheme === undefined) { previousTheme = default_theme; } 330 | 331 | if (store) { 332 | try { localStorage.setItem('mdbook-theme', theme); } catch (e) { } 333 | } 334 | 335 | html.classList.remove(previousTheme); 336 | html.classList.add(theme); 337 | } 338 | 339 | // Set theme 340 | var theme; 341 | try { theme = localStorage.getItem('mdbook-theme'); } catch(e) { } 342 | if (theme === null || theme === undefined) { theme = default_theme; } 343 | 344 | set_theme(theme, false); 345 | 346 | themeToggleButton.addEventListener('click', function () { 347 | if (themePopup.style.display === 'block') { 348 | hideThemes(); 349 | } else { 350 | showThemes(); 351 | } 352 | }); 353 | 354 | themePopup.addEventListener('click', function (e) { 355 | var theme = e.target.id || e.target.parentElement.id; 356 | set_theme(theme); 357 | }); 358 | 359 | themePopup.addEventListener('focusout', function(e) { 360 | // e.relatedTarget is null in Safari and Firefox on macOS (see workaround below) 361 | if (!!e.relatedTarget && !themeToggleButton.contains(e.relatedTarget) && !themePopup.contains(e.relatedTarget)) { 362 | hideThemes(); 363 | } 364 | }); 365 | 366 | // Should not be needed, but it works around an issue on macOS & iOS: https://github.com/rust-lang/mdBook/issues/628 367 | document.addEventListener('click', function(e) { 368 | if (themePopup.style.display === 'block' && !themeToggleButton.contains(e.target) && !themePopup.contains(e.target)) { 369 | hideThemes(); 370 | } 371 | }); 372 | 373 | document.addEventListener('keydown', function (e) { 374 | if (e.altKey || e.ctrlKey || e.metaKey || e.shiftKey) { return; } 375 | if (!themePopup.contains(e.target)) { return; } 376 | 377 | switch (e.key) { 378 | case 'Escape': 379 | e.preventDefault(); 380 | hideThemes(); 381 | break; 382 | case 'ArrowUp': 383 | e.preventDefault(); 384 | var li = document.activeElement.parentElement; 385 | if (li && li.previousElementSibling) { 386 | li.previousElementSibling.querySelector('button').focus(); 387 | } 388 | break; 389 | case 'ArrowDown': 390 | e.preventDefault(); 391 | var li = document.activeElement.parentElement; 392 | if (li && li.nextElementSibling) { 393 | li.nextElementSibling.querySelector('button').focus(); 394 | } 395 | break; 396 | case 'Home': 397 | e.preventDefault(); 398 | themePopup.querySelector('li:first-child button').focus(); 399 | break; 400 | case 'End': 401 | e.preventDefault(); 402 | themePopup.querySelector('li:last-child button').focus(); 403 | break; 404 | } 405 | }); 406 | })(); 407 | 408 | (function sidebar() { 409 | var html = document.querySelector("html"); 410 | var sidebar = document.getElementById("sidebar"); 411 | var sidebarScrollBox = document.querySelector(".sidebar-scrollbox"); 412 | var sidebarLinks = document.querySelectorAll('#sidebar a'); 413 | var sidebarToggleButton = document.getElementById("sidebar-toggle"); 414 | var sidebarResizeHandle = document.getElementById("sidebar-resize-handle"); 415 | var firstContact = null; 416 | 417 | function showSidebar() { 418 | html.classList.remove('sidebar-hidden') 419 | html.classList.add('sidebar-visible'); 420 | Array.from(sidebarLinks).forEach(function (link) { 421 | link.setAttribute('tabIndex', 0); 422 | }); 423 | sidebarToggleButton.setAttribute('aria-expanded', true); 424 | sidebar.setAttribute('aria-hidden', false); 425 | try { localStorage.setItem('mdbook-sidebar', 'visible'); } catch (e) { } 426 | } 427 | 428 | 429 | var sidebarAnchorToggles = document.querySelectorAll('#sidebar a.toggle'); 430 | 431 | function toggleSection(ev) { 432 | ev.currentTarget.parentElement.classList.toggle('expanded'); 433 | } 434 | 435 | Array.from(sidebarAnchorToggles).forEach(function (el) { 436 | el.addEventListener('click', toggleSection); 437 | }); 438 | 439 | function hideSidebar() { 440 | html.classList.remove('sidebar-visible') 441 | html.classList.add('sidebar-hidden'); 442 | Array.from(sidebarLinks).forEach(function (link) { 443 | link.setAttribute('tabIndex', -1); 444 | }); 445 | sidebarToggleButton.setAttribute('aria-expanded', false); 446 | sidebar.setAttribute('aria-hidden', true); 447 | try { localStorage.setItem('mdbook-sidebar', 'hidden'); } catch (e) { } 448 | } 449 | 450 | // Toggle sidebar 451 | sidebarToggleButton.addEventListener('click', function sidebarToggle() { 452 | if (html.classList.contains("sidebar-hidden")) { 453 | showSidebar(); 454 | } else if (html.classList.contains("sidebar-visible")) { 455 | hideSidebar(); 456 | } else { 457 | if (getComputedStyle(sidebar)['transform'] === 'none') { 458 | hideSidebar(); 459 | } else { 460 | showSidebar(); 461 | } 462 | } 463 | }); 464 | 465 | sidebarResizeHandle.addEventListener('mousedown', initResize, false); 466 | 467 | function initResize(e) { 468 | window.addEventListener('mousemove', resize, false); 469 | window.addEventListener('mouseup', stopResize, false); 470 | html.classList.add('sidebar-resizing'); 471 | } 472 | function resize(e) { 473 | document.documentElement.style.setProperty('--sidebar-width', (e.clientX - sidebar.offsetLeft) + 'px'); 474 | } 475 | //on mouseup remove windows functions mousemove & mouseup 476 | function stopResize(e) { 477 | html.classList.remove('sidebar-resizing'); 478 | window.removeEventListener('mousemove', resize, false); 479 | window.removeEventListener('mouseup', stopResize, false); 480 | } 481 | 482 | document.addEventListener('touchstart', function (e) { 483 | firstContact = { 484 | x: e.touches[0].clientX, 485 | time: Date.now() 486 | }; 487 | }, { passive: true }); 488 | 489 | document.addEventListener('touchmove', function (e) { 490 | if (!firstContact) 491 | return; 492 | 493 | var curX = e.touches[0].clientX; 494 | var xDiff = curX - firstContact.x, 495 | tDiff = Date.now() - firstContact.time; 496 | 497 | if (tDiff < 250 && Math.abs(xDiff) >= 150) { 498 | if (xDiff >= 0 && firstContact.x < Math.min(document.body.clientWidth * 0.25, 300)) 499 | showSidebar(); 500 | else if (xDiff < 0 && curX < 300) 501 | hideSidebar(); 502 | 503 | firstContact = null; 504 | } 505 | }, { passive: true }); 506 | 507 | // Scroll sidebar to current active section 508 | var activeSection = document.getElementById("sidebar").querySelector(".active"); 509 | if (activeSection) { 510 | sidebarScrollBox.scrollTop = activeSection.offsetTop; 511 | } 512 | })(); 513 | 514 | (function chapterNavigation() { 515 | document.addEventListener('keydown', function (e) { 516 | if (e.altKey || e.ctrlKey || e.metaKey || e.shiftKey) { return; } 517 | if (window.search && window.search.hasFocus()) { return; } 518 | 519 | switch (e.key) { 520 | case 'ArrowRight': 521 | e.preventDefault(); 522 | var nextButton = document.querySelector('.nav-chapters.next'); 523 | if (nextButton) { 524 | window.location.href = nextButton.href; 525 | } 526 | break; 527 | case 'ArrowLeft': 528 | e.preventDefault(); 529 | var previousButton = document.querySelector('.nav-chapters.previous'); 530 | if (previousButton) { 531 | window.location.href = previousButton.href; 532 | } 533 | break; 534 | } 535 | }); 536 | })(); 537 | 538 | (function clipboard() { 539 | var clipButtons = document.querySelectorAll('.clip-button'); 540 | 541 | function hideTooltip(elem) { 542 | elem.firstChild.innerText = ""; 543 | elem.className = 'fa fa-copy clip-button'; 544 | } 545 | 546 | function showTooltip(elem, msg) { 547 | elem.firstChild.innerText = msg; 548 | elem.className = 'fa fa-copy tooltipped'; 549 | } 550 | 551 | var clipboardSnippets = new ClipboardJS('.clip-button', { 552 | text: function (trigger) { 553 | hideTooltip(trigger); 554 | let playpen = trigger.closest("pre"); 555 | return playpen_text(playpen); 556 | } 557 | }); 558 | 559 | Array.from(clipButtons).forEach(function (clipButton) { 560 | clipButton.addEventListener('mouseout', function (e) { 561 | hideTooltip(e.currentTarget); 562 | }); 563 | }); 564 | 565 | clipboardSnippets.on('success', function (e) { 566 | e.clearSelection(); 567 | showTooltip(e.trigger, "Copied!"); 568 | }); 569 | 570 | clipboardSnippets.on('error', function (e) { 571 | showTooltip(e.trigger, "Clipboard error!"); 572 | }); 573 | })(); 574 | 575 | (function scrollToTop () { 576 | var menuTitle = document.querySelector('.menu-title'); 577 | 578 | menuTitle.addEventListener('click', function () { 579 | document.scrollingElement.scrollTo({ top: 0, behavior: 'smooth' }); 580 | }); 581 | })(); 582 | 583 | (function autoHideMenu() { 584 | var menu = document.getElementById('menu-bar'); 585 | 586 | var previousScrollTop = document.scrollingElement.scrollTop; 587 | 588 | document.addEventListener('scroll', function () { 589 | if (menu.classList.contains('folded') && document.scrollingElement.scrollTop < previousScrollTop) { 590 | menu.classList.remove('folded'); 591 | } else if (!menu.classList.contains('folded') && document.scrollingElement.scrollTop > previousScrollTop) { 592 | menu.classList.add('folded'); 593 | } 594 | 595 | if (!menu.classList.contains('bordered') && document.scrollingElement.scrollTop > 0) { 596 | menu.classList.add('bordered'); 597 | } 598 | 599 | if (menu.classList.contains('bordered') && document.scrollingElement.scrollTop === 0) { 600 | menu.classList.remove('bordered'); 601 | } 602 | 603 | previousScrollTop = Math.max(document.scrollingElement.scrollTop, 0); 604 | }, { passive: true }); 605 | })(); 606 | -------------------------------------------------------------------------------- /docs/static/FontAwesome/css/font-awesome.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome 3 | * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) 4 | */@font-face{font-family:'FontAwesome';src:url('../fonts/fontawesome-webfont.eot?v=4.7.0');src:url('../fonts/fontawesome-webfont.eot?#iefix&v=4.7.0') format('embedded-opentype'),url('../fonts/fontawesome-webfont.woff2?v=4.7.0') format('woff2'),url('../fonts/fontawesome-webfont.woff?v=4.7.0') format('woff'),url('../fonts/fontawesome-webfont.ttf?v=4.7.0') format('truetype'),url('../fonts/fontawesome-webfont.svg?v=4.7.0#fontawesomeregular') format('svg');font-weight:normal;font-style:normal}.fa{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571429em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14285714em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14285714em;width:2.14285714em;top:.14285714em;text-align:center}.fa-li.fa-lg{left:-1.85714286em}.fa-border{padding:.2em .25em .15em;border:solid .08em #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa.fa-pull-left{margin-right:.3em}.fa.fa-pull-right{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left{margin-right:.3em}.fa.pull-right{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s infinite linear;animation:fa-spin 2s infinite linear}.fa-pulse{-webkit-animation:fa-spin 1s infinite steps(8);animation:fa-spin 1s infinite steps(8)}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}100%{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}100%{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scale(-1, 1);-ms-transform:scale(-1, 1);transform:scale(-1, 1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scale(1, -1);-ms-transform:scale(1, -1);transform:scale(1, -1)}:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270,:root .fa-flip-horizontal,:root .fa-flip-vertical{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:"\f000"}.fa-music:before{content:"\f001"}.fa-search:before{content:"\f002"}.fa-envelope-o:before{content:"\f003"}.fa-heart:before{content:"\f004"}.fa-star:before{content:"\f005"}.fa-star-o:before{content:"\f006"}.fa-user:before{content:"\f007"}.fa-film:before{content:"\f008"}.fa-th-large:before{content:"\f009"}.fa-th:before{content:"\f00a"}.fa-th-list:before{content:"\f00b"}.fa-check:before{content:"\f00c"}.fa-remove:before,.fa-close:before,.fa-times:before{content:"\f00d"}.fa-search-plus:before{content:"\f00e"}.fa-search-minus:before{content:"\f010"}.fa-power-off:before{content:"\f011"}.fa-signal:before{content:"\f012"}.fa-gear:before,.fa-cog:before{content:"\f013"}.fa-trash-o:before{content:"\f014"}.fa-home:before{content:"\f015"}.fa-file-o:before{content:"\f016"}.fa-clock-o:before{content:"\f017"}.fa-road:before{content:"\f018"}.fa-download:before{content:"\f019"}.fa-arrow-circle-o-down:before{content:"\f01a"}.fa-arrow-circle-o-up:before{content:"\f01b"}.fa-inbox:before{content:"\f01c"}.fa-play-circle-o:before{content:"\f01d"}.fa-rotate-right:before,.fa-repeat:before{content:"\f01e"}.fa-refresh:before{content:"\f021"}.fa-list-alt:before{content:"\f022"}.fa-lock:before{content:"\f023"}.fa-flag:before{content:"\f024"}.fa-headphones:before{content:"\f025"}.fa-volume-off:before{content:"\f026"}.fa-volume-down:before{content:"\f027"}.fa-volume-up:before{content:"\f028"}.fa-qrcode:before{content:"\f029"}.fa-barcode:before{content:"\f02a"}.fa-tag:before{content:"\f02b"}.fa-tags:before{content:"\f02c"}.fa-book:before{content:"\f02d"}.fa-bookmark:before{content:"\f02e"}.fa-print:before{content:"\f02f"}.fa-camera:before{content:"\f030"}.fa-font:before{content:"\f031"}.fa-bold:before{content:"\f032"}.fa-italic:before{content:"\f033"}.fa-text-height:before{content:"\f034"}.fa-text-width:before{content:"\f035"}.fa-align-left:before{content:"\f036"}.fa-align-center:before{content:"\f037"}.fa-align-right:before{content:"\f038"}.fa-align-justify:before{content:"\f039"}.fa-list:before{content:"\f03a"}.fa-dedent:before,.fa-outdent:before{content:"\f03b"}.fa-indent:before{content:"\f03c"}.fa-video-camera:before{content:"\f03d"}.fa-photo:before,.fa-image:before,.fa-picture-o:before{content:"\f03e"}.fa-pencil:before{content:"\f040"}.fa-map-marker:before{content:"\f041"}.fa-adjust:before{content:"\f042"}.fa-tint:before{content:"\f043"}.fa-edit:before,.fa-pencil-square-o:before{content:"\f044"}.fa-share-square-o:before{content:"\f045"}.fa-check-square-o:before{content:"\f046"}.fa-arrows:before{content:"\f047"}.fa-step-backward:before{content:"\f048"}.fa-fast-backward:before{content:"\f049"}.fa-backward:before{content:"\f04a"}.fa-play:before{content:"\f04b"}.fa-pause:before{content:"\f04c"}.fa-stop:before{content:"\f04d"}.fa-forward:before{content:"\f04e"}.fa-fast-forward:before{content:"\f050"}.fa-step-forward:before{content:"\f051"}.fa-eject:before{content:"\f052"}.fa-chevron-left:before{content:"\f053"}.fa-chevron-right:before{content:"\f054"}.fa-plus-circle:before{content:"\f055"}.fa-minus-circle:before{content:"\f056"}.fa-times-circle:before{content:"\f057"}.fa-check-circle:before{content:"\f058"}.fa-question-circle:before{content:"\f059"}.fa-info-circle:before{content:"\f05a"}.fa-crosshairs:before{content:"\f05b"}.fa-times-circle-o:before{content:"\f05c"}.fa-check-circle-o:before{content:"\f05d"}.fa-ban:before{content:"\f05e"}.fa-arrow-left:before{content:"\f060"}.fa-arrow-right:before{content:"\f061"}.fa-arrow-up:before{content:"\f062"}.fa-arrow-down:before{content:"\f063"}.fa-mail-forward:before,.fa-share:before{content:"\f064"}.fa-expand:before{content:"\f065"}.fa-compress:before{content:"\f066"}.fa-plus:before{content:"\f067"}.fa-minus:before{content:"\f068"}.fa-asterisk:before{content:"\f069"}.fa-exclamation-circle:before{content:"\f06a"}.fa-gift:before{content:"\f06b"}.fa-leaf:before{content:"\f06c"}.fa-fire:before{content:"\f06d"}.fa-eye:before{content:"\f06e"}.fa-eye-slash:before{content:"\f070"}.fa-warning:before,.fa-exclamation-triangle:before{content:"\f071"}.fa-plane:before{content:"\f072"}.fa-calendar:before{content:"\f073"}.fa-random:before{content:"\f074"}.fa-comment:before{content:"\f075"}.fa-magnet:before{content:"\f076"}.fa-chevron-up:before{content:"\f077"}.fa-chevron-down:before{content:"\f078"}.fa-retweet:before{content:"\f079"}.fa-shopping-cart:before{content:"\f07a"}.fa-folder:before{content:"\f07b"}.fa-folder-open:before{content:"\f07c"}.fa-arrows-v:before{content:"\f07d"}.fa-arrows-h:before{content:"\f07e"}.fa-bar-chart-o:before,.fa-bar-chart:before{content:"\f080"}.fa-twitter-square:before{content:"\f081"}.fa-facebook-square:before{content:"\f082"}.fa-camera-retro:before{content:"\f083"}.fa-key:before{content:"\f084"}.fa-gears:before,.fa-cogs:before{content:"\f085"}.fa-comments:before{content:"\f086"}.fa-thumbs-o-up:before{content:"\f087"}.fa-thumbs-o-down:before{content:"\f088"}.fa-star-half:before{content:"\f089"}.fa-heart-o:before{content:"\f08a"}.fa-sign-out:before{content:"\f08b"}.fa-linkedin-square:before{content:"\f08c"}.fa-thumb-tack:before{content:"\f08d"}.fa-external-link:before{content:"\f08e"}.fa-sign-in:before{content:"\f090"}.fa-trophy:before{content:"\f091"}.fa-github-square:before{content:"\f092"}.fa-upload:before{content:"\f093"}.fa-lemon-o:before{content:"\f094"}.fa-phone:before{content:"\f095"}.fa-square-o:before{content:"\f096"}.fa-bookmark-o:before{content:"\f097"}.fa-phone-square:before{content:"\f098"}.fa-twitter:before{content:"\f099"}.fa-facebook-f:before,.fa-facebook:before{content:"\f09a"}.fa-github:before{content:"\f09b"}.fa-unlock:before{content:"\f09c"}.fa-credit-card:before{content:"\f09d"}.fa-feed:before,.fa-rss:before{content:"\f09e"}.fa-hdd-o:before{content:"\f0a0"}.fa-bullhorn:before{content:"\f0a1"}.fa-bell:before{content:"\f0f3"}.fa-certificate:before{content:"\f0a3"}.fa-hand-o-right:before{content:"\f0a4"}.fa-hand-o-left:before{content:"\f0a5"}.fa-hand-o-up:before{content:"\f0a6"}.fa-hand-o-down:before{content:"\f0a7"}.fa-arrow-circle-left:before{content:"\f0a8"}.fa-arrow-circle-right:before{content:"\f0a9"}.fa-arrow-circle-up:before{content:"\f0aa"}.fa-arrow-circle-down:before{content:"\f0ab"}.fa-globe:before{content:"\f0ac"}.fa-wrench:before{content:"\f0ad"}.fa-tasks:before{content:"\f0ae"}.fa-filter:before{content:"\f0b0"}.fa-briefcase:before{content:"\f0b1"}.fa-arrows-alt:before{content:"\f0b2"}.fa-group:before,.fa-users:before{content:"\f0c0"}.fa-chain:before,.fa-link:before{content:"\f0c1"}.fa-cloud:before{content:"\f0c2"}.fa-flask:before{content:"\f0c3"}.fa-cut:before,.fa-scissors:before{content:"\f0c4"}.fa-copy:before,.fa-files-o:before{content:"\f0c5"}.fa-paperclip:before{content:"\f0c6"}.fa-save:before,.fa-floppy-o:before{content:"\f0c7"}.fa-square:before{content:"\f0c8"}.fa-navicon:before,.fa-reorder:before,.fa-bars:before{content:"\f0c9"}.fa-list-ul:before{content:"\f0ca"}.fa-list-ol:before{content:"\f0cb"}.fa-strikethrough:before{content:"\f0cc"}.fa-underline:before{content:"\f0cd"}.fa-table:before{content:"\f0ce"}.fa-magic:before{content:"\f0d0"}.fa-truck:before{content:"\f0d1"}.fa-pinterest:before{content:"\f0d2"}.fa-pinterest-square:before{content:"\f0d3"}.fa-google-plus-square:before{content:"\f0d4"}.fa-google-plus:before{content:"\f0d5"}.fa-money:before{content:"\f0d6"}.fa-caret-down:before{content:"\f0d7"}.fa-caret-up:before{content:"\f0d8"}.fa-caret-left:before{content:"\f0d9"}.fa-caret-right:before{content:"\f0da"}.fa-columns:before{content:"\f0db"}.fa-unsorted:before,.fa-sort:before{content:"\f0dc"}.fa-sort-down:before,.fa-sort-desc:before{content:"\f0dd"}.fa-sort-up:before,.fa-sort-asc:before{content:"\f0de"}.fa-envelope:before{content:"\f0e0"}.fa-linkedin:before{content:"\f0e1"}.fa-rotate-left:before,.fa-undo:before{content:"\f0e2"}.fa-legal:before,.fa-gavel:before{content:"\f0e3"}.fa-dashboard:before,.fa-tachometer:before{content:"\f0e4"}.fa-comment-o:before{content:"\f0e5"}.fa-comments-o:before{content:"\f0e6"}.fa-flash:before,.fa-bolt:before{content:"\f0e7"}.fa-sitemap:before{content:"\f0e8"}.fa-umbrella:before{content:"\f0e9"}.fa-paste:before,.fa-clipboard:before{content:"\f0ea"}.fa-lightbulb-o:before{content:"\f0eb"}.fa-exchange:before{content:"\f0ec"}.fa-cloud-download:before{content:"\f0ed"}.fa-cloud-upload:before{content:"\f0ee"}.fa-user-md:before{content:"\f0f0"}.fa-stethoscope:before{content:"\f0f1"}.fa-suitcase:before{content:"\f0f2"}.fa-bell-o:before{content:"\f0a2"}.fa-coffee:before{content:"\f0f4"}.fa-cutlery:before{content:"\f0f5"}.fa-file-text-o:before{content:"\f0f6"}.fa-building-o:before{content:"\f0f7"}.fa-hospital-o:before{content:"\f0f8"}.fa-ambulance:before{content:"\f0f9"}.fa-medkit:before{content:"\f0fa"}.fa-fighter-jet:before{content:"\f0fb"}.fa-beer:before{content:"\f0fc"}.fa-h-square:before{content:"\f0fd"}.fa-plus-square:before{content:"\f0fe"}.fa-angle-double-left:before{content:"\f100"}.fa-angle-double-right:before{content:"\f101"}.fa-angle-double-up:before{content:"\f102"}.fa-angle-double-down:before{content:"\f103"}.fa-angle-left:before{content:"\f104"}.fa-angle-right:before{content:"\f105"}.fa-angle-up:before{content:"\f106"}.fa-angle-down:before{content:"\f107"}.fa-desktop:before{content:"\f108"}.fa-laptop:before{content:"\f109"}.fa-tablet:before{content:"\f10a"}.fa-mobile-phone:before,.fa-mobile:before{content:"\f10b"}.fa-circle-o:before{content:"\f10c"}.fa-quote-left:before{content:"\f10d"}.fa-quote-right:before{content:"\f10e"}.fa-spinner:before{content:"\f110"}.fa-circle:before{content:"\f111"}.fa-mail-reply:before,.fa-reply:before{content:"\f112"}.fa-github-alt:before{content:"\f113"}.fa-folder-o:before{content:"\f114"}.fa-folder-open-o:before{content:"\f115"}.fa-smile-o:before{content:"\f118"}.fa-frown-o:before{content:"\f119"}.fa-meh-o:before{content:"\f11a"}.fa-gamepad:before{content:"\f11b"}.fa-keyboard-o:before{content:"\f11c"}.fa-flag-o:before{content:"\f11d"}.fa-flag-checkered:before{content:"\f11e"}.fa-terminal:before{content:"\f120"}.fa-code:before{content:"\f121"}.fa-mail-reply-all:before,.fa-reply-all:before{content:"\f122"}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:"\f123"}.fa-location-arrow:before{content:"\f124"}.fa-crop:before{content:"\f125"}.fa-code-fork:before{content:"\f126"}.fa-unlink:before,.fa-chain-broken:before{content:"\f127"}.fa-question:before{content:"\f128"}.fa-info:before{content:"\f129"}.fa-exclamation:before{content:"\f12a"}.fa-superscript:before{content:"\f12b"}.fa-subscript:before{content:"\f12c"}.fa-eraser:before{content:"\f12d"}.fa-puzzle-piece:before{content:"\f12e"}.fa-microphone:before{content:"\f130"}.fa-microphone-slash:before{content:"\f131"}.fa-shield:before{content:"\f132"}.fa-calendar-o:before{content:"\f133"}.fa-fire-extinguisher:before{content:"\f134"}.fa-rocket:before{content:"\f135"}.fa-maxcdn:before{content:"\f136"}.fa-chevron-circle-left:before{content:"\f137"}.fa-chevron-circle-right:before{content:"\f138"}.fa-chevron-circle-up:before{content:"\f139"}.fa-chevron-circle-down:before{content:"\f13a"}.fa-html5:before{content:"\f13b"}.fa-css3:before{content:"\f13c"}.fa-anchor:before{content:"\f13d"}.fa-unlock-alt:before{content:"\f13e"}.fa-bullseye:before{content:"\f140"}.fa-ellipsis-h:before{content:"\f141"}.fa-ellipsis-v:before{content:"\f142"}.fa-rss-square:before{content:"\f143"}.fa-play-circle:before{content:"\f144"}.fa-ticket:before{content:"\f145"}.fa-minus-square:before{content:"\f146"}.fa-minus-square-o:before{content:"\f147"}.fa-level-up:before{content:"\f148"}.fa-level-down:before{content:"\f149"}.fa-check-square:before{content:"\f14a"}.fa-pencil-square:before{content:"\f14b"}.fa-external-link-square:before{content:"\f14c"}.fa-share-square:before{content:"\f14d"}.fa-compass:before{content:"\f14e"}.fa-toggle-down:before,.fa-caret-square-o-down:before{content:"\f150"}.fa-toggle-up:before,.fa-caret-square-o-up:before{content:"\f151"}.fa-toggle-right:before,.fa-caret-square-o-right:before{content:"\f152"}.fa-euro:before,.fa-eur:before{content:"\f153"}.fa-gbp:before{content:"\f154"}.fa-dollar:before,.fa-usd:before{content:"\f155"}.fa-rupee:before,.fa-inr:before{content:"\f156"}.fa-cny:before,.fa-rmb:before,.fa-yen:before,.fa-jpy:before{content:"\f157"}.fa-ruble:before,.fa-rouble:before,.fa-rub:before{content:"\f158"}.fa-won:before,.fa-krw:before{content:"\f159"}.fa-bitcoin:before,.fa-btc:before{content:"\f15a"}.fa-file:before{content:"\f15b"}.fa-file-text:before{content:"\f15c"}.fa-sort-alpha-asc:before{content:"\f15d"}.fa-sort-alpha-desc:before{content:"\f15e"}.fa-sort-amount-asc:before{content:"\f160"}.fa-sort-amount-desc:before{content:"\f161"}.fa-sort-numeric-asc:before{content:"\f162"}.fa-sort-numeric-desc:before{content:"\f163"}.fa-thumbs-up:before{content:"\f164"}.fa-thumbs-down:before{content:"\f165"}.fa-youtube-square:before{content:"\f166"}.fa-youtube:before{content:"\f167"}.fa-xing:before{content:"\f168"}.fa-xing-square:before{content:"\f169"}.fa-youtube-play:before{content:"\f16a"}.fa-dropbox:before{content:"\f16b"}.fa-stack-overflow:before{content:"\f16c"}.fa-instagram:before{content:"\f16d"}.fa-flickr:before{content:"\f16e"}.fa-adn:before{content:"\f170"}.fa-bitbucket:before{content:"\f171"}.fa-bitbucket-square:before{content:"\f172"}.fa-tumblr:before{content:"\f173"}.fa-tumblr-square:before{content:"\f174"}.fa-long-arrow-down:before{content:"\f175"}.fa-long-arrow-up:before{content:"\f176"}.fa-long-arrow-left:before{content:"\f177"}.fa-long-arrow-right:before{content:"\f178"}.fa-apple:before{content:"\f179"}.fa-windows:before{content:"\f17a"}.fa-android:before{content:"\f17b"}.fa-linux:before{content:"\f17c"}.fa-dribbble:before{content:"\f17d"}.fa-skype:before{content:"\f17e"}.fa-foursquare:before{content:"\f180"}.fa-trello:before{content:"\f181"}.fa-female:before{content:"\f182"}.fa-male:before{content:"\f183"}.fa-gittip:before,.fa-gratipay:before{content:"\f184"}.fa-sun-o:before{content:"\f185"}.fa-moon-o:before{content:"\f186"}.fa-archive:before{content:"\f187"}.fa-bug:before{content:"\f188"}.fa-vk:before{content:"\f189"}.fa-weibo:before{content:"\f18a"}.fa-renren:before{content:"\f18b"}.fa-pagelines:before{content:"\f18c"}.fa-stack-exchange:before{content:"\f18d"}.fa-arrow-circle-o-right:before{content:"\f18e"}.fa-arrow-circle-o-left:before{content:"\f190"}.fa-toggle-left:before,.fa-caret-square-o-left:before{content:"\f191"}.fa-dot-circle-o:before{content:"\f192"}.fa-wheelchair:before{content:"\f193"}.fa-vimeo-square:before{content:"\f194"}.fa-turkish-lira:before,.fa-try:before{content:"\f195"}.fa-plus-square-o:before{content:"\f196"}.fa-space-shuttle:before{content:"\f197"}.fa-slack:before{content:"\f198"}.fa-envelope-square:before{content:"\f199"}.fa-wordpress:before{content:"\f19a"}.fa-openid:before{content:"\f19b"}.fa-institution:before,.fa-bank:before,.fa-university:before{content:"\f19c"}.fa-mortar-board:before,.fa-graduation-cap:before{content:"\f19d"}.fa-yahoo:before{content:"\f19e"}.fa-google:before{content:"\f1a0"}.fa-reddit:before{content:"\f1a1"}.fa-reddit-square:before{content:"\f1a2"}.fa-stumbleupon-circle:before{content:"\f1a3"}.fa-stumbleupon:before{content:"\f1a4"}.fa-delicious:before{content:"\f1a5"}.fa-digg:before{content:"\f1a6"}.fa-pied-piper-pp:before{content:"\f1a7"}.fa-pied-piper-alt:before{content:"\f1a8"}.fa-drupal:before{content:"\f1a9"}.fa-joomla:before{content:"\f1aa"}.fa-language:before{content:"\f1ab"}.fa-fax:before{content:"\f1ac"}.fa-building:before{content:"\f1ad"}.fa-child:before{content:"\f1ae"}.fa-paw:before{content:"\f1b0"}.fa-spoon:before{content:"\f1b1"}.fa-cube:before{content:"\f1b2"}.fa-cubes:before{content:"\f1b3"}.fa-behance:before{content:"\f1b4"}.fa-behance-square:before{content:"\f1b5"}.fa-steam:before{content:"\f1b6"}.fa-steam-square:before{content:"\f1b7"}.fa-recycle:before{content:"\f1b8"}.fa-automobile:before,.fa-car:before{content:"\f1b9"}.fa-cab:before,.fa-taxi:before{content:"\f1ba"}.fa-tree:before{content:"\f1bb"}.fa-spotify:before{content:"\f1bc"}.fa-deviantart:before{content:"\f1bd"}.fa-soundcloud:before{content:"\f1be"}.fa-database:before{content:"\f1c0"}.fa-file-pdf-o:before{content:"\f1c1"}.fa-file-word-o:before{content:"\f1c2"}.fa-file-excel-o:before{content:"\f1c3"}.fa-file-powerpoint-o:before{content:"\f1c4"}.fa-file-photo-o:before,.fa-file-picture-o:before,.fa-file-image-o:before{content:"\f1c5"}.fa-file-zip-o:before,.fa-file-archive-o:before{content:"\f1c6"}.fa-file-sound-o:before,.fa-file-audio-o:before{content:"\f1c7"}.fa-file-movie-o:before,.fa-file-video-o:before{content:"\f1c8"}.fa-file-code-o:before{content:"\f1c9"}.fa-vine:before{content:"\f1ca"}.fa-codepen:before{content:"\f1cb"}.fa-jsfiddle:before{content:"\f1cc"}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-saver:before,.fa-support:before,.fa-life-ring:before{content:"\f1cd"}.fa-circle-o-notch:before{content:"\f1ce"}.fa-ra:before,.fa-resistance:before,.fa-rebel:before{content:"\f1d0"}.fa-ge:before,.fa-empire:before{content:"\f1d1"}.fa-git-square:before{content:"\f1d2"}.fa-git:before{content:"\f1d3"}.fa-y-combinator-square:before,.fa-yc-square:before,.fa-hacker-news:before{content:"\f1d4"}.fa-tencent-weibo:before{content:"\f1d5"}.fa-qq:before{content:"\f1d6"}.fa-wechat:before,.fa-weixin:before{content:"\f1d7"}.fa-send:before,.fa-paper-plane:before{content:"\f1d8"}.fa-send-o:before,.fa-paper-plane-o:before{content:"\f1d9"}.fa-history:before{content:"\f1da"}.fa-circle-thin:before{content:"\f1db"}.fa-header:before{content:"\f1dc"}.fa-paragraph:before{content:"\f1dd"}.fa-sliders:before{content:"\f1de"}.fa-share-alt:before{content:"\f1e0"}.fa-share-alt-square:before{content:"\f1e1"}.fa-bomb:before{content:"\f1e2"}.fa-soccer-ball-o:before,.fa-futbol-o:before{content:"\f1e3"}.fa-tty:before{content:"\f1e4"}.fa-binoculars:before{content:"\f1e5"}.fa-plug:before{content:"\f1e6"}.fa-slideshare:before{content:"\f1e7"}.fa-twitch:before{content:"\f1e8"}.fa-yelp:before{content:"\f1e9"}.fa-newspaper-o:before{content:"\f1ea"}.fa-wifi:before{content:"\f1eb"}.fa-calculator:before{content:"\f1ec"}.fa-paypal:before{content:"\f1ed"}.fa-google-wallet:before{content:"\f1ee"}.fa-cc-visa:before{content:"\f1f0"}.fa-cc-mastercard:before{content:"\f1f1"}.fa-cc-discover:before{content:"\f1f2"}.fa-cc-amex:before{content:"\f1f3"}.fa-cc-paypal:before{content:"\f1f4"}.fa-cc-stripe:before{content:"\f1f5"}.fa-bell-slash:before{content:"\f1f6"}.fa-bell-slash-o:before{content:"\f1f7"}.fa-trash:before{content:"\f1f8"}.fa-copyright:before{content:"\f1f9"}.fa-at:before{content:"\f1fa"}.fa-eyedropper:before{content:"\f1fb"}.fa-paint-brush:before{content:"\f1fc"}.fa-birthday-cake:before{content:"\f1fd"}.fa-area-chart:before{content:"\f1fe"}.fa-pie-chart:before{content:"\f200"}.fa-line-chart:before{content:"\f201"}.fa-lastfm:before{content:"\f202"}.fa-lastfm-square:before{content:"\f203"}.fa-toggle-off:before{content:"\f204"}.fa-toggle-on:before{content:"\f205"}.fa-bicycle:before{content:"\f206"}.fa-bus:before{content:"\f207"}.fa-ioxhost:before{content:"\f208"}.fa-angellist:before{content:"\f209"}.fa-cc:before{content:"\f20a"}.fa-shekel:before,.fa-sheqel:before,.fa-ils:before{content:"\f20b"}.fa-meanpath:before{content:"\f20c"}.fa-buysellads:before{content:"\f20d"}.fa-connectdevelop:before{content:"\f20e"}.fa-dashcube:before{content:"\f210"}.fa-forumbee:before{content:"\f211"}.fa-leanpub:before{content:"\f212"}.fa-sellsy:before{content:"\f213"}.fa-shirtsinbulk:before{content:"\f214"}.fa-simplybuilt:before{content:"\f215"}.fa-skyatlas:before{content:"\f216"}.fa-cart-plus:before{content:"\f217"}.fa-cart-arrow-down:before{content:"\f218"}.fa-diamond:before{content:"\f219"}.fa-ship:before{content:"\f21a"}.fa-user-secret:before{content:"\f21b"}.fa-motorcycle:before{content:"\f21c"}.fa-street-view:before{content:"\f21d"}.fa-heartbeat:before{content:"\f21e"}.fa-venus:before{content:"\f221"}.fa-mars:before{content:"\f222"}.fa-mercury:before{content:"\f223"}.fa-intersex:before,.fa-transgender:before{content:"\f224"}.fa-transgender-alt:before{content:"\f225"}.fa-venus-double:before{content:"\f226"}.fa-mars-double:before{content:"\f227"}.fa-venus-mars:before{content:"\f228"}.fa-mars-stroke:before{content:"\f229"}.fa-mars-stroke-v:before{content:"\f22a"}.fa-mars-stroke-h:before{content:"\f22b"}.fa-neuter:before{content:"\f22c"}.fa-genderless:before{content:"\f22d"}.fa-facebook-official:before{content:"\f230"}.fa-pinterest-p:before{content:"\f231"}.fa-whatsapp:before{content:"\f232"}.fa-server:before{content:"\f233"}.fa-user-plus:before{content:"\f234"}.fa-user-times:before{content:"\f235"}.fa-hotel:before,.fa-bed:before{content:"\f236"}.fa-viacoin:before{content:"\f237"}.fa-train:before{content:"\f238"}.fa-subway:before{content:"\f239"}.fa-medium:before{content:"\f23a"}.fa-yc:before,.fa-y-combinator:before{content:"\f23b"}.fa-optin-monster:before{content:"\f23c"}.fa-opencart:before{content:"\f23d"}.fa-expeditedssl:before{content:"\f23e"}.fa-battery-4:before,.fa-battery:before,.fa-battery-full:before{content:"\f240"}.fa-battery-3:before,.fa-battery-three-quarters:before{content:"\f241"}.fa-battery-2:before,.fa-battery-half:before{content:"\f242"}.fa-battery-1:before,.fa-battery-quarter:before{content:"\f243"}.fa-battery-0:before,.fa-battery-empty:before{content:"\f244"}.fa-mouse-pointer:before{content:"\f245"}.fa-i-cursor:before{content:"\f246"}.fa-object-group:before{content:"\f247"}.fa-object-ungroup:before{content:"\f248"}.fa-sticky-note:before{content:"\f249"}.fa-sticky-note-o:before{content:"\f24a"}.fa-cc-jcb:before{content:"\f24b"}.fa-cc-diners-club:before{content:"\f24c"}.fa-clone:before{content:"\f24d"}.fa-balance-scale:before{content:"\f24e"}.fa-hourglass-o:before{content:"\f250"}.fa-hourglass-1:before,.fa-hourglass-start:before{content:"\f251"}.fa-hourglass-2:before,.fa-hourglass-half:before{content:"\f252"}.fa-hourglass-3:before,.fa-hourglass-end:before{content:"\f253"}.fa-hourglass:before{content:"\f254"}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:"\f255"}.fa-hand-stop-o:before,.fa-hand-paper-o:before{content:"\f256"}.fa-hand-scissors-o:before{content:"\f257"}.fa-hand-lizard-o:before{content:"\f258"}.fa-hand-spock-o:before{content:"\f259"}.fa-hand-pointer-o:before{content:"\f25a"}.fa-hand-peace-o:before{content:"\f25b"}.fa-trademark:before{content:"\f25c"}.fa-registered:before{content:"\f25d"}.fa-creative-commons:before{content:"\f25e"}.fa-gg:before{content:"\f260"}.fa-gg-circle:before{content:"\f261"}.fa-tripadvisor:before{content:"\f262"}.fa-odnoklassniki:before{content:"\f263"}.fa-odnoklassniki-square:before{content:"\f264"}.fa-get-pocket:before{content:"\f265"}.fa-wikipedia-w:before{content:"\f266"}.fa-safari:before{content:"\f267"}.fa-chrome:before{content:"\f268"}.fa-firefox:before{content:"\f269"}.fa-opera:before{content:"\f26a"}.fa-internet-explorer:before{content:"\f26b"}.fa-tv:before,.fa-television:before{content:"\f26c"}.fa-contao:before{content:"\f26d"}.fa-500px:before{content:"\f26e"}.fa-amazon:before{content:"\f270"}.fa-calendar-plus-o:before{content:"\f271"}.fa-calendar-minus-o:before{content:"\f272"}.fa-calendar-times-o:before{content:"\f273"}.fa-calendar-check-o:before{content:"\f274"}.fa-industry:before{content:"\f275"}.fa-map-pin:before{content:"\f276"}.fa-map-signs:before{content:"\f277"}.fa-map-o:before{content:"\f278"}.fa-map:before{content:"\f279"}.fa-commenting:before{content:"\f27a"}.fa-commenting-o:before{content:"\f27b"}.fa-houzz:before{content:"\f27c"}.fa-vimeo:before{content:"\f27d"}.fa-black-tie:before{content:"\f27e"}.fa-fonticons:before{content:"\f280"}.fa-reddit-alien:before{content:"\f281"}.fa-edge:before{content:"\f282"}.fa-credit-card-alt:before{content:"\f283"}.fa-codiepie:before{content:"\f284"}.fa-modx:before{content:"\f285"}.fa-fort-awesome:before{content:"\f286"}.fa-usb:before{content:"\f287"}.fa-product-hunt:before{content:"\f288"}.fa-mixcloud:before{content:"\f289"}.fa-scribd:before{content:"\f28a"}.fa-pause-circle:before{content:"\f28b"}.fa-pause-circle-o:before{content:"\f28c"}.fa-stop-circle:before{content:"\f28d"}.fa-stop-circle-o:before{content:"\f28e"}.fa-shopping-bag:before{content:"\f290"}.fa-shopping-basket:before{content:"\f291"}.fa-hashtag:before{content:"\f292"}.fa-bluetooth:before{content:"\f293"}.fa-bluetooth-b:before{content:"\f294"}.fa-percent:before{content:"\f295"}.fa-gitlab:before{content:"\f296"}.fa-wpbeginner:before{content:"\f297"}.fa-wpforms:before{content:"\f298"}.fa-envira:before{content:"\f299"}.fa-universal-access:before{content:"\f29a"}.fa-wheelchair-alt:before{content:"\f29b"}.fa-question-circle-o:before{content:"\f29c"}.fa-blind:before{content:"\f29d"}.fa-audio-description:before{content:"\f29e"}.fa-volume-control-phone:before{content:"\f2a0"}.fa-braille:before{content:"\f2a1"}.fa-assistive-listening-systems:before{content:"\f2a2"}.fa-asl-interpreting:before,.fa-american-sign-language-interpreting:before{content:"\f2a3"}.fa-deafness:before,.fa-hard-of-hearing:before,.fa-deaf:before{content:"\f2a4"}.fa-glide:before{content:"\f2a5"}.fa-glide-g:before{content:"\f2a6"}.fa-signing:before,.fa-sign-language:before{content:"\f2a7"}.fa-low-vision:before{content:"\f2a8"}.fa-viadeo:before{content:"\f2a9"}.fa-viadeo-square:before{content:"\f2aa"}.fa-snapchat:before{content:"\f2ab"}.fa-snapchat-ghost:before{content:"\f2ac"}.fa-snapchat-square:before{content:"\f2ad"}.fa-pied-piper:before{content:"\f2ae"}.fa-first-order:before{content:"\f2b0"}.fa-yoast:before{content:"\f2b1"}.fa-themeisle:before{content:"\f2b2"}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:"\f2b3"}.fa-fa:before,.fa-font-awesome:before{content:"\f2b4"}.fa-handshake-o:before{content:"\f2b5"}.fa-envelope-open:before{content:"\f2b6"}.fa-envelope-open-o:before{content:"\f2b7"}.fa-linode:before{content:"\f2b8"}.fa-address-book:before{content:"\f2b9"}.fa-address-book-o:before{content:"\f2ba"}.fa-vcard:before,.fa-address-card:before{content:"\f2bb"}.fa-vcard-o:before,.fa-address-card-o:before{content:"\f2bc"}.fa-user-circle:before{content:"\f2bd"}.fa-user-circle-o:before{content:"\f2be"}.fa-user-o:before{content:"\f2c0"}.fa-id-badge:before{content:"\f2c1"}.fa-drivers-license:before,.fa-id-card:before{content:"\f2c2"}.fa-drivers-license-o:before,.fa-id-card-o:before{content:"\f2c3"}.fa-quora:before{content:"\f2c4"}.fa-free-code-camp:before{content:"\f2c5"}.fa-telegram:before{content:"\f2c6"}.fa-thermometer-4:before,.fa-thermometer:before,.fa-thermometer-full:before{content:"\f2c7"}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:"\f2c8"}.fa-thermometer-2:before,.fa-thermometer-half:before{content:"\f2c9"}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:"\f2ca"}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:"\f2cb"}.fa-shower:before{content:"\f2cc"}.fa-bathtub:before,.fa-s15:before,.fa-bath:before{content:"\f2cd"}.fa-podcast:before{content:"\f2ce"}.fa-window-maximize:before{content:"\f2d0"}.fa-window-minimize:before{content:"\f2d1"}.fa-window-restore:before{content:"\f2d2"}.fa-times-rectangle:before,.fa-window-close:before{content:"\f2d3"}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:"\f2d4"}.fa-bandcamp:before{content:"\f2d5"}.fa-grav:before{content:"\f2d6"}.fa-etsy:before{content:"\f2d7"}.fa-imdb:before{content:"\f2d8"}.fa-ravelry:before{content:"\f2d9"}.fa-eercast:before{content:"\f2da"}.fa-microchip:before{content:"\f2db"}.fa-snowflake-o:before{content:"\f2dc"}.fa-superpowers:before{content:"\f2dd"}.fa-wpexplorer:before{content:"\f2de"}.fa-meetup:before{content:"\f2e0"}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0, 0, 0, 0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto} 5 | --------------------------------------------------------------------------------