├── .gitignore
├── AnalogNAS_Tutorial.ipynb
├── LICENSE
├── README.md
├── analognas
├── README.md
├── analogainas.egg-info
│ ├── PKG-INFO
│ ├── SOURCES.txt
│ ├── dependency_links.txt
│ ├── requires.txt
│ └── top_level.txt
├── analogainas
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── __init__.cpython-38.pyc
│ │ ├── __init__.cpython-39.pyc
│ │ └── utils.cpython-38.pyc
│ ├── __version__.py
│ ├── evaluators
│ │ ├── __init__.py
│ │ ├── __pycache__
│ │ │ ├── __init__.cpython-38.pyc
│ │ │ ├── __init__.cpython-39.pyc
│ │ │ ├── xgboost.cpython-38.pyc
│ │ │ └── xgboost.cpython-39.pyc
│ │ ├── base_evaluator.py
│ │ ├── mlp.py
│ │ ├── prepare_data.py
│ │ ├── weights
│ │ │ ├── mlp_model.pth
│ │ │ ├── surrogate_xgboost.json
│ │ │ ├── surrogate_xgboost_avm.json
│ │ │ ├── surrogate_xgboost_ranker.json
│ │ │ ├── surrogate_xgboost_std.json
│ │ │ ├── xgboost_avm.bst
│ │ │ ├── xgboost_avm_kws.bst
│ │ │ ├── xgboost_avm_vww.bst
│ │ │ ├── xgboost_ranker_acc.bst
│ │ │ ├── xgboost_ranker_kws.bst
│ │ │ ├── xgboost_ranker_vww.bst
│ │ │ ├── xgboost_std.bst
│ │ │ ├── xgboost_std_kws.bst
│ │ │ └── xgboost_std_vww.bst
│ │ └── xgboost.py
│ ├── search_algorithms
│ │ ├── __init__.py
│ │ ├── __pycache__
│ │ │ ├── __init__.cpython-38.pyc
│ │ │ ├── __init__.cpython-39.pyc
│ │ │ ├── ea_optimized.cpython-38.pyc
│ │ │ ├── ea_optimized.cpython-39.pyc
│ │ │ ├── worker.cpython-38.pyc
│ │ │ └── worker.cpython-39.pyc
│ │ ├── bo.py
│ │ ├── ea.py
│ │ ├── ea_optimized.py
│ │ └── worker.py
│ ├── search_spaces
│ │ ├── __pycache__
│ │ │ ├── config_space.cpython-38.pyc
│ │ │ ├── config_space.cpython-39.pyc
│ │ │ ├── resnet_macro_architecture.cpython-38.pyc
│ │ │ ├── resnet_macro_architecture.cpython-39.pyc
│ │ │ ├── sample.cpython-38.pyc
│ │ │ ├── sample.cpython-39.pyc
│ │ │ ├── train.cpython-38.pyc
│ │ │ └── train.cpython-39.pyc
│ │ ├── config_space.py
│ │ ├── dataloaders
│ │ │ ├── __pycache__
│ │ │ │ ├── cutout.cpython-38.pyc
│ │ │ │ ├── cutout.cpython-39.pyc
│ │ │ │ ├── dataloader.cpython-38.pyc
│ │ │ │ └── dataloader.cpython-39.pyc
│ │ │ ├── cutout.py
│ │ │ ├── dataloader.py
│ │ │ └── kws.py
│ │ ├── resnet_macro_architecture.py
│ │ ├── sample.py
│ │ └── train.py
│ └── utils.py
├── build
│ └── lib
│ │ └── analogainas
│ │ ├── __init__.py
│ │ ├── __version__.py
│ │ ├── evaluators
│ │ ├── __init__.py
│ │ ├── base_evaluator.py
│ │ ├── mlp.py
│ │ ├── prepare_data.py
│ │ └── xgboost.py
│ │ ├── search_algorithms
│ │ ├── __init__.py
│ │ ├── bo.py
│ │ ├── ea.py
│ │ ├── ea_optimized.py
│ │ └── worker.py
│ │ └── utils.py
├── dist
│ ├── analogainas-0.1.0-py3.8.egg
│ └── analogainas-0.1.0-py3.9.egg
├── docs
│ ├── Makefile
│ ├── _build
│ │ ├── doctrees
│ │ │ ├── environment.pickle
│ │ │ ├── getting_started.doctree
│ │ │ ├── index.doctree
│ │ │ └── install.doctree
│ │ └── html
│ │ │ ├── .buildinfo
│ │ │ ├── _sources
│ │ │ ├── getting_started.rst.txt
│ │ │ ├── index.rst.txt
│ │ │ └── install.rst.txt
│ │ │ ├── _static
│ │ │ ├── _sphinx_javascript_frameworks_compat.js
│ │ │ ├── alabaster.css
│ │ │ ├── basic.css
│ │ │ ├── classic.css
│ │ │ ├── css
│ │ │ │ ├── badge_only.css
│ │ │ │ ├── fonts
│ │ │ │ │ ├── Roboto-Slab-Bold.woff
│ │ │ │ │ ├── Roboto-Slab-Bold.woff2
│ │ │ │ │ ├── Roboto-Slab-Regular.woff
│ │ │ │ │ ├── Roboto-Slab-Regular.woff2
│ │ │ │ │ ├── fontawesome-webfont.eot
│ │ │ │ │ ├── fontawesome-webfont.svg
│ │ │ │ │ ├── fontawesome-webfont.ttf
│ │ │ │ │ ├── fontawesome-webfont.woff
│ │ │ │ │ ├── fontawesome-webfont.woff2
│ │ │ │ │ ├── lato-bold-italic.woff
│ │ │ │ │ ├── lato-bold-italic.woff2
│ │ │ │ │ ├── lato-bold.woff
│ │ │ │ │ ├── lato-bold.woff2
│ │ │ │ │ ├── lato-normal-italic.woff
│ │ │ │ │ ├── lato-normal-italic.woff2
│ │ │ │ │ ├── lato-normal.woff
│ │ │ │ │ └── lato-normal.woff2
│ │ │ │ └── theme.css
│ │ │ ├── custom.css
│ │ │ ├── doctools.js
│ │ │ ├── documentation_options.js
│ │ │ ├── file.png
│ │ │ ├── jquery.js
│ │ │ ├── js
│ │ │ │ ├── badge_only.js
│ │ │ │ ├── html5shiv-printshiv.min.js
│ │ │ │ ├── html5shiv.min.js
│ │ │ │ └── theme.js
│ │ │ ├── language_data.js
│ │ │ ├── minus.png
│ │ │ ├── plus.png
│ │ │ ├── pygments.css
│ │ │ ├── searchtools.js
│ │ │ ├── sidebar.js
│ │ │ └── sphinx_highlight.js
│ │ │ ├── genindex.html
│ │ │ ├── getting_started.html
│ │ │ ├── index.html
│ │ │ ├── install.html
│ │ │ ├── objects.inv
│ │ │ ├── search.html
│ │ │ └── searchindex.js
│ ├── conf.py
│ ├── getting_started.rst
│ ├── index.rst
│ ├── install.rst
│ └── make.bat
├── environment.yml
├── nas_search_demo.py
├── requirements.txt
├── setup.py
├── starter_notebook.ipynb
└── tests
│ ├── 01_general_search.py
│ ├── 02_bo.py
│ ├── 03_random_sample.py
│ ├── 04_generate_architecture.py
│ └── resnet.csv
└── analognasbench
├── README.md
├── analog_nasbench.egg-info
├── PKG-INFO
├── SOURCES.txt
├── dependency_links.txt
├── requires.txt
└── top_level.txt
├── analognasbench
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-38.pyc
│ └── data.cpython-38.pyc
├── data.anb
└── data.py
├── build
└── lib
│ └── analognasbench
│ ├── __init__.py
│ ├── data.anb
│ └── data.py
├── examples
├── bananas.py
├── brute_force.py
├── evolutionary_algorithm.py
└── random_search.py
├── setup.py
├── test.py
├── training_script.py
└── training_script.sh
/.gitignore:
--------------------------------------------------------------------------------
1 | venv
2 | env
3 | results
4 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Analog-aware NAS Works
2 |
3 | ## Description
4 | Recent updates:
5 | * **AnalogNAS-Bench**:** We have integrated AnalogNAS-Bench, a dedicated NAS benchmark tailored specifically for Analog In-Memory Computing (AIMC). This benchmark enables fair and systematic comparisons of NAS methodologies under realistic AIMC conditions, providing valuable insights into architectural robustness against AIMC-induced noise and drift.
6 |
7 | * **AnalogNas** is a modular and flexible framework to facilitate the implementation of Analog-aware Neural Architecture Search. It offers high-level classes to define: the search space, the accuracy evaluator, and the search strategy. It leverages [the aihwkit framework](https://github.com/IBM/aihwkit) to apply hardware-aware training with analog non-idealities and noise included. **AnalogNAS** obtained architectures are more robust during inference on Analog Hardware. We also include two evaluators trained to rank the architectures according to their analog training accuracy.
8 |
9 |
10 | ## Setup
11 | While installing the repository, creating a new conda environment is recomended.
12 |
13 | Firstly, refer to [AIHWKit installation](https://aihwkit.readthedocs.io/en/latest/install.html) to install Pytorch and the AIHWKit toolkit.
14 |
15 | Install the additional requirements, using:
16 | ```
17 | pip install -r requirements.txt
18 | ```
19 |
20 | Afterwards, install AnalogNAS by running the ```setup.py``` file:
21 | ```
22 | python setup.py install
23 | ```
24 |
25 | Alternatively, you can also download the package through pip:
26 | ```
27 | pip install analogainas
28 | ```
29 |
30 | ## Example
31 |
32 | ```python
33 | from analogainas.search_spaces.config_space import ConfigSpace
34 | from analogainas.evaluators.xgboost import XGBoostEvaluator
35 | from analogainas.search_algorithms.ea_optimized import EAOptimizer
36 | from analogainas.search_algorithms.worker import Worker
37 |
38 | CS = ConfigSpace('CIFAR-10') # define search space, by default a resnet-like search space
39 | evaluator = XGBoostEvaluator() # load the evaluators
40 | optimizer = EAOptimizer(evaluator, population_size=20, nb_iter=10) # define the optimizer with its parameters
41 |
42 | NB_RUN = 2
43 | worker = Worker(CS, optimizer=optimizer, runs=NB_RUN) # The global runner
44 |
45 | worker.search() # start search
46 |
47 | worker.result_summary() # print results
48 |
49 | ```
50 |
51 | ## Usage
52 | To get started, check out ```nas_search_demo.py``` and ```starter_notebook.ipynb``` to ensure the installation went well.
53 |
54 | ## Authors
55 | AnalogNAS has been developed by IBM Research,
56 |
57 | with Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo,
58 | Smail Niar, Hamza Ouarnoughi, Ramachandran Muralidhar, Sidney Tsai, Vijay Narayanan,
59 | Abu Sebastian, and Kaoutar El Maghraoui
60 |
61 | You can contact us by opening a new issue in the repository.
62 |
63 | ## How to cite?
64 |
65 | In case you are using the _AnalogNas_ toolkit for
66 | your research, please cite the IEEE Edge 2023 paper that describes the toolkit:
67 |
68 | > Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo,
69 | > Hsinyu Tsai, Ramachandran Muralidhar, Smail Niar, Ouarnoughi Hamza, Vijay Narayanan,
70 | > Abu Sebastian and Kaoutar El Maghraoui
71 | > "AnalogNAS: A Neural Network Design Framework for Accurate Inference with Analog In-Memory Computing" (2023 IEEE INTERNATIONAL CONFERENCE ON EDGE
72 | > COMPUTING & COMMUNICATIONS (IEEE Edge))
73 |
74 | > https://arxiv.org/abs/2305.10459
75 |
76 |
77 | ## Awards and Media Mentions
78 |
79 | * We are proud to share that AnalogNAS open source project the prestigious **IEEE OPEN SOURCE SCIENCE** in 2023 at the [IEEE 2023 Services Computing Congress](https://conferences.computer.org/services/2023/awards/).
80 |
81 |
82 |
83 | * AnalogNAS paper received the **Best Paper Award** at [2023 IEEE EDGE (INTERNATIONAL CONFERENCE ON EDGE COMPUTING & COMMUNICATIONS)](https://conferences.computer.org/edge/2023/)
84 |
85 |
86 |
87 |
88 |
89 |
90 | ## References
91 | * [Hardware-aware Neural Architecture Search: Survey and Taxonomy](https://www.ijcai.org/proceedings/2021/592)
92 | * [AIHWKit](https://ieeexplore.ieee.org/abstract/document/9458494)
93 | * [AIHW Composer](https://aihw-composer.draco.res.ibm.com)
94 |
95 | ## License
96 | This project is licensed under [Apache License 2.0].
97 |
98 | [Apache License 2.0]: LICENSE.txt
99 |
--------------------------------------------------------------------------------
/analognas/README.md:
--------------------------------------------------------------------------------
1 | # AnalogNAS
2 |
3 | ## Description
4 | **AnalogNas** is a modular and flexible framework to facilitate the implementation of Analog-aware Neural Architecture Search. It offers high-level classes to define: the search space, the accuracy evaluator, and the search strategy. It leverages [the aihwkit framework](https://github.com/IBM/aihwkit) to apply hardware-aware training with analog non-idealities and noise included. **AnalogNAS** obtained architectures are more robust during inference on Analog Hardware. We also include two evaluators trained to rank the architectures according to their analog training accuracy.
5 |
6 | > :warning: This library is currently in beta and under active development.
7 | > Please be mindful of potential issues and monitor for improvements,
8 | > new features, and bug fixes in upcoming versions.
9 |
10 | [**Setup**](#setup)
11 | | [**Usage**](#usage)
12 | | [**Docs**](https://github.com/IBM/analog-nas/blob/main/starter_notebook.ipynb)
13 | | [**References**](#references)
14 |
15 | ## Features
16 | AnalogaiNAS package offers the following features:
17 |
18 | * A customizable resnet-like search space, allowing to target CIFAR-10, Visual Wake Words, and Keyword Spotting
19 | * A configuration space object allows to add any number or type of architecture and training hyperparameters to the search
20 | * An analog-specific evaluator which includes:
21 | * An 1-day accuracy ranker
22 | * An 1 month accuracy variation estimator
23 | * A 1-day standard deviation estimator
24 | * A flexible search algorithm, enabling the implementation and extension of state-of-the-art NAS methods.
25 |
26 | ## Structure
27 | In a high-level AnalogAINAS consists of 4 main building blocks which (can) interact with each other:
28 |
29 | * Configuration spaces (```search_spaces/config_space.py```): a search space of architectures targeting a specific dataset.
30 | * Evaluator (```evaluators/base_evaluator.py```): a ML predictor model to predict:
31 | * 1-day Accuracy: the evaluator models the drift effect that is encountered in Analog devices. The accuracy after 1 day of drift is then predicted and used as an objective to maximize.
32 | * The Accuracy Variation for One Month (AVM): The difference between the accuracy after 1 month and the accuracy after 1 sec.
33 | * The 1-day accuracy standard deviation: The stochasticity of the noise induces different variation of the model's accuracy depending on its architecture.
34 |
35 | The weights of these models are provided in (```evaluators/weights```).
36 | * Optimizer (```search_algorithms/```): a optimization strategy such as evolutionary algorithm or bayesian optimization.
37 | * Worker (```search_algorithms/worker.py```): A global object that runs the architecture search loop and the final network training pipeline
38 |
39 | ## Setup
40 | While installing the repository, creating a new conda environment is recomended.
41 |
42 | Firstly, refer to [AIHWKit installation](https://aihwkit.readthedocs.io/en/latest/install.html) to install Pytorch and the AIHWKit toolkit.
43 |
44 | Install the additional requirements, using:
45 | ```
46 | pip install -r requirements.txt
47 | ```
48 |
49 | Afterwards, install AnalogNAS by running the ```setup.py``` file:
50 | ```
51 | python setup.py install
52 | ```
53 |
54 | Alternatively, you can also download the package through pip:
55 | ```
56 | pip install analogainas
57 | ```
58 |
59 | ## Example
60 |
61 | ```python
62 | from analogainas.search_spaces.config_space import ConfigSpace
63 | from analogainas.evaluators.xgboost import XGBoostEvaluator
64 | from analogainas.search_algorithms.ea_optimized import EAOptimizer
65 | from analogainas.search_algorithms.worker import Worker
66 |
67 | CS = ConfigSpace('CIFAR-10') # define search space, by default a resnet-like search space
68 | evaluator = XGBoostEvaluator() # load the evaluators
69 | optimizer = EAOptimizer(evaluator, population_size=20, nb_iter=10) # define the optimizer with its parameters
70 |
71 | NB_RUN = 2
72 | worker = Worker(CS, optimizer=optimizer, runs=NB_RUN) # The global runner
73 |
74 | worker.search() # start search
75 |
76 | worker.result_summary() # print results
77 |
78 | ```
79 |
80 | ## Usage
81 | To get started, check out ```nas_search_demo.py``` and ```starter_notebook.ipynb``` to ensure the installation went well.
82 |
83 | ## Authors
84 | AnalogNAS has been developed by IBM Research,
85 |
86 | with Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo,
87 | Smail Niar, Hamza Ouarnoughi, Ramachandran Muralidhar, Sidney Tsai, Vijay Narayanan,
88 | Abu Sebastian, and Kaoutar El Maghraoui
89 |
90 | You can contact us by opening a new issue in the repository.
91 |
92 | ## How to cite?
93 |
94 | In case you are using the _AnalogNas_ toolkit for
95 | your research, please cite the IEEE Edge 2023 paper that describes the toolkit:
96 |
97 | > Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo,
98 | > Hsinyu Tsai, Ramachandran Muralidhar, Smail Niar, Ouarnoughi Hamza, Vijay Narayanan,
99 | > Abu Sebastian and Kaoutar El Maghraoui
100 | > "AnalogNAS: A Neural Network Design Framework for Accurate Inference with Analog In-Memory Computing" (2023 IEEE INTERNATIONAL CONFERENCE ON EDGE
101 | > COMPUTING & COMMUNICATIONS (IEEE Edge))
102 |
103 | > https://arxiv.org/abs/2305.10459
104 |
105 |
106 | ## Awards and Media Mentions
107 |
108 | * We are proud to share that AnalogNAS open source project the prestigious **IEEE OPEN SOURCE SCIENCE** in 2023 at the [IEEE 2023 Services Computing Congress](https://conferences.computer.org/services/2023/awards/).
109 |
110 |
111 |
112 | * AnalogNAS paper received the **Best Paper Award** at [2023 IEEE EDGE (INTERNATIONAL CONFERENCE ON EDGE COMPUTING & COMMUNICATIONS)](https://conferences.computer.org/edge/2023/)
113 |
114 |
115 |
116 |
117 |
118 |
119 | ## References
120 | * [Hardware-aware Neural Architecture Search: Survey and Taxonomy](https://www.ijcai.org/proceedings/2021/592)
121 | * [AIHWKit](https://ieeexplore.ieee.org/abstract/document/9458494)
122 | * [AIHW Composer](https://aihw-composer.draco.res.ibm.com)
123 |
124 | ## License
125 | This project is licensed under [Apache License 2.0].
126 |
127 | [Apache License 2.0]: LICENSE.txt
128 |
--------------------------------------------------------------------------------
/analognas/analogainas.egg-info/SOURCES.txt:
--------------------------------------------------------------------------------
1 | LICENSE
2 | README.md
3 | setup.py
4 | analogainas/__init__.py
5 | analogainas/__version__.py
6 | analogainas/utils.py
7 | analogainas.egg-info/PKG-INFO
8 | analogainas.egg-info/SOURCES.txt
9 | analogainas.egg-info/dependency_links.txt
10 | analogainas.egg-info/requires.txt
11 | analogainas.egg-info/top_level.txt
12 | analogainas/evaluators/__init__.py
13 | analogainas/evaluators/base_evaluator.py
14 | analogainas/evaluators/mlp.py
15 | analogainas/evaluators/prepare_data.py
16 | analogainas/evaluators/xgboost.py
17 | analogainas/search_algorithms/__init__.py
18 | analogainas/search_algorithms/bo.py
19 | analogainas/search_algorithms/ea.py
20 | analogainas/search_algorithms/ea_optimized.py
21 | analogainas/search_algorithms/worker.py
--------------------------------------------------------------------------------
/analognas/analogainas.egg-info/dependency_links.txt:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/analognas/analogainas.egg-info/requires.txt:
--------------------------------------------------------------------------------
1 | cmake==3.22.4
2 | scikit-build==0.14.1
3 | scikit-learn==1.0.2
4 | scipy
5 | xgboost
6 |
--------------------------------------------------------------------------------
/analognas/analogainas.egg-info/top_level.txt:
--------------------------------------------------------------------------------
1 | analogainas
2 |
--------------------------------------------------------------------------------
/analognas/analogainas/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/__init__.py
--------------------------------------------------------------------------------
/analognas/analogainas/__pycache__/__init__.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/__pycache__/__init__.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/__pycache__/__init__.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/__pycache__/__init__.cpython-39.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/__pycache__/utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/__pycache__/utils.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/__version__.py:
--------------------------------------------------------------------------------
1 | """Version information."""
2 | __version__ = "0.1.0"
3 |
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/__init__.py:
--------------------------------------------------------------------------------
1 | """Accuracy Evaluation Methods"""
2 |
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/__pycache__/__init__.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/__pycache__/__init__.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/__pycache__/__init__.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/__pycache__/__init__.cpython-39.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/__pycache__/xgboost.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/__pycache__/xgboost.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/__pycache__/xgboost.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/__pycache__/xgboost.cpython-39.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/base_evaluator.py:
--------------------------------------------------------------------------------
1 | """Abstract class for base evaluator."""
2 | from analogainas.utils import kendal_correlation
3 |
4 |
5 | """Base class for Accuracy Evaluation Methods."""
6 | class Evaluator:
7 | def __init__(self, model_type=None):
8 | self.model_type = model_type
9 |
10 | def pre_process(self):
11 | """
12 | This is called at the start of the NAS algorithm,
13 | before any architectures have been queried
14 | """
15 | pass
16 |
17 | def fit(self, x_train, y_train):
18 | """
19 | Training the evaluator.
20 |
21 | Args:
22 | x_train: list of architectures
23 | y_train: accuracies or ranks
24 | """
25 | pass
26 |
27 | def query(self, x_test):
28 | """
29 | Get the accuracy/rank prediction for x_test.
30 |
31 | Args:
32 | x_test: list of architectures
33 |
34 | Returns:
35 | Predictions for the architectures
36 | """
37 | pass
38 |
39 | def get_evaluator_stat(self):
40 | """
41 | Check whether the evaluator needs retraining.
42 |
43 | Returns:
44 | A dictionary of metrics.
45 | """
46 | reqs = {
47 | "requires_retraining": False,
48 | "test_accuracy": None,
49 | "requires_hyperparameters": False,
50 | "hyperparams": {}
51 | }
52 | return reqs
53 |
54 | def set_hyperparams(self, hyperparams):
55 | """
56 | Modifies/sets hyperparameters of the evaluator.
57 |
58 | Args:
59 | hyperparams: dictionary of hyperparameters.
60 | """
61 | self.hyperparams = hyperparams
62 |
63 | def get_hyperparams(self):
64 | """
65 | Get the hyperparameters of the evaluator.
66 |
67 | Returns:
68 | A dictionary of hyperparameters.
69 | If not manually set, a dictionary of the default hyperparameters.
70 | """
71 | if hasattr(self, "hyperparams"):
72 | return self.hyperparams
73 | else:
74 | return None
75 |
76 | def get_correlation(self, x_test, y_test):
77 | y_pred = self.query(x_test)
78 | return kendal_correlation(y_test, y_pred)
79 |
80 |
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/prepare_data.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from numpy import genfromtxt
3 | from sklearn.model_selection import train_test_split
4 | from sklearn.preprocessing import StandardScaler
5 |
6 | class AccuracyDataLoader:
7 | def __init__(self, dataset_file="dataset_cifar10.csv", transforms=None):
8 | self.dataset_file = dataset_file
9 | self.data = genfromtxt(self.dataset_file, delimiter=',')
10 |
11 | # Applies encoding
12 | if transforms is not None:
13 | self.data = transforms(self.data)
14 |
15 | def get_train(self):
16 | X = self.data[1:24]
17 | y = self.data[27]
18 | slope = self.data[26] - self.data[-1]
19 |
20 | # Split the data into train and test sets
21 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
22 |
23 | # Scale the data using StandardScaler
24 | scaler = StandardScaler()
25 | X_train = scaler.fit_transform(X_train)
26 | X_test = scaler.transform(X_test)
27 |
28 | return (X_train, y_train), (X_test, y_test), slope
29 |
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/weights/mlp_model.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/mlp_model.pth
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/weights/xgboost_avm.bst:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_avm.bst
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/weights/xgboost_avm_kws.bst:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_avm_kws.bst
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/weights/xgboost_avm_vww.bst:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_avm_vww.bst
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/weights/xgboost_ranker_acc.bst:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_ranker_acc.bst
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/weights/xgboost_ranker_kws.bst:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_ranker_kws.bst
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/weights/xgboost_ranker_vww.bst:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_ranker_vww.bst
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/weights/xgboost_std.bst:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_std.bst
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/weights/xgboost_std_kws.bst:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_std_kws.bst
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/weights/xgboost_std_vww.bst:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_std_vww.bst
--------------------------------------------------------------------------------
/analognas/analogainas/evaluators/xgboost.py:
--------------------------------------------------------------------------------
1 | """XGBoost evaluator."""
2 | from tabnanny import verbose
3 | import xgboost as xgb
4 | import numpy as np
5 | #from base_evaluator import Evaluator
6 |
7 | """
8 | XGboost Evalutor Wrapper class.
9 | """
10 | class XGBoostEvaluator():
11 | def __init__(
12 | self,
13 | model_type = "XGBRanker",
14 | load_weight = True,
15 | hpo_wrapper=False,
16 | hparams_from_file=False,
17 | avm_predictor_path = "analogainas/evaluators/weights/xgboost_avm.bst",
18 | std_predictor_path = "analogainas/evaluators/weights/xgboost_std.bst",
19 | ranker_path = "analogainas/evaluators/weights/xgboost_ranker_acc.bst"
20 | ):
21 | self.model_type = model_type
22 | self.hpo_wrapper = hpo_wrapper
23 | self.default_hyperparams = {
24 | 'tree_method':'gpu_hist',
25 | 'booster':'gbtree',
26 | 'objective':'rank:pairwise',
27 | 'random_state':42,
28 | 'learning_rate':0.1,
29 | 'colsample_bytree':0.9,
30 | 'eta':0.05,
31 | 'max_depth':6,
32 | 'n_estimators':110,
33 | 'subsample':0.75,
34 | 'enable_categorical':True}
35 | self.hyperparams = None
36 | self.hparams_from_file = hparams_from_file
37 | self.load_weight = load_weight
38 | self.ranker_path = ranker_path
39 | self.avm_predictor_path = avm_predictor_path
40 | self.std_predictor_path = std_predictor_path
41 |
42 | self.ranker = self.get_ranker()
43 | self.avm_predictor = self.get_avm_predictor()
44 | self.std_predictor = self.get_std_predictor()
45 |
46 | def get_ranker(self):
47 | ranker = xgb.XGBRegressor()
48 | if self.load_weight == True:
49 | ranker.load_model(self.ranker_path)
50 |
51 | return ranker
52 |
53 | def get_avm_predictor(self):
54 | avm_predictor = xgb.XGBRegressor()
55 | if self.load_weight == True:
56 | avm_predictor.load_model(self.avm_predictor_path)
57 | return avm_predictor
58 |
59 | def get_std_predictor(self):
60 | std_predictor = xgb.XGBRegressor()
61 | if self.load_weight == True:
62 | std_predictor.load_model(self.std_predictor_path)
63 | return std_predictor
64 |
65 | def fit(self, x_train, y_train, train_info_file="xgboost.txt", hyperparameters=None, epochs=500, verbose=True):
66 | if hyperparameters == None:
67 | self.evaluator = self.get_model(self.default_hyperparams)
68 | else:
69 | self.hyperparams = hyperparameters
70 | self.evaluator = self.get_model(self.hyperparams)
71 |
72 | progress = dict()
73 | d_train = xgb.DMatrix(x_train, y_train)
74 | watchlist = [(d_train,'rank:ndcg')]
75 | self.evaluator = self.evaluator.train(self.hyperparams, d_train, epochs, watchlist, evals_result=progress)
76 |
77 | #SAVE MODEL
78 | self.evaluator.save_model(train_info_file)
79 |
80 | return progress['rank:ndcg']
81 |
82 | def query_pop(self, P):
83 | x_test = []
84 | for a in P:
85 | arch = list(a[0].values())
86 | x_test.append(arch)
87 | x_test = np.array(x_test)
88 | return self.ranker.predict(x_test), self.avm_predictor.predict(x_test)
89 |
90 | def query(self, P):
91 | x_test = []
92 | arch = list(P[0].values())
93 | x_test.append(arch)
94 | return self.ranker.predict(x_test), self.avm_predictor.predict(x_test)
95 |
96 | def dcg_at_k(self, r, k, method=0):
97 | r = np.asfarray(r)[:k]
98 | if r.size:
99 | if method == 0:
100 | return r[0] + np.sum(r[1:] / np.log2(np.arange(2, r.size + 1)))
101 | elif method == 1:
102 | return np.sum(r / np.log2(np.arange(2, r.size + 2)))
103 | return 0.
104 |
105 | def ndcg_at_k(self, r, k, method=0):
106 | dcg_max = self.dcg_at_k(sorted(r, reverse=True), k, method)
107 | if not dcg_max:
108 | return 0.
109 | return self.dcg_at_k(r, k, method) / dcg_max
110 |
111 | def ndcg_scorer(self, estimator, X, y_true):
112 | y_pred = estimator.predict(X)
113 | # Assuming y_true contains the actual relevance scores
114 | # Sort the true scores based on the predictions
115 | sorted_scores = [y for _, y in sorted(zip(y_pred, y_true), reverse=True)]
116 | return self.ndcg_at_k(sorted_scores, k=len(y_true)) # or use a specific k
117 |
118 |
--------------------------------------------------------------------------------
/analognas/analogainas/search_algorithms/__init__.py:
--------------------------------------------------------------------------------
1 | """A Worker is the base NAS Search Strategy class."""
2 |
--------------------------------------------------------------------------------
/analognas/analogainas/search_algorithms/__pycache__/__init__.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_algorithms/__pycache__/__init__.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_algorithms/__pycache__/__init__.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_algorithms/__pycache__/__init__.cpython-39.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_algorithms/__pycache__/ea_optimized.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_algorithms/__pycache__/ea_optimized.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_algorithms/__pycache__/ea_optimized.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_algorithms/__pycache__/ea_optimized.cpython-39.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_algorithms/__pycache__/worker.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_algorithms/__pycache__/worker.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_algorithms/__pycache__/worker.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_algorithms/__pycache__/worker.cpython-39.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_algorithms/bo.py:
--------------------------------------------------------------------------------
1 | """Bayesian Optimizer."""
2 | import numpy as np
3 |
4 | class BOptimizer:
5 | def __init__(self, max_iter):
6 | self.max_iter = max_iter
7 |
8 | # surrogate or approximation for the objective function
9 | def surrogate(self, model, X):
10 | # catch any warning generated when making a prediction
11 | return model.predict(X, return_std=True)
12 |
13 | # probability of improvement acquisition function
14 | def acquisition(self, X, Xsamples, model):
15 | # calculate the best surrogate score found so far
16 | yhat, _ = self.surrogate(model, X)
17 | best = max(yhat)
18 | # calculate mean and stdev via surrogate function
19 | mu, std = self.surrogate(model, Xsamples)
20 | mu = mu[:, 0]
21 | # calculate the probability of improvement
22 | probs = (mu - best) / (std+1E-9)
23 | return probs
24 |
25 | def maximize(self):
26 | for _ in range(self.n_iter):
27 | x_next = self.propose_next_point()
28 | y_next = self.evaluate_func(x_next)
29 |
30 | self.X.append(x_next)
31 | self.y.append(y_next)
32 |
33 | best_idx = np.argmax(self.y)
34 | best_x = self.X[best_idx]
35 | best_y = self.y[best_idx]
36 | return best_x, best_y
37 |
38 | def propose_next_point(self):
39 | x_candidates = self.random_state.uniform(
40 | low=self.search_space[:, 0],
41 | high=self.search_space[:, 1],
42 | size=(100, self.search_space.shape[0])
43 | )
44 |
45 | best_x = None
46 | best_acquisition = float('-inf')
47 |
48 | for x in x_candidates:
49 | acquisition = self.acquisition(x)
50 | if acquisition > best_acquisition:
51 | best_x = x
52 | best_acquisition = acquisition
53 |
54 | return best_x
55 |
56 | def gaussian_process_regression(self):
57 | # Define your surrogate model (Gaussian Process) and fit it to the data
58 | # Example: Mean of 0, Standard Deviation of 1
59 | mean = 0.0
60 | std = 1.0
61 | return mean, std
62 |
63 | # optimize the acquisition function
64 | def run(self, X, y, model):
65 | # random search, generate random samples
66 | Xsamples = self.rs_search(100)
67 | Xsamples = Xsamples.reshape(len(Xsamples), 1)
68 | # calculate the acquisition function for each sample
69 | scores = self.acquisition(X, Xsamples, model)
70 | # locate the index of the largest scores
71 | ix = np.argmax(scores)
72 | return Xsamples[ix, 0]
73 |
--------------------------------------------------------------------------------
/analognas/analogainas/search_algorithms/ea.py:
--------------------------------------------------------------------------------
1 | """Classical Evolutionary Algorithm."""
2 | import random
3 | from analogainas.search_spaces.sample import random_sample
4 |
5 | class EAOptimizer:
6 | def __init__(self, max_iter, population_size, mutation_prob):
7 | self.max_iter = max_iter
8 | self.population_size = population_size
9 | self.mutation_prob = mutation_prob
10 |
11 | def mutate(self, architecture):
12 | if random.random() > self.mutation_prob:
13 | architecture = random_sample()
14 | return architecture
15 |
16 | def run(self):
17 | D = [self.cs.sample() for _ in range(self.population_size)]
18 | best_f = 0.0
19 | best_x = [None] * self.population_size
20 |
21 | for _ in range(self.max_iter):
22 | new_x = [self.mutate(x) for x in D]
23 | new_f = [self.evaluation(x) for x in new_x]
24 |
25 | for j in range(self.population_size):
26 | if new_f[j] > best_f:
27 | best_f = new_f[j]
28 | best_x = new_x[j]
29 |
30 | D = new_x
31 |
32 | return {'best_x': best_x, 'best_f': best_f}
33 |
--------------------------------------------------------------------------------
/analognas/analogainas/search_algorithms/ea_optimized.py:
--------------------------------------------------------------------------------
1 | """Optimized Evolutionary Algorithm - AnalogNAS."""
2 | import random
3 | from analogainas.search_spaces.sample import random_sample
4 |
5 | class EAOptimizer:
6 | """
7 | Evolutionary Algorithm with optimized mutations and robustness constraint.
8 |
9 | The NAS problem is cast to:
10 | Max Acc(arch)
11 | s.t nb_param(arch) < max_nb_param
12 | drop(arch) < 10
13 |
14 | Args:
15 | nb_iter: maximum number of iterations.
16 | population_size: number of architectures in the population.
17 |
18 | mutation_prob_width: Mutation probability of modifying the width.
19 | - Increase/Decrease widening factor of one block.
20 | - Add/Remove branches.
21 | -Increase/Decrease Initial output channel size.
22 |
23 | mutation_prob_depth: Mutation probability for modifying the depth.
24 | - Increase/Decrease the number of residual blocks.
25 | - Modify the type of convolution from BasicBlock to BottleneckBlock.
26 |
27 | mutation_prob_other: Mutation probability for applying various other transformations:
28 | - Add/Remove a residual connection.
29 | - Modify initial kernel size.
30 |
31 | max_nb_param: constraint applied to the number of parameters.
32 | T_AVM: constraint applied on the predicted AVM (robustness check).
33 | """
34 | def __init__(self,
35 | surrogate,
36 | nb_iter = 200,
37 | population_size=50,
38 | mutation_prob_width=0.8,
39 | mutation_prob_depth=0.8,
40 | mutation_prob_other=0.6,
41 | max_nb_param=1,
42 | T_AVM =10):
43 |
44 | assert population_size > 10, f"Population size needs to be at least 10, got {population_size}"
45 |
46 | self.surrogate = surrogate
47 | self.nb_iter = nb_iter
48 | self.population_size = int(population_size/10)
49 | self.mutation_prob_width = mutation_prob_width
50 | self.mutation_prob_depth = mutation_prob_depth
51 | self.mutation_prob_other = mutation_prob_other
52 | self.max_nb_param = max_nb_param
53 | self.T_AVM = T_AVM
54 |
55 | def mutate(self, cs, architecture):
56 | r = random.random()
57 | if r < 0.4:
58 | architecture= self.mutate_width(cs,architecture)
59 | elif r < 0.8:
60 | architecture= self.mutate_depth(cs,architecture)
61 | else:
62 | architecture= self.mutate_other(cs,architecture)
63 |
64 | return architecture
65 |
66 | def mutate_width(self, cs, architecture):
67 | if random.random() < self.mutation_prob_width:
68 | architecture = cs.sample_arch_uniformly(1)
69 | return architecture
70 |
71 | def mutate_depth(self, cs, architecture):
72 | if random.random() < self.mutation_prob_depth:
73 | architecture = cs.sample_arch_uniformly(1)
74 | return architecture
75 |
76 | def mutate_other(self, cs, architecture):
77 | if random.random() < self.mutation_prob_other:
78 | architecture = cs.sample_arch_uniformly(1)
79 | return architecture
80 |
81 | def generate_initial_population(self, cs):
82 | P = [cs.sample_arch_uniformly(1)] * self.population_size
83 | print(len(P))
84 | _, slope = self.surrogate.query_pop(P)
85 |
86 | while (not self.satisfied_constrained(P)):
87 | for i, s in enumerate(slope):
88 | if s > self.T_AVM:
89 | P[i] = cs.sample_arch_uniformly(1)
90 | return P
91 |
92 | def satisfied_constrained(self, P):
93 | _, slope = self.surrogate.query_pop(P)
94 | for i, s in enumerate(slope):
95 | if s > self.T_AVM:
96 | return False
97 | return True
98 |
99 | def run(self, cs):
100 | P = self.generate_initial_population(cs)
101 | best_f = 0.0
102 | best_x = [None]*self.population_size
103 |
104 | for i in range(self.nb_iter):
105 | best_accs =[]
106 | new_P = []
107 | for a in P:
108 | new_a = self.mutate(cs, a)
109 | new_P.append(new_a)
110 | acc, _ = self.surrogate.query(new_a)
111 | best_accs.append(acc)
112 | new_f = max(best_accs)
113 | if new_f > best_f:
114 | best_f = new_f
115 | best_x = new_a[0]
116 |
117 | P = new_P
118 |
119 | print("ITERATION {} completed: best acc {}".format(i, best_f))
120 |
121 | return best_x, best_f
122 |
--------------------------------------------------------------------------------
/analognas/analogainas/search_algorithms/worker.py:
--------------------------------------------------------------------------------
1 | """Search Experiment Runner."""
2 | import os
3 | import csv
4 | import numpy as np
5 |
6 | from analogainas.search_spaces.config_space import ConfigSpace
7 | from analogainas.search_spaces.resnet_macro_architecture import Network
8 |
9 | """Wrapper class to launch NAS search."""
10 | class Worker():
11 | def __init__(self,
12 | cs: ConfigSpace=None,
13 | eval = None,
14 | optimizer=None,
15 | runs=5,
16 | max_budget=1,
17 | n_iter=100):
18 | self.max_budget = max_budget
19 | self.n_iter = n_iter
20 | self.config_space = cs
21 | self.evaluation = eval
22 | self.optimizer=optimizer
23 | self.runs = runs
24 | self.best_config = None
25 | self.best_acc = 0
26 | self.std_err = 0
27 |
28 | @property
29 | def best_arch(self):
30 | return Network(self.best_config)
31 |
32 | def search(self):
33 | if os.path.exists("results"):
34 | print("The 'results' directory already exists.\n")
35 | else:
36 | os.mkdir("results")
37 | print("Result directory created.\n")
38 |
39 | results = []
40 | for i in range(self.runs):
41 | print("Search {} started".format(i))
42 | best_config, best_acc = self.optimizer.run(self.config_space)
43 |
44 | with open('results/best_results_{}.csv'.format(i), 'w') as f:
45 | for key in best_config.keys():
46 | f.write("%s,%s\n"%(key,best_config[key]))
47 |
48 | results.append(best_acc)
49 | if best_acc > self.best_acc:
50 | self.best_config = best_config
51 | self.best_acc = best_acc
52 |
53 | print("Best Acc = {}".format(best_acc))
54 | self.std_err = np.std(results, ddof=1) / np.sqrt(np.size(results))
55 |
56 | print("SEARCH ENDED")
57 |
58 | def result_summary(self):
59 | print("Best architecture accuracy: ", self.best_acc)
60 | print(f"Standard deviation of accuracy over {self.runs} runs: {self.std_err}")
61 | print("Best architecture: ", self.best_config)
62 |
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/__pycache__/config_space.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/config_space.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/__pycache__/config_space.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/config_space.cpython-39.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/__pycache__/resnet_macro_architecture.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/resnet_macro_architecture.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/__pycache__/resnet_macro_architecture.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/resnet_macro_architecture.cpython-39.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/__pycache__/sample.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/sample.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/__pycache__/sample.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/sample.cpython-39.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/__pycache__/train.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/train.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/__pycache__/train.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/train.cpython-39.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/config_space.py:
--------------------------------------------------------------------------------
1 | """Hyperparameter Configuration Space."""
2 | import numpy as np
3 | import random
4 |
5 |
6 | class Hyperparameter:
7 | """
8 | Class defines a hyperparameter and its range.
9 | """
10 | def __init__(self, name, type, range=None, min_value=0, max_value=0, step=1):
11 | self.name = name
12 | self.min_value = min_value
13 | self.max_value = max_value
14 | self.step = step
15 | self.range = range
16 | if self.range is not None:
17 | self.sampling = "range"
18 | else:
19 | self.type = type # Discrete, continuous
20 | self.sampling = "uniform"
21 |
22 | def sample_hyp(self):
23 | if self.sampling == "range":
24 | return random.choice(self.range)
25 | if self.type == "discrete":
26 | return np.random.randint(self.min_value, high=self.max_value)
27 | if self.type == "continuous":
28 | return np.random.uniform(self.min_value, high=self.max_value)
29 |
30 | def size(self):
31 | if self.sampling == "range":
32 | return len(self.range)
33 | if self.type == "continuous":
34 | return 1
35 | return len(np.arange(self.min_value, self.max_value, self.step))
36 |
37 | def __repr__(self) -> str:
38 | return "Name: {}\nMin_Value:{}\nMax_value:{}\nStep:{}".format(
39 | str(self.name), str(self.min_value), str(self.max_value), str(self.step)
40 | )
41 |
42 |
43 |
44 | class ConfigSpace:
45 | """
46 | This class defines the search space.
47 | """
48 | def __init__(self, dataset="CIFAR-10"):
49 | self.dataset = dataset # VWW, KWS
50 | self.search_space = "resnet-like" # for now only resnet-like
51 | self.hyperparameters = [] # list of Hyperparameters to search for
52 | self.set_hyperparameters()
53 |
54 | def add_hyperparameter(self, name, type, min_value, max_value, step=1):
55 | for h in self.hyperparameters:
56 | if h.name == name:
57 | raise Exception("Name should be unique!")
58 |
59 | hyp = Hyperparameter(name,
60 | type,
61 | min_value=min_value,
62 | max_value=max_value,
63 | step=step)
64 | self.hyperparameters.append(hyp)
65 |
66 | def add_hyperparameter_range(self, name, type, range):
67 | for h in self.hyperparameters:
68 | if h.name == name:
69 | raise Exception("Name should be unique!")
70 |
71 | hyp = Hyperparameter(name, type, range=range)
72 | self.hyperparameters.append(hyp)
73 |
74 | def sample_arch(self):
75 | arch = {}
76 | for hyp in self.hyperparameters:
77 | arch[hyp.name] = hyp.sample_hyp()
78 | return arch
79 |
80 | def sample_arch_uniformly(self, n):
81 | archs = []
82 | for i in range(n):
83 | tmp = self.sample_arch()
84 | for j in range(5, tmp["M"], -1):
85 | tmp["convblock%d" % j] = 0
86 | tmp["widenfact%d" % j] = 0
87 | tmp["B%d" % j] = 0
88 | tmp["R%d" % j] = 0
89 | archs.append(tmp)
90 |
91 | return archs
92 |
93 | def set_hyperparameters(self):
94 | if self.search_space == "resnet-like":
95 | self.add_hyperparameter_range("out_channel0",
96 | "discrete",
97 | range=[8, 12, 16, 32, 48, 64])
98 | self.add_hyperparameter("M", "discrete", min_value=1, max_value=5)
99 | self.add_hyperparameter("R1", "discrete", min_value=1, max_value=16)
100 | self.add_hyperparameter("R2", "discrete", min_value=0, max_value=16)
101 | self.add_hyperparameter("R3", "discrete", min_value=0, max_value=16)
102 | self.add_hyperparameter("R4", "discrete", min_value=0, max_value=16)
103 | self.add_hyperparameter("R5", "discrete", min_value=0, max_value=16)
104 |
105 | for i in range(1, 6):
106 | self.add_hyperparameter_range("convblock%d" % i,
107 | "discrete",
108 | range=[1, 2])
109 | self.add_hyperparameter("widenfact%d" % i,
110 | "continuous",
111 | min_value=0.5,
112 | max_value=0.8)
113 | self.add_hyperparameter("B%d" % i, "discrete", min_value=1, max_value=5)
114 |
115 | def remove_hyperparameter(self, name):
116 | for i, h in enumerate(self.hyperparameters):
117 | if h.name == name:
118 | self.hyperparameters.remove(h)
119 | break
120 |
121 | def compute_cs_size(self):
122 | size = 1
123 | for h in self.hyperparameters:
124 | size *= h.size()
125 | return size
126 |
127 | def get_hyperparameters(self):
128 | l = []
129 | for h in self.hyperparameters:
130 | l.append(h.name)
131 | print(l)
132 |
133 | def __repr__(self) -> str:
134 | str_ = ""
135 | str_ += "Architecture Type: {}\n".format(self.search_space)
136 | str_ += "Search Space Size: {}\n".format(self.compute_cs_size())
137 | str_ += "------------------------------------------------\n"
138 | for i, h in enumerate(self.hyperparameters):
139 | str_ += "{})\n".format(i) + str(h) + "\n\n"
140 | str_ += "------------------------------------------------\n"
141 | return str_
142 |
143 | def main():
144 | CS = ConfigSpace("Cifar-10")
145 | configs = CS.sample_arch_uniformly(20)
146 | print(configs)
147 |
148 |
149 | if __name__ == "__main__":
150 | main()
151 |
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/dataloaders/__pycache__/cutout.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/dataloaders/__pycache__/cutout.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/dataloaders/__pycache__/cutout.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/dataloaders/__pycache__/cutout.cpython-39.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/dataloaders/__pycache__/dataloader.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/dataloaders/__pycache__/dataloader.cpython-38.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/dataloaders/__pycache__/dataloader.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/dataloaders/__pycache__/dataloader.cpython-39.pyc
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/dataloaders/cutout.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import numpy as np
3 |
4 |
5 | class Cutout(object):
6 | """Randomly mask out one or more patches from an image.
7 |
8 | Args:
9 | n_holes (int): Number of patches to cut out of each image.
10 | length (int): The length (in pixels) of each square patch.
11 | """
12 | def __init__(self, n_holes, length):
13 | self.n_holes = n_holes
14 | self.length = length
15 |
16 | def __call__(self, img):
17 | """
18 | Args:
19 | img (Tensor): Tensor image of size (C, H, W).
20 | Returns:
21 | Tensor: Image with n_holes of dimension
22 | length x length cut out of it.
23 | """
24 | h = img.size(1)
25 | w = img.size(2)
26 |
27 | mask = np.ones((h, w), np.float32)
28 |
29 | for n in range(self.n_holes):
30 | y = np.random.randint(h)
31 | x = np.random.randint(w)
32 |
33 | y1 = np.clip(y - self.length // 2, 0, h)
34 | y2 = np.clip(y + self.length // 2, 0, h)
35 | x1 = np.clip(x - self.length // 2, 0, w)
36 | x2 = np.clip(x + self.length // 2, 0, w)
37 |
38 | mask[y1: y2, x1: x2] = 0.
39 |
40 | mask = torch.from_numpy(mask)
41 | mask = mask.expand_as(img)
42 | img = img * mask
43 |
44 | return img
45 |
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/dataloaders/dataloader.py:
--------------------------------------------------------------------------------
1 | import torchvision
2 | import torchvision.transforms as transforms
3 | from torch.utils.data import DataLoader
4 | from analogainas.search_spaces.dataloaders.cutout import Cutout
5 |
6 | import importlib.util
7 | pyvww = importlib.util.find_spec("pyvww")
8 | found = pyvww is not None
9 |
10 | def load_cifar10(batch_size):
11 | transform_train = transforms.Compose([
12 | transforms.RandomCrop(32, padding=4),
13 | transforms.RandomHorizontalFlip(),
14 | transforms.ToTensor(),
15 | transforms.Normalize((0.4914, 0.4822, 0.4465),
16 | (0.2023, 0.1994, 0.2010)),
17 | Cutout(1, length=8)
18 | ])
19 |
20 | transform_test = transforms.Compose([
21 | transforms.ToTensor(),
22 | transforms.Normalize((0.4914, 0.4822, 0.4465),
23 | (0.2023, 0.1994, 0.2010)),
24 | ])
25 |
26 | trainset = torchvision.datasets.CIFAR10(root='./data',
27 | train=True,
28 | download=True,
29 | transform=transform_train)
30 |
31 | trainloader = DataLoader(
32 | trainset, batch_size=batch_size, shuffle=True, num_workers=2)
33 |
34 | testset = torchvision.datasets.CIFAR10(
35 | root='./data', train=False, download=True, transform=transform_test)
36 |
37 | testloader = DataLoader(
38 | testset, batch_size=100, shuffle=False, num_workers=2)
39 |
40 | return trainloader, testloader
41 |
42 |
43 | classes = ('plane', 'car', 'bird', 'cat', 'deer',
44 | 'dog', 'frog', 'horse', 'ship', 'truck')
45 |
46 |
47 | def load_vww(batch_size, path, annot_path):
48 | transform = transforms.Compose([
49 | transforms.CenterCrop(100),
50 | transforms.ToTensor()
51 | ])
52 |
53 | train_dataset = pyvww.pytorch.VisualWakeWordsClassification(
54 | root=path, annFile=annot_path, transform=transform)
55 | valid_dataset = pyvww.pytorch.VisualWakeWordsClassification(
56 | root=path, annFile=annot_path, transform=transform)
57 |
58 | train_loader = DataLoader(train_dataset,
59 | batch_size=batch_size,
60 | shuffle=True,
61 | num_workers=1)
62 | valid_loader = DataLoader(valid_dataset,
63 | batch_size=batch_size,
64 | shuffle=False,
65 | num_workers=1)
66 |
67 | return train_loader, valid_loader
68 |
--------------------------------------------------------------------------------
/analognas/analogainas/search_spaces/sample.py:
--------------------------------------------------------------------------------
1 | import random
2 | import numpy as np
3 | from analogainas.search_spaces.resnet_macro_architecture import Network
4 | from analogainas.search_spaces.config_space import ConfigSpace
5 | from analogainas.search_algorithms.worker import Worker
6 | from analogainas.search_spaces.train import train
7 | from analogainas.utils import *
8 | import csv
9 |
10 | EPOCHS = 40
11 | LEARNING_RATE = 0.05
12 |
13 | def latin_hypercube_sample(dataset, n):
14 | """Latin Hypercube Sampling of n architectures from ConfigSpace."""
15 | cs = ConfigSpace(dataset)
16 | num_parameters = len(cs.get_hyperparameters())
17 | ranges = np.arange(0, 1, 1/n)
18 |
19 | sampled_architectures = []
20 | for _ in range(n):
21 | config = {}
22 | for i, hyperparameter in enumerate(cs.get_hyperparameters()):
23 | min_val, max_val = hyperparameter.lower, hyperparameter.upper
24 | val_range = max_val - min_val
25 | offset = random.uniform(0, val_range/n)
26 | config[hyperparameter.name] = min_val + ranges[_] * val_range + offset
27 | sampled_architectures.append(config)
28 |
29 | keys = sampled_architectures[0].keys()
30 |
31 | for config in sampled_architectures:
32 | model = Network(config)
33 | model_name = "resnet_{}_{}".format(config["M"], get_nb_convs(config))
34 |
35 | with open("./configs/"+model_name+".config",
36 | 'w', newline='') as output_file:
37 | dict_writer = csv.DictWriter(output_file, keys)
38 | dict_writer.writeheader()
39 | dict_writer.writerows(config)
40 |
41 | train(model, model_name, LEARNING_RATE, EPOCHS)
42 |
43 |
44 | def random_sample(dataset, n):
45 | """Randomly samples n architectures from ConfigSpace."""
46 | cs = ConfigSpace(dataset)
47 | sampled_architectures = cs.sample_arch_uniformly(n)
48 |
49 | keys = sampled_architectures[0].keys()
50 |
51 | for config in sampled_architectures:
52 | model = Network(config)
53 | model_name = "resnet_{}_{}".format(config["M"], get_nb_convs(config))
54 |
55 | with open("./configs/"+model_name+".config",
56 | 'w', newline='') as output_file:
57 | dict_writer = csv.DictWriter(output_file, keys)
58 | dict_writer.writeheader()
59 | dict_writer.writerows(config)
60 |
61 | train(model, model_name, LEARNING_RATE, EPOCHS)
62 |
63 |
64 | def ea_sample(dataset, n, n_iter):
65 | """Samples n architectures from ConfigSpace
66 | using an evolutionary algorithm."""
67 | cs = ConfigSpace(dataset)
68 | worker = Worker(dataset, cs, 3, n_iter)
69 | worker.search(population_size=n)
70 |
--------------------------------------------------------------------------------
/analognas/analogainas/utils.py:
--------------------------------------------------------------------------------
1 | import torch.nn.functional as F
2 | from scipy.stats import kendalltau
3 | from scipy.stats import spearmanr
4 | import math
5 |
6 | ALPHA = 0.05
7 |
8 | def accuracy_mse(prediction, target, scale=100.0):
9 | prediction = prediction.detach() * scale
10 | target = (target) * scale
11 | return F.mse_loss(prediction, target)
12 |
13 |
14 | def kendal_correlation(v1, v2):
15 | """Compute the kendal correlation between two variables v1 & v2"""
16 | coef, p = kendalltau(v1, v2)
17 |
18 | if p > ALPHA:
19 | print("Samples are uncorrelated (fail to reject H0)")
20 | return 0
21 | else:
22 | return coef
23 |
24 |
25 | def spearman_correlation(v1, v2):
26 | """Compute the spearman correlation between two variables v1 & v2"""
27 | coef, p = spearmanr(v1, v2)
28 | if p > ALPHA:
29 | print("Samples are uncorrelated (fail to reject H0)")
30 | return 0
31 | else:
32 | return coef
33 |
34 |
35 | def check_ties(v1, v2):
36 | """Check if two variables contains ties.
37 | Contains ties --> Spearman
38 | No ties --> Kendal"""
39 | v1_set = set(v1)
40 | v2_set = set(v2)
41 | if len(v1_set.intersection(v2_set)) > 0:
42 | return(True)
43 | return(False)
44 |
45 |
46 | def truncate(f, n):
47 | return math.floor(f * 10 ** n) / 10 ** n
48 |
49 |
50 | def nb_rank_error(v1, v2):
51 | """Compute the pairwise ranking error."""
52 | v1_sorted = sorted(range(len(v1)), key=lambda k: v1[k])
53 | v2_sorted = sorted(range(len(v2)), key=lambda k: v2[k])
54 |
55 | rank_errors = 0
56 | for i in range(len(v1)):
57 | if v1_sorted[i] != v2_sorted[i]:
58 | rank_errors += 1
59 | return rank_errors
60 |
61 |
62 | def get_nb_params(model):
63 | """Compute the number of parameters of model."""
64 | return sum(p.numel() for p in model.parameters())
65 |
66 |
67 | def get_nb_convs(config):
68 | """Compute the depth of the model."""
69 | m = config["M"]
70 | nb_conv = 0
71 | for i in range(1, m+1):
72 | if config["convblock%d" % i] == 1:
73 | nb_conv += config["R%d" % i]*2*config["B%d" % i]
74 | if config["convblock%d" % i] == 2:
75 | nb_conv += config["R%d" % i]*3*config["B%d" % i]
76 | return nb_conv
77 |
--------------------------------------------------------------------------------
/analognas/build/lib/analogainas/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/build/lib/analogainas/__init__.py
--------------------------------------------------------------------------------
/analognas/build/lib/analogainas/__version__.py:
--------------------------------------------------------------------------------
1 | """Version information."""
2 | __version__ = "0.1.0"
3 |
--------------------------------------------------------------------------------
/analognas/build/lib/analogainas/evaluators/__init__.py:
--------------------------------------------------------------------------------
1 | """Accuracy Evaluation Methods"""
2 |
--------------------------------------------------------------------------------
/analognas/build/lib/analogainas/evaluators/base_evaluator.py:
--------------------------------------------------------------------------------
1 | """Abstract class for base evaluator."""
2 | from analogainas.utils import kendal_correlation
3 |
4 |
5 | """Base class for Accuracy Evaluation Methods."""
6 | class Evaluator:
7 | def __init__(self, model_type=None):
8 | self.model_type = model_type
9 |
10 | def pre_process(self):
11 | """
12 | This is called at the start of the NAS algorithm,
13 | before any architectures have been queried
14 | """
15 | pass
16 |
17 | def fit(self, x_train, y_train):
18 | """
19 | Training the evaluator.
20 |
21 | Args:
22 | x_train: list of architectures
23 | y_train: accuracies or ranks
24 | """
25 | pass
26 |
27 | def query(self, x_test):
28 | """
29 | Get the accuracy/rank prediction for x_test.
30 |
31 | Args:
32 | x_test: list of architectures
33 |
34 | Returns:
35 | Predictions for the architectures
36 | """
37 | pass
38 |
39 | def get_evaluator_stat(self):
40 | """
41 | Check whether the evaluator needs retraining.
42 |
43 | Returns:
44 | A dictionary of metrics.
45 | """
46 | reqs = {
47 | "requires_retraining": False,
48 | "test_accuracy": None,
49 | "requires_hyperparameters": False,
50 | "hyperparams": {}
51 | }
52 | return reqs
53 |
54 | def set_hyperparams(self, hyperparams):
55 | """
56 | Modifies/sets hyperparameters of the evaluator.
57 |
58 | Args:
59 | hyperparams: dictionary of hyperparameters.
60 | """
61 | self.hyperparams = hyperparams
62 |
63 | def get_hyperparams(self):
64 | """
65 | Get the hyperparameters of the evaluator.
66 |
67 | Returns:
68 | A dictionary of hyperparameters.
69 | If not manually set, a dictionary of the default hyperparameters.
70 | """
71 | if hasattr(self, "hyperparams"):
72 | return self.hyperparams
73 | else:
74 | return None
75 |
76 | def get_correlation(self, x_test, y_test):
77 | y_pred = self.query(x_test)
78 | return kendal_correlation(y_test, y_pred)
79 |
80 |
--------------------------------------------------------------------------------
/analognas/build/lib/analogainas/evaluators/prepare_data.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from numpy import genfromtxt
3 | from sklearn.model_selection import train_test_split
4 | from sklearn.preprocessing import StandardScaler
5 |
6 | class AccuracyDataLoader:
7 | def __init__(self, dataset_file="dataset_cifar10.csv", transforms=None):
8 | self.dataset_file = dataset_file
9 | self.data = genfromtxt(self.dataset_file, delimiter=',')
10 |
11 | # Applies encoding
12 | if transforms is not None:
13 | self.data = transforms(self.data)
14 |
15 | def get_train(self):
16 | X = self.data[1:24]
17 | y = self.data[27]
18 | slope = self.data[26] - self.data[-1]
19 |
20 | # Split the data into train and test sets
21 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
22 |
23 | # Scale the data using StandardScaler
24 | scaler = StandardScaler()
25 | X_train = scaler.fit_transform(X_train)
26 | X_test = scaler.transform(X_test)
27 |
28 | return (X_train, y_train), (X_test, y_test), slope
29 |
--------------------------------------------------------------------------------
/analognas/build/lib/analogainas/evaluators/xgboost.py:
--------------------------------------------------------------------------------
1 | """XGBoost evaluator."""
2 | from tabnanny import verbose
3 | import xgboost as xgb
4 | import numpy as np
5 | #from base_evaluator import Evaluator
6 |
7 | """
8 | XGboost Evalutor Wrapper class.
9 | """
10 | class XGBoostEvaluator():
11 | def __init__(
12 | self,
13 | model_type = "XGBRanker",
14 | load_weight = True,
15 | hpo_wrapper=False,
16 | hparams_from_file=False,
17 | avm_predictor_path = "analogainas/evaluators/weights/xgboost_avm.bst",
18 | std_predictor_path = "analogainas/evaluators/weights/xgboost_std.bst",
19 | ranker_path = "analogainas/evaluators/weights/xgboost_ranker_acc.bst"
20 | ):
21 | self.model_type = model_type
22 | self.hpo_wrapper = hpo_wrapper
23 | self.default_hyperparams = {
24 | 'tree_method':'gpu_hist',
25 | 'booster':'gbtree',
26 | 'objective':'rank:pairwise',
27 | 'random_state':42,
28 | 'learning_rate':0.1,
29 | 'colsample_bytree':0.9,
30 | 'eta':0.05,
31 | 'max_depth':6,
32 | 'n_estimators':110,
33 | 'subsample':0.75,
34 | 'enable_categorical':True}
35 | self.hyperparams = None
36 | self.hparams_from_file = hparams_from_file
37 | self.load_weight = load_weight
38 | self.ranker_path = ranker_path
39 | self.avm_predictor_path = avm_predictor_path
40 | self.std_predictor_path = std_predictor_path
41 |
42 | self.ranker = self.get_ranker()
43 | self.avm_predictor = self.get_avm_predictor()
44 | self.std_predictor = self.get_std_predictor()
45 |
46 | def get_ranker(self):
47 | ranker = xgb.XGBRegressor(objective='rank:pairwise')
48 | if self.load_weight == True:
49 | ranker.load_model(self.ranker_path)
50 | return ranker
51 |
52 | def get_avm_predictor(self):
53 | avm_predictor = xgb.XGBRegressor()
54 | if self.load_weight == True:
55 | avm_predictor.load_model(self.avm_predictor_path)
56 | return avm_predictor
57 |
58 | def get_std_predictor(self):
59 | std_predictor = xgb.XGBRegressor()
60 | if self.load_weight == True:
61 | std_predictor.load_model(self.std_predictor_path)
62 | return std_predictor
63 |
64 | def fit(self, x_train, y_train, train_info_file="xgboost.txt", hyperparameters=None, epochs=500, verbose=True):
65 | if hyperparameters == None:
66 | self.evaluator = self.get_model(self.default_hyperparams)
67 | else:
68 | self.hyperparams = hyperparameters
69 | self.evaluator = self.get_model(self.hyperparams)
70 |
71 | progress = dict()
72 | d_train = xgb.DMatrix(x_train, y_train)
73 | watchlist = [(d_train,'rank:ndcg')]
74 | self.evaluator = self.evaluator.train(self.hyperparams, d_train, epochs, watchlist, evals_result=progress)
75 |
76 | #SAVE MODEL
77 | self.evaluator.save_model(train_info_file)
78 |
79 | return progress['rank:ndcg']
80 |
81 | def query_pop(self, P):
82 | x_test = []
83 | for a in P:
84 | arch = list(a[0].values())
85 | x_test.append(arch)
86 | return self.ranker.predict(x_test), self.avm_predictor.predict(x_test)
87 |
88 | def query(self, P):
89 | x_test = []
90 | arch = list(P[0].values())
91 | x_test.append(arch)
92 | return self.ranker.predict(x_test), self.avm_predictor.predict(x_test)
93 |
94 | def dcg_at_k(self, r, k, method=0):
95 | r = np.asfarray(r)[:k]
96 | if r.size:
97 | if method == 0:
98 | return r[0] + np.sum(r[1:] / np.log2(np.arange(2, r.size + 1)))
99 | elif method == 1:
100 | return np.sum(r / np.log2(np.arange(2, r.size + 2)))
101 | return 0.
102 |
103 | def ndcg_at_k(self, r, k, method=0):
104 | dcg_max = self.dcg_at_k(sorted(r, reverse=True), k, method)
105 | if not dcg_max:
106 | return 0.
107 | return self.dcg_at_k(r, k, method) / dcg_max
108 |
109 | def ndcg_scorer(self, estimator, X, y_true):
110 | y_pred = estimator.predict(X)
111 | # Assuming y_true contains the actual relevance scores
112 | # Sort the true scores based on the predictions
113 | sorted_scores = [y for _, y in sorted(zip(y_pred, y_true), reverse=True)]
114 | return self.ndcg_at_k(sorted_scores, k=len(y_true)) # or use a specific k
115 |
116 |
--------------------------------------------------------------------------------
/analognas/build/lib/analogainas/search_algorithms/__init__.py:
--------------------------------------------------------------------------------
1 | """A Worker is the base NAS Search Strategy class."""
2 |
--------------------------------------------------------------------------------
/analognas/build/lib/analogainas/search_algorithms/bo.py:
--------------------------------------------------------------------------------
1 | """Bayesian Optimizer."""
2 | import numpy as np
3 |
4 | class BOptimizer:
5 | def __init__(self, max_iter):
6 | self.max_iter = max_iter
7 |
8 | # surrogate or approximation for the objective function
9 | def surrogate(self, model, X):
10 | # catch any warning generated when making a prediction
11 | return model.predict(X, return_std=True)
12 |
13 | # probability of improvement acquisition function
14 | def acquisition(self, X, Xsamples, model):
15 | # calculate the best surrogate score found so far
16 | yhat, _ = self.surrogate(model, X)
17 | best = max(yhat)
18 | # calculate mean and stdev via surrogate function
19 | mu, std = self.surrogate(model, Xsamples)
20 | mu = mu[:, 0]
21 | # calculate the probability of improvement
22 | probs = (mu - best) / (std+1E-9)
23 | return probs
24 |
25 | def maximize(self):
26 | for _ in range(self.n_iter):
27 | x_next = self.propose_next_point()
28 | y_next = self.evaluate_func(x_next)
29 |
30 | self.X.append(x_next)
31 | self.y.append(y_next)
32 |
33 | best_idx = np.argmax(self.y)
34 | best_x = self.X[best_idx]
35 | best_y = self.y[best_idx]
36 | return best_x, best_y
37 |
38 | def propose_next_point(self):
39 | x_candidates = self.random_state.uniform(
40 | low=self.search_space[:, 0],
41 | high=self.search_space[:, 1],
42 | size=(100, self.search_space.shape[0])
43 | )
44 |
45 | best_x = None
46 | best_acquisition = float('-inf')
47 |
48 | for x in x_candidates:
49 | acquisition = self.acquisition(x)
50 | if acquisition > best_acquisition:
51 | best_x = x
52 | best_acquisition = acquisition
53 |
54 | return best_x
55 |
56 | def gaussian_process_regression(self):
57 | # Define your surrogate model (Gaussian Process) and fit it to the data
58 | # Example: Mean of 0, Standard Deviation of 1
59 | mean = 0.0
60 | std = 1.0
61 | return mean, std
62 |
63 | # optimize the acquisition function
64 | def run(self, X, y, model):
65 | # random search, generate random samples
66 | Xsamples = self.rs_search(100)
67 | Xsamples = Xsamples.reshape(len(Xsamples), 1)
68 | # calculate the acquisition function for each sample
69 | scores = self.acquisition(X, Xsamples, model)
70 | # locate the index of the largest scores
71 | ix = np.argmax(scores)
72 | return Xsamples[ix, 0]
73 |
--------------------------------------------------------------------------------
/analognas/build/lib/analogainas/search_algorithms/ea.py:
--------------------------------------------------------------------------------
1 | """Classical Evolutionary Algorithm."""
2 | import random
3 | from analogainas.search_spaces.sample import random_sample
4 |
5 | class EAOptimizer:
6 | def __init__(self, max_iter, population_size, mutation_prob):
7 | self.max_iter = max_iter
8 | self.population_size = population_size
9 | self.mutation_prob = mutation_prob
10 |
11 | def mutate(self, architecture):
12 | if random.random() > self.mutation_prob:
13 | architecture = random_sample()
14 | return architecture
15 |
16 | def run(self):
17 | D = [self.cs.sample() for _ in range(self.population_size)]
18 | best_f = 0.0
19 | best_x = [None] * self.population_size
20 |
21 | for _ in range(self.max_iter):
22 | new_x = [self.mutate(x) for x in D]
23 | new_f = [self.evaluation(x) for x in new_x]
24 |
25 | for j in range(self.population_size):
26 | if new_f[j] > best_f:
27 | best_f = new_f[j]
28 | best_x = new_x[j]
29 |
30 | D = new_x
31 |
32 | return {'best_x': best_x, 'best_f': best_f}
33 |
--------------------------------------------------------------------------------
/analognas/build/lib/analogainas/search_algorithms/ea_optimized.py:
--------------------------------------------------------------------------------
1 | """Optimized Evolutionary Algorithm - AnalogNAS."""
2 | import random
3 | from analogainas.search_spaces.sample import random_sample
4 |
5 | class EAOptimizer:
6 | """
7 | Evolutionary Algorithm with optimized mutations and robustness constraint.
8 |
9 | The NAS problem is cast to:
10 | Max Acc(arch)
11 | s.t nb_param(arch) < max_nb_param
12 | drop(arch) < 10
13 |
14 | Args:
15 | nb_iter: maximum number of iterations.
16 | population_size: number of architectures in the population.
17 |
18 | mutation_prob_width: Mutation probability of modifying the width.
19 | - Increase/Decrease widening factor of one block.
20 | - Add/Remove branches.
21 | -Increase/Decrease Initial output channel size.
22 |
23 | mutation_prob_depth: Mutation probability for modifying the depth.
24 | - Increase/Decrease the number of residual blocks.
25 | - Modify the type of convolution from BasicBlock to BottleneckBlock.
26 |
27 | mutation_prob_other: Mutation probability for applying various other transformations:
28 | - Add/Remove a residual connection.
29 | - Modify initial kernel size.
30 |
31 | max_nb_param: constraint applied to the number of parameters.
32 | T_AVM: constraint applied on the predicted AVM (robustness check).
33 | """
34 | def __init__(self,
35 | surrogate,
36 | nb_iter = 200,
37 | population_size=50,
38 | mutation_prob_width=0.8,
39 | mutation_prob_depth=0.8,
40 | mutation_prob_other=0.6,
41 | max_nb_param=1,
42 | T_AVM =10):
43 |
44 | assert population_size > 10, f"Population size needs to be at least 10, got {population_size}"
45 |
46 | self.surrogate = surrogate
47 | self.nb_iter = nb_iter
48 | self.population_size = int(population_size/10)
49 | self.mutation_prob_width = mutation_prob_width
50 | self.mutation_prob_depth = mutation_prob_depth
51 | self.mutation_prob_other = mutation_prob_other
52 | self.max_nb_param = max_nb_param
53 | self.T_AVM = T_AVM
54 |
55 | def mutate(self, cs, architecture):
56 | r = random.random()
57 | if r < 0.4:
58 | architecture= self.mutate_width(cs,architecture)
59 | elif r < 0.8:
60 | architecture= self.mutate_depth(cs,architecture)
61 | else:
62 | architecture= self.mutate_other(cs,architecture)
63 |
64 | return architecture
65 |
66 | def mutate_width(self, cs, architecture):
67 | if random.random() < self.mutation_prob_width:
68 | architecture = cs.sample_arch_uniformly(1)
69 | return architecture
70 |
71 | def mutate_depth(self, cs, architecture):
72 | if random.random() < self.mutation_prob_depth:
73 | architecture = cs.sample_arch_uniformly(1)
74 | return architecture
75 |
76 | def mutate_other(self, cs, architecture):
77 | if random.random() < self.mutation_prob_other:
78 | architecture = cs.sample_arch_uniformly(1)
79 | return architecture
80 |
81 | def generate_initial_population(self, cs):
82 | P = [cs.sample_arch_uniformly(1)] * self.population_size
83 | _, slope = self.surrogate.query_pop(P)
84 |
85 | while (not self.satisfied_constrained(P)):
86 | for i, s in enumerate(slope):
87 | if s > self.T_AVM:
88 | P[i] = cs.sample_arch_uniformly(1)
89 | return P
90 |
91 | def satisfied_constrained(self, P):
92 | _, slope = self.surrogate.query_pop(P)
93 | for i, s in enumerate(slope):
94 | if s > self.T_AVM:
95 | return False
96 | return True
97 |
98 | def run(self, cs):
99 | P = self.generate_initial_population(cs)
100 | best_f = 0.0
101 | best_x = [None]*self.population_size
102 |
103 | for i in range(self.nb_iter):
104 | best_accs =[]
105 | new_P = []
106 | for a in P:
107 | new_a = self.mutate(cs, a)
108 | new_P.append(new_a)
109 | acc, _ = self.surrogate.query(new_a)
110 | best_accs.append(acc)
111 | new_f = max(best_accs)
112 | if new_f > best_f:
113 | best_f = new_f
114 | best_x = new_a[0]
115 |
116 | P = new_P
117 |
118 | print("ITERATION {} completed: best acc {}".format(i, best_f))
119 |
120 | return best_x, best_f
121 |
--------------------------------------------------------------------------------
/analognas/build/lib/analogainas/search_algorithms/worker.py:
--------------------------------------------------------------------------------
1 | """Search Experiment Runner."""
2 | import os
3 | import csv
4 | import numpy as np
5 |
6 | from analogainas.search_spaces.config_space import ConfigSpace
7 | from analogainas.search_spaces.resnet_macro_architecture import Network
8 |
9 | """Wrapper class to launch NAS search."""
10 | class Worker():
11 | def __init__(self,
12 | cs: ConfigSpace=None,
13 | eval = None,
14 | optimizer=None,
15 | runs=5,
16 | max_budget=1,
17 | n_iter=100):
18 | self.max_budget = max_budget
19 | self.n_iter = n_iter
20 | self.config_space = cs
21 | self.evaluation = eval
22 | self.optimizer=optimizer
23 | self.runs = runs
24 | self.best_config = None
25 | self.best_acc = 0
26 | self.std_err = 0
27 |
28 | @property
29 | def best_arch(self):
30 | return Network(self.best_config)
31 |
32 | def search(self):
33 | os.mkdir("results")
34 | print("Result directory created.\n")
35 |
36 | results = []
37 | for i in range(self.runs):
38 | print("Search {} started".format(i))
39 | best_config, best_acc = self.optimizer.run(self.config_space)
40 |
41 | with open('results/best_results_{}.csv'.format(i), 'w') as f:
42 | for key in best_config.keys():
43 | f.write("%s,%s\n"%(key,best_config[key]))
44 |
45 | results.append(best_acc)
46 | if best_acc > self.best_acc:
47 | self.best_config = best_config
48 | self.best_acc = best_acc
49 |
50 | print("Best Acc = {}".format(best_acc))
51 | self.std_err = np.std(results, ddof=1) / np.sqrt(np.size(results))
52 |
53 | print("SEARCH ENDED")
54 |
55 | def result_summary(self):
56 | print("Best architecture accuracy: ", self.best_acc)
57 | print(f"Standard deviation of accuracy over {self.runs} runs: {self.best_acc}")
58 |
--------------------------------------------------------------------------------
/analognas/build/lib/analogainas/utils.py:
--------------------------------------------------------------------------------
1 | import torch.nn.functional as F
2 | from scipy.stats import kendalltau
3 | from scipy.stats import spearmanr
4 | import math
5 |
6 | ALPHA = 0.05
7 |
8 | def accuracy_mse(prediction, target, scale=100.0):
9 | prediction = prediction.detach() * scale
10 | target = (target) * scale
11 | return F.mse_loss(prediction, target)
12 |
13 |
14 | def kendal_correlation(v1, v2):
15 | """Compute the kendal correlation between two variables v1 & v2"""
16 | coef, p = kendalltau(v1, v2)
17 |
18 | if p > ALPHA:
19 | print("Samples are uncorrelated (fail to reject H0)")
20 | return 0
21 | else:
22 | return coef
23 |
24 |
25 | def spearman_correlation(v1, v2):
26 | """Compute the spearman correlation between two variables v1 & v2"""
27 | coef, p = spearmanr(v1, v2)
28 | if p > ALPHA:
29 | print("Samples are uncorrelated (fail to reject H0)")
30 | return 0
31 | else:
32 | return coef
33 |
34 |
35 | def check_ties(v1, v2):
36 | """Check if two variables contains ties.
37 | Contains ties --> Spearman
38 | No ties --> Kendal"""
39 | v1_set = set(v1)
40 | v2_set = set(v2)
41 | if len(v1_set.intersection(v2_set)) > 0:
42 | return(True)
43 | return(False)
44 |
45 |
46 | def truncate(f, n):
47 | return math.floor(f * 10 ** n) / 10 ** n
48 |
49 |
50 | def nb_rank_error(v1, v2):
51 | """Compute the pairwise ranking error."""
52 | v1_sorted = sorted(range(len(v1)), key=lambda k: v1[k])
53 | v2_sorted = sorted(range(len(v2)), key=lambda k: v2[k])
54 |
55 | rank_errors = 0
56 | for i in range(len(v1)):
57 | if v1_sorted[i] != v2_sorted[i]:
58 | rank_errors += 1
59 | return rank_errors
60 |
61 |
62 | def get_nb_params(model):
63 | """Compute the number of parameters of model."""
64 | return sum(p.numel() for p in model.parameters())
65 |
66 |
67 | def get_nb_convs(config):
68 | """Compute the depth of the model."""
69 | m = config["M"]
70 | nb_conv = 0
71 | for i in range(1, m+1):
72 | if config["convblock%d" % i] == 1:
73 | nb_conv += config["R%d" % i]*2*config["B%d" % i]
74 | if config["convblock%d" % i] == 2:
75 | nb_conv += config["R%d" % i]*3*config["B%d" % i]
76 | return nb_conv
77 |
--------------------------------------------------------------------------------
/analognas/dist/analogainas-0.1.0-py3.8.egg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/dist/analogainas-0.1.0-py3.8.egg
--------------------------------------------------------------------------------
/analognas/dist/analogainas-0.1.0-py3.9.egg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/dist/analogainas-0.1.0-py3.9.egg
--------------------------------------------------------------------------------
/analognas/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/analognas/docs/_build/doctrees/environment.pickle:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/doctrees/environment.pickle
--------------------------------------------------------------------------------
/analognas/docs/_build/doctrees/getting_started.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/doctrees/getting_started.doctree
--------------------------------------------------------------------------------
/analognas/docs/_build/doctrees/index.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/doctrees/index.doctree
--------------------------------------------------------------------------------
/analognas/docs/_build/doctrees/install.doctree:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/doctrees/install.doctree
--------------------------------------------------------------------------------
/analognas/docs/_build/html/.buildinfo:
--------------------------------------------------------------------------------
1 | # Sphinx build info version 1
2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
3 | config: 62f30f4bb49045b5aa46b9c411b8b5be
4 | tags: 645f666f9bcd5a90fca523b33c5a78b7
5 |
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_sources/getting_started.rst.txt:
--------------------------------------------------------------------------------
1 | Tutorial
2 | ========
3 |
4 | *AnalogAINAS* is a framework that aims at building analog-aware efficient deep learning models. AnalogNAS is built on top of the [AIHWKIT](https://github.com/IBM/aihwkit). IBM Analog Hardware Acceleration Kit (AIHWKIT) is an open source Python toolkit for exploring and using the capabilities of in-memory computing devices in the context of artificial intelligence.
5 |
6 | In a high-level AnalogAINAS consists of 4 main building blocks which (can) interact with each other:
7 |
8 | * Configuration spaces: a search space of architectures targeting a specific dataset.
9 | * Evaluator: a ML predictor model to predict:
10 | * 1-day Accuracy: the evaluator models the drift effect that is encountered in Analog devices. The accuracy after 1 day of drift is then predicted and used as an objective to maximize.
11 | * The Accuracy Variation for One Month (AVM): The difference between the accuracy after 1 month and the accuracy after 1 sec.
12 | * The 1-day accuracy standard deviation: The stochasticity of the noise induces different variation of the model's accuracy depending on its architecture.
13 | * Optimizer: a optimization strategy such as evolutionary algorithm or bayesian optimization.
14 | * Worker: A global object that runs the architecture search loop and the final network training pipeline
15 |
16 | Create a Configuration Space
17 | ----------------------------
18 |
19 | AnalogNAS presents a general search space composed of ResNet-like architectures.
20 |
21 | The macro-architecture defined in the file ```search_spaces/resnet_macro_architecture.py``` is customizable to any image classification dataset, given an input shape and output classes.
22 |
23 | .. warning::
24 | The hyperparameters in the configuration space should have a unique name ID each.
25 |
26 | Evaluator
27 | ---------
28 |
29 | To speed up the search, we built a machine learning predictor to evaluate the accuracy and robustness of any given architecture from the configuration space.
30 |
31 | Search Optimizer and Worker
32 | ---------------------------
33 |
34 | In this example, we will use evolutionary search to look for the best architecture in CS using our evaluator.
35 |
36 | ::
37 |
38 | from analogainas.search_algorithms.ea_optimized import EAOptimizer
39 | from analogainas.search_algorithms.worker import Worker
40 |
41 | optimizer = EAOptimizer(evaluator, population_size=20, nb_iter=10)
42 |
43 | NB_RUN = 2
44 | worker = Worker(CS, optimizer=optimizer, runs=NB_RUN)
45 |
46 | worker.search()
47 |
48 | worker.result_summary()
49 |
50 |
51 |
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_sources/index.rst.txt:
--------------------------------------------------------------------------------
1 | AnalogNAS
2 | =========
3 |
4 | .. toctree::
5 | :maxdepth: 3
6 | :caption: Get started
7 | :hidden:
8 |
9 | install
10 | getting_started
11 |
12 |
13 | .. toctree::
14 | :maxdepth: 3
15 | :caption: References
16 | :hidden:
17 |
18 | api_references
19 | references
20 |
21 | AnalogNAS is a Python library designed to empower researchers and practitioners in efficiently exploring and optimizing neural network architectures specifically for in-memory computing scenarios. AnalogNAS is built on top of the IBM Analog Hardware Acceleration Kit that enables efficient hardware-aware training with simulated noise injection on multiple IMC devices. By capitalizing on the advantages of in-memory computing, AnalogNAS opens new avenues for discovering architectures that can fully exploit the capabilities of this emerging computing paradigm.
22 |
23 | AnalogNAS offers a comprehensive set of features and functionalities that facilitate the neural architecture search process. From seamlessly exploring a vast space of architectural configurations to fine-tuning models for optimal performance, AnalogNAS provides a versatile framework that accelerates the discovery of efficient and effective neural network architectures for in-memory computing.
24 |
25 | .. warning::
26 | This library is currently in beta and under active development.
27 | Please be mindful of potential issues and keep an eye for improvements,
28 | new features and bug fixes in upcoming versions.
29 |
30 | Features
31 | --------
32 |
33 |
34 | - A customizable resnet-like search space, allowing to target CIFAR-10, Visual Wake Words, and Keyword Spotting
35 | - A configuration space object allows to add any number or type of architecture and training hyperparameters to the search
36 | - An analog-specific evaluator which includes:
37 |
38 | - An 1-day accuracy ranker
39 | - An 1 month accuracy variation estimator
40 | - A 1-day standard deviation estimator
41 |
42 | - A flexible search algorithm, enabling the implementation and extension of state-of-the-art NAS methods.
43 |
44 | Installation
45 | ------------
46 |
47 | Install analogNAS by running:
48 |
49 | pip install analogainas
50 |
51 |
52 | How to cite
53 | -----------
54 |
55 | In case you are using the *AnalogNAS* for
56 | your research, please cite:
57 |
58 | .. note::
59 |
60 | Benmeziane, H., Lammie, C., Boybat, I., Rasch, M., Gallo, M. L., Tsai, H., ... & Maghraoui, K. E. (2023). AnalogNAS: A Neural Network Design Framework for Accurate Inference with Analog In-Memory Computing. IEEE Edge 2023.
61 |
62 | https://arxiv.org/abs/2305.10459
63 |
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_sources/install.rst.txt:
--------------------------------------------------------------------------------
1 | Installation
2 | ============
3 |
4 | The preferred way to install this package is by using the `Python package index`_::
5 |
6 | pip install analogainas
7 |
8 | For ease of installation, install aihwkit library separately, refer to `AIHWKit installation`_:
9 |
10 | The package require the following runtime libraries to be installed in your
11 | system:
12 |
13 | * `OpenBLAS`_: 0.3.3+
14 | * `CUDA Toolkit`_: 9.0+ (only required for the GPU-enabled simulator [#f1]_)
15 |
16 | .. [#f1] Note that GPU support is not available in OSX, as it depends on a
17 | platform that has official CUDA support.
18 |
19 | .. _AIHWKit installation: https://aihwkit.readthedocs.io/en/latest/install.html
20 | .. _Python package index: https://pypi.org/project/analogainas/
21 | .. _OpenBLAS: https://www.openblas.net
22 | .. _CUDA Toolkit: https://developer.nvidia.com/accelerated-computing-toolkit
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/_sphinx_javascript_frameworks_compat.js:
--------------------------------------------------------------------------------
1 | /* Compatability shim for jQuery and underscores.js.
2 | *
3 | * Copyright Sphinx contributors
4 | * Released under the two clause BSD licence
5 | */
6 |
7 | /**
8 | * small helper function to urldecode strings
9 | *
10 | * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL
11 | */
12 | jQuery.urldecode = function(x) {
13 | if (!x) {
14 | return x
15 | }
16 | return decodeURIComponent(x.replace(/\+/g, ' '));
17 | };
18 |
19 | /**
20 | * small helper function to urlencode strings
21 | */
22 | jQuery.urlencode = encodeURIComponent;
23 |
24 | /**
25 | * This function returns the parsed url parameters of the
26 | * current request. Multiple values per key are supported,
27 | * it will always return arrays of strings for the value parts.
28 | */
29 | jQuery.getQueryParameters = function(s) {
30 | if (typeof s === 'undefined')
31 | s = document.location.search;
32 | var parts = s.substr(s.indexOf('?') + 1).split('&');
33 | var result = {};
34 | for (var i = 0; i < parts.length; i++) {
35 | var tmp = parts[i].split('=', 2);
36 | var key = jQuery.urldecode(tmp[0]);
37 | var value = jQuery.urldecode(tmp[1]);
38 | if (key in result)
39 | result[key].push(value);
40 | else
41 | result[key] = [value];
42 | }
43 | return result;
44 | };
45 |
46 | /**
47 | * highlight a given string on a jquery object by wrapping it in
48 | * span elements with the given class name.
49 | */
50 | jQuery.fn.highlightText = function(text, className) {
51 | function highlight(node, addItems) {
52 | if (node.nodeType === 3) {
53 | var val = node.nodeValue;
54 | var pos = val.toLowerCase().indexOf(text);
55 | if (pos >= 0 &&
56 | !jQuery(node.parentNode).hasClass(className) &&
57 | !jQuery(node.parentNode).hasClass("nohighlight")) {
58 | var span;
59 | var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg");
60 | if (isInSVG) {
61 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
62 | } else {
63 | span = document.createElement("span");
64 | span.className = className;
65 | }
66 | span.appendChild(document.createTextNode(val.substr(pos, text.length)));
67 | node.parentNode.insertBefore(span, node.parentNode.insertBefore(
68 | document.createTextNode(val.substr(pos + text.length)),
69 | node.nextSibling));
70 | node.nodeValue = val.substr(0, pos);
71 | if (isInSVG) {
72 | var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect");
73 | var bbox = node.parentElement.getBBox();
74 | rect.x.baseVal.value = bbox.x;
75 | rect.y.baseVal.value = bbox.y;
76 | rect.width.baseVal.value = bbox.width;
77 | rect.height.baseVal.value = bbox.height;
78 | rect.setAttribute('class', className);
79 | addItems.push({
80 | "parent": node.parentNode,
81 | "target": rect});
82 | }
83 | }
84 | }
85 | else if (!jQuery(node).is("button, select, textarea")) {
86 | jQuery.each(node.childNodes, function() {
87 | highlight(this, addItems);
88 | });
89 | }
90 | }
91 | var addItems = [];
92 | var result = this.each(function() {
93 | highlight(this, addItems);
94 | });
95 | for (var i = 0; i < addItems.length; ++i) {
96 | jQuery(addItems[i].parent).before(addItems[i].target);
97 | }
98 | return result;
99 | };
100 |
101 | /*
102 | * backward compatibility for jQuery.browser
103 | * This will be supported until firefox bug is fixed.
104 | */
105 | if (!jQuery.browser) {
106 | jQuery.uaMatch = function(ua) {
107 | ua = ua.toLowerCase();
108 |
109 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) ||
110 | /(webkit)[ \/]([\w.]+)/.exec(ua) ||
111 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) ||
112 | /(msie) ([\w.]+)/.exec(ua) ||
113 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) ||
114 | [];
115 |
116 | return {
117 | browser: match[ 1 ] || "",
118 | version: match[ 2 ] || "0"
119 | };
120 | };
121 | jQuery.browser = {};
122 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true;
123 | }
124 |
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/classic.css:
--------------------------------------------------------------------------------
1 | /*
2 | * classic.css_t
3 | * ~~~~~~~~~~~~~
4 | *
5 | * Sphinx stylesheet -- classic theme.
6 | *
7 | * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
8 | * :license: BSD, see LICENSE for details.
9 | *
10 | */
11 |
12 | @import url("basic.css");
13 |
14 | /* -- page layout ----------------------------------------------------------- */
15 |
16 | html {
17 | /* CSS hack for macOS's scrollbar (see #1125) */
18 | background-color: #FFFFFF;
19 | }
20 |
21 | body {
22 | font-family: sans-serif;
23 | font-size: 100%;
24 | background-color: #11303d;
25 | color: #000;
26 | margin: 0;
27 | padding: 0;
28 | }
29 |
30 | div.document {
31 | display: flex;
32 | background-color: #1c4e63;
33 | }
34 |
35 | div.documentwrapper {
36 | float: left;
37 | width: 100%;
38 | }
39 |
40 | div.bodywrapper {
41 | margin: 0 0 0 230px;
42 | }
43 |
44 | div.body {
45 | background-color: #ffffff;
46 | color: #000000;
47 | padding: 0 20px 30px 20px;
48 | }
49 |
50 | div.footer {
51 | color: #ffffff;
52 | width: 100%;
53 | padding: 9px 0 9px 0;
54 | text-align: center;
55 | font-size: 75%;
56 | }
57 |
58 | div.footer a {
59 | color: #ffffff;
60 | text-decoration: underline;
61 | }
62 |
63 | div.related {
64 | background-color: #133f52;
65 | line-height: 30px;
66 | color: #ffffff;
67 | }
68 |
69 | div.related a {
70 | color: #ffffff;
71 | }
72 |
73 | div.sphinxsidebar {
74 | }
75 |
76 | div.sphinxsidebar h3 {
77 | font-family: 'Trebuchet MS', sans-serif;
78 | color: #ffffff;
79 | font-size: 1.4em;
80 | font-weight: normal;
81 | margin: 0;
82 | padding: 0;
83 | }
84 |
85 | div.sphinxsidebar h3 a {
86 | color: #ffffff;
87 | }
88 |
89 | div.sphinxsidebar h4 {
90 | font-family: 'Trebuchet MS', sans-serif;
91 | color: #ffffff;
92 | font-size: 1.3em;
93 | font-weight: normal;
94 | margin: 5px 0 0 0;
95 | padding: 0;
96 | }
97 |
98 | div.sphinxsidebar p {
99 | color: #ffffff;
100 | }
101 |
102 | div.sphinxsidebar p.topless {
103 | margin: 5px 10px 10px 10px;
104 | }
105 |
106 | div.sphinxsidebar ul {
107 | margin: 10px;
108 | padding: 0;
109 | color: #ffffff;
110 | }
111 |
112 | div.sphinxsidebar a {
113 | color: #98dbcc;
114 | }
115 |
116 | div.sphinxsidebar input {
117 | border: 1px solid #98dbcc;
118 | font-family: sans-serif;
119 | font-size: 1em;
120 | }
121 |
122 |
123 |
124 | /* -- hyperlink styles ------------------------------------------------------ */
125 |
126 | a {
127 | color: #355f7c;
128 | text-decoration: none;
129 | }
130 |
131 | a:visited {
132 | color: #355f7c;
133 | text-decoration: none;
134 | }
135 |
136 | a:hover {
137 | text-decoration: underline;
138 | }
139 |
140 |
141 |
142 | /* -- body styles ----------------------------------------------------------- */
143 |
144 | div.body h1,
145 | div.body h2,
146 | div.body h3,
147 | div.body h4,
148 | div.body h5,
149 | div.body h6 {
150 | font-family: 'Trebuchet MS', sans-serif;
151 | background-color: #f2f2f2;
152 | font-weight: normal;
153 | color: #20435c;
154 | border-bottom: 1px solid #ccc;
155 | margin: 20px -20px 10px -20px;
156 | padding: 3px 0 3px 10px;
157 | }
158 |
159 | div.body h1 { margin-top: 0; font-size: 200%; }
160 | div.body h2 { font-size: 160%; }
161 | div.body h3 { font-size: 140%; }
162 | div.body h4 { font-size: 120%; }
163 | div.body h5 { font-size: 110%; }
164 | div.body h6 { font-size: 100%; }
165 |
166 | a.headerlink {
167 | color: #c60f0f;
168 | font-size: 0.8em;
169 | padding: 0 4px 0 4px;
170 | text-decoration: none;
171 | }
172 |
173 | a.headerlink:hover {
174 | background-color: #c60f0f;
175 | color: white;
176 | }
177 |
178 | div.body p, div.body dd, div.body li, div.body blockquote {
179 | text-align: justify;
180 | line-height: 130%;
181 | }
182 |
183 | div.admonition p.admonition-title + p {
184 | display: inline;
185 | }
186 |
187 | div.admonition p {
188 | margin-bottom: 5px;
189 | }
190 |
191 | div.admonition pre {
192 | margin-bottom: 5px;
193 | }
194 |
195 | div.admonition ul, div.admonition ol {
196 | margin-bottom: 5px;
197 | }
198 |
199 | div.note {
200 | background-color: #eee;
201 | border: 1px solid #ccc;
202 | }
203 |
204 | div.seealso {
205 | background-color: #ffc;
206 | border: 1px solid #ff6;
207 | }
208 |
209 | nav.contents,
210 | aside.topic,
211 | div.topic {
212 | background-color: #eee;
213 | }
214 |
215 | div.warning {
216 | background-color: #ffe4e4;
217 | border: 1px solid #f66;
218 | }
219 |
220 | p.admonition-title {
221 | display: inline;
222 | }
223 |
224 | p.admonition-title:after {
225 | content: ":";
226 | }
227 |
228 | pre {
229 | padding: 5px;
230 | background-color: unset;
231 | color: unset;
232 | line-height: 120%;
233 | border: 1px solid #ac9;
234 | border-left: none;
235 | border-right: none;
236 | }
237 |
238 | code {
239 | background-color: #ecf0f3;
240 | padding: 0 1px 0 1px;
241 | font-size: 0.95em;
242 | }
243 |
244 | th, dl.field-list > dt {
245 | background-color: #ede;
246 | }
247 |
248 | .warning code {
249 | background: #efc2c2;
250 | }
251 |
252 | .note code {
253 | background: #d6d6d6;
254 | }
255 |
256 | .viewcode-back {
257 | font-family: sans-serif;
258 | }
259 |
260 | div.viewcode-block:target {
261 | background-color: #f4debf;
262 | border-top: 1px solid #ac9;
263 | border-bottom: 1px solid #ac9;
264 | }
265 |
266 | div.code-block-caption {
267 | color: #efefef;
268 | background-color: #1c4e63;
269 | }
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/badge_only.css:
--------------------------------------------------------------------------------
1 | .clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Bold.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Bold.woff
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Bold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Bold.woff2
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Regular.woff
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Regular.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Regular.woff2
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.eot
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.ttf
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.woff
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.woff2
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/lato-bold-italic.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-bold-italic.woff
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/lato-bold-italic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-bold-italic.woff2
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/lato-bold.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-bold.woff
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/lato-bold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-bold.woff2
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/lato-normal-italic.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-normal-italic.woff
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/lato-normal-italic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-normal-italic.woff2
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/lato-normal.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-normal.woff
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/css/fonts/lato-normal.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-normal.woff2
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/custom.css:
--------------------------------------------------------------------------------
1 | /* This file intentionally left blank. */
2 |
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/doctools.js:
--------------------------------------------------------------------------------
1 | /*
2 | * doctools.js
3 | * ~~~~~~~~~~~
4 | *
5 | * Base JavaScript utilities for all Sphinx HTML documentation.
6 | *
7 | * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
8 | * :license: BSD, see LICENSE for details.
9 | *
10 | */
11 | "use strict";
12 |
13 | const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([
14 | "TEXTAREA",
15 | "INPUT",
16 | "SELECT",
17 | "BUTTON",
18 | ]);
19 |
20 | const _ready = (callback) => {
21 | if (document.readyState !== "loading") {
22 | callback();
23 | } else {
24 | document.addEventListener("DOMContentLoaded", callback);
25 | }
26 | };
27 |
28 | /**
29 | * Small JavaScript module for the documentation.
30 | */
31 | const Documentation = {
32 | init: () => {
33 | Documentation.initDomainIndexTable();
34 | Documentation.initOnKeyListeners();
35 | },
36 |
37 | /**
38 | * i18n support
39 | */
40 | TRANSLATIONS: {},
41 | PLURAL_EXPR: (n) => (n === 1 ? 0 : 1),
42 | LOCALE: "unknown",
43 |
44 | // gettext and ngettext don't access this so that the functions
45 | // can safely bound to a different name (_ = Documentation.gettext)
46 | gettext: (string) => {
47 | const translated = Documentation.TRANSLATIONS[string];
48 | switch (typeof translated) {
49 | case "undefined":
50 | return string; // no translation
51 | case "string":
52 | return translated; // translation exists
53 | default:
54 | return translated[0]; // (singular, plural) translation tuple exists
55 | }
56 | },
57 |
58 | ngettext: (singular, plural, n) => {
59 | const translated = Documentation.TRANSLATIONS[singular];
60 | if (typeof translated !== "undefined")
61 | return translated[Documentation.PLURAL_EXPR(n)];
62 | return n === 1 ? singular : plural;
63 | },
64 |
65 | addTranslations: (catalog) => {
66 | Object.assign(Documentation.TRANSLATIONS, catalog.messages);
67 | Documentation.PLURAL_EXPR = new Function(
68 | "n",
69 | `return (${catalog.plural_expr})`
70 | );
71 | Documentation.LOCALE = catalog.locale;
72 | },
73 |
74 | /**
75 | * helper function to focus on search bar
76 | */
77 | focusSearchBar: () => {
78 | document.querySelectorAll("input[name=q]")[0]?.focus();
79 | },
80 |
81 | /**
82 | * Initialise the domain index toggle buttons
83 | */
84 | initDomainIndexTable: () => {
85 | const toggler = (el) => {
86 | const idNumber = el.id.substr(7);
87 | const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`);
88 | if (el.src.substr(-9) === "minus.png") {
89 | el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`;
90 | toggledRows.forEach((el) => (el.style.display = "none"));
91 | } else {
92 | el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`;
93 | toggledRows.forEach((el) => (el.style.display = ""));
94 | }
95 | };
96 |
97 | const togglerElements = document.querySelectorAll("img.toggler");
98 | togglerElements.forEach((el) =>
99 | el.addEventListener("click", (event) => toggler(event.currentTarget))
100 | );
101 | togglerElements.forEach((el) => (el.style.display = ""));
102 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler);
103 | },
104 |
105 | initOnKeyListeners: () => {
106 | // only install a listener if it is really needed
107 | if (
108 | !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS &&
109 | !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
110 | )
111 | return;
112 |
113 | document.addEventListener("keydown", (event) => {
114 | // bail for input elements
115 | if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
116 | // bail with special keys
117 | if (event.altKey || event.ctrlKey || event.metaKey) return;
118 |
119 | if (!event.shiftKey) {
120 | switch (event.key) {
121 | case "ArrowLeft":
122 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
123 |
124 | const prevLink = document.querySelector('link[rel="prev"]');
125 | if (prevLink && prevLink.href) {
126 | window.location.href = prevLink.href;
127 | event.preventDefault();
128 | }
129 | break;
130 | case "ArrowRight":
131 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
132 |
133 | const nextLink = document.querySelector('link[rel="next"]');
134 | if (nextLink && nextLink.href) {
135 | window.location.href = nextLink.href;
136 | event.preventDefault();
137 | }
138 | break;
139 | }
140 | }
141 |
142 | // some keyboard layouts may need Shift to get /
143 | switch (event.key) {
144 | case "/":
145 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break;
146 | Documentation.focusSearchBar();
147 | event.preventDefault();
148 | }
149 | });
150 | },
151 | };
152 |
153 | // quick alias for translations
154 | const _ = Documentation.gettext;
155 |
156 | _ready(Documentation.init);
157 |
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/documentation_options.js:
--------------------------------------------------------------------------------
1 | var DOCUMENTATION_OPTIONS = {
2 | URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'),
3 | VERSION: 'v0.1.0',
4 | LANGUAGE: 'en',
5 | COLLAPSE_INDEX: false,
6 | BUILDER: 'html',
7 | FILE_SUFFIX: '.html',
8 | LINK_SUFFIX: '.html',
9 | HAS_SOURCE: true,
10 | SOURCELINK_SUFFIX: '.txt',
11 | NAVIGATION_WITH_KEYS: false,
12 | SHOW_SEARCH_SUMMARY: true,
13 | ENABLE_SEARCH_SHORTCUTS: true,
14 | };
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/file.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/file.png
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/js/badge_only.js:
--------------------------------------------------------------------------------
1 | !function(e){var t={};function r(n){if(t[n])return t[n].exports;var o=t[n]={i:n,l:!1,exports:{}};return e[n].call(o.exports,o,o.exports,r),o.l=!0,o.exports}r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)r.d(n,o,function(t){return e[t]}.bind(null,o));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=4)}({4:function(e,t,r){}});
--------------------------------------------------------------------------------
/analognas/docs/_build/html/_static/js/html5shiv-printshiv.min.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @preserve HTML5 Shiv 3.7.3-pre | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed
3 | */
4 | !function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="
' + 106 | '' + 107 | _("Hide Search Matches") + 108 | "
" 109 | ) 110 | ); 111 | }, 112 | 113 | /** 114 | * helper function to hide the search marks again 115 | */ 116 | hideSearchWords: () => { 117 | document 118 | .querySelectorAll("#searchbox .highlight-link") 119 | .forEach((el) => el.remove()); 120 | document 121 | .querySelectorAll("span.highlighted") 122 | .forEach((el) => el.classList.remove("highlighted")); 123 | localStorage.removeItem("sphinx_highlight_terms") 124 | }, 125 | 126 | initEscapeListener: () => { 127 | // only install a listener if it is really needed 128 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; 129 | 130 | document.addEventListener("keydown", (event) => { 131 | // bail for input elements 132 | if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; 133 | // bail with special keys 134 | if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; 135 | if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { 136 | SphinxHighlight.hideSearchWords(); 137 | event.preventDefault(); 138 | } 139 | }); 140 | }, 141 | }; 142 | 143 | _ready(SphinxHighlight.highlightSearchWords); 144 | _ready(SphinxHighlight.initEscapeListener); 145 | -------------------------------------------------------------------------------- /analognas/docs/_build/html/genindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 |The preferred way to install this package is by using the Python package index:
78 |pip install analogainas
79 |
For ease of installation, install aihwkit library separately, refer to AIHWKit installation:
82 |The package require the following runtime libraries to be installed in your 83 | system:
84 |OpenBLAS: 0.3.3+
CUDA Toolkit: 9.0+ (only required for the GPU-enabled simulator [1])