├── .gitignore ├── AnalogNAS_Tutorial.ipynb ├── LICENSE ├── README.md ├── analognas ├── README.md ├── analogainas.egg-info │ ├── PKG-INFO │ ├── SOURCES.txt │ ├── dependency_links.txt │ ├── requires.txt │ └── top_level.txt ├── analogainas │ ├── __init__.py │ ├── __pycache__ │ │ ├── __init__.cpython-38.pyc │ │ ├── __init__.cpython-39.pyc │ │ └── utils.cpython-38.pyc │ ├── __version__.py │ ├── evaluators │ │ ├── __init__.py │ │ ├── __pycache__ │ │ │ ├── __init__.cpython-38.pyc │ │ │ ├── __init__.cpython-39.pyc │ │ │ ├── xgboost.cpython-38.pyc │ │ │ └── xgboost.cpython-39.pyc │ │ ├── base_evaluator.py │ │ ├── mlp.py │ │ ├── prepare_data.py │ │ ├── weights │ │ │ ├── mlp_model.pth │ │ │ ├── surrogate_xgboost.json │ │ │ ├── surrogate_xgboost_avm.json │ │ │ ├── surrogate_xgboost_ranker.json │ │ │ ├── surrogate_xgboost_std.json │ │ │ ├── xgboost_avm.bst │ │ │ ├── xgboost_avm_kws.bst │ │ │ ├── xgboost_avm_vww.bst │ │ │ ├── xgboost_ranker_acc.bst │ │ │ ├── xgboost_ranker_kws.bst │ │ │ ├── xgboost_ranker_vww.bst │ │ │ ├── xgboost_std.bst │ │ │ ├── xgboost_std_kws.bst │ │ │ └── xgboost_std_vww.bst │ │ └── xgboost.py │ ├── search_algorithms │ │ ├── __init__.py │ │ ├── __pycache__ │ │ │ ├── __init__.cpython-38.pyc │ │ │ ├── __init__.cpython-39.pyc │ │ │ ├── ea_optimized.cpython-38.pyc │ │ │ ├── ea_optimized.cpython-39.pyc │ │ │ ├── worker.cpython-38.pyc │ │ │ └── worker.cpython-39.pyc │ │ ├── bo.py │ │ ├── ea.py │ │ ├── ea_optimized.py │ │ └── worker.py │ ├── search_spaces │ │ ├── __pycache__ │ │ │ ├── config_space.cpython-38.pyc │ │ │ ├── config_space.cpython-39.pyc │ │ │ ├── resnet_macro_architecture.cpython-38.pyc │ │ │ ├── resnet_macro_architecture.cpython-39.pyc │ │ │ ├── sample.cpython-38.pyc │ │ │ ├── sample.cpython-39.pyc │ │ │ ├── train.cpython-38.pyc │ │ │ └── train.cpython-39.pyc │ │ ├── config_space.py │ │ ├── dataloaders │ │ │ ├── __pycache__ │ │ │ │ ├── cutout.cpython-38.pyc │ │ │ │ ├── cutout.cpython-39.pyc │ │ │ │ ├── dataloader.cpython-38.pyc │ │ │ │ └── dataloader.cpython-39.pyc │ │ │ ├── cutout.py │ │ │ ├── dataloader.py │ │ │ └── kws.py │ │ ├── resnet_macro_architecture.py │ │ ├── sample.py │ │ └── train.py │ └── utils.py ├── build │ └── lib │ │ └── analogainas │ │ ├── __init__.py │ │ ├── __version__.py │ │ ├── evaluators │ │ ├── __init__.py │ │ ├── base_evaluator.py │ │ ├── mlp.py │ │ ├── prepare_data.py │ │ └── xgboost.py │ │ ├── search_algorithms │ │ ├── __init__.py │ │ ├── bo.py │ │ ├── ea.py │ │ ├── ea_optimized.py │ │ └── worker.py │ │ └── utils.py ├── dist │ ├── analogainas-0.1.0-py3.8.egg │ └── analogainas-0.1.0-py3.9.egg ├── docs │ ├── Makefile │ ├── _build │ │ ├── doctrees │ │ │ ├── environment.pickle │ │ │ ├── getting_started.doctree │ │ │ ├── index.doctree │ │ │ └── install.doctree │ │ └── html │ │ │ ├── .buildinfo │ │ │ ├── _sources │ │ │ ├── getting_started.rst.txt │ │ │ ├── index.rst.txt │ │ │ └── install.rst.txt │ │ │ ├── _static │ │ │ ├── _sphinx_javascript_frameworks_compat.js │ │ │ ├── alabaster.css │ │ │ ├── basic.css │ │ │ ├── classic.css │ │ │ ├── css │ │ │ │ ├── badge_only.css │ │ │ │ ├── fonts │ │ │ │ │ ├── Roboto-Slab-Bold.woff │ │ │ │ │ ├── Roboto-Slab-Bold.woff2 │ │ │ │ │ ├── Roboto-Slab-Regular.woff │ │ │ │ │ ├── Roboto-Slab-Regular.woff2 │ │ │ │ │ ├── fontawesome-webfont.eot │ │ │ │ │ ├── fontawesome-webfont.svg │ │ │ │ │ ├── fontawesome-webfont.ttf │ │ │ │ │ ├── fontawesome-webfont.woff │ │ │ │ │ ├── fontawesome-webfont.woff2 │ │ │ │ │ ├── lato-bold-italic.woff │ │ │ │ │ ├── lato-bold-italic.woff2 │ │ │ │ │ ├── lato-bold.woff │ │ │ │ │ ├── lato-bold.woff2 │ │ │ │ │ ├── lato-normal-italic.woff │ │ │ │ │ ├── lato-normal-italic.woff2 │ │ │ │ │ ├── lato-normal.woff │ │ │ │ │ └── lato-normal.woff2 │ │ │ │ └── theme.css │ │ │ ├── custom.css │ │ │ ├── doctools.js │ │ │ ├── documentation_options.js │ │ │ ├── file.png │ │ │ ├── jquery.js │ │ │ ├── js │ │ │ │ ├── badge_only.js │ │ │ │ ├── html5shiv-printshiv.min.js │ │ │ │ ├── html5shiv.min.js │ │ │ │ └── theme.js │ │ │ ├── language_data.js │ │ │ ├── minus.png │ │ │ ├── plus.png │ │ │ ├── pygments.css │ │ │ ├── searchtools.js │ │ │ ├── sidebar.js │ │ │ └── sphinx_highlight.js │ │ │ ├── genindex.html │ │ │ ├── getting_started.html │ │ │ ├── index.html │ │ │ ├── install.html │ │ │ ├── objects.inv │ │ │ ├── search.html │ │ │ └── searchindex.js │ ├── conf.py │ ├── getting_started.rst │ ├── index.rst │ ├── install.rst │ └── make.bat ├── environment.yml ├── nas_search_demo.py ├── requirements.txt ├── setup.py ├── starter_notebook.ipynb └── tests │ ├── 01_general_search.py │ ├── 02_bo.py │ ├── 03_random_sample.py │ ├── 04_generate_architecture.py │ └── resnet.csv └── analognasbench ├── README.md ├── analog_nasbench.egg-info ├── PKG-INFO ├── SOURCES.txt ├── dependency_links.txt ├── requires.txt └── top_level.txt ├── analognasbench ├── __init__.py ├── __pycache__ │ ├── __init__.cpython-38.pyc │ └── data.cpython-38.pyc ├── data.anb └── data.py ├── build └── lib │ └── analognasbench │ ├── __init__.py │ ├── data.anb │ └── data.py ├── examples ├── bananas.py ├── brute_force.py ├── evolutionary_algorithm.py └── random_search.py ├── setup.py ├── test.py ├── training_script.py └── training_script.sh /.gitignore: -------------------------------------------------------------------------------- 1 | venv 2 | env 3 | results 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Analog-aware NAS Works 2 | 3 | ## Description 4 | Recent updates: 5 | * **AnalogNAS-Bench**:** We have integrated AnalogNAS-Bench, a dedicated NAS benchmark tailored specifically for Analog In-Memory Computing (AIMC). This benchmark enables fair and systematic comparisons of NAS methodologies under realistic AIMC conditions, providing valuable insights into architectural robustness against AIMC-induced noise and drift. 6 | 7 | * **AnalogNas** is a modular and flexible framework to facilitate the implementation of Analog-aware Neural Architecture Search. It offers high-level classes to define: the search space, the accuracy evaluator, and the search strategy. It leverages [the aihwkit framework](https://github.com/IBM/aihwkit) to apply hardware-aware training with analog non-idealities and noise included. **AnalogNAS** obtained architectures are more robust during inference on Analog Hardware. We also include two evaluators trained to rank the architectures according to their analog training accuracy. 8 | 9 | 10 | ## Setup 11 | While installing the repository, creating a new conda environment is recomended. 12 | 13 | Firstly, refer to [AIHWKit installation](https://aihwkit.readthedocs.io/en/latest/install.html) to install Pytorch and the AIHWKit toolkit. 14 | 15 | Install the additional requirements, using: 16 | ``` 17 | pip install -r requirements.txt 18 | ``` 19 | 20 | Afterwards, install AnalogNAS by running the ```setup.py``` file: 21 | ``` 22 | python setup.py install 23 | ``` 24 | 25 | Alternatively, you can also download the package through pip: 26 | ``` 27 | pip install analogainas 28 | ``` 29 | 30 | ## Example 31 | 32 | ```python 33 | from analogainas.search_spaces.config_space import ConfigSpace 34 | from analogainas.evaluators.xgboost import XGBoostEvaluator 35 | from analogainas.search_algorithms.ea_optimized import EAOptimizer 36 | from analogainas.search_algorithms.worker import Worker 37 | 38 | CS = ConfigSpace('CIFAR-10') # define search space, by default a resnet-like search space 39 | evaluator = XGBoostEvaluator() # load the evaluators 40 | optimizer = EAOptimizer(evaluator, population_size=20, nb_iter=10) # define the optimizer with its parameters 41 | 42 | NB_RUN = 2 43 | worker = Worker(CS, optimizer=optimizer, runs=NB_RUN) # The global runner 44 | 45 | worker.search() # start search 46 | 47 | worker.result_summary() # print results 48 | 49 | ``` 50 | 51 | ## Usage 52 | To get started, check out ```nas_search_demo.py``` and ```starter_notebook.ipynb``` to ensure the installation went well. 53 | 54 | ## Authors 55 | AnalogNAS has been developed by IBM Research, 56 | 57 | with Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo, 58 | Smail Niar, Hamza Ouarnoughi, Ramachandran Muralidhar, Sidney Tsai, Vijay Narayanan, 59 | Abu Sebastian, and Kaoutar El Maghraoui 60 | 61 | You can contact us by opening a new issue in the repository. 62 | 63 | ## How to cite? 64 | 65 | In case you are using the _AnalogNas_ toolkit for 66 | your research, please cite the IEEE Edge 2023 paper that describes the toolkit: 67 | 68 | > Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo, 69 | > Hsinyu Tsai, Ramachandran Muralidhar, Smail Niar, Ouarnoughi Hamza, Vijay Narayanan, 70 | > Abu Sebastian and Kaoutar El Maghraoui 71 | > "AnalogNAS: A Neural Network Design Framework for Accurate Inference with Analog In-Memory Computing" (2023 IEEE INTERNATIONAL CONFERENCE ON EDGE 72 | > COMPUTING & COMMUNICATIONS (IEEE Edge)) 73 | 74 | > https://arxiv.org/abs/2305.10459 75 | 76 | 77 | ## Awards and Media Mentions 78 | 79 | * We are proud to share that AnalogNAS open source project the prestigious **IEEE OPEN SOURCE SCIENCE** in 2023 at the [IEEE 2023 Services Computing Congress](https://conferences.computer.org/services/2023/awards/). 80 | 81 | image 82 | 83 | * AnalogNAS paper received the **Best Paper Award** at [2023 IEEE EDGE (INTERNATIONAL CONFERENCE ON EDGE COMPUTING & COMMUNICATIONS)](https://conferences.computer.org/edge/2023/) 84 | 85 | image 86 | 87 | 88 | 89 | 90 | ## References 91 | * [Hardware-aware Neural Architecture Search: Survey and Taxonomy](https://www.ijcai.org/proceedings/2021/592) 92 | * [AIHWKit](https://ieeexplore.ieee.org/abstract/document/9458494) 93 | * [AIHW Composer](https://aihw-composer.draco.res.ibm.com) 94 | 95 | ## License 96 | This project is licensed under [Apache License 2.0]. 97 | 98 | [Apache License 2.0]: LICENSE.txt 99 | -------------------------------------------------------------------------------- /analognas/README.md: -------------------------------------------------------------------------------- 1 | # AnalogNAS 2 | 3 | ## Description 4 | **AnalogNas** is a modular and flexible framework to facilitate the implementation of Analog-aware Neural Architecture Search. It offers high-level classes to define: the search space, the accuracy evaluator, and the search strategy. It leverages [the aihwkit framework](https://github.com/IBM/aihwkit) to apply hardware-aware training with analog non-idealities and noise included. **AnalogNAS** obtained architectures are more robust during inference on Analog Hardware. We also include two evaluators trained to rank the architectures according to their analog training accuracy. 5 | 6 | > :warning: This library is currently in beta and under active development. 7 | > Please be mindful of potential issues and monitor for improvements, 8 | > new features, and bug fixes in upcoming versions. 9 | 10 | [**Setup**](#setup) 11 | | [**Usage**](#usage) 12 | | [**Docs**](https://github.com/IBM/analog-nas/blob/main/starter_notebook.ipynb) 13 | | [**References**](#references) 14 | 15 | ## Features 16 | AnalogaiNAS package offers the following features: 17 | 18 | * A customizable resnet-like search space, allowing to target CIFAR-10, Visual Wake Words, and Keyword Spotting 19 | * A configuration space object allows to add any number or type of architecture and training hyperparameters to the search 20 | * An analog-specific evaluator which includes: 21 | * An 1-day accuracy ranker 22 | * An 1 month accuracy variation estimator 23 | * A 1-day standard deviation estimator 24 | * A flexible search algorithm, enabling the implementation and extension of state-of-the-art NAS methods. 25 | 26 | ## Structure 27 | In a high-level AnalogAINAS consists of 4 main building blocks which (can) interact with each other: 28 | 29 | * Configuration spaces (```search_spaces/config_space.py```): a search space of architectures targeting a specific dataset. 30 | * Evaluator (```evaluators/base_evaluator.py```): a ML predictor model to predict: 31 | * 1-day Accuracy: the evaluator models the drift effect that is encountered in Analog devices. The accuracy after 1 day of drift is then predicted and used as an objective to maximize. 32 | * The Accuracy Variation for One Month (AVM): The difference between the accuracy after 1 month and the accuracy after 1 sec. 33 | * The 1-day accuracy standard deviation: The stochasticity of the noise induces different variation of the model's accuracy depending on its architecture. 34 | 35 | The weights of these models are provided in (```evaluators/weights```). 36 | * Optimizer (```search_algorithms/```): a optimization strategy such as evolutionary algorithm or bayesian optimization. 37 | * Worker (```search_algorithms/worker.py```): A global object that runs the architecture search loop and the final network training pipeline 38 | 39 | ## Setup 40 | While installing the repository, creating a new conda environment is recomended. 41 | 42 | Firstly, refer to [AIHWKit installation](https://aihwkit.readthedocs.io/en/latest/install.html) to install Pytorch and the AIHWKit toolkit. 43 | 44 | Install the additional requirements, using: 45 | ``` 46 | pip install -r requirements.txt 47 | ``` 48 | 49 | Afterwards, install AnalogNAS by running the ```setup.py``` file: 50 | ``` 51 | python setup.py install 52 | ``` 53 | 54 | Alternatively, you can also download the package through pip: 55 | ``` 56 | pip install analogainas 57 | ``` 58 | 59 | ## Example 60 | 61 | ```python 62 | from analogainas.search_spaces.config_space import ConfigSpace 63 | from analogainas.evaluators.xgboost import XGBoostEvaluator 64 | from analogainas.search_algorithms.ea_optimized import EAOptimizer 65 | from analogainas.search_algorithms.worker import Worker 66 | 67 | CS = ConfigSpace('CIFAR-10') # define search space, by default a resnet-like search space 68 | evaluator = XGBoostEvaluator() # load the evaluators 69 | optimizer = EAOptimizer(evaluator, population_size=20, nb_iter=10) # define the optimizer with its parameters 70 | 71 | NB_RUN = 2 72 | worker = Worker(CS, optimizer=optimizer, runs=NB_RUN) # The global runner 73 | 74 | worker.search() # start search 75 | 76 | worker.result_summary() # print results 77 | 78 | ``` 79 | 80 | ## Usage 81 | To get started, check out ```nas_search_demo.py``` and ```starter_notebook.ipynb``` to ensure the installation went well. 82 | 83 | ## Authors 84 | AnalogNAS has been developed by IBM Research, 85 | 86 | with Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo, 87 | Smail Niar, Hamza Ouarnoughi, Ramachandran Muralidhar, Sidney Tsai, Vijay Narayanan, 88 | Abu Sebastian, and Kaoutar El Maghraoui 89 | 90 | You can contact us by opening a new issue in the repository. 91 | 92 | ## How to cite? 93 | 94 | In case you are using the _AnalogNas_ toolkit for 95 | your research, please cite the IEEE Edge 2023 paper that describes the toolkit: 96 | 97 | > Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo, 98 | > Hsinyu Tsai, Ramachandran Muralidhar, Smail Niar, Ouarnoughi Hamza, Vijay Narayanan, 99 | > Abu Sebastian and Kaoutar El Maghraoui 100 | > "AnalogNAS: A Neural Network Design Framework for Accurate Inference with Analog In-Memory Computing" (2023 IEEE INTERNATIONAL CONFERENCE ON EDGE 101 | > COMPUTING & COMMUNICATIONS (IEEE Edge)) 102 | 103 | > https://arxiv.org/abs/2305.10459 104 | 105 | 106 | ## Awards and Media Mentions 107 | 108 | * We are proud to share that AnalogNAS open source project the prestigious **IEEE OPEN SOURCE SCIENCE** in 2023 at the [IEEE 2023 Services Computing Congress](https://conferences.computer.org/services/2023/awards/). 109 | 110 | image 111 | 112 | * AnalogNAS paper received the **Best Paper Award** at [2023 IEEE EDGE (INTERNATIONAL CONFERENCE ON EDGE COMPUTING & COMMUNICATIONS)](https://conferences.computer.org/edge/2023/) 113 | 114 | image 115 | 116 | 117 | 118 | 119 | ## References 120 | * [Hardware-aware Neural Architecture Search: Survey and Taxonomy](https://www.ijcai.org/proceedings/2021/592) 121 | * [AIHWKit](https://ieeexplore.ieee.org/abstract/document/9458494) 122 | * [AIHW Composer](https://aihw-composer.draco.res.ibm.com) 123 | 124 | ## License 125 | This project is licensed under [Apache License 2.0]. 126 | 127 | [Apache License 2.0]: LICENSE.txt 128 | -------------------------------------------------------------------------------- /analognas/analogainas.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | LICENSE 2 | README.md 3 | setup.py 4 | analogainas/__init__.py 5 | analogainas/__version__.py 6 | analogainas/utils.py 7 | analogainas.egg-info/PKG-INFO 8 | analogainas.egg-info/SOURCES.txt 9 | analogainas.egg-info/dependency_links.txt 10 | analogainas.egg-info/requires.txt 11 | analogainas.egg-info/top_level.txt 12 | analogainas/evaluators/__init__.py 13 | analogainas/evaluators/base_evaluator.py 14 | analogainas/evaluators/mlp.py 15 | analogainas/evaluators/prepare_data.py 16 | analogainas/evaluators/xgboost.py 17 | analogainas/search_algorithms/__init__.py 18 | analogainas/search_algorithms/bo.py 19 | analogainas/search_algorithms/ea.py 20 | analogainas/search_algorithms/ea_optimized.py 21 | analogainas/search_algorithms/worker.py -------------------------------------------------------------------------------- /analognas/analogainas.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /analognas/analogainas.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | cmake==3.22.4 2 | scikit-build==0.14.1 3 | scikit-learn==1.0.2 4 | scipy 5 | xgboost 6 | -------------------------------------------------------------------------------- /analognas/analogainas.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | analogainas 2 | -------------------------------------------------------------------------------- /analognas/analogainas/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/__init__.py -------------------------------------------------------------------------------- /analognas/analogainas/__pycache__/__init__.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/__pycache__/__init__.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/__pycache__/__init__.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/__pycache__/__init__.cpython-39.pyc -------------------------------------------------------------------------------- /analognas/analogainas/__pycache__/utils.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/__pycache__/utils.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/__version__.py: -------------------------------------------------------------------------------- 1 | """Version information.""" 2 | __version__ = "0.1.0" 3 | -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/__init__.py: -------------------------------------------------------------------------------- 1 | """Accuracy Evaluation Methods""" 2 | -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/__pycache__/__init__.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/__pycache__/__init__.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/__pycache__/__init__.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/__pycache__/__init__.cpython-39.pyc -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/__pycache__/xgboost.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/__pycache__/xgboost.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/__pycache__/xgboost.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/__pycache__/xgboost.cpython-39.pyc -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/base_evaluator.py: -------------------------------------------------------------------------------- 1 | """Abstract class for base evaluator.""" 2 | from analogainas.utils import kendal_correlation 3 | 4 | 5 | """Base class for Accuracy Evaluation Methods.""" 6 | class Evaluator: 7 | def __init__(self, model_type=None): 8 | self.model_type = model_type 9 | 10 | def pre_process(self): 11 | """ 12 | This is called at the start of the NAS algorithm, 13 | before any architectures have been queried 14 | """ 15 | pass 16 | 17 | def fit(self, x_train, y_train): 18 | """ 19 | Training the evaluator. 20 | 21 | Args: 22 | x_train: list of architectures 23 | y_train: accuracies or ranks 24 | """ 25 | pass 26 | 27 | def query(self, x_test): 28 | """ 29 | Get the accuracy/rank prediction for x_test. 30 | 31 | Args: 32 | x_test: list of architectures 33 | 34 | Returns: 35 | Predictions for the architectures 36 | """ 37 | pass 38 | 39 | def get_evaluator_stat(self): 40 | """ 41 | Check whether the evaluator needs retraining. 42 | 43 | Returns: 44 | A dictionary of metrics. 45 | """ 46 | reqs = { 47 | "requires_retraining": False, 48 | "test_accuracy": None, 49 | "requires_hyperparameters": False, 50 | "hyperparams": {} 51 | } 52 | return reqs 53 | 54 | def set_hyperparams(self, hyperparams): 55 | """ 56 | Modifies/sets hyperparameters of the evaluator. 57 | 58 | Args: 59 | hyperparams: dictionary of hyperparameters. 60 | """ 61 | self.hyperparams = hyperparams 62 | 63 | def get_hyperparams(self): 64 | """ 65 | Get the hyperparameters of the evaluator. 66 | 67 | Returns: 68 | A dictionary of hyperparameters. 69 | If not manually set, a dictionary of the default hyperparameters. 70 | """ 71 | if hasattr(self, "hyperparams"): 72 | return self.hyperparams 73 | else: 74 | return None 75 | 76 | def get_correlation(self, x_test, y_test): 77 | y_pred = self.query(x_test) 78 | return kendal_correlation(y_test, y_pred) 79 | 80 | -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/prepare_data.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from numpy import genfromtxt 3 | from sklearn.model_selection import train_test_split 4 | from sklearn.preprocessing import StandardScaler 5 | 6 | class AccuracyDataLoader: 7 | def __init__(self, dataset_file="dataset_cifar10.csv", transforms=None): 8 | self.dataset_file = dataset_file 9 | self.data = genfromtxt(self.dataset_file, delimiter=',') 10 | 11 | # Applies encoding 12 | if transforms is not None: 13 | self.data = transforms(self.data) 14 | 15 | def get_train(self): 16 | X = self.data[1:24] 17 | y = self.data[27] 18 | slope = self.data[26] - self.data[-1] 19 | 20 | # Split the data into train and test sets 21 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0) 22 | 23 | # Scale the data using StandardScaler 24 | scaler = StandardScaler() 25 | X_train = scaler.fit_transform(X_train) 26 | X_test = scaler.transform(X_test) 27 | 28 | return (X_train, y_train), (X_test, y_test), slope 29 | -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/weights/mlp_model.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/mlp_model.pth -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/weights/xgboost_avm.bst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_avm.bst -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/weights/xgboost_avm_kws.bst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_avm_kws.bst -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/weights/xgboost_avm_vww.bst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_avm_vww.bst -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/weights/xgboost_ranker_acc.bst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_ranker_acc.bst -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/weights/xgboost_ranker_kws.bst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_ranker_kws.bst -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/weights/xgboost_ranker_vww.bst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_ranker_vww.bst -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/weights/xgboost_std.bst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_std.bst -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/weights/xgboost_std_kws.bst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_std_kws.bst -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/weights/xgboost_std_vww.bst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/evaluators/weights/xgboost_std_vww.bst -------------------------------------------------------------------------------- /analognas/analogainas/evaluators/xgboost.py: -------------------------------------------------------------------------------- 1 | """XGBoost evaluator.""" 2 | from tabnanny import verbose 3 | import xgboost as xgb 4 | import numpy as np 5 | #from base_evaluator import Evaluator 6 | 7 | """ 8 | XGboost Evalutor Wrapper class. 9 | """ 10 | class XGBoostEvaluator(): 11 | def __init__( 12 | self, 13 | model_type = "XGBRanker", 14 | load_weight = True, 15 | hpo_wrapper=False, 16 | hparams_from_file=False, 17 | avm_predictor_path = "analogainas/evaluators/weights/xgboost_avm.bst", 18 | std_predictor_path = "analogainas/evaluators/weights/xgboost_std.bst", 19 | ranker_path = "analogainas/evaluators/weights/xgboost_ranker_acc.bst" 20 | ): 21 | self.model_type = model_type 22 | self.hpo_wrapper = hpo_wrapper 23 | self.default_hyperparams = { 24 | 'tree_method':'gpu_hist', 25 | 'booster':'gbtree', 26 | 'objective':'rank:pairwise', 27 | 'random_state':42, 28 | 'learning_rate':0.1, 29 | 'colsample_bytree':0.9, 30 | 'eta':0.05, 31 | 'max_depth':6, 32 | 'n_estimators':110, 33 | 'subsample':0.75, 34 | 'enable_categorical':True} 35 | self.hyperparams = None 36 | self.hparams_from_file = hparams_from_file 37 | self.load_weight = load_weight 38 | self.ranker_path = ranker_path 39 | self.avm_predictor_path = avm_predictor_path 40 | self.std_predictor_path = std_predictor_path 41 | 42 | self.ranker = self.get_ranker() 43 | self.avm_predictor = self.get_avm_predictor() 44 | self.std_predictor = self.get_std_predictor() 45 | 46 | def get_ranker(self): 47 | ranker = xgb.XGBRegressor() 48 | if self.load_weight == True: 49 | ranker.load_model(self.ranker_path) 50 | 51 | return ranker 52 | 53 | def get_avm_predictor(self): 54 | avm_predictor = xgb.XGBRegressor() 55 | if self.load_weight == True: 56 | avm_predictor.load_model(self.avm_predictor_path) 57 | return avm_predictor 58 | 59 | def get_std_predictor(self): 60 | std_predictor = xgb.XGBRegressor() 61 | if self.load_weight == True: 62 | std_predictor.load_model(self.std_predictor_path) 63 | return std_predictor 64 | 65 | def fit(self, x_train, y_train, train_info_file="xgboost.txt", hyperparameters=None, epochs=500, verbose=True): 66 | if hyperparameters == None: 67 | self.evaluator = self.get_model(self.default_hyperparams) 68 | else: 69 | self.hyperparams = hyperparameters 70 | self.evaluator = self.get_model(self.hyperparams) 71 | 72 | progress = dict() 73 | d_train = xgb.DMatrix(x_train, y_train) 74 | watchlist = [(d_train,'rank:ndcg')] 75 | self.evaluator = self.evaluator.train(self.hyperparams, d_train, epochs, watchlist, evals_result=progress) 76 | 77 | #SAVE MODEL 78 | self.evaluator.save_model(train_info_file) 79 | 80 | return progress['rank:ndcg'] 81 | 82 | def query_pop(self, P): 83 | x_test = [] 84 | for a in P: 85 | arch = list(a[0].values()) 86 | x_test.append(arch) 87 | x_test = np.array(x_test) 88 | return self.ranker.predict(x_test), self.avm_predictor.predict(x_test) 89 | 90 | def query(self, P): 91 | x_test = [] 92 | arch = list(P[0].values()) 93 | x_test.append(arch) 94 | return self.ranker.predict(x_test), self.avm_predictor.predict(x_test) 95 | 96 | def dcg_at_k(self, r, k, method=0): 97 | r = np.asfarray(r)[:k] 98 | if r.size: 99 | if method == 0: 100 | return r[0] + np.sum(r[1:] / np.log2(np.arange(2, r.size + 1))) 101 | elif method == 1: 102 | return np.sum(r / np.log2(np.arange(2, r.size + 2))) 103 | return 0. 104 | 105 | def ndcg_at_k(self, r, k, method=0): 106 | dcg_max = self.dcg_at_k(sorted(r, reverse=True), k, method) 107 | if not dcg_max: 108 | return 0. 109 | return self.dcg_at_k(r, k, method) / dcg_max 110 | 111 | def ndcg_scorer(self, estimator, X, y_true): 112 | y_pred = estimator.predict(X) 113 | # Assuming y_true contains the actual relevance scores 114 | # Sort the true scores based on the predictions 115 | sorted_scores = [y for _, y in sorted(zip(y_pred, y_true), reverse=True)] 116 | return self.ndcg_at_k(sorted_scores, k=len(y_true)) # or use a specific k 117 | 118 | -------------------------------------------------------------------------------- /analognas/analogainas/search_algorithms/__init__.py: -------------------------------------------------------------------------------- 1 | """A Worker is the base NAS Search Strategy class.""" 2 | -------------------------------------------------------------------------------- /analognas/analogainas/search_algorithms/__pycache__/__init__.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_algorithms/__pycache__/__init__.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_algorithms/__pycache__/__init__.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_algorithms/__pycache__/__init__.cpython-39.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_algorithms/__pycache__/ea_optimized.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_algorithms/__pycache__/ea_optimized.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_algorithms/__pycache__/ea_optimized.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_algorithms/__pycache__/ea_optimized.cpython-39.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_algorithms/__pycache__/worker.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_algorithms/__pycache__/worker.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_algorithms/__pycache__/worker.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_algorithms/__pycache__/worker.cpython-39.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_algorithms/bo.py: -------------------------------------------------------------------------------- 1 | """Bayesian Optimizer.""" 2 | import numpy as np 3 | 4 | class BOptimizer: 5 | def __init__(self, max_iter): 6 | self.max_iter = max_iter 7 | 8 | # surrogate or approximation for the objective function 9 | def surrogate(self, model, X): 10 | # catch any warning generated when making a prediction 11 | return model.predict(X, return_std=True) 12 | 13 | # probability of improvement acquisition function 14 | def acquisition(self, X, Xsamples, model): 15 | # calculate the best surrogate score found so far 16 | yhat, _ = self.surrogate(model, X) 17 | best = max(yhat) 18 | # calculate mean and stdev via surrogate function 19 | mu, std = self.surrogate(model, Xsamples) 20 | mu = mu[:, 0] 21 | # calculate the probability of improvement 22 | probs = (mu - best) / (std+1E-9) 23 | return probs 24 | 25 | def maximize(self): 26 | for _ in range(self.n_iter): 27 | x_next = self.propose_next_point() 28 | y_next = self.evaluate_func(x_next) 29 | 30 | self.X.append(x_next) 31 | self.y.append(y_next) 32 | 33 | best_idx = np.argmax(self.y) 34 | best_x = self.X[best_idx] 35 | best_y = self.y[best_idx] 36 | return best_x, best_y 37 | 38 | def propose_next_point(self): 39 | x_candidates = self.random_state.uniform( 40 | low=self.search_space[:, 0], 41 | high=self.search_space[:, 1], 42 | size=(100, self.search_space.shape[0]) 43 | ) 44 | 45 | best_x = None 46 | best_acquisition = float('-inf') 47 | 48 | for x in x_candidates: 49 | acquisition = self.acquisition(x) 50 | if acquisition > best_acquisition: 51 | best_x = x 52 | best_acquisition = acquisition 53 | 54 | return best_x 55 | 56 | def gaussian_process_regression(self): 57 | # Define your surrogate model (Gaussian Process) and fit it to the data 58 | # Example: Mean of 0, Standard Deviation of 1 59 | mean = 0.0 60 | std = 1.0 61 | return mean, std 62 | 63 | # optimize the acquisition function 64 | def run(self, X, y, model): 65 | # random search, generate random samples 66 | Xsamples = self.rs_search(100) 67 | Xsamples = Xsamples.reshape(len(Xsamples), 1) 68 | # calculate the acquisition function for each sample 69 | scores = self.acquisition(X, Xsamples, model) 70 | # locate the index of the largest scores 71 | ix = np.argmax(scores) 72 | return Xsamples[ix, 0] 73 | -------------------------------------------------------------------------------- /analognas/analogainas/search_algorithms/ea.py: -------------------------------------------------------------------------------- 1 | """Classical Evolutionary Algorithm.""" 2 | import random 3 | from analogainas.search_spaces.sample import random_sample 4 | 5 | class EAOptimizer: 6 | def __init__(self, max_iter, population_size, mutation_prob): 7 | self.max_iter = max_iter 8 | self.population_size = population_size 9 | self.mutation_prob = mutation_prob 10 | 11 | def mutate(self, architecture): 12 | if random.random() > self.mutation_prob: 13 | architecture = random_sample() 14 | return architecture 15 | 16 | def run(self): 17 | D = [self.cs.sample() for _ in range(self.population_size)] 18 | best_f = 0.0 19 | best_x = [None] * self.population_size 20 | 21 | for _ in range(self.max_iter): 22 | new_x = [self.mutate(x) for x in D] 23 | new_f = [self.evaluation(x) for x in new_x] 24 | 25 | for j in range(self.population_size): 26 | if new_f[j] > best_f: 27 | best_f = new_f[j] 28 | best_x = new_x[j] 29 | 30 | D = new_x 31 | 32 | return {'best_x': best_x, 'best_f': best_f} 33 | -------------------------------------------------------------------------------- /analognas/analogainas/search_algorithms/ea_optimized.py: -------------------------------------------------------------------------------- 1 | """Optimized Evolutionary Algorithm - AnalogNAS.""" 2 | import random 3 | from analogainas.search_spaces.sample import random_sample 4 | 5 | class EAOptimizer: 6 | """ 7 | Evolutionary Algorithm with optimized mutations and robustness constraint. 8 | 9 | The NAS problem is cast to: 10 | Max Acc(arch) 11 | s.t nb_param(arch) < max_nb_param 12 | drop(arch) < 10 13 | 14 | Args: 15 | nb_iter: maximum number of iterations. 16 | population_size: number of architectures in the population. 17 | 18 | mutation_prob_width: Mutation probability of modifying the width. 19 | - Increase/Decrease widening factor of one block. 20 | - Add/Remove branches. 21 | -Increase/Decrease Initial output channel size. 22 | 23 | mutation_prob_depth: Mutation probability for modifying the depth. 24 | - Increase/Decrease the number of residual blocks. 25 | - Modify the type of convolution from BasicBlock to BottleneckBlock. 26 | 27 | mutation_prob_other: Mutation probability for applying various other transformations: 28 | - Add/Remove a residual connection. 29 | - Modify initial kernel size. 30 | 31 | max_nb_param: constraint applied to the number of parameters. 32 | T_AVM: constraint applied on the predicted AVM (robustness check). 33 | """ 34 | def __init__(self, 35 | surrogate, 36 | nb_iter = 200, 37 | population_size=50, 38 | mutation_prob_width=0.8, 39 | mutation_prob_depth=0.8, 40 | mutation_prob_other=0.6, 41 | max_nb_param=1, 42 | T_AVM =10): 43 | 44 | assert population_size > 10, f"Population size needs to be at least 10, got {population_size}" 45 | 46 | self.surrogate = surrogate 47 | self.nb_iter = nb_iter 48 | self.population_size = int(population_size/10) 49 | self.mutation_prob_width = mutation_prob_width 50 | self.mutation_prob_depth = mutation_prob_depth 51 | self.mutation_prob_other = mutation_prob_other 52 | self.max_nb_param = max_nb_param 53 | self.T_AVM = T_AVM 54 | 55 | def mutate(self, cs, architecture): 56 | r = random.random() 57 | if r < 0.4: 58 | architecture= self.mutate_width(cs,architecture) 59 | elif r < 0.8: 60 | architecture= self.mutate_depth(cs,architecture) 61 | else: 62 | architecture= self.mutate_other(cs,architecture) 63 | 64 | return architecture 65 | 66 | def mutate_width(self, cs, architecture): 67 | if random.random() < self.mutation_prob_width: 68 | architecture = cs.sample_arch_uniformly(1) 69 | return architecture 70 | 71 | def mutate_depth(self, cs, architecture): 72 | if random.random() < self.mutation_prob_depth: 73 | architecture = cs.sample_arch_uniformly(1) 74 | return architecture 75 | 76 | def mutate_other(self, cs, architecture): 77 | if random.random() < self.mutation_prob_other: 78 | architecture = cs.sample_arch_uniformly(1) 79 | return architecture 80 | 81 | def generate_initial_population(self, cs): 82 | P = [cs.sample_arch_uniformly(1)] * self.population_size 83 | print(len(P)) 84 | _, slope = self.surrogate.query_pop(P) 85 | 86 | while (not self.satisfied_constrained(P)): 87 | for i, s in enumerate(slope): 88 | if s > self.T_AVM: 89 | P[i] = cs.sample_arch_uniformly(1) 90 | return P 91 | 92 | def satisfied_constrained(self, P): 93 | _, slope = self.surrogate.query_pop(P) 94 | for i, s in enumerate(slope): 95 | if s > self.T_AVM: 96 | return False 97 | return True 98 | 99 | def run(self, cs): 100 | P = self.generate_initial_population(cs) 101 | best_f = 0.0 102 | best_x = [None]*self.population_size 103 | 104 | for i in range(self.nb_iter): 105 | best_accs =[] 106 | new_P = [] 107 | for a in P: 108 | new_a = self.mutate(cs, a) 109 | new_P.append(new_a) 110 | acc, _ = self.surrogate.query(new_a) 111 | best_accs.append(acc) 112 | new_f = max(best_accs) 113 | if new_f > best_f: 114 | best_f = new_f 115 | best_x = new_a[0] 116 | 117 | P = new_P 118 | 119 | print("ITERATION {} completed: best acc {}".format(i, best_f)) 120 | 121 | return best_x, best_f 122 | -------------------------------------------------------------------------------- /analognas/analogainas/search_algorithms/worker.py: -------------------------------------------------------------------------------- 1 | """Search Experiment Runner.""" 2 | import os 3 | import csv 4 | import numpy as np 5 | 6 | from analogainas.search_spaces.config_space import ConfigSpace 7 | from analogainas.search_spaces.resnet_macro_architecture import Network 8 | 9 | """Wrapper class to launch NAS search.""" 10 | class Worker(): 11 | def __init__(self, 12 | cs: ConfigSpace=None, 13 | eval = None, 14 | optimizer=None, 15 | runs=5, 16 | max_budget=1, 17 | n_iter=100): 18 | self.max_budget = max_budget 19 | self.n_iter = n_iter 20 | self.config_space = cs 21 | self.evaluation = eval 22 | self.optimizer=optimizer 23 | self.runs = runs 24 | self.best_config = None 25 | self.best_acc = 0 26 | self.std_err = 0 27 | 28 | @property 29 | def best_arch(self): 30 | return Network(self.best_config) 31 | 32 | def search(self): 33 | if os.path.exists("results"): 34 | print("The 'results' directory already exists.\n") 35 | else: 36 | os.mkdir("results") 37 | print("Result directory created.\n") 38 | 39 | results = [] 40 | for i in range(self.runs): 41 | print("Search {} started".format(i)) 42 | best_config, best_acc = self.optimizer.run(self.config_space) 43 | 44 | with open('results/best_results_{}.csv'.format(i), 'w') as f: 45 | for key in best_config.keys(): 46 | f.write("%s,%s\n"%(key,best_config[key])) 47 | 48 | results.append(best_acc) 49 | if best_acc > self.best_acc: 50 | self.best_config = best_config 51 | self.best_acc = best_acc 52 | 53 | print("Best Acc = {}".format(best_acc)) 54 | self.std_err = np.std(results, ddof=1) / np.sqrt(np.size(results)) 55 | 56 | print("SEARCH ENDED") 57 | 58 | def result_summary(self): 59 | print("Best architecture accuracy: ", self.best_acc) 60 | print(f"Standard deviation of accuracy over {self.runs} runs: {self.std_err}") 61 | print("Best architecture: ", self.best_config) 62 | -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/__pycache__/config_space.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/config_space.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/__pycache__/config_space.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/config_space.cpython-39.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/__pycache__/resnet_macro_architecture.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/resnet_macro_architecture.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/__pycache__/resnet_macro_architecture.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/resnet_macro_architecture.cpython-39.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/__pycache__/sample.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/sample.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/__pycache__/sample.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/sample.cpython-39.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/__pycache__/train.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/train.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/__pycache__/train.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/__pycache__/train.cpython-39.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/config_space.py: -------------------------------------------------------------------------------- 1 | """Hyperparameter Configuration Space.""" 2 | import numpy as np 3 | import random 4 | 5 | 6 | class Hyperparameter: 7 | """ 8 | Class defines a hyperparameter and its range. 9 | """ 10 | def __init__(self, name, type, range=None, min_value=0, max_value=0, step=1): 11 | self.name = name 12 | self.min_value = min_value 13 | self.max_value = max_value 14 | self.step = step 15 | self.range = range 16 | if self.range is not None: 17 | self.sampling = "range" 18 | else: 19 | self.type = type # Discrete, continuous 20 | self.sampling = "uniform" 21 | 22 | def sample_hyp(self): 23 | if self.sampling == "range": 24 | return random.choice(self.range) 25 | if self.type == "discrete": 26 | return np.random.randint(self.min_value, high=self.max_value) 27 | if self.type == "continuous": 28 | return np.random.uniform(self.min_value, high=self.max_value) 29 | 30 | def size(self): 31 | if self.sampling == "range": 32 | return len(self.range) 33 | if self.type == "continuous": 34 | return 1 35 | return len(np.arange(self.min_value, self.max_value, self.step)) 36 | 37 | def __repr__(self) -> str: 38 | return "Name: {}\nMin_Value:{}\nMax_value:{}\nStep:{}".format( 39 | str(self.name), str(self.min_value), str(self.max_value), str(self.step) 40 | ) 41 | 42 | 43 | 44 | class ConfigSpace: 45 | """ 46 | This class defines the search space. 47 | """ 48 | def __init__(self, dataset="CIFAR-10"): 49 | self.dataset = dataset # VWW, KWS 50 | self.search_space = "resnet-like" # for now only resnet-like 51 | self.hyperparameters = [] # list of Hyperparameters to search for 52 | self.set_hyperparameters() 53 | 54 | def add_hyperparameter(self, name, type, min_value, max_value, step=1): 55 | for h in self.hyperparameters: 56 | if h.name == name: 57 | raise Exception("Name should be unique!") 58 | 59 | hyp = Hyperparameter(name, 60 | type, 61 | min_value=min_value, 62 | max_value=max_value, 63 | step=step) 64 | self.hyperparameters.append(hyp) 65 | 66 | def add_hyperparameter_range(self, name, type, range): 67 | for h in self.hyperparameters: 68 | if h.name == name: 69 | raise Exception("Name should be unique!") 70 | 71 | hyp = Hyperparameter(name, type, range=range) 72 | self.hyperparameters.append(hyp) 73 | 74 | def sample_arch(self): 75 | arch = {} 76 | for hyp in self.hyperparameters: 77 | arch[hyp.name] = hyp.sample_hyp() 78 | return arch 79 | 80 | def sample_arch_uniformly(self, n): 81 | archs = [] 82 | for i in range(n): 83 | tmp = self.sample_arch() 84 | for j in range(5, tmp["M"], -1): 85 | tmp["convblock%d" % j] = 0 86 | tmp["widenfact%d" % j] = 0 87 | tmp["B%d" % j] = 0 88 | tmp["R%d" % j] = 0 89 | archs.append(tmp) 90 | 91 | return archs 92 | 93 | def set_hyperparameters(self): 94 | if self.search_space == "resnet-like": 95 | self.add_hyperparameter_range("out_channel0", 96 | "discrete", 97 | range=[8, 12, 16, 32, 48, 64]) 98 | self.add_hyperparameter("M", "discrete", min_value=1, max_value=5) 99 | self.add_hyperparameter("R1", "discrete", min_value=1, max_value=16) 100 | self.add_hyperparameter("R2", "discrete", min_value=0, max_value=16) 101 | self.add_hyperparameter("R3", "discrete", min_value=0, max_value=16) 102 | self.add_hyperparameter("R4", "discrete", min_value=0, max_value=16) 103 | self.add_hyperparameter("R5", "discrete", min_value=0, max_value=16) 104 | 105 | for i in range(1, 6): 106 | self.add_hyperparameter_range("convblock%d" % i, 107 | "discrete", 108 | range=[1, 2]) 109 | self.add_hyperparameter("widenfact%d" % i, 110 | "continuous", 111 | min_value=0.5, 112 | max_value=0.8) 113 | self.add_hyperparameter("B%d" % i, "discrete", min_value=1, max_value=5) 114 | 115 | def remove_hyperparameter(self, name): 116 | for i, h in enumerate(self.hyperparameters): 117 | if h.name == name: 118 | self.hyperparameters.remove(h) 119 | break 120 | 121 | def compute_cs_size(self): 122 | size = 1 123 | for h in self.hyperparameters: 124 | size *= h.size() 125 | return size 126 | 127 | def get_hyperparameters(self): 128 | l = [] 129 | for h in self.hyperparameters: 130 | l.append(h.name) 131 | print(l) 132 | 133 | def __repr__(self) -> str: 134 | str_ = "" 135 | str_ += "Architecture Type: {}\n".format(self.search_space) 136 | str_ += "Search Space Size: {}\n".format(self.compute_cs_size()) 137 | str_ += "------------------------------------------------\n" 138 | for i, h in enumerate(self.hyperparameters): 139 | str_ += "{})\n".format(i) + str(h) + "\n\n" 140 | str_ += "------------------------------------------------\n" 141 | return str_ 142 | 143 | def main(): 144 | CS = ConfigSpace("Cifar-10") 145 | configs = CS.sample_arch_uniformly(20) 146 | print(configs) 147 | 148 | 149 | if __name__ == "__main__": 150 | main() 151 | -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/dataloaders/__pycache__/cutout.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/dataloaders/__pycache__/cutout.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/dataloaders/__pycache__/cutout.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/dataloaders/__pycache__/cutout.cpython-39.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/dataloaders/__pycache__/dataloader.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/dataloaders/__pycache__/dataloader.cpython-38.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/dataloaders/__pycache__/dataloader.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/analogainas/search_spaces/dataloaders/__pycache__/dataloader.cpython-39.pyc -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/dataloaders/cutout.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | 4 | 5 | class Cutout(object): 6 | """Randomly mask out one or more patches from an image. 7 | 8 | Args: 9 | n_holes (int): Number of patches to cut out of each image. 10 | length (int): The length (in pixels) of each square patch. 11 | """ 12 | def __init__(self, n_holes, length): 13 | self.n_holes = n_holes 14 | self.length = length 15 | 16 | def __call__(self, img): 17 | """ 18 | Args: 19 | img (Tensor): Tensor image of size (C, H, W). 20 | Returns: 21 | Tensor: Image with n_holes of dimension 22 | length x length cut out of it. 23 | """ 24 | h = img.size(1) 25 | w = img.size(2) 26 | 27 | mask = np.ones((h, w), np.float32) 28 | 29 | for n in range(self.n_holes): 30 | y = np.random.randint(h) 31 | x = np.random.randint(w) 32 | 33 | y1 = np.clip(y - self.length // 2, 0, h) 34 | y2 = np.clip(y + self.length // 2, 0, h) 35 | x1 = np.clip(x - self.length // 2, 0, w) 36 | x2 = np.clip(x + self.length // 2, 0, w) 37 | 38 | mask[y1: y2, x1: x2] = 0. 39 | 40 | mask = torch.from_numpy(mask) 41 | mask = mask.expand_as(img) 42 | img = img * mask 43 | 44 | return img 45 | -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/dataloaders/dataloader.py: -------------------------------------------------------------------------------- 1 | import torchvision 2 | import torchvision.transforms as transforms 3 | from torch.utils.data import DataLoader 4 | from analogainas.search_spaces.dataloaders.cutout import Cutout 5 | 6 | import importlib.util 7 | pyvww = importlib.util.find_spec("pyvww") 8 | found = pyvww is not None 9 | 10 | def load_cifar10(batch_size): 11 | transform_train = transforms.Compose([ 12 | transforms.RandomCrop(32, padding=4), 13 | transforms.RandomHorizontalFlip(), 14 | transforms.ToTensor(), 15 | transforms.Normalize((0.4914, 0.4822, 0.4465), 16 | (0.2023, 0.1994, 0.2010)), 17 | Cutout(1, length=8) 18 | ]) 19 | 20 | transform_test = transforms.Compose([ 21 | transforms.ToTensor(), 22 | transforms.Normalize((0.4914, 0.4822, 0.4465), 23 | (0.2023, 0.1994, 0.2010)), 24 | ]) 25 | 26 | trainset = torchvision.datasets.CIFAR10(root='./data', 27 | train=True, 28 | download=True, 29 | transform=transform_train) 30 | 31 | trainloader = DataLoader( 32 | trainset, batch_size=batch_size, shuffle=True, num_workers=2) 33 | 34 | testset = torchvision.datasets.CIFAR10( 35 | root='./data', train=False, download=True, transform=transform_test) 36 | 37 | testloader = DataLoader( 38 | testset, batch_size=100, shuffle=False, num_workers=2) 39 | 40 | return trainloader, testloader 41 | 42 | 43 | classes = ('plane', 'car', 'bird', 'cat', 'deer', 44 | 'dog', 'frog', 'horse', 'ship', 'truck') 45 | 46 | 47 | def load_vww(batch_size, path, annot_path): 48 | transform = transforms.Compose([ 49 | transforms.CenterCrop(100), 50 | transforms.ToTensor() 51 | ]) 52 | 53 | train_dataset = pyvww.pytorch.VisualWakeWordsClassification( 54 | root=path, annFile=annot_path, transform=transform) 55 | valid_dataset = pyvww.pytorch.VisualWakeWordsClassification( 56 | root=path, annFile=annot_path, transform=transform) 57 | 58 | train_loader = DataLoader(train_dataset, 59 | batch_size=batch_size, 60 | shuffle=True, 61 | num_workers=1) 62 | valid_loader = DataLoader(valid_dataset, 63 | batch_size=batch_size, 64 | shuffle=False, 65 | num_workers=1) 66 | 67 | return train_loader, valid_loader 68 | -------------------------------------------------------------------------------- /analognas/analogainas/search_spaces/sample.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy as np 3 | from analogainas.search_spaces.resnet_macro_architecture import Network 4 | from analogainas.search_spaces.config_space import ConfigSpace 5 | from analogainas.search_algorithms.worker import Worker 6 | from analogainas.search_spaces.train import train 7 | from analogainas.utils import * 8 | import csv 9 | 10 | EPOCHS = 40 11 | LEARNING_RATE = 0.05 12 | 13 | def latin_hypercube_sample(dataset, n): 14 | """Latin Hypercube Sampling of n architectures from ConfigSpace.""" 15 | cs = ConfigSpace(dataset) 16 | num_parameters = len(cs.get_hyperparameters()) 17 | ranges = np.arange(0, 1, 1/n) 18 | 19 | sampled_architectures = [] 20 | for _ in range(n): 21 | config = {} 22 | for i, hyperparameter in enumerate(cs.get_hyperparameters()): 23 | min_val, max_val = hyperparameter.lower, hyperparameter.upper 24 | val_range = max_val - min_val 25 | offset = random.uniform(0, val_range/n) 26 | config[hyperparameter.name] = min_val + ranges[_] * val_range + offset 27 | sampled_architectures.append(config) 28 | 29 | keys = sampled_architectures[0].keys() 30 | 31 | for config in sampled_architectures: 32 | model = Network(config) 33 | model_name = "resnet_{}_{}".format(config["M"], get_nb_convs(config)) 34 | 35 | with open("./configs/"+model_name+".config", 36 | 'w', newline='') as output_file: 37 | dict_writer = csv.DictWriter(output_file, keys) 38 | dict_writer.writeheader() 39 | dict_writer.writerows(config) 40 | 41 | train(model, model_name, LEARNING_RATE, EPOCHS) 42 | 43 | 44 | def random_sample(dataset, n): 45 | """Randomly samples n architectures from ConfigSpace.""" 46 | cs = ConfigSpace(dataset) 47 | sampled_architectures = cs.sample_arch_uniformly(n) 48 | 49 | keys = sampled_architectures[0].keys() 50 | 51 | for config in sampled_architectures: 52 | model = Network(config) 53 | model_name = "resnet_{}_{}".format(config["M"], get_nb_convs(config)) 54 | 55 | with open("./configs/"+model_name+".config", 56 | 'w', newline='') as output_file: 57 | dict_writer = csv.DictWriter(output_file, keys) 58 | dict_writer.writeheader() 59 | dict_writer.writerows(config) 60 | 61 | train(model, model_name, LEARNING_RATE, EPOCHS) 62 | 63 | 64 | def ea_sample(dataset, n, n_iter): 65 | """Samples n architectures from ConfigSpace 66 | using an evolutionary algorithm.""" 67 | cs = ConfigSpace(dataset) 68 | worker = Worker(dataset, cs, 3, n_iter) 69 | worker.search(population_size=n) 70 | -------------------------------------------------------------------------------- /analognas/analogainas/utils.py: -------------------------------------------------------------------------------- 1 | import torch.nn.functional as F 2 | from scipy.stats import kendalltau 3 | from scipy.stats import spearmanr 4 | import math 5 | 6 | ALPHA = 0.05 7 | 8 | def accuracy_mse(prediction, target, scale=100.0): 9 | prediction = prediction.detach() * scale 10 | target = (target) * scale 11 | return F.mse_loss(prediction, target) 12 | 13 | 14 | def kendal_correlation(v1, v2): 15 | """Compute the kendal correlation between two variables v1 & v2""" 16 | coef, p = kendalltau(v1, v2) 17 | 18 | if p > ALPHA: 19 | print("Samples are uncorrelated (fail to reject H0)") 20 | return 0 21 | else: 22 | return coef 23 | 24 | 25 | def spearman_correlation(v1, v2): 26 | """Compute the spearman correlation between two variables v1 & v2""" 27 | coef, p = spearmanr(v1, v2) 28 | if p > ALPHA: 29 | print("Samples are uncorrelated (fail to reject H0)") 30 | return 0 31 | else: 32 | return coef 33 | 34 | 35 | def check_ties(v1, v2): 36 | """Check if two variables contains ties. 37 | Contains ties --> Spearman 38 | No ties --> Kendal""" 39 | v1_set = set(v1) 40 | v2_set = set(v2) 41 | if len(v1_set.intersection(v2_set)) > 0: 42 | return(True) 43 | return(False) 44 | 45 | 46 | def truncate(f, n): 47 | return math.floor(f * 10 ** n) / 10 ** n 48 | 49 | 50 | def nb_rank_error(v1, v2): 51 | """Compute the pairwise ranking error.""" 52 | v1_sorted = sorted(range(len(v1)), key=lambda k: v1[k]) 53 | v2_sorted = sorted(range(len(v2)), key=lambda k: v2[k]) 54 | 55 | rank_errors = 0 56 | for i in range(len(v1)): 57 | if v1_sorted[i] != v2_sorted[i]: 58 | rank_errors += 1 59 | return rank_errors 60 | 61 | 62 | def get_nb_params(model): 63 | """Compute the number of parameters of model.""" 64 | return sum(p.numel() for p in model.parameters()) 65 | 66 | 67 | def get_nb_convs(config): 68 | """Compute the depth of the model.""" 69 | m = config["M"] 70 | nb_conv = 0 71 | for i in range(1, m+1): 72 | if config["convblock%d" % i] == 1: 73 | nb_conv += config["R%d" % i]*2*config["B%d" % i] 74 | if config["convblock%d" % i] == 2: 75 | nb_conv += config["R%d" % i]*3*config["B%d" % i] 76 | return nb_conv 77 | -------------------------------------------------------------------------------- /analognas/build/lib/analogainas/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/build/lib/analogainas/__init__.py -------------------------------------------------------------------------------- /analognas/build/lib/analogainas/__version__.py: -------------------------------------------------------------------------------- 1 | """Version information.""" 2 | __version__ = "0.1.0" 3 | -------------------------------------------------------------------------------- /analognas/build/lib/analogainas/evaluators/__init__.py: -------------------------------------------------------------------------------- 1 | """Accuracy Evaluation Methods""" 2 | -------------------------------------------------------------------------------- /analognas/build/lib/analogainas/evaluators/base_evaluator.py: -------------------------------------------------------------------------------- 1 | """Abstract class for base evaluator.""" 2 | from analogainas.utils import kendal_correlation 3 | 4 | 5 | """Base class for Accuracy Evaluation Methods.""" 6 | class Evaluator: 7 | def __init__(self, model_type=None): 8 | self.model_type = model_type 9 | 10 | def pre_process(self): 11 | """ 12 | This is called at the start of the NAS algorithm, 13 | before any architectures have been queried 14 | """ 15 | pass 16 | 17 | def fit(self, x_train, y_train): 18 | """ 19 | Training the evaluator. 20 | 21 | Args: 22 | x_train: list of architectures 23 | y_train: accuracies or ranks 24 | """ 25 | pass 26 | 27 | def query(self, x_test): 28 | """ 29 | Get the accuracy/rank prediction for x_test. 30 | 31 | Args: 32 | x_test: list of architectures 33 | 34 | Returns: 35 | Predictions for the architectures 36 | """ 37 | pass 38 | 39 | def get_evaluator_stat(self): 40 | """ 41 | Check whether the evaluator needs retraining. 42 | 43 | Returns: 44 | A dictionary of metrics. 45 | """ 46 | reqs = { 47 | "requires_retraining": False, 48 | "test_accuracy": None, 49 | "requires_hyperparameters": False, 50 | "hyperparams": {} 51 | } 52 | return reqs 53 | 54 | def set_hyperparams(self, hyperparams): 55 | """ 56 | Modifies/sets hyperparameters of the evaluator. 57 | 58 | Args: 59 | hyperparams: dictionary of hyperparameters. 60 | """ 61 | self.hyperparams = hyperparams 62 | 63 | def get_hyperparams(self): 64 | """ 65 | Get the hyperparameters of the evaluator. 66 | 67 | Returns: 68 | A dictionary of hyperparameters. 69 | If not manually set, a dictionary of the default hyperparameters. 70 | """ 71 | if hasattr(self, "hyperparams"): 72 | return self.hyperparams 73 | else: 74 | return None 75 | 76 | def get_correlation(self, x_test, y_test): 77 | y_pred = self.query(x_test) 78 | return kendal_correlation(y_test, y_pred) 79 | 80 | -------------------------------------------------------------------------------- /analognas/build/lib/analogainas/evaluators/prepare_data.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from numpy import genfromtxt 3 | from sklearn.model_selection import train_test_split 4 | from sklearn.preprocessing import StandardScaler 5 | 6 | class AccuracyDataLoader: 7 | def __init__(self, dataset_file="dataset_cifar10.csv", transforms=None): 8 | self.dataset_file = dataset_file 9 | self.data = genfromtxt(self.dataset_file, delimiter=',') 10 | 11 | # Applies encoding 12 | if transforms is not None: 13 | self.data = transforms(self.data) 14 | 15 | def get_train(self): 16 | X = self.data[1:24] 17 | y = self.data[27] 18 | slope = self.data[26] - self.data[-1] 19 | 20 | # Split the data into train and test sets 21 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0) 22 | 23 | # Scale the data using StandardScaler 24 | scaler = StandardScaler() 25 | X_train = scaler.fit_transform(X_train) 26 | X_test = scaler.transform(X_test) 27 | 28 | return (X_train, y_train), (X_test, y_test), slope 29 | -------------------------------------------------------------------------------- /analognas/build/lib/analogainas/evaluators/xgboost.py: -------------------------------------------------------------------------------- 1 | """XGBoost evaluator.""" 2 | from tabnanny import verbose 3 | import xgboost as xgb 4 | import numpy as np 5 | #from base_evaluator import Evaluator 6 | 7 | """ 8 | XGboost Evalutor Wrapper class. 9 | """ 10 | class XGBoostEvaluator(): 11 | def __init__( 12 | self, 13 | model_type = "XGBRanker", 14 | load_weight = True, 15 | hpo_wrapper=False, 16 | hparams_from_file=False, 17 | avm_predictor_path = "analogainas/evaluators/weights/xgboost_avm.bst", 18 | std_predictor_path = "analogainas/evaluators/weights/xgboost_std.bst", 19 | ranker_path = "analogainas/evaluators/weights/xgboost_ranker_acc.bst" 20 | ): 21 | self.model_type = model_type 22 | self.hpo_wrapper = hpo_wrapper 23 | self.default_hyperparams = { 24 | 'tree_method':'gpu_hist', 25 | 'booster':'gbtree', 26 | 'objective':'rank:pairwise', 27 | 'random_state':42, 28 | 'learning_rate':0.1, 29 | 'colsample_bytree':0.9, 30 | 'eta':0.05, 31 | 'max_depth':6, 32 | 'n_estimators':110, 33 | 'subsample':0.75, 34 | 'enable_categorical':True} 35 | self.hyperparams = None 36 | self.hparams_from_file = hparams_from_file 37 | self.load_weight = load_weight 38 | self.ranker_path = ranker_path 39 | self.avm_predictor_path = avm_predictor_path 40 | self.std_predictor_path = std_predictor_path 41 | 42 | self.ranker = self.get_ranker() 43 | self.avm_predictor = self.get_avm_predictor() 44 | self.std_predictor = self.get_std_predictor() 45 | 46 | def get_ranker(self): 47 | ranker = xgb.XGBRegressor(objective='rank:pairwise') 48 | if self.load_weight == True: 49 | ranker.load_model(self.ranker_path) 50 | return ranker 51 | 52 | def get_avm_predictor(self): 53 | avm_predictor = xgb.XGBRegressor() 54 | if self.load_weight == True: 55 | avm_predictor.load_model(self.avm_predictor_path) 56 | return avm_predictor 57 | 58 | def get_std_predictor(self): 59 | std_predictor = xgb.XGBRegressor() 60 | if self.load_weight == True: 61 | std_predictor.load_model(self.std_predictor_path) 62 | return std_predictor 63 | 64 | def fit(self, x_train, y_train, train_info_file="xgboost.txt", hyperparameters=None, epochs=500, verbose=True): 65 | if hyperparameters == None: 66 | self.evaluator = self.get_model(self.default_hyperparams) 67 | else: 68 | self.hyperparams = hyperparameters 69 | self.evaluator = self.get_model(self.hyperparams) 70 | 71 | progress = dict() 72 | d_train = xgb.DMatrix(x_train, y_train) 73 | watchlist = [(d_train,'rank:ndcg')] 74 | self.evaluator = self.evaluator.train(self.hyperparams, d_train, epochs, watchlist, evals_result=progress) 75 | 76 | #SAVE MODEL 77 | self.evaluator.save_model(train_info_file) 78 | 79 | return progress['rank:ndcg'] 80 | 81 | def query_pop(self, P): 82 | x_test = [] 83 | for a in P: 84 | arch = list(a[0].values()) 85 | x_test.append(arch) 86 | return self.ranker.predict(x_test), self.avm_predictor.predict(x_test) 87 | 88 | def query(self, P): 89 | x_test = [] 90 | arch = list(P[0].values()) 91 | x_test.append(arch) 92 | return self.ranker.predict(x_test), self.avm_predictor.predict(x_test) 93 | 94 | def dcg_at_k(self, r, k, method=0): 95 | r = np.asfarray(r)[:k] 96 | if r.size: 97 | if method == 0: 98 | return r[0] + np.sum(r[1:] / np.log2(np.arange(2, r.size + 1))) 99 | elif method == 1: 100 | return np.sum(r / np.log2(np.arange(2, r.size + 2))) 101 | return 0. 102 | 103 | def ndcg_at_k(self, r, k, method=0): 104 | dcg_max = self.dcg_at_k(sorted(r, reverse=True), k, method) 105 | if not dcg_max: 106 | return 0. 107 | return self.dcg_at_k(r, k, method) / dcg_max 108 | 109 | def ndcg_scorer(self, estimator, X, y_true): 110 | y_pred = estimator.predict(X) 111 | # Assuming y_true contains the actual relevance scores 112 | # Sort the true scores based on the predictions 113 | sorted_scores = [y for _, y in sorted(zip(y_pred, y_true), reverse=True)] 114 | return self.ndcg_at_k(sorted_scores, k=len(y_true)) # or use a specific k 115 | 116 | -------------------------------------------------------------------------------- /analognas/build/lib/analogainas/search_algorithms/__init__.py: -------------------------------------------------------------------------------- 1 | """A Worker is the base NAS Search Strategy class.""" 2 | -------------------------------------------------------------------------------- /analognas/build/lib/analogainas/search_algorithms/bo.py: -------------------------------------------------------------------------------- 1 | """Bayesian Optimizer.""" 2 | import numpy as np 3 | 4 | class BOptimizer: 5 | def __init__(self, max_iter): 6 | self.max_iter = max_iter 7 | 8 | # surrogate or approximation for the objective function 9 | def surrogate(self, model, X): 10 | # catch any warning generated when making a prediction 11 | return model.predict(X, return_std=True) 12 | 13 | # probability of improvement acquisition function 14 | def acquisition(self, X, Xsamples, model): 15 | # calculate the best surrogate score found so far 16 | yhat, _ = self.surrogate(model, X) 17 | best = max(yhat) 18 | # calculate mean and stdev via surrogate function 19 | mu, std = self.surrogate(model, Xsamples) 20 | mu = mu[:, 0] 21 | # calculate the probability of improvement 22 | probs = (mu - best) / (std+1E-9) 23 | return probs 24 | 25 | def maximize(self): 26 | for _ in range(self.n_iter): 27 | x_next = self.propose_next_point() 28 | y_next = self.evaluate_func(x_next) 29 | 30 | self.X.append(x_next) 31 | self.y.append(y_next) 32 | 33 | best_idx = np.argmax(self.y) 34 | best_x = self.X[best_idx] 35 | best_y = self.y[best_idx] 36 | return best_x, best_y 37 | 38 | def propose_next_point(self): 39 | x_candidates = self.random_state.uniform( 40 | low=self.search_space[:, 0], 41 | high=self.search_space[:, 1], 42 | size=(100, self.search_space.shape[0]) 43 | ) 44 | 45 | best_x = None 46 | best_acquisition = float('-inf') 47 | 48 | for x in x_candidates: 49 | acquisition = self.acquisition(x) 50 | if acquisition > best_acquisition: 51 | best_x = x 52 | best_acquisition = acquisition 53 | 54 | return best_x 55 | 56 | def gaussian_process_regression(self): 57 | # Define your surrogate model (Gaussian Process) and fit it to the data 58 | # Example: Mean of 0, Standard Deviation of 1 59 | mean = 0.0 60 | std = 1.0 61 | return mean, std 62 | 63 | # optimize the acquisition function 64 | def run(self, X, y, model): 65 | # random search, generate random samples 66 | Xsamples = self.rs_search(100) 67 | Xsamples = Xsamples.reshape(len(Xsamples), 1) 68 | # calculate the acquisition function for each sample 69 | scores = self.acquisition(X, Xsamples, model) 70 | # locate the index of the largest scores 71 | ix = np.argmax(scores) 72 | return Xsamples[ix, 0] 73 | -------------------------------------------------------------------------------- /analognas/build/lib/analogainas/search_algorithms/ea.py: -------------------------------------------------------------------------------- 1 | """Classical Evolutionary Algorithm.""" 2 | import random 3 | from analogainas.search_spaces.sample import random_sample 4 | 5 | class EAOptimizer: 6 | def __init__(self, max_iter, population_size, mutation_prob): 7 | self.max_iter = max_iter 8 | self.population_size = population_size 9 | self.mutation_prob = mutation_prob 10 | 11 | def mutate(self, architecture): 12 | if random.random() > self.mutation_prob: 13 | architecture = random_sample() 14 | return architecture 15 | 16 | def run(self): 17 | D = [self.cs.sample() for _ in range(self.population_size)] 18 | best_f = 0.0 19 | best_x = [None] * self.population_size 20 | 21 | for _ in range(self.max_iter): 22 | new_x = [self.mutate(x) for x in D] 23 | new_f = [self.evaluation(x) for x in new_x] 24 | 25 | for j in range(self.population_size): 26 | if new_f[j] > best_f: 27 | best_f = new_f[j] 28 | best_x = new_x[j] 29 | 30 | D = new_x 31 | 32 | return {'best_x': best_x, 'best_f': best_f} 33 | -------------------------------------------------------------------------------- /analognas/build/lib/analogainas/search_algorithms/ea_optimized.py: -------------------------------------------------------------------------------- 1 | """Optimized Evolutionary Algorithm - AnalogNAS.""" 2 | import random 3 | from analogainas.search_spaces.sample import random_sample 4 | 5 | class EAOptimizer: 6 | """ 7 | Evolutionary Algorithm with optimized mutations and robustness constraint. 8 | 9 | The NAS problem is cast to: 10 | Max Acc(arch) 11 | s.t nb_param(arch) < max_nb_param 12 | drop(arch) < 10 13 | 14 | Args: 15 | nb_iter: maximum number of iterations. 16 | population_size: number of architectures in the population. 17 | 18 | mutation_prob_width: Mutation probability of modifying the width. 19 | - Increase/Decrease widening factor of one block. 20 | - Add/Remove branches. 21 | -Increase/Decrease Initial output channel size. 22 | 23 | mutation_prob_depth: Mutation probability for modifying the depth. 24 | - Increase/Decrease the number of residual blocks. 25 | - Modify the type of convolution from BasicBlock to BottleneckBlock. 26 | 27 | mutation_prob_other: Mutation probability for applying various other transformations: 28 | - Add/Remove a residual connection. 29 | - Modify initial kernel size. 30 | 31 | max_nb_param: constraint applied to the number of parameters. 32 | T_AVM: constraint applied on the predicted AVM (robustness check). 33 | """ 34 | def __init__(self, 35 | surrogate, 36 | nb_iter = 200, 37 | population_size=50, 38 | mutation_prob_width=0.8, 39 | mutation_prob_depth=0.8, 40 | mutation_prob_other=0.6, 41 | max_nb_param=1, 42 | T_AVM =10): 43 | 44 | assert population_size > 10, f"Population size needs to be at least 10, got {population_size}" 45 | 46 | self.surrogate = surrogate 47 | self.nb_iter = nb_iter 48 | self.population_size = int(population_size/10) 49 | self.mutation_prob_width = mutation_prob_width 50 | self.mutation_prob_depth = mutation_prob_depth 51 | self.mutation_prob_other = mutation_prob_other 52 | self.max_nb_param = max_nb_param 53 | self.T_AVM = T_AVM 54 | 55 | def mutate(self, cs, architecture): 56 | r = random.random() 57 | if r < 0.4: 58 | architecture= self.mutate_width(cs,architecture) 59 | elif r < 0.8: 60 | architecture= self.mutate_depth(cs,architecture) 61 | else: 62 | architecture= self.mutate_other(cs,architecture) 63 | 64 | return architecture 65 | 66 | def mutate_width(self, cs, architecture): 67 | if random.random() < self.mutation_prob_width: 68 | architecture = cs.sample_arch_uniformly(1) 69 | return architecture 70 | 71 | def mutate_depth(self, cs, architecture): 72 | if random.random() < self.mutation_prob_depth: 73 | architecture = cs.sample_arch_uniformly(1) 74 | return architecture 75 | 76 | def mutate_other(self, cs, architecture): 77 | if random.random() < self.mutation_prob_other: 78 | architecture = cs.sample_arch_uniformly(1) 79 | return architecture 80 | 81 | def generate_initial_population(self, cs): 82 | P = [cs.sample_arch_uniformly(1)] * self.population_size 83 | _, slope = self.surrogate.query_pop(P) 84 | 85 | while (not self.satisfied_constrained(P)): 86 | for i, s in enumerate(slope): 87 | if s > self.T_AVM: 88 | P[i] = cs.sample_arch_uniformly(1) 89 | return P 90 | 91 | def satisfied_constrained(self, P): 92 | _, slope = self.surrogate.query_pop(P) 93 | for i, s in enumerate(slope): 94 | if s > self.T_AVM: 95 | return False 96 | return True 97 | 98 | def run(self, cs): 99 | P = self.generate_initial_population(cs) 100 | best_f = 0.0 101 | best_x = [None]*self.population_size 102 | 103 | for i in range(self.nb_iter): 104 | best_accs =[] 105 | new_P = [] 106 | for a in P: 107 | new_a = self.mutate(cs, a) 108 | new_P.append(new_a) 109 | acc, _ = self.surrogate.query(new_a) 110 | best_accs.append(acc) 111 | new_f = max(best_accs) 112 | if new_f > best_f: 113 | best_f = new_f 114 | best_x = new_a[0] 115 | 116 | P = new_P 117 | 118 | print("ITERATION {} completed: best acc {}".format(i, best_f)) 119 | 120 | return best_x, best_f 121 | -------------------------------------------------------------------------------- /analognas/build/lib/analogainas/search_algorithms/worker.py: -------------------------------------------------------------------------------- 1 | """Search Experiment Runner.""" 2 | import os 3 | import csv 4 | import numpy as np 5 | 6 | from analogainas.search_spaces.config_space import ConfigSpace 7 | from analogainas.search_spaces.resnet_macro_architecture import Network 8 | 9 | """Wrapper class to launch NAS search.""" 10 | class Worker(): 11 | def __init__(self, 12 | cs: ConfigSpace=None, 13 | eval = None, 14 | optimizer=None, 15 | runs=5, 16 | max_budget=1, 17 | n_iter=100): 18 | self.max_budget = max_budget 19 | self.n_iter = n_iter 20 | self.config_space = cs 21 | self.evaluation = eval 22 | self.optimizer=optimizer 23 | self.runs = runs 24 | self.best_config = None 25 | self.best_acc = 0 26 | self.std_err = 0 27 | 28 | @property 29 | def best_arch(self): 30 | return Network(self.best_config) 31 | 32 | def search(self): 33 | os.mkdir("results") 34 | print("Result directory created.\n") 35 | 36 | results = [] 37 | for i in range(self.runs): 38 | print("Search {} started".format(i)) 39 | best_config, best_acc = self.optimizer.run(self.config_space) 40 | 41 | with open('results/best_results_{}.csv'.format(i), 'w') as f: 42 | for key in best_config.keys(): 43 | f.write("%s,%s\n"%(key,best_config[key])) 44 | 45 | results.append(best_acc) 46 | if best_acc > self.best_acc: 47 | self.best_config = best_config 48 | self.best_acc = best_acc 49 | 50 | print("Best Acc = {}".format(best_acc)) 51 | self.std_err = np.std(results, ddof=1) / np.sqrt(np.size(results)) 52 | 53 | print("SEARCH ENDED") 54 | 55 | def result_summary(self): 56 | print("Best architecture accuracy: ", self.best_acc) 57 | print(f"Standard deviation of accuracy over {self.runs} runs: {self.best_acc}") 58 | -------------------------------------------------------------------------------- /analognas/build/lib/analogainas/utils.py: -------------------------------------------------------------------------------- 1 | import torch.nn.functional as F 2 | from scipy.stats import kendalltau 3 | from scipy.stats import spearmanr 4 | import math 5 | 6 | ALPHA = 0.05 7 | 8 | def accuracy_mse(prediction, target, scale=100.0): 9 | prediction = prediction.detach() * scale 10 | target = (target) * scale 11 | return F.mse_loss(prediction, target) 12 | 13 | 14 | def kendal_correlation(v1, v2): 15 | """Compute the kendal correlation between two variables v1 & v2""" 16 | coef, p = kendalltau(v1, v2) 17 | 18 | if p > ALPHA: 19 | print("Samples are uncorrelated (fail to reject H0)") 20 | return 0 21 | else: 22 | return coef 23 | 24 | 25 | def spearman_correlation(v1, v2): 26 | """Compute the spearman correlation between two variables v1 & v2""" 27 | coef, p = spearmanr(v1, v2) 28 | if p > ALPHA: 29 | print("Samples are uncorrelated (fail to reject H0)") 30 | return 0 31 | else: 32 | return coef 33 | 34 | 35 | def check_ties(v1, v2): 36 | """Check if two variables contains ties. 37 | Contains ties --> Spearman 38 | No ties --> Kendal""" 39 | v1_set = set(v1) 40 | v2_set = set(v2) 41 | if len(v1_set.intersection(v2_set)) > 0: 42 | return(True) 43 | return(False) 44 | 45 | 46 | def truncate(f, n): 47 | return math.floor(f * 10 ** n) / 10 ** n 48 | 49 | 50 | def nb_rank_error(v1, v2): 51 | """Compute the pairwise ranking error.""" 52 | v1_sorted = sorted(range(len(v1)), key=lambda k: v1[k]) 53 | v2_sorted = sorted(range(len(v2)), key=lambda k: v2[k]) 54 | 55 | rank_errors = 0 56 | for i in range(len(v1)): 57 | if v1_sorted[i] != v2_sorted[i]: 58 | rank_errors += 1 59 | return rank_errors 60 | 61 | 62 | def get_nb_params(model): 63 | """Compute the number of parameters of model.""" 64 | return sum(p.numel() for p in model.parameters()) 65 | 66 | 67 | def get_nb_convs(config): 68 | """Compute the depth of the model.""" 69 | m = config["M"] 70 | nb_conv = 0 71 | for i in range(1, m+1): 72 | if config["convblock%d" % i] == 1: 73 | nb_conv += config["R%d" % i]*2*config["B%d" % i] 74 | if config["convblock%d" % i] == 2: 75 | nb_conv += config["R%d" % i]*3*config["B%d" % i] 76 | return nb_conv 77 | -------------------------------------------------------------------------------- /analognas/dist/analogainas-0.1.0-py3.8.egg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/dist/analogainas-0.1.0-py3.8.egg -------------------------------------------------------------------------------- /analognas/dist/analogainas-0.1.0-py3.9.egg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/dist/analogainas-0.1.0-py3.9.egg -------------------------------------------------------------------------------- /analognas/docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /analognas/docs/_build/doctrees/environment.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/doctrees/environment.pickle -------------------------------------------------------------------------------- /analognas/docs/_build/doctrees/getting_started.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/doctrees/getting_started.doctree -------------------------------------------------------------------------------- /analognas/docs/_build/doctrees/index.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/doctrees/index.doctree -------------------------------------------------------------------------------- /analognas/docs/_build/doctrees/install.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/doctrees/install.doctree -------------------------------------------------------------------------------- /analognas/docs/_build/html/.buildinfo: -------------------------------------------------------------------------------- 1 | # Sphinx build info version 1 2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. 3 | config: 62f30f4bb49045b5aa46b9c411b8b5be 4 | tags: 645f666f9bcd5a90fca523b33c5a78b7 5 | -------------------------------------------------------------------------------- /analognas/docs/_build/html/_sources/getting_started.rst.txt: -------------------------------------------------------------------------------- 1 | Tutorial 2 | ======== 3 | 4 | *AnalogAINAS* is a framework that aims at building analog-aware efficient deep learning models. AnalogNAS is built on top of the [AIHWKIT](https://github.com/IBM/aihwkit). IBM Analog Hardware Acceleration Kit (AIHWKIT) is an open source Python toolkit for exploring and using the capabilities of in-memory computing devices in the context of artificial intelligence. 5 | 6 | In a high-level AnalogAINAS consists of 4 main building blocks which (can) interact with each other: 7 | 8 | * Configuration spaces: a search space of architectures targeting a specific dataset. 9 | * Evaluator: a ML predictor model to predict: 10 | * 1-day Accuracy: the evaluator models the drift effect that is encountered in Analog devices. The accuracy after 1 day of drift is then predicted and used as an objective to maximize. 11 | * The Accuracy Variation for One Month (AVM): The difference between the accuracy after 1 month and the accuracy after 1 sec. 12 | * The 1-day accuracy standard deviation: The stochasticity of the noise induces different variation of the model's accuracy depending on its architecture. 13 | * Optimizer: a optimization strategy such as evolutionary algorithm or bayesian optimization. 14 | * Worker: A global object that runs the architecture search loop and the final network training pipeline 15 | 16 | Create a Configuration Space 17 | ---------------------------- 18 | 19 | AnalogNAS presents a general search space composed of ResNet-like architectures. 20 | 21 | The macro-architecture defined in the file ```search_spaces/resnet_macro_architecture.py``` is customizable to any image classification dataset, given an input shape and output classes. 22 | 23 | .. warning:: 24 | The hyperparameters in the configuration space should have a unique name ID each. 25 | 26 | Evaluator 27 | --------- 28 | 29 | To speed up the search, we built a machine learning predictor to evaluate the accuracy and robustness of any given architecture from the configuration space. 30 | 31 | Search Optimizer and Worker 32 | --------------------------- 33 | 34 | In this example, we will use evolutionary search to look for the best architecture in CS using our evaluator. 35 | 36 | :: 37 | 38 | from analogainas.search_algorithms.ea_optimized import EAOptimizer 39 | from analogainas.search_algorithms.worker import Worker 40 | 41 | optimizer = EAOptimizer(evaluator, population_size=20, nb_iter=10) 42 | 43 | NB_RUN = 2 44 | worker = Worker(CS, optimizer=optimizer, runs=NB_RUN) 45 | 46 | worker.search() 47 | 48 | worker.result_summary() 49 | 50 | 51 | -------------------------------------------------------------------------------- /analognas/docs/_build/html/_sources/index.rst.txt: -------------------------------------------------------------------------------- 1 | AnalogNAS 2 | ========= 3 | 4 | .. toctree:: 5 | :maxdepth: 3 6 | :caption: Get started 7 | :hidden: 8 | 9 | install 10 | getting_started 11 | 12 | 13 | .. toctree:: 14 | :maxdepth: 3 15 | :caption: References 16 | :hidden: 17 | 18 | api_references 19 | references 20 | 21 | AnalogNAS is a Python library designed to empower researchers and practitioners in efficiently exploring and optimizing neural network architectures specifically for in-memory computing scenarios. AnalogNAS is built on top of the IBM Analog Hardware Acceleration Kit that enables efficient hardware-aware training with simulated noise injection on multiple IMC devices. By capitalizing on the advantages of in-memory computing, AnalogNAS opens new avenues for discovering architectures that can fully exploit the capabilities of this emerging computing paradigm. 22 | 23 | AnalogNAS offers a comprehensive set of features and functionalities that facilitate the neural architecture search process. From seamlessly exploring a vast space of architectural configurations to fine-tuning models for optimal performance, AnalogNAS provides a versatile framework that accelerates the discovery of efficient and effective neural network architectures for in-memory computing. 24 | 25 | .. warning:: 26 | This library is currently in beta and under active development. 27 | Please be mindful of potential issues and keep an eye for improvements, 28 | new features and bug fixes in upcoming versions. 29 | 30 | Features 31 | -------- 32 | 33 | 34 | - A customizable resnet-like search space, allowing to target CIFAR-10, Visual Wake Words, and Keyword Spotting 35 | - A configuration space object allows to add any number or type of architecture and training hyperparameters to the search 36 | - An analog-specific evaluator which includes: 37 | 38 | - An 1-day accuracy ranker 39 | - An 1 month accuracy variation estimator 40 | - A 1-day standard deviation estimator 41 | 42 | - A flexible search algorithm, enabling the implementation and extension of state-of-the-art NAS methods. 43 | 44 | Installation 45 | ------------ 46 | 47 | Install analogNAS by running: 48 | 49 | pip install analogainas 50 | 51 | 52 | How to cite 53 | ----------- 54 | 55 | In case you are using the *AnalogNAS* for 56 | your research, please cite: 57 | 58 | .. note:: 59 | 60 | Benmeziane, H., Lammie, C., Boybat, I., Rasch, M., Gallo, M. L., Tsai, H., ... & Maghraoui, K. E. (2023). AnalogNAS: A Neural Network Design Framework for Accurate Inference with Analog In-Memory Computing. IEEE Edge 2023. 61 | 62 | https://arxiv.org/abs/2305.10459 63 | -------------------------------------------------------------------------------- /analognas/docs/_build/html/_sources/install.rst.txt: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | The preferred way to install this package is by using the `Python package index`_:: 5 | 6 | pip install analogainas 7 | 8 | For ease of installation, install aihwkit library separately, refer to `AIHWKit installation`_: 9 | 10 | The package require the following runtime libraries to be installed in your 11 | system: 12 | 13 | * `OpenBLAS`_: 0.3.3+ 14 | * `CUDA Toolkit`_: 9.0+ (only required for the GPU-enabled simulator [#f1]_) 15 | 16 | .. [#f1] Note that GPU support is not available in OSX, as it depends on a 17 | platform that has official CUDA support. 18 | 19 | .. _AIHWKit installation: https://aihwkit.readthedocs.io/en/latest/install.html 20 | .. _Python package index: https://pypi.org/project/analogainas/ 21 | .. _OpenBLAS: https://www.openblas.net 22 | .. _CUDA Toolkit: https://developer.nvidia.com/accelerated-computing-toolkit -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/_sphinx_javascript_frameworks_compat.js: -------------------------------------------------------------------------------- 1 | /* Compatability shim for jQuery and underscores.js. 2 | * 3 | * Copyright Sphinx contributors 4 | * Released under the two clause BSD licence 5 | */ 6 | 7 | /** 8 | * small helper function to urldecode strings 9 | * 10 | * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL 11 | */ 12 | jQuery.urldecode = function(x) { 13 | if (!x) { 14 | return x 15 | } 16 | return decodeURIComponent(x.replace(/\+/g, ' ')); 17 | }; 18 | 19 | /** 20 | * small helper function to urlencode strings 21 | */ 22 | jQuery.urlencode = encodeURIComponent; 23 | 24 | /** 25 | * This function returns the parsed url parameters of the 26 | * current request. Multiple values per key are supported, 27 | * it will always return arrays of strings for the value parts. 28 | */ 29 | jQuery.getQueryParameters = function(s) { 30 | if (typeof s === 'undefined') 31 | s = document.location.search; 32 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 33 | var result = {}; 34 | for (var i = 0; i < parts.length; i++) { 35 | var tmp = parts[i].split('=', 2); 36 | var key = jQuery.urldecode(tmp[0]); 37 | var value = jQuery.urldecode(tmp[1]); 38 | if (key in result) 39 | result[key].push(value); 40 | else 41 | result[key] = [value]; 42 | } 43 | return result; 44 | }; 45 | 46 | /** 47 | * highlight a given string on a jquery object by wrapping it in 48 | * span elements with the given class name. 49 | */ 50 | jQuery.fn.highlightText = function(text, className) { 51 | function highlight(node, addItems) { 52 | if (node.nodeType === 3) { 53 | var val = node.nodeValue; 54 | var pos = val.toLowerCase().indexOf(text); 55 | if (pos >= 0 && 56 | !jQuery(node.parentNode).hasClass(className) && 57 | !jQuery(node.parentNode).hasClass("nohighlight")) { 58 | var span; 59 | var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); 60 | if (isInSVG) { 61 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 62 | } else { 63 | span = document.createElement("span"); 64 | span.className = className; 65 | } 66 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 67 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 68 | document.createTextNode(val.substr(pos + text.length)), 69 | node.nextSibling)); 70 | node.nodeValue = val.substr(0, pos); 71 | if (isInSVG) { 72 | var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); 73 | var bbox = node.parentElement.getBBox(); 74 | rect.x.baseVal.value = bbox.x; 75 | rect.y.baseVal.value = bbox.y; 76 | rect.width.baseVal.value = bbox.width; 77 | rect.height.baseVal.value = bbox.height; 78 | rect.setAttribute('class', className); 79 | addItems.push({ 80 | "parent": node.parentNode, 81 | "target": rect}); 82 | } 83 | } 84 | } 85 | else if (!jQuery(node).is("button, select, textarea")) { 86 | jQuery.each(node.childNodes, function() { 87 | highlight(this, addItems); 88 | }); 89 | } 90 | } 91 | var addItems = []; 92 | var result = this.each(function() { 93 | highlight(this, addItems); 94 | }); 95 | for (var i = 0; i < addItems.length; ++i) { 96 | jQuery(addItems[i].parent).before(addItems[i].target); 97 | } 98 | return result; 99 | }; 100 | 101 | /* 102 | * backward compatibility for jQuery.browser 103 | * This will be supported until firefox bug is fixed. 104 | */ 105 | if (!jQuery.browser) { 106 | jQuery.uaMatch = function(ua) { 107 | ua = ua.toLowerCase(); 108 | 109 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || 110 | /(webkit)[ \/]([\w.]+)/.exec(ua) || 111 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || 112 | /(msie) ([\w.]+)/.exec(ua) || 113 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || 114 | []; 115 | 116 | return { 117 | browser: match[ 1 ] || "", 118 | version: match[ 2 ] || "0" 119 | }; 120 | }; 121 | jQuery.browser = {}; 122 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; 123 | } 124 | -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/classic.css: -------------------------------------------------------------------------------- 1 | /* 2 | * classic.css_t 3 | * ~~~~~~~~~~~~~ 4 | * 5 | * Sphinx stylesheet -- classic theme. 6 | * 7 | * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | @import url("basic.css"); 13 | 14 | /* -- page layout ----------------------------------------------------------- */ 15 | 16 | html { 17 | /* CSS hack for macOS's scrollbar (see #1125) */ 18 | background-color: #FFFFFF; 19 | } 20 | 21 | body { 22 | font-family: sans-serif; 23 | font-size: 100%; 24 | background-color: #11303d; 25 | color: #000; 26 | margin: 0; 27 | padding: 0; 28 | } 29 | 30 | div.document { 31 | display: flex; 32 | background-color: #1c4e63; 33 | } 34 | 35 | div.documentwrapper { 36 | float: left; 37 | width: 100%; 38 | } 39 | 40 | div.bodywrapper { 41 | margin: 0 0 0 230px; 42 | } 43 | 44 | div.body { 45 | background-color: #ffffff; 46 | color: #000000; 47 | padding: 0 20px 30px 20px; 48 | } 49 | 50 | div.footer { 51 | color: #ffffff; 52 | width: 100%; 53 | padding: 9px 0 9px 0; 54 | text-align: center; 55 | font-size: 75%; 56 | } 57 | 58 | div.footer a { 59 | color: #ffffff; 60 | text-decoration: underline; 61 | } 62 | 63 | div.related { 64 | background-color: #133f52; 65 | line-height: 30px; 66 | color: #ffffff; 67 | } 68 | 69 | div.related a { 70 | color: #ffffff; 71 | } 72 | 73 | div.sphinxsidebar { 74 | } 75 | 76 | div.sphinxsidebar h3 { 77 | font-family: 'Trebuchet MS', sans-serif; 78 | color: #ffffff; 79 | font-size: 1.4em; 80 | font-weight: normal; 81 | margin: 0; 82 | padding: 0; 83 | } 84 | 85 | div.sphinxsidebar h3 a { 86 | color: #ffffff; 87 | } 88 | 89 | div.sphinxsidebar h4 { 90 | font-family: 'Trebuchet MS', sans-serif; 91 | color: #ffffff; 92 | font-size: 1.3em; 93 | font-weight: normal; 94 | margin: 5px 0 0 0; 95 | padding: 0; 96 | } 97 | 98 | div.sphinxsidebar p { 99 | color: #ffffff; 100 | } 101 | 102 | div.sphinxsidebar p.topless { 103 | margin: 5px 10px 10px 10px; 104 | } 105 | 106 | div.sphinxsidebar ul { 107 | margin: 10px; 108 | padding: 0; 109 | color: #ffffff; 110 | } 111 | 112 | div.sphinxsidebar a { 113 | color: #98dbcc; 114 | } 115 | 116 | div.sphinxsidebar input { 117 | border: 1px solid #98dbcc; 118 | font-family: sans-serif; 119 | font-size: 1em; 120 | } 121 | 122 | 123 | 124 | /* -- hyperlink styles ------------------------------------------------------ */ 125 | 126 | a { 127 | color: #355f7c; 128 | text-decoration: none; 129 | } 130 | 131 | a:visited { 132 | color: #355f7c; 133 | text-decoration: none; 134 | } 135 | 136 | a:hover { 137 | text-decoration: underline; 138 | } 139 | 140 | 141 | 142 | /* -- body styles ----------------------------------------------------------- */ 143 | 144 | div.body h1, 145 | div.body h2, 146 | div.body h3, 147 | div.body h4, 148 | div.body h5, 149 | div.body h6 { 150 | font-family: 'Trebuchet MS', sans-serif; 151 | background-color: #f2f2f2; 152 | font-weight: normal; 153 | color: #20435c; 154 | border-bottom: 1px solid #ccc; 155 | margin: 20px -20px 10px -20px; 156 | padding: 3px 0 3px 10px; 157 | } 158 | 159 | div.body h1 { margin-top: 0; font-size: 200%; } 160 | div.body h2 { font-size: 160%; } 161 | div.body h3 { font-size: 140%; } 162 | div.body h4 { font-size: 120%; } 163 | div.body h5 { font-size: 110%; } 164 | div.body h6 { font-size: 100%; } 165 | 166 | a.headerlink { 167 | color: #c60f0f; 168 | font-size: 0.8em; 169 | padding: 0 4px 0 4px; 170 | text-decoration: none; 171 | } 172 | 173 | a.headerlink:hover { 174 | background-color: #c60f0f; 175 | color: white; 176 | } 177 | 178 | div.body p, div.body dd, div.body li, div.body blockquote { 179 | text-align: justify; 180 | line-height: 130%; 181 | } 182 | 183 | div.admonition p.admonition-title + p { 184 | display: inline; 185 | } 186 | 187 | div.admonition p { 188 | margin-bottom: 5px; 189 | } 190 | 191 | div.admonition pre { 192 | margin-bottom: 5px; 193 | } 194 | 195 | div.admonition ul, div.admonition ol { 196 | margin-bottom: 5px; 197 | } 198 | 199 | div.note { 200 | background-color: #eee; 201 | border: 1px solid #ccc; 202 | } 203 | 204 | div.seealso { 205 | background-color: #ffc; 206 | border: 1px solid #ff6; 207 | } 208 | 209 | nav.contents, 210 | aside.topic, 211 | div.topic { 212 | background-color: #eee; 213 | } 214 | 215 | div.warning { 216 | background-color: #ffe4e4; 217 | border: 1px solid #f66; 218 | } 219 | 220 | p.admonition-title { 221 | display: inline; 222 | } 223 | 224 | p.admonition-title:after { 225 | content: ":"; 226 | } 227 | 228 | pre { 229 | padding: 5px; 230 | background-color: unset; 231 | color: unset; 232 | line-height: 120%; 233 | border: 1px solid #ac9; 234 | border-left: none; 235 | border-right: none; 236 | } 237 | 238 | code { 239 | background-color: #ecf0f3; 240 | padding: 0 1px 0 1px; 241 | font-size: 0.95em; 242 | } 243 | 244 | th, dl.field-list > dt { 245 | background-color: #ede; 246 | } 247 | 248 | .warning code { 249 | background: #efc2c2; 250 | } 251 | 252 | .note code { 253 | background: #d6d6d6; 254 | } 255 | 256 | .viewcode-back { 257 | font-family: sans-serif; 258 | } 259 | 260 | div.viewcode-block:target { 261 | background-color: #f4debf; 262 | border-top: 1px solid #ac9; 263 | border-bottom: 1px solid #ac9; 264 | } 265 | 266 | div.code-block-caption { 267 | color: #efefef; 268 | background-color: #1c4e63; 269 | } -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/badge_only.css: -------------------------------------------------------------------------------- 1 | .clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Bold.woff -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Bold.woff2 -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Regular.woff -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/Roboto-Slab-Regular.woff2 -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/fontawesome-webfont.woff2 -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/lato-bold-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-bold-italic.woff -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/lato-bold-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-bold-italic.woff2 -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/lato-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-bold.woff -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/lato-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-bold.woff2 -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/lato-normal-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-normal-italic.woff -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/lato-normal-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-normal-italic.woff2 -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/lato-normal.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-normal.woff -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/css/fonts/lato-normal.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/css/fonts/lato-normal.woff2 -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/custom.css: -------------------------------------------------------------------------------- 1 | /* This file intentionally left blank. */ 2 | -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/doctools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * doctools.js 3 | * ~~~~~~~~~~~ 4 | * 5 | * Base JavaScript utilities for all Sphinx HTML documentation. 6 | * 7 | * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | "use strict"; 12 | 13 | const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ 14 | "TEXTAREA", 15 | "INPUT", 16 | "SELECT", 17 | "BUTTON", 18 | ]); 19 | 20 | const _ready = (callback) => { 21 | if (document.readyState !== "loading") { 22 | callback(); 23 | } else { 24 | document.addEventListener("DOMContentLoaded", callback); 25 | } 26 | }; 27 | 28 | /** 29 | * Small JavaScript module for the documentation. 30 | */ 31 | const Documentation = { 32 | init: () => { 33 | Documentation.initDomainIndexTable(); 34 | Documentation.initOnKeyListeners(); 35 | }, 36 | 37 | /** 38 | * i18n support 39 | */ 40 | TRANSLATIONS: {}, 41 | PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), 42 | LOCALE: "unknown", 43 | 44 | // gettext and ngettext don't access this so that the functions 45 | // can safely bound to a different name (_ = Documentation.gettext) 46 | gettext: (string) => { 47 | const translated = Documentation.TRANSLATIONS[string]; 48 | switch (typeof translated) { 49 | case "undefined": 50 | return string; // no translation 51 | case "string": 52 | return translated; // translation exists 53 | default: 54 | return translated[0]; // (singular, plural) translation tuple exists 55 | } 56 | }, 57 | 58 | ngettext: (singular, plural, n) => { 59 | const translated = Documentation.TRANSLATIONS[singular]; 60 | if (typeof translated !== "undefined") 61 | return translated[Documentation.PLURAL_EXPR(n)]; 62 | return n === 1 ? singular : plural; 63 | }, 64 | 65 | addTranslations: (catalog) => { 66 | Object.assign(Documentation.TRANSLATIONS, catalog.messages); 67 | Documentation.PLURAL_EXPR = new Function( 68 | "n", 69 | `return (${catalog.plural_expr})` 70 | ); 71 | Documentation.LOCALE = catalog.locale; 72 | }, 73 | 74 | /** 75 | * helper function to focus on search bar 76 | */ 77 | focusSearchBar: () => { 78 | document.querySelectorAll("input[name=q]")[0]?.focus(); 79 | }, 80 | 81 | /** 82 | * Initialise the domain index toggle buttons 83 | */ 84 | initDomainIndexTable: () => { 85 | const toggler = (el) => { 86 | const idNumber = el.id.substr(7); 87 | const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); 88 | if (el.src.substr(-9) === "minus.png") { 89 | el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; 90 | toggledRows.forEach((el) => (el.style.display = "none")); 91 | } else { 92 | el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; 93 | toggledRows.forEach((el) => (el.style.display = "")); 94 | } 95 | }; 96 | 97 | const togglerElements = document.querySelectorAll("img.toggler"); 98 | togglerElements.forEach((el) => 99 | el.addEventListener("click", (event) => toggler(event.currentTarget)) 100 | ); 101 | togglerElements.forEach((el) => (el.style.display = "")); 102 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); 103 | }, 104 | 105 | initOnKeyListeners: () => { 106 | // only install a listener if it is really needed 107 | if ( 108 | !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && 109 | !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS 110 | ) 111 | return; 112 | 113 | document.addEventListener("keydown", (event) => { 114 | // bail for input elements 115 | if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; 116 | // bail with special keys 117 | if (event.altKey || event.ctrlKey || event.metaKey) return; 118 | 119 | if (!event.shiftKey) { 120 | switch (event.key) { 121 | case "ArrowLeft": 122 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; 123 | 124 | const prevLink = document.querySelector('link[rel="prev"]'); 125 | if (prevLink && prevLink.href) { 126 | window.location.href = prevLink.href; 127 | event.preventDefault(); 128 | } 129 | break; 130 | case "ArrowRight": 131 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; 132 | 133 | const nextLink = document.querySelector('link[rel="next"]'); 134 | if (nextLink && nextLink.href) { 135 | window.location.href = nextLink.href; 136 | event.preventDefault(); 137 | } 138 | break; 139 | } 140 | } 141 | 142 | // some keyboard layouts may need Shift to get / 143 | switch (event.key) { 144 | case "/": 145 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; 146 | Documentation.focusSearchBar(); 147 | event.preventDefault(); 148 | } 149 | }); 150 | }, 151 | }; 152 | 153 | // quick alias for translations 154 | const _ = Documentation.gettext; 155 | 156 | _ready(Documentation.init); 157 | -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/documentation_options.js: -------------------------------------------------------------------------------- 1 | var DOCUMENTATION_OPTIONS = { 2 | URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), 3 | VERSION: 'v0.1.0', 4 | LANGUAGE: 'en', 5 | COLLAPSE_INDEX: false, 6 | BUILDER: 'html', 7 | FILE_SUFFIX: '.html', 8 | LINK_SUFFIX: '.html', 9 | HAS_SOURCE: true, 10 | SOURCELINK_SUFFIX: '.txt', 11 | NAVIGATION_WITH_KEYS: false, 12 | SHOW_SEARCH_SUMMARY: true, 13 | ENABLE_SEARCH_SHORTCUTS: true, 14 | }; -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/file.png -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/js/badge_only.js: -------------------------------------------------------------------------------- 1 | !function(e){var t={};function r(n){if(t[n])return t[n].exports;var o=t[n]={i:n,l:!1,exports:{}};return e[n].call(o.exports,o,o.exports,r),o.l=!0,o.exports}r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)r.d(n,o,function(t){return e[t]}.bind(null,o));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=4)}({4:function(e,t,r){}}); -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/js/html5shiv-printshiv.min.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @preserve HTML5 Shiv 3.7.3-pre | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed 3 | */ 4 | !function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/js/html5shiv.min.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed 3 | */ 4 | !function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/js/theme.js: -------------------------------------------------------------------------------- 1 | !function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t0 63 | var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 64 | var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 65 | var s_v = "^(" + C + ")?" + v; // vowel in stem 66 | 67 | this.stemWord = function (w) { 68 | var stem; 69 | var suffix; 70 | var firstch; 71 | var origword = w; 72 | 73 | if (w.length < 3) 74 | return w; 75 | 76 | var re; 77 | var re2; 78 | var re3; 79 | var re4; 80 | 81 | firstch = w.substr(0,1); 82 | if (firstch == "y") 83 | w = firstch.toUpperCase() + w.substr(1); 84 | 85 | // Step 1a 86 | re = /^(.+?)(ss|i)es$/; 87 | re2 = /^(.+?)([^s])s$/; 88 | 89 | if (re.test(w)) 90 | w = w.replace(re,"$1$2"); 91 | else if (re2.test(w)) 92 | w = w.replace(re2,"$1$2"); 93 | 94 | // Step 1b 95 | re = /^(.+?)eed$/; 96 | re2 = /^(.+?)(ed|ing)$/; 97 | if (re.test(w)) { 98 | var fp = re.exec(w); 99 | re = new RegExp(mgr0); 100 | if (re.test(fp[1])) { 101 | re = /.$/; 102 | w = w.replace(re,""); 103 | } 104 | } 105 | else if (re2.test(w)) { 106 | var fp = re2.exec(w); 107 | stem = fp[1]; 108 | re2 = new RegExp(s_v); 109 | if (re2.test(stem)) { 110 | w = stem; 111 | re2 = /(at|bl|iz)$/; 112 | re3 = new RegExp("([^aeiouylsz])\\1$"); 113 | re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 114 | if (re2.test(w)) 115 | w = w + "e"; 116 | else if (re3.test(w)) { 117 | re = /.$/; 118 | w = w.replace(re,""); 119 | } 120 | else if (re4.test(w)) 121 | w = w + "e"; 122 | } 123 | } 124 | 125 | // Step 1c 126 | re = /^(.+?)y$/; 127 | if (re.test(w)) { 128 | var fp = re.exec(w); 129 | stem = fp[1]; 130 | re = new RegExp(s_v); 131 | if (re.test(stem)) 132 | w = stem + "i"; 133 | } 134 | 135 | // Step 2 136 | re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; 137 | if (re.test(w)) { 138 | var fp = re.exec(w); 139 | stem = fp[1]; 140 | suffix = fp[2]; 141 | re = new RegExp(mgr0); 142 | if (re.test(stem)) 143 | w = stem + step2list[suffix]; 144 | } 145 | 146 | // Step 3 147 | re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; 148 | if (re.test(w)) { 149 | var fp = re.exec(w); 150 | stem = fp[1]; 151 | suffix = fp[2]; 152 | re = new RegExp(mgr0); 153 | if (re.test(stem)) 154 | w = stem + step3list[suffix]; 155 | } 156 | 157 | // Step 4 158 | re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; 159 | re2 = /^(.+?)(s|t)(ion)$/; 160 | if (re.test(w)) { 161 | var fp = re.exec(w); 162 | stem = fp[1]; 163 | re = new RegExp(mgr1); 164 | if (re.test(stem)) 165 | w = stem; 166 | } 167 | else if (re2.test(w)) { 168 | var fp = re2.exec(w); 169 | stem = fp[1] + fp[2]; 170 | re2 = new RegExp(mgr1); 171 | if (re2.test(stem)) 172 | w = stem; 173 | } 174 | 175 | // Step 5 176 | re = /^(.+?)e$/; 177 | if (re.test(w)) { 178 | var fp = re.exec(w); 179 | stem = fp[1]; 180 | re = new RegExp(mgr1); 181 | re2 = new RegExp(meq1); 182 | re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 183 | if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) 184 | w = stem; 185 | } 186 | re = /ll$/; 187 | re2 = new RegExp(mgr1); 188 | if (re.test(w) && re2.test(w)) { 189 | re = /.$/; 190 | w = w.replace(re,""); 191 | } 192 | 193 | // and turn initial Y back to y 194 | if (firstch == "y") 195 | w = firstch.toLowerCase() + w.substr(1); 196 | return w; 197 | } 198 | } 199 | 200 | -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/minus.png -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/_static/plus.png -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/pygments.css: -------------------------------------------------------------------------------- 1 | pre { line-height: 125%; } 2 | td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } 3 | span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } 4 | td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } 5 | span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } 6 | .highlight .hll { background-color: #ffffcc } 7 | .highlight { background: #f8f8f8; } 8 | .highlight .c { color: #3D7B7B; font-style: italic } /* Comment */ 9 | .highlight .err { border: 1px solid #FF0000 } /* Error */ 10 | .highlight .k { color: #008000; font-weight: bold } /* Keyword */ 11 | .highlight .o { color: #666666 } /* Operator */ 12 | .highlight .ch { color: #3D7B7B; font-style: italic } /* Comment.Hashbang */ 13 | .highlight .cm { color: #3D7B7B; font-style: italic } /* Comment.Multiline */ 14 | .highlight .cp { color: #9C6500 } /* Comment.Preproc */ 15 | .highlight .cpf { color: #3D7B7B; font-style: italic } /* Comment.PreprocFile */ 16 | .highlight .c1 { color: #3D7B7B; font-style: italic } /* Comment.Single */ 17 | .highlight .cs { color: #3D7B7B; font-style: italic } /* Comment.Special */ 18 | .highlight .gd { color: #A00000 } /* Generic.Deleted */ 19 | .highlight .ge { font-style: italic } /* Generic.Emph */ 20 | .highlight .gr { color: #E40000 } /* Generic.Error */ 21 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ 22 | .highlight .gi { color: #008400 } /* Generic.Inserted */ 23 | .highlight .go { color: #717171 } /* Generic.Output */ 24 | .highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */ 25 | .highlight .gs { font-weight: bold } /* Generic.Strong */ 26 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ 27 | .highlight .gt { color: #0044DD } /* Generic.Traceback */ 28 | .highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */ 29 | .highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */ 30 | .highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */ 31 | .highlight .kp { color: #008000 } /* Keyword.Pseudo */ 32 | .highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */ 33 | .highlight .kt { color: #B00040 } /* Keyword.Type */ 34 | .highlight .m { color: #666666 } /* Literal.Number */ 35 | .highlight .s { color: #BA2121 } /* Literal.String */ 36 | .highlight .na { color: #687822 } /* Name.Attribute */ 37 | .highlight .nb { color: #008000 } /* Name.Builtin */ 38 | .highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */ 39 | .highlight .no { color: #880000 } /* Name.Constant */ 40 | .highlight .nd { color: #AA22FF } /* Name.Decorator */ 41 | .highlight .ni { color: #717171; font-weight: bold } /* Name.Entity */ 42 | .highlight .ne { color: #CB3F38; font-weight: bold } /* Name.Exception */ 43 | .highlight .nf { color: #0000FF } /* Name.Function */ 44 | .highlight .nl { color: #767600 } /* Name.Label */ 45 | .highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */ 46 | .highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */ 47 | .highlight .nv { color: #19177C } /* Name.Variable */ 48 | .highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */ 49 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */ 50 | .highlight .mb { color: #666666 } /* Literal.Number.Bin */ 51 | .highlight .mf { color: #666666 } /* Literal.Number.Float */ 52 | .highlight .mh { color: #666666 } /* Literal.Number.Hex */ 53 | .highlight .mi { color: #666666 } /* Literal.Number.Integer */ 54 | .highlight .mo { color: #666666 } /* Literal.Number.Oct */ 55 | .highlight .sa { color: #BA2121 } /* Literal.String.Affix */ 56 | .highlight .sb { color: #BA2121 } /* Literal.String.Backtick */ 57 | .highlight .sc { color: #BA2121 } /* Literal.String.Char */ 58 | .highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */ 59 | .highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */ 60 | .highlight .s2 { color: #BA2121 } /* Literal.String.Double */ 61 | .highlight .se { color: #AA5D1F; font-weight: bold } /* Literal.String.Escape */ 62 | .highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */ 63 | .highlight .si { color: #A45A77; font-weight: bold } /* Literal.String.Interpol */ 64 | .highlight .sx { color: #008000 } /* Literal.String.Other */ 65 | .highlight .sr { color: #A45A77 } /* Literal.String.Regex */ 66 | .highlight .s1 { color: #BA2121 } /* Literal.String.Single */ 67 | .highlight .ss { color: #19177C } /* Literal.String.Symbol */ 68 | .highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */ 69 | .highlight .fm { color: #0000FF } /* Name.Function.Magic */ 70 | .highlight .vc { color: #19177C } /* Name.Variable.Class */ 71 | .highlight .vg { color: #19177C } /* Name.Variable.Global */ 72 | .highlight .vi { color: #19177C } /* Name.Variable.Instance */ 73 | .highlight .vm { color: #19177C } /* Name.Variable.Magic */ 74 | .highlight .il { color: #666666 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/sidebar.js: -------------------------------------------------------------------------------- 1 | /* 2 | * sidebar.js 3 | * ~~~~~~~~~~ 4 | * 5 | * This script makes the Sphinx sidebar collapsible. 6 | * 7 | * .sphinxsidebar contains .sphinxsidebarwrapper. This script adds 8 | * in .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton 9 | * used to collapse and expand the sidebar. 10 | * 11 | * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden 12 | * and the width of the sidebar and the margin-left of the document 13 | * are decreased. When the sidebar is expanded the opposite happens. 14 | * This script saves a per-browser/per-session cookie used to 15 | * remember the position of the sidebar among the pages. 16 | * Once the browser is closed the cookie is deleted and the position 17 | * reset to the default (expanded). 18 | * 19 | * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. 20 | * :license: BSD, see LICENSE for details. 21 | * 22 | */ 23 | 24 | const initialiseSidebar = () => { 25 | 26 | 27 | 28 | 29 | // global elements used by the functions. 30 | const bodyWrapper = document.getElementsByClassName("bodywrapper")[0] 31 | const sidebar = document.getElementsByClassName("sphinxsidebar")[0] 32 | const sidebarWrapper = document.getElementsByClassName('sphinxsidebarwrapper')[0] 33 | const sidebarButton = document.getElementById("sidebarbutton") 34 | const sidebarArrow = sidebarButton.querySelector('span') 35 | 36 | // for some reason, the document has no sidebar; do not run into errors 37 | if (typeof sidebar === "undefined") return; 38 | 39 | const flipArrow = element => element.innerText = (element.innerText === "»") ? "«" : "»" 40 | 41 | const collapse_sidebar = () => { 42 | bodyWrapper.style.marginLeft = ".8em"; 43 | sidebar.style.width = ".8em" 44 | sidebarWrapper.style.display = "none" 45 | flipArrow(sidebarArrow) 46 | sidebarButton.title = _('Expand sidebar') 47 | window.localStorage.setItem("sidebar", "collapsed") 48 | } 49 | 50 | const expand_sidebar = () => { 51 | bodyWrapper.style.marginLeft = "" 52 | sidebar.style.removeProperty("width") 53 | sidebarWrapper.style.display = "" 54 | flipArrow(sidebarArrow) 55 | sidebarButton.title = _('Collapse sidebar') 56 | window.localStorage.setItem("sidebar", "expanded") 57 | } 58 | 59 | sidebarButton.addEventListener("click", () => { 60 | (sidebarWrapper.style.display === "none") ? expand_sidebar() : collapse_sidebar() 61 | }) 62 | 63 | if (!window.localStorage.getItem("sidebar")) return 64 | const value = window.localStorage.getItem("sidebar") 65 | if (value === "collapsed") collapse_sidebar(); 66 | else if (value === "expanded") expand_sidebar(); 67 | } 68 | 69 | if (document.readyState !== "loading") initialiseSidebar() 70 | else document.addEventListener("DOMContentLoaded", initialiseSidebar) -------------------------------------------------------------------------------- /analognas/docs/_build/html/_static/sphinx_highlight.js: -------------------------------------------------------------------------------- 1 | /* Highlighting utilities for Sphinx HTML documentation. */ 2 | "use strict"; 3 | 4 | const SPHINX_HIGHLIGHT_ENABLED = true 5 | 6 | /** 7 | * highlight a given string on a node by wrapping it in 8 | * span elements with the given class name. 9 | */ 10 | const _highlight = (node, addItems, text, className) => { 11 | if (node.nodeType === Node.TEXT_NODE) { 12 | const val = node.nodeValue; 13 | const parent = node.parentNode; 14 | const pos = val.toLowerCase().indexOf(text); 15 | if ( 16 | pos >= 0 && 17 | !parent.classList.contains(className) && 18 | !parent.classList.contains("nohighlight") 19 | ) { 20 | let span; 21 | 22 | const closestNode = parent.closest("body, svg, foreignObject"); 23 | const isInSVG = closestNode && closestNode.matches("svg"); 24 | if (isInSVG) { 25 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 26 | } else { 27 | span = document.createElement("span"); 28 | span.classList.add(className); 29 | } 30 | 31 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 32 | parent.insertBefore( 33 | span, 34 | parent.insertBefore( 35 | document.createTextNode(val.substr(pos + text.length)), 36 | node.nextSibling 37 | ) 38 | ); 39 | node.nodeValue = val.substr(0, pos); 40 | 41 | if (isInSVG) { 42 | const rect = document.createElementNS( 43 | "http://www.w3.org/2000/svg", 44 | "rect" 45 | ); 46 | const bbox = parent.getBBox(); 47 | rect.x.baseVal.value = bbox.x; 48 | rect.y.baseVal.value = bbox.y; 49 | rect.width.baseVal.value = bbox.width; 50 | rect.height.baseVal.value = bbox.height; 51 | rect.setAttribute("class", className); 52 | addItems.push({ parent: parent, target: rect }); 53 | } 54 | } 55 | } else if (node.matches && !node.matches("button, select, textarea")) { 56 | node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); 57 | } 58 | }; 59 | const _highlightText = (thisNode, text, className) => { 60 | let addItems = []; 61 | _highlight(thisNode, addItems, text, className); 62 | addItems.forEach((obj) => 63 | obj.parent.insertAdjacentElement("beforebegin", obj.target) 64 | ); 65 | }; 66 | 67 | /** 68 | * Small JavaScript module for the documentation. 69 | */ 70 | const SphinxHighlight = { 71 | 72 | /** 73 | * highlight the search words provided in localstorage in the text 74 | */ 75 | highlightSearchWords: () => { 76 | if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight 77 | 78 | // get and clear terms from localstorage 79 | const url = new URL(window.location); 80 | const highlight = 81 | localStorage.getItem("sphinx_highlight_terms") 82 | || url.searchParams.get("highlight") 83 | || ""; 84 | localStorage.removeItem("sphinx_highlight_terms") 85 | url.searchParams.delete("highlight"); 86 | window.history.replaceState({}, "", url); 87 | 88 | // get individual terms from highlight string 89 | const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); 90 | if (terms.length === 0) return; // nothing to do 91 | 92 | // There should never be more than one element matching "div.body" 93 | const divBody = document.querySelectorAll("div.body"); 94 | const body = divBody.length ? divBody[0] : document.querySelector("body"); 95 | window.setTimeout(() => { 96 | terms.forEach((term) => _highlightText(body, term, "highlighted")); 97 | }, 10); 98 | 99 | const searchBox = document.getElementById("searchbox"); 100 | if (searchBox === null) return; 101 | searchBox.appendChild( 102 | document 103 | .createRange() 104 | .createContextualFragment( 105 | '" 109 | ) 110 | ); 111 | }, 112 | 113 | /** 114 | * helper function to hide the search marks again 115 | */ 116 | hideSearchWords: () => { 117 | document 118 | .querySelectorAll("#searchbox .highlight-link") 119 | .forEach((el) => el.remove()); 120 | document 121 | .querySelectorAll("span.highlighted") 122 | .forEach((el) => el.classList.remove("highlighted")); 123 | localStorage.removeItem("sphinx_highlight_terms") 124 | }, 125 | 126 | initEscapeListener: () => { 127 | // only install a listener if it is really needed 128 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; 129 | 130 | document.addEventListener("keydown", (event) => { 131 | // bail for input elements 132 | if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; 133 | // bail with special keys 134 | if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; 135 | if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { 136 | SphinxHighlight.hideSearchWords(); 137 | event.preventDefault(); 138 | } 139 | }); 140 | }, 141 | }; 142 | 143 | _ready(SphinxHighlight.highlightSearchWords); 144 | _ready(SphinxHighlight.initEscapeListener); 145 | -------------------------------------------------------------------------------- /analognas/docs/_build/html/genindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Index — AnalogNAS v0.1.0 documentation 7 | 8 | 9 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 |
25 | 56 | 57 |
61 | 62 |
63 |
64 |
65 |
    66 |
  • 67 | 68 |
  • 69 |
  • 70 |
71 |
72 |
73 |
74 |
75 | 76 | 77 |

Index

78 | 79 |
80 | 81 |
82 | 83 | 84 |
85 |
86 |
87 | 88 |
89 | 90 |
91 |

© Copyright 2023, Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo, Smail Niar, Hamza Ouarnoughi, Ramachandran Muralidhar, Sidney Tsai, Vijay Narayanan, Abu Sebastian, Kaoutar El Maghraoui.

92 |
93 | 94 | Built with Sphinx using a 95 | theme 96 | provided by Read the Docs. 97 | 98 | 99 |
100 |
101 |
102 |
103 |
104 | 109 | 110 | 111 | -------------------------------------------------------------------------------- /analognas/docs/_build/html/install.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Installation — AnalogNAS v0.1.0 documentation 8 | 9 | 10 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 | 54 | 55 |
59 | 60 |
61 |
62 |
63 | 70 |
71 |
72 |
73 |
74 | 75 |
76 |

Installation

77 |

The preferred way to install this package is by using the Python package index:

78 |
pip install analogainas
 79 | 
80 |
81 |

For ease of installation, install aihwkit library separately, refer to AIHWKit installation:

82 |

The package require the following runtime libraries to be installed in your 83 | system:

84 | 88 | 95 |
96 | 97 | 98 |
99 |
100 |
104 | 105 |
106 | 107 |
108 |

© Copyright 2023, Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo, Smail Niar, Hamza Ouarnoughi, Ramachandran Muralidhar, Sidney Tsai, Vijay Narayanan, Abu Sebastian, Kaoutar El Maghraoui.

109 |
110 | 111 | Built with Sphinx using a 112 | theme 113 | provided by Read the Docs. 114 | 115 | 116 |
117 |
118 |
119 |
120 |
121 | 126 | 127 | 128 | -------------------------------------------------------------------------------- /analognas/docs/_build/html/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognas/docs/_build/html/objects.inv -------------------------------------------------------------------------------- /analognas/docs/_build/html/search.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Search — AnalogNAS v0.1.0 documentation 7 | 8 | 9 | 10 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 | 59 | 60 |
64 | 65 |
66 |
67 |
68 |
    69 |
  • 70 | 71 |
  • 72 |
  • 73 |
74 |
75 |
76 |
77 |
78 | 79 | 86 | 87 | 88 |
89 | 90 |
91 | 92 |
93 |
94 |
95 | 96 |
97 | 98 |
99 |

© Copyright 2023, Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo, Smail Niar, Hamza Ouarnoughi, Ramachandran Muralidhar, Sidney Tsai, Vijay Narayanan, Abu Sebastian, Kaoutar El Maghraoui.

100 |
101 | 102 | Built with Sphinx using a 103 | theme 104 | provided by Read the Docs. 105 | 106 | 107 |
108 |
109 |
110 |
111 |
112 | 117 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | -------------------------------------------------------------------------------- /analognas/docs/_build/html/searchindex.js: -------------------------------------------------------------------------------- 1 | Search.setIndex({"docnames": ["api_references", "getting_started", "index", "install", "references"], "filenames": ["api_references.rst", "getting_started.rst", "index.rst", "install.rst", "references.rst"], "titles": ["API Reference", "Tutorial", "AnalogNAS", "Installation", "Paper References"], "terms": {"solv": [], "your": [2, 3], "problem": [], "where": [], "start": [], "document": [], "provid": 2, "basic": [], "explan": [], "how": [], "do": [], "easili": [], "look": 1, "easi": [], "i": [1, 2, 3], "us": [1, 2, 3, 4], "import": 1, "get": [], "stuff": [], "done": [], "do_stuff": [], "Be": [], "awesom": [], "make": [], "thing": [], "faster": [], "run": [1, 2], "issu": 2, "tracker": [], "github": 1, "com": 1, "sourc": 1, "code": [], "If": [], "you": 2, "ar": 2, "have": 1, "pleas": 2, "let": [], "u": [], "know": [], "we": 1, "mail": [], "list": [], "locat": [], "googl": [], "group": [], "The": [1, 3], "under": 2, "bsd": [], "python": [1, 2, 3], "librari": [2, 3], "design": [2, 4], "empow": 2, "research": 2, "practition": 2, "effici": [1, 2], "explor": [1, 2], "optim": 2, "neural": [2, 4], "network": [1, 2, 4], "architectur": [1, 2, 4], "specif": [1, 2], "memori": [1, 2, 4], "comput": [1, 2, 4], "scenario": 2, "built": [1, 2], "top": [1, 2], "ibm": [1, 2], "analog": [1, 2, 4], "hardwar": [1, 2, 4], "acceler": [1, 2, 4], "kit": [1, 2], "enabl": [2, 3], "awar": [1, 2, 4], "train": [1, 2, 4], "simul": [2, 3], "nois": [1, 2, 4], "inject": 2, "multipl": 2, "imc": 2, "devic": [1, 2, 4], "By": 2, "capit": 2, "advantag": 2, "open": [1, 2], "new": 2, "avenu": 2, "discov": 2, "can": [1, 2], "fulli": 2, "exploit": 2, "capabl": [1, 2], "thi": [1, 2, 3], "emerg": 2, "paradigm": 2, "offer": 2, "comprehens": 2, "set": 2, "function": 2, "facilit": 2, "search": [2, 4], "process": 2, "from": [1, 2, 4], "seamlessli": 2, "vast": 2, "space": 2, "configur": 2, "fine": 2, "tune": 2, "model": [1, 2], "perform": 2, "versatil": 2, "framework": [1, 2, 4], "discoveri": 2, "effect": [1, 2], "project": [], "A": [1, 2, 4], "customiz": [1, 2], "resnet": [1, 2], "like": [1, 2], "allow": 2, "target": [1, 2], "cifar": 2, "10": [1, 2], "visual": 2, "wake": 2, "word": 2, "keyword": 2, "spot": 2, "object": [1, 2], "add": 2, "ani": [1, 2], "number": 2, "type": 2, "hyperparamet": [1, 2], "an": [1, 2], "evalu": 2, "which": [1, 2], "includ": 2, "1": [1, 2, 3, 4], "dai": [1, 2], "accuraci": [1, 2, 4], "ranker": 2, "month": [1, 2], "variat": [1, 2], "estim": 2, "standard": [1, 2], "deviat": [1, 2], "flexibl": 2, "algorithm": [1, 2], "implement": 2, "extens": 2, "state": 2, "art": 2, "na": 2, "method": 2, "warn": [], "current": 2, "beta": 2, "activ": 2, "develop": 2, "mind": 2, "potenti": 2, "keep": 2, "ey": 2, "improv": 2, "bug": 2, "fix": 2, "upcom": 2, "version": 2, "pip": [2, 3], "analogaina": [1, 2, 3], "In": [1, 2, 4], "case": 2, "paper": [], "_": [], "describ": [], "strategi": 1, "benmezian": 2, "h": 2, "lammi": 2, "c": [1, 2], "boybat": 2, "rasch": 2, "m": 2, "gallo": 2, "l": 2, "tsai": 2, "maghraoui": 2, "k": 2, "e": 2, "2023": [2, 4], "accur": [2, 4], "infer": [2, 4], "ieee": [2, 4], "edg": [2, 4], "http": [1, 2], "arxiv": 2, "org": 2, "ab": 2, "2305": 2, "10459": 2, "prefer": 3, "wai": 3, "packag": 3, "index": 3, "For": 3, "eas": 3, "aihwkit": [1, 3], "separ": 3, "refer": 3, "requir": 3, "follow": 3, "runtim": 3, "system": 3, "openbla": 3, "0": 3, "3": [3, 4], "cuda": 3, "toolkit": [1, 3], "9": 3, "onli": 3, "gpu": 3, "f1": [], "note": 3, "support": 3, "avail": 3, "osx": 3, "depend": [1, 3], "platform": 3, "ha": 3, "offici": 3, "analogna": [1, 4], "2": [1, 4], "2021": 4, "ijcai": 4, "survei": 4, "taxonomi": 4, "2020": 4, "natur": 4, "nanotechnologi": 4, "applic": 4, "commun": 4, "deep": [1, 4], "phase": 4, "chang": 4, "frontier": 4, "neurosci": 4, "resist": 4, "cross": 4, "point": 4, "consider": 4, "4": [1, 4], "mix": 4, "precis": 4, "learn": [1, 4], "base": 4, "5": 4, "2018": 4, "equival": 4, "analogu": 4, "6": 4, "signal": 4, "extract": 4, "element": 4, "neuromorph": 4, "7": [], "2019": [], "symposium": [], "vlsi": [], "technologi": [], "capacitor": [], "arrai": [], "record": [], "symmetri": [], "linear": [], "8": [], "intern": [], "electron": [], "meet": [], "iedm": [], "ecram": [], "scalabl": [], "synapt": [], "cell": [], "high": 1, "speed": 1, "low": [], "power": [], "2016": [], "aim": 1, "build": 1, "context": 1, "artifici": 1, "intellig": 1, "level": 1, "consist": 1, "main": 1, "block": 1, "interact": 1, "each": 1, "other": 1, "dataset": 1, "ml": 1, "predictor": 1, "predict": 1, "drift": 1, "encount": 1, "after": 1, "maxim": 1, "One": 1, "avm": 1, "differ": 1, "between": 1, "sec": 1, "stochast": 1, "induc": 1, "": 1, "its": 1, "evolutionari": 1, "bayesian": 1, "worker": [], "global": 1, "loop": 1, "final": 1, "pipelin": 1, "present": 1, "gener": 1, "compos": 1, "macro": 1, "defin": 1, "file": 1, "search_spac": 1, "resnet_macro_architectur": 1, "py": 1, "imag": 1, "classif": 1, "given": 1, "input": 1, "shape": 1, "output": 1, "class": 1, "should": 1, "uniqu": 1, "name": 1, "id": 1, "To": 1, "up": 1, "machin": 1, "robust": 1, "exampl": 1, "best": 1, "our": 1, "search_algorithm": 1, "ea_optim": 1, "eaoptim": 1, "population_s": 1, "20": 1, "nb_iter": 1, "nb_run": 1, "result_summari": 1}, "objects": {}, "objtypes": {}, "objnames": {}, "titleterms": {"project": [], "featur": 2, "instal": [2, 3], "contribut": [], "support": [], "licens": [], "analogna": 2, "how": 2, "cite": 2, "get": [], "start": [], "api": 0, "refer": [0, 4], "tutori": 1, "paper": 4, "creat": 1, "configur": 1, "space": 1, "evalu": 1, "search": 1, "optim": 1, "worker": 1}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 8, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.viewcode": 1, "sphinx": 57}, "alltitles": {"Installation": [[3, "installation"], [2, "installation"]], "AnalogNAS": [[2, "analognas"]], "Features": [[2, "features"]], "How to cite": [[2, "how-to-cite"]], "Paper References": [[4, "paper-references"]], "API Reference": [[0, "api-reference"]], "Tutorial": [[1, "tutorial"]], "Create a Configuration Space": [[1, "create-a-configuration-space"]], "Evaluator": [[1, "evaluator"]], "Search Optimizer and Worker": [[1, "search-optimizer-and-worker"]]}, "indexentries": {}}) -------------------------------------------------------------------------------- /analognas/docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # For the full list of built-in configuration values, see the documentation: 4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 5 | 6 | # -- Project information ----------------------------------------------------- 7 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 8 | import os 9 | import sys 10 | 11 | project = 'AnalogNAS' 12 | copyright = '2023, Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo, Smail Niar, Hamza Ouarnoughi, Ramachandran Muralidhar, Sidney Tsai, Vijay Narayanan, Abu Sebastian, Kaoutar El Maghraoui' 13 | author = 'Hadjer Benmeziane, Corey Lammie, Irem Boybat, Malte Rasch, Manuel Le Gallo, Smail Niar, Hamza Ouarnoughi, Ramachandran Muralidhar, Sidney Tsai, Vijay Narayanan, Abu Sebastian, Kaoutar El Maghraoui' 14 | release = 'v0.1.0' 15 | 16 | # -- General configuration --------------------------------------------------- 17 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 18 | 19 | # Add any Sphinx extension module names here, as strings. They can be 20 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 21 | # ones. 22 | extensions = [ 23 | 'sphinx.ext.napoleon', 24 | 'sphinx.ext.autodoc', 25 | 'sphinx.ext.autosummary', 26 | 'sphinx.ext.viewcode', 27 | 'sphinx_rtd_theme', 28 | 'recommonmark' 29 | ] 30 | 31 | templates_path = ['_templates'] 32 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 33 | 34 | 35 | 36 | # -- Options for HTML output ------------------------------------------------- 37 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 38 | 39 | html_theme = 'sphinx_rtd_theme' 40 | html_static_path = ['_static'] 41 | 42 | -------------------------------------------------------------------------------- /analognas/docs/getting_started.rst: -------------------------------------------------------------------------------- 1 | Tutorial 2 | ======== 3 | 4 | *AnalogAINAS* is a framework that aims at building analog-aware efficient deep learning models. AnalogNAS is built on top of the [AIHWKIT](https://github.com/IBM/aihwkit). IBM Analog Hardware Acceleration Kit (AIHWKIT) is an open source Python toolkit for exploring and using the capabilities of in-memory computing devices in the context of artificial intelligence. 5 | 6 | In a high-level AnalogAINAS consists of 4 main building blocks which (can) interact with each other: 7 | 8 | * Configuration spaces: a search space of architectures targeting a specific dataset. 9 | * Evaluator: a ML predictor model to predict: 10 | * 1-day Accuracy: the evaluator models the drift effect that is encountered in Analog devices. The accuracy after 1 day of drift is then predicted and used as an objective to maximize. 11 | * The Accuracy Variation for One Month (AVM): The difference between the accuracy after 1 month and the accuracy after 1 sec. 12 | * The 1-day accuracy standard deviation: The stochasticity of the noise induces different variation of the model's accuracy depending on its architecture. 13 | * Optimizer: a optimization strategy such as evolutionary algorithm or bayesian optimization. 14 | * Worker: A global object that runs the architecture search loop and the final network training pipeline 15 | 16 | Create a Configuration Space 17 | ---------------------------- 18 | 19 | AnalogNAS presents a general search space composed of ResNet-like architectures. 20 | 21 | The macro-architecture defined in the file ```search_spaces/resnet_macro_architecture.py``` is customizable to any image classification dataset, given an input shape and output classes. 22 | 23 | .. warning:: 24 | The hyperparameters in the configuration space should have a unique name ID each. 25 | 26 | Evaluator 27 | --------- 28 | 29 | To speed up the search, we built a machine learning predictor to evaluate the accuracy and robustness of any given architecture from the configuration space. 30 | 31 | Search Optimizer and Worker 32 | --------------------------- 33 | 34 | In this example, we will use evolutionary search to look for the best architecture in CS using our evaluator. 35 | 36 | :: 37 | 38 | from analogainas.search_algorithms.ea_optimized import EAOptimizer 39 | from analogainas.search_algorithms.worker import Worker 40 | 41 | optimizer = EAOptimizer(evaluator, population_size=20, nb_iter=10) 42 | 43 | NB_RUN = 2 44 | worker = Worker(CS, optimizer=optimizer, runs=NB_RUN) 45 | 46 | worker.search() 47 | 48 | worker.result_summary() 49 | 50 | 51 | -------------------------------------------------------------------------------- /analognas/docs/index.rst: -------------------------------------------------------------------------------- 1 | AnalogNAS 2 | ========= 3 | 4 | .. toctree:: 5 | :maxdepth: 3 6 | :caption: Get started 7 | :hidden: 8 | 9 | install 10 | getting_started 11 | 12 | 13 | .. toctree:: 14 | :maxdepth: 3 15 | :caption: References 16 | :hidden: 17 | 18 | api_references 19 | references 20 | 21 | AnalogNAS is a Python library designed to empower researchers and practitioners in efficiently exploring and optimizing neural network architectures specifically for in-memory computing scenarios. AnalogNAS is built on top of the IBM Analog Hardware Acceleration Kit that enables efficient hardware-aware training with simulated noise injection on multiple IMC devices. By capitalizing on the advantages of in-memory computing, AnalogNAS opens new avenues for discovering architectures that can fully exploit the capabilities of this emerging computing paradigm. 22 | 23 | AnalogNAS offers a comprehensive set of features and functionalities that facilitate the neural architecture search process. From seamlessly exploring a vast space of architectural configurations to fine-tuning models for optimal performance, AnalogNAS provides a versatile framework that accelerates the discovery of efficient and effective neural network architectures for in-memory computing. 24 | 25 | .. warning:: 26 | This library is currently in beta and under active development. 27 | Please be mindful of potential issues and keep an eye for improvements, 28 | new features and bug fixes in upcoming versions. 29 | 30 | Features 31 | -------- 32 | 33 | 34 | - A customizable resnet-like search space, allowing to target CIFAR-10, Visual Wake Words, and Keyword Spotting 35 | - A configuration space object allows to add any number or type of architecture and training hyperparameters to the search 36 | - An analog-specific evaluator which includes: 37 | 38 | - An 1-day accuracy ranker 39 | - An 1 month accuracy variation estimator 40 | - A 1-day standard deviation estimator 41 | 42 | - A flexible search algorithm, enabling the implementation and extension of state-of-the-art NAS methods. 43 | 44 | Installation 45 | ------------ 46 | 47 | Install analogNAS by running: 48 | 49 | pip install analogainas 50 | 51 | 52 | How to cite 53 | ----------- 54 | 55 | In case you are using the *AnalogNAS* for 56 | your research, please cite: 57 | 58 | .. note:: 59 | 60 | Benmeziane, H., Lammie, C., Boybat, I., Rasch, M., Gallo, M. L., Tsai, H., ... & Maghraoui, K. E. (2023). AnalogNAS: A Neural Network Design Framework for Accurate Inference with Analog In-Memory Computing. IEEE Edge 2023. 61 | 62 | https://arxiv.org/abs/2305.10459 63 | -------------------------------------------------------------------------------- /analognas/docs/install.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | The preferred way to install this package is by using the `Python package index`_:: 5 | 6 | pip install analogainas 7 | 8 | For ease of installation, install aihwkit library separately, refer to `AIHWKit installation`_: 9 | 10 | The package require the following runtime libraries to be installed in your 11 | system: 12 | 13 | * `OpenBLAS`_: 0.3.3+ 14 | * `CUDA Toolkit`_: 9.0+ (only required for the GPU-enabled simulator [#f1]_) 15 | 16 | .. [#f1] Note that GPU support is not available in OSX, as it depends on a 17 | platform that has official CUDA support. 18 | 19 | .. _AIHWKit installation: https://aihwkit.readthedocs.io/en/latest/install.html 20 | .. _Python package index: https://pypi.org/project/analogainas/ 21 | .. _OpenBLAS: https://www.openblas.net 22 | .. _CUDA Toolkit: https://developer.nvidia.com/accelerated-computing-toolkit -------------------------------------------------------------------------------- /analognas/docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /analognas/environment.yml: -------------------------------------------------------------------------------- 1 | name: analognas 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - _libgcc_mutex=0.1=main 7 | - _openmp_mutex=5.1=1_gnu 8 | - asttokens=2.4.1=pyhd8ed1ab_0 9 | - backcall=0.2.0=pyh9f0ad1d_0 10 | - ca-certificates=2024.2.2=hbcca054_0 11 | - comm=0.2.1=pyhd8ed1ab_0 12 | - debugpy=1.6.7=py38h6a678d5_0 13 | - decorator=5.1.1=pyhd8ed1ab_0 14 | - entrypoints=0.4=pyhd8ed1ab_0 15 | - executing=2.0.1=pyhd8ed1ab_0 16 | - ipykernel=6.29.2=pyhd33586a_0 17 | - ipython=8.12.0=pyh41d4057_0 18 | - jedi=0.19.1=pyhd8ed1ab_0 19 | - jupyter_client=7.3.4=pyhd8ed1ab_0 20 | - jupyter_core=5.7.1=py38h578d9bd_0 21 | - ld_impl_linux-64=2.38=h1181459_1 22 | - libffi=3.4.4=h6a678d5_0 23 | - libgcc-ng=11.2.0=h1234567_1 24 | - libgomp=11.2.0=h1234567_1 25 | - libsodium=1.0.18=h36c2ea0_1 26 | - libstdcxx-ng=11.2.0=h1234567_1 27 | - matplotlib-inline=0.1.6=pyhd8ed1ab_0 28 | - ncurses=6.4=h6a678d5_0 29 | - nest-asyncio=1.6.0=pyhd8ed1ab_0 30 | - openssl=3.0.13=h7f8727e_0 31 | - packaging=23.2=pyhd8ed1ab_0 32 | - parso=0.8.3=pyhd8ed1ab_0 33 | - pexpect=4.9.0=pyhd8ed1ab_0 34 | - pickleshare=0.7.5=py_1003 35 | - pip=23.3.1=py38h06a4308_0 36 | - platformdirs=4.2.0=pyhd8ed1ab_0 37 | - prompt-toolkit=3.0.42=pyha770c72_0 38 | - prompt_toolkit=3.0.42=hd8ed1ab_0 39 | - psutil=5.9.0=py38h5eee18b_0 40 | - ptyprocess=0.7.0=pyhd3deb0d_0 41 | - pure_eval=0.2.2=pyhd8ed1ab_0 42 | - pygments=2.17.2=pyhd8ed1ab_0 43 | - python=3.8.18=h955ad1f_0 44 | - python-dateutil=2.8.2=pyhd8ed1ab_0 45 | - python_abi=3.8=2_cp38 46 | - pyzmq=25.1.2=py38h6a678d5_0 47 | - readline=8.2=h5eee18b_0 48 | - setuptools=68.2.2=py38h06a4308_0 49 | - six=1.16.0=pyh6c4a22f_0 50 | - sqlite=3.41.2=h5eee18b_0 51 | - stack_data=0.6.2=pyhd8ed1ab_0 52 | - tk=8.6.12=h1ccaba5_0 53 | - tornado=6.1=py38h0a891b7_3 54 | - traitlets=5.14.1=pyhd8ed1ab_0 55 | - typing_extensions=4.9.0=pyha770c72_0 56 | - wcwidth=0.2.13=pyhd8ed1ab_0 57 | - wheel=0.41.2=py38h06a4308_0 58 | - xz=5.4.5=h5eee18b_0 59 | - zeromq=4.3.5=h6a678d5_0 60 | - zlib=1.2.13=h5eee18b_0 61 | - pip: 62 | - aihwkit==0.9.0 63 | - certifi==2024.2.2 64 | - charset-normalizer==3.3.2 65 | - cmake==3.28.3 66 | - distro==1.9.0 67 | - filelock==3.13.1 68 | - idna==3.6 69 | - jinja2==3.1.3 70 | - joblib==1.3.2 71 | - lit==17.0.6 72 | - markupsafe==2.1.5 73 | - mpmath==1.3.0 74 | - networkx==3.1 75 | - numpy==1.24.4 76 | - nvidia-cublas-cu11==11.10.3.66 77 | - nvidia-cuda-cupti-cu11==11.7.101 78 | - nvidia-cuda-nvrtc-cu11==11.7.99 79 | - nvidia-cuda-runtime-cu11==11.7.99 80 | - nvidia-cudnn-cu11==8.5.0.96 81 | - nvidia-cufft-cu11==10.9.0.58 82 | - nvidia-curand-cu11==10.2.10.91 83 | - nvidia-cusolver-cu11==11.4.0.1 84 | - nvidia-cusparse-cu11==11.7.4.91 85 | - nvidia-nccl-cu11==2.14.3 86 | - nvidia-nvtx-cu11==11.7.91 87 | - pillow==10.2.0 88 | - protobuf==4.25.3 89 | - requests==2.31.0 90 | - scikit-build==0.17.6 91 | - scikit-learn==1.3.2 92 | - scipy==1.10.1 93 | - sympy==1.12 94 | - threadpoolctl==3.3.0 95 | - tomli==2.0.1 96 | - torch==2.0.1 97 | - torchvision==0.15.2 98 | - triton==2.0.0 99 | - urllib3==2.2.1 100 | - xgboost==2.0.3 101 | -------------------------------------------------------------------------------- /analognas/nas_search_demo.py: -------------------------------------------------------------------------------- 1 | from analogainas.search_spaces.config_space import ConfigSpace 2 | from analogainas.evaluators.xgboost import XGBoostEvaluator 3 | from analogainas.search_algorithms.ea_optimized import EAOptimizer 4 | from analogainas.search_algorithms.worker import Worker 5 | 6 | 7 | CS = ConfigSpace('CIFAR-10') # Search Space Definition 8 | surrogate = XGBoostEvaluator(model_type="XGBRanker", load_weight=True) # 9 | optimizer = EAOptimizer(surrogate, population_size=20, nb_iter=50) # The default population size is 100. 10 | 11 | nb_runs = 2 12 | worker = Worker(CS, optimizer=optimizer, runs=nb_runs) 13 | 14 | worker.search() 15 | worker.result_summary() 16 | 17 | best_config = worker.best_config 18 | best_model = worker.best_arch 19 | -------------------------------------------------------------------------------- /analognas/requirements.txt: -------------------------------------------------------------------------------- 1 | cmake==3.22.4 2 | scikit-build==0.14.1 3 | scikit-learn==1.0.2 4 | scipy 5 | xgboost -------------------------------------------------------------------------------- /analognas/setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import subprocess 4 | from setuptools import setup, find_packages 5 | 6 | # Check for python version 7 | if sys.version_info.major != 3 or sys.version_info.minor < 7 or sys.version_info.minor > 10: 8 | raise ValueError( 9 | 'Unsupported Python version %d.%d.%d found. AnalogNAS requires Python ' 10 | '3.7, 3.8 or 3.9' % (sys.version_info.major, sys.version_info.minor, sys.version_info.micro) 11 | ) 12 | 13 | 14 | cwd = os.path.dirname(os.path.abspath(__file__)) 15 | 16 | version_path = os.path.join(cwd, 'analogainas', '__version__.py') 17 | with open(version_path) as fh: 18 | version = fh.readlines()[-1].split()[-1].strip("\"'") 19 | 20 | with open("README.md", "r") as f: 21 | long_description = f.read() 22 | 23 | requirements = [] 24 | with open("requirements.txt", "r") as f: 25 | for line in f: 26 | requirements.append(line.strip()) 27 | 28 | print('-- Building version ' + version) 29 | print('-- Note: by default installs pytorch-cpu version (1.9.0), update to torch-gpu by following instructions from: https://pytorch.org/get-started/locally/') 30 | 31 | setup( 32 | name='analogainas', 33 | version=version, 34 | description='AnalogAINAS: A modular and extensible Analog-aware Neural Architecture Search (NAS) library.', 35 | long_description=long_description, 36 | long_description_content_type="text/markdown", 37 | author='IBM Research', 38 | author_email='aihwkit@us.ibm.com', 39 | license='Apache 2.0', 40 | classifiers=[ 41 | 'Development Status :: 4 - Beta', 42 | 'Environment :: Console', 43 | 'Environment :: GPU :: NVIDIA CUDA', 44 | 'Intended Audience :: Science/Research', 45 | 'License :: OSI Approved :: Apache Software License', 46 | 'Operating System :: MacOS', 47 | 'Operating System :: Microsoft :: Windows', 48 | 'Operating System :: POSIX :: Linux', 49 | 'Programming Language :: Python :: 3 :: Only', 50 | 'Topic :: Scientific/Engineering', 51 | 'Topic :: Scientific/Engineering :: Artificial Intelligence', 52 | 'Typing :: Typed', 53 | ], 54 | keywords=['NAS', 'analog', 'torch'], 55 | packages=find_packages(), 56 | python_requires='>=3.7', 57 | platforms=['Linux'], 58 | install_requires=requirements 59 | ) -------------------------------------------------------------------------------- /analognas/tests/01_general_search.py: -------------------------------------------------------------------------------- 1 | from analognas.search_spaces import config_space 2 | from analognas.evaluators import XGBoostEvaluator 3 | from analognas.search_algorithms import EAOptimizer, Worker 4 | 5 | CS = config_space('CIFAR-10') 6 | surrogate = XGBoostEvaluator(model_type="XGBRanker", load_weight=True) 7 | optimizer = EAOptimizer() 8 | 9 | worker = Worker(CS, eval=surrogate, optimizer=optimizer, runs=5) 10 | 11 | result = worker.search() 12 | 13 | assert(result['best_acc'] > 0.92) -------------------------------------------------------------------------------- /analognas/tests/02_bo.py: -------------------------------------------------------------------------------- 1 | from analognas.search_spaces import config_space 2 | from analognas.evaluators import XGBoostEvaluator 3 | from analognas.search_algorithms import BOptimizer, Worker 4 | 5 | CS = config_space('CIFAR-10') 6 | surrogate = XGBoostEvaluator(model_type="XGBRanker", load_weight=True) 7 | optimizer = BOptimizer() 8 | 9 | worker = Worker(CS, eval=surrogate, optimizer=optimizer, runs=5) 10 | 11 | result = worker.search() 12 | 13 | print("Best architecture accuracy: ", result["best_acc"]) 14 | print("Standard deviation of accuracy over 5 runs: ", result['std_err']) -------------------------------------------------------------------------------- /analognas/tests/03_random_sample.py: -------------------------------------------------------------------------------- 1 | from analognas.search_spaces.sample import random_sample 2 | 3 | dataset = "VWW" 4 | number_of_architectures = 30 5 | architectures = random_sample(dataset, number_of_architectures) 6 | print(architectures[0]) -------------------------------------------------------------------------------- /analognas/tests/04_generate_architecture.py: -------------------------------------------------------------------------------- 1 | from analognas.search_spaces.sample import random_sample 2 | from analognas.search_spaces.resnet_macro_architecture import Network 3 | 4 | arch = random_sample('VWW', 1) 5 | model = Network(arch) 6 | 7 | print(model) 8 | -------------------------------------------------------------------------------- /analognas/tests/resnet.csv: -------------------------------------------------------------------------------- 1 | out_channel0,16 2 | M,3 3 | R1,3 4 | R2,0 5 | R3,2 6 | R4,0 7 | R5,0 8 | convblock1,2 9 | widenfact1,2 10 | B1,1 11 | convblock2,1 12 | widenfact2,3 13 | B2,2 14 | convblock3,2 15 | widenfact3,3 16 | B3,2 17 | convblock4,0 18 | widenfact4,0 19 | B4,0 20 | convblock5,0 21 | widenfact5,0 22 | B5,0 23 | std_dev,0.5 24 | digital_acc,86.97 25 | analog_acc,86.9 26 | -------------------------------------------------------------------------------- /analognasbench/README.md: -------------------------------------------------------------------------------- 1 | # AnalogNAS-Bench: A NAS Benchmark for Analog In-Memory Computing 2 | 3 | ## Overview 4 | AnalogNAS-Bench is the first Neural Architecture Search (NAS) benchmark specifically designed for Analog In-Memory Computing (AIMC). It provides a comprehensive evaluation of 15,625 architectures from NAS-Bench-201 trained on the CIFAR-10 dataset under different conditions, including AIMC-specific hardware simulations. 5 | 6 | This benchmark enables researchers to explore how different neural architectures perform on AIMC hardware and compare them against standard digital training methods. 7 | 8 | ## Features 9 | 10 | - **Large-scale Benchmark:** 15,625 architectures from NAS-Bench-201. 11 | 12 | - **Diverse Training Conditions:** 13 | - **Baseline Evaluation**: Full training on standard hardware. 14 | - **Noisy Evaluation**: Direct evaluation on AIMC hardware. 15 | - **Hardware-Aware Training (HWT)**: Fine-tuning trained models on AIMC hardware with noise injection. 16 | - **Post-Training Quantization (PTQ)**: Quantizing pre-trained models on standard hardware. 17 | - **Quantization-Aware Training (QAT)**: Training with quantization awareness on standard hardware. 18 | 19 | - **Temporal Drift Simulation:** Evaluates performance degradation over time on AIMC hardware, for both noisy and hardware-aware training evaluations, at intervals of 1 minute, 1 hour, 1 day, and 1 month. 20 | 21 | - **Ongoing Expansion:** Currently supports CIFAR-10, with CIFAR-100 and ImageNet16-120 integration in progress. 22 | 23 | ## Installation 24 | - Clone the AnalogNAS repository 25 | ```bash 26 | git clone https://github.com/IBM/analog-nas.git 27 | ``` 28 | - Move to analognasbench folder 29 | ```bash 30 | cd analognasbench 31 | ``` 32 | - Run the setup script 33 | ```bash 34 | pip install . 35 | ``` 36 | 37 | 38 | ## Usage 39 | ### Initialize Benchmark 40 | ```python 41 | from analognasbench.data import AnalogNASBench 42 | benchmark = AnalogNASBench() 43 | ``` 44 | 45 | ### Query a Specific Metric for an Architecture 46 | Available metrics: 47 | - `baseline_accuracy` - Standard training 48 | - `noisy_accuracy` - Noisy evaluation 49 | - `analog_accuracy` - Hardware-aware training (HWT) 50 | - `ptq_accuracy` - Post-training quantization (PTQ) 51 | - `qat_accuracy` - Quantization-aware training (QAT) 52 | - `noisy_drift_60`, `analog_drift_60` - 1 min drift 53 | - `noisy_drift_3600`, `analog_drift_3600` - 1 hour drift 54 | - `noisy_drift_86400`, `analog_drift_86400` - 1 day drift 55 | - `noisy_drift_2592000`, `analog_drift_2592000` - 1 month drift 56 | 57 | Example: 58 | ```python 59 | architecture = (0, 0, 0, 4, 3, 2) 60 | analog_accuracy = benchmark.query_metric(architecture, 'analog_accuracy') 61 | print(f"Analog Accuracy: {analog_accuracy}") 62 | ``` 63 | 64 | ### Retrieve Full Architecture Details 65 | ```python 66 | arch_details = benchmark.get_architecture_details(architecture) 67 | print(arch_details) 68 | ``` 69 | -------------------------------------------------------------------------------- /analognasbench/analog_nasbench.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: analog-nasbench 3 | Version: 0.1.3 4 | Requires-Dist: pandas 5 | Requires-Dist: numpy 6 | -------------------------------------------------------------------------------- /analognasbench/analog_nasbench.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | setup.py 2 | analog_nasbench.egg-info/PKG-INFO 3 | analog_nasbench.egg-info/SOURCES.txt 4 | analog_nasbench.egg-info/dependency_links.txt 5 | analog_nasbench.egg-info/requires.txt 6 | analog_nasbench.egg-info/top_level.txt 7 | analognasbench/__init__.py 8 | analognasbench/data.anb 9 | analognasbench/data.py -------------------------------------------------------------------------------- /analognasbench/analog_nasbench.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /analognasbench/analog_nasbench.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | pandas 2 | numpy 3 | -------------------------------------------------------------------------------- /analognasbench/analog_nasbench.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | analognasbench 2 | -------------------------------------------------------------------------------- /analognasbench/analognasbench/__init__.py: -------------------------------------------------------------------------------- 1 | from .data import AnalogNASBench 2 | 3 | __version__ = '0.1.0' 4 | 5 | __all__ = ['NASBenchAnalog'] -------------------------------------------------------------------------------- /analognasbench/analognasbench/__pycache__/__init__.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognasbench/analognasbench/__pycache__/__init__.cpython-38.pyc -------------------------------------------------------------------------------- /analognasbench/analognasbench/__pycache__/data.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognasbench/analognasbench/__pycache__/data.cpython-38.pyc -------------------------------------------------------------------------------- /analognasbench/analognasbench/data.anb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognasbench/analognasbench/data.anb -------------------------------------------------------------------------------- /analognasbench/analognasbench/data.py: -------------------------------------------------------------------------------- 1 | import os 2 | import numpy as np 3 | import pickle 4 | import pkg_resources 5 | from ast import literal_eval 6 | 7 | 8 | class Architecture: 9 | def __init__(self, architecture, baseline_accuracy, 10 | ptq_accuracy, qat_accuracy, 11 | noisy_accuracy, analog_accuracy, 12 | noisy_drift_60,noisy_drift_3600,noisy_drift_86400,noisy_drift_2592000, 13 | analog_drift_60,analog_drift_3600,analog_drift_86400,analog_drift_2592000, 14 | params, 15 | ): 16 | 17 | self.architecture = architecture # Tuple (e.g., (0,0,0,4,3,2)) 18 | self.baseline_accuracy = baseline_accuracy 19 | 20 | self.ptq_accuracy = ptq_accuracy 21 | self.qat_accuracy = qat_accuracy 22 | 23 | self.noisy_accuracy = noisy_accuracy 24 | self.analog_accuracy = analog_accuracy 25 | 26 | self.params = params 27 | 28 | self.noisy_drift_60 = self._parse_drift(noisy_drift_60) 29 | self.noisy_drift_3600 = self._parse_drift(noisy_drift_3600) 30 | self.noisy_drift_86400 = self._parse_drift(noisy_drift_86400) 31 | self.noisy_drift_2592000 = self._parse_drift(noisy_drift_2592000) 32 | self.analog_drift_60 = self._parse_drift(analog_drift_60) 33 | self.analog_drift_3600 = self._parse_drift(analog_drift_3600) 34 | self.analog_drift_86400 = self._parse_drift(analog_drift_86400) 35 | self.analog_drift_2592000 = self._parse_drift(analog_drift_2592000) 36 | 37 | def _parse_drift(self, drift_str): 38 | """ 39 | Parse drift string into DriftMeasurement object. 40 | 41 | :param drift_str: String in format "value ± uncertainty" 42 | :return: DriftMeasurement object 43 | """ 44 | if isinstance(drift_str, (int, float)): 45 | return DriftMeasurement(drift_str, 0) 46 | 47 | if isinstance(drift_str, str): 48 | drift_str = drift_str.strip() 49 | 50 | try: 51 | return DriftMeasurement(float(drift_str), 0) 52 | except ValueError: 53 | pass 54 | 55 | if '±' in drift_str: 56 | try: 57 | parts = drift_str.split('±') 58 | value = float(parts[0].strip()) 59 | uncertainty = float(parts[1].strip()) 60 | return DriftMeasurement(value, uncertainty) 61 | except: 62 | print(f"Warning: Could not parse drift string: {drift_str}") 63 | return DriftMeasurement(0, 0) 64 | 65 | return DriftMeasurement(0, 0) 66 | 67 | 68 | 69 | class DriftMeasurement: 70 | def __init__(self, value, uncertainty): 71 | """ 72 | Represents a drift measurement with value and uncertainty. 73 | 74 | :param value: Mean drift value 75 | :param uncertainty: Uncertainty of the measurement 76 | """ 77 | self.value = float(value) 78 | self.uncertainty = float(uncertainty) 79 | 80 | def __repr__(self): 81 | """String representation of the drift measurement""" 82 | return f"{self.value} ± {self.uncertainty}" 83 | 84 | def __str__(self): 85 | """Human-readable string representation""" 86 | return self.__repr__() 87 | 88 | 89 | 90 | 91 | class AnalogNASBench: 92 | def __init__(self): 93 | """ 94 | Initialize the AnalogNAS-Bench dataset. 95 | """ 96 | data_path = pkg_resources.resource_filename('analognasbench', 'data.anb') 97 | with open(data_path, 'rb') as f: 98 | self.data = pickle.load(f) 99 | 100 | # List of all metrics for querying 101 | self.metrics = [ 102 | 'architecture', 'baseline_accuracy', 'ptq_accuracy', 103 | 'qat_accuracy', 'noisy_accuracy', 'analog_accuracy', 104 | 'noisy_drift_60', 'noisy_drift_3600', 105 | 'noisy_drift_86400', 'noisy_drift_2592000', 106 | 'analog_drift_60', 'analog_drift_3600', 107 | 'analog_drift_86400', 'analog_drift_2592000', "params" 108 | ] 109 | 110 | def query_metric(self, architecture, metric): 111 | """ 112 | Query a specific metric for a given architecture ID. 113 | 114 | :param architecture: Architecture identifier 115 | :param metric: Metric to retrieve 116 | :return: Metric value or None if not found 117 | """ 118 | if metric not in self.metrics: 119 | raise ValueError(f"Metric {metric} not found. Available metrics: {self.metrics}") 120 | 121 | 122 | arch = self.data.get(architecture) 123 | if not arch: 124 | return "architecture not found" 125 | 126 | return getattr(arch, metric) 127 | 128 | def get_architecture_details(self, architecture): 129 | """ 130 | Retrieve full details for a specific architecture. 131 | 132 | :param architecture: Architecture identifier 133 | :return: Formatted string of architecture details 134 | """ 135 | arch = self.data.get(architecture) 136 | if not arch: 137 | return "Architecture not found." 138 | 139 | # Format the details into a readable string 140 | details = f""" 141 | Architecture Details: 142 | -------------------- 143 | Architecture: \t\t{arch.architecture} 144 | Number of parameters: \t{arch.params} 145 | Baseline Accuracy: \t{arch.baseline_accuracy} 146 | PTQ Accuracy: \t\t{arch.ptq_accuracy} 147 | QAT Accuracy: \t\t{arch.qat_accuracy} 148 | Noisy Accuracy: \t{arch.noisy_accuracy} 149 | Analog Accuracy: \t{arch.analog_accuracy} 150 | 151 | Noisy Drift: 152 | - 60s: \t\t{arch.noisy_drift_60} 153 | - 3600s: \t{arch.noisy_drift_3600} 154 | - 86400s: \t{arch.noisy_drift_86400} 155 | - 2592000s: \t{arch.noisy_drift_2592000} 156 | 157 | Analog Drift: 158 | - 60s: \t\t{arch.analog_drift_60} 159 | - 3600s: \t{arch.analog_drift_3600} 160 | - 86400s: \t{arch.analog_drift_86400} 161 | - 2592000s: \t{arch.analog_drift_2592000} 162 | """ 163 | 164 | return details 165 | 166 | def list_available_architectures(self): 167 | """ 168 | List all available architectures. 169 | 170 | :return: List of architectures (tuples) 171 | """ 172 | return list(self.data.keys()) -------------------------------------------------------------------------------- /analognasbench/build/lib/analognasbench/__init__.py: -------------------------------------------------------------------------------- 1 | from .data import AnalogNASBench 2 | 3 | __version__ = '0.1.0' 4 | 5 | __all__ = ['NASBenchAnalog'] -------------------------------------------------------------------------------- /analognasbench/build/lib/analognasbench/data.anb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/analog-nas/71dae1f89f16d1d5bb5960bafdbfeee500a34b89/analognasbench/build/lib/analognasbench/data.anb -------------------------------------------------------------------------------- /analognasbench/build/lib/analognasbench/data.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pandas as pd 3 | import numpy as np 4 | import pickle 5 | import pkg_resources 6 | from ast import literal_eval 7 | 8 | 9 | class Architecture: 10 | def __init__(self, architecture, baseline_accuracy, 11 | ptq_accuracy, qat_accuracy, 12 | noisy_accuracy, analog_accuracy, baseline_drift_60, 13 | baseline_drift_3600,baseline_drift_86400,baseline_drift_2592000, 14 | analog_drift_60,analog_drift_3600,analog_drift_86400,analog_drift_2592000 15 | ): 16 | 17 | self.architecture = architecture # Tuple (e.g., (0,0,0,4,3,2)) 18 | self.baseline_accuracy = baseline_accuracy 19 | 20 | self.ptq_accuracy = ptq_accuracy 21 | self.qat_accuracy = qat_accuracy 22 | 23 | self.noisy_accuracy = noisy_accuracy 24 | self.analog_accuracy = analog_accuracy 25 | 26 | self.baseline_drift_60 = self._parse_drift(baseline_drift_60) 27 | self.baseline_drift_3600 = self._parse_drift(baseline_drift_3600) 28 | self.baseline_drift_86400 = self._parse_drift(baseline_drift_86400) 29 | self.baseline_drift_2592000 = self._parse_drift(baseline_drift_2592000) 30 | self.analog_drift_60 = self._parse_drift(analog_drift_60) 31 | self.analog_drift_3600 = self._parse_drift(analog_drift_3600) 32 | self.analog_drift_86400 = self._parse_drift(analog_drift_86400) 33 | self.analog_drift_2592000 = self._parse_drift(analog_drift_2592000) 34 | 35 | def _parse_drift(self, drift_str): 36 | """ 37 | Parse drift string into DriftMeasurement object. 38 | 39 | :param drift_str: String in format "value ± uncertainty" 40 | :return: DriftMeasurement object 41 | """ 42 | if isinstance(drift_str, (int, float)): 43 | return DriftMeasurement(drift_str, 0) 44 | 45 | if isinstance(drift_str, str): 46 | drift_str = drift_str.strip() 47 | 48 | try: 49 | return DriftMeasurement(float(drift_str), 0) 50 | except ValueError: 51 | pass 52 | 53 | if '±' in drift_str: 54 | try: 55 | parts = drift_str.split('±') 56 | value = float(parts[0].strip()) 57 | uncertainty = float(parts[1].strip()) 58 | return DriftMeasurement(value, uncertainty) 59 | except: 60 | print(f"Warning: Could not parse drift string: {drift_str}") 61 | return DriftMeasurement(0, 0) 62 | 63 | return DriftMeasurement(0, 0) 64 | 65 | 66 | 67 | class DriftMeasurement: 68 | def __init__(self, value, uncertainty): 69 | """ 70 | Represents a drift measurement with value and uncertainty. 71 | 72 | :param value: Mean drift value 73 | :param uncertainty: Uncertainty of the measurement 74 | """ 75 | self.value = float(value) 76 | self.uncertainty = float(uncertainty) 77 | 78 | def __repr__(self): 79 | """String representation of the drift measurement""" 80 | return f"{self.value} ± {self.uncertainty}" 81 | 82 | def __str__(self): 83 | """Human-readable string representation""" 84 | return self.__repr__() 85 | 86 | 87 | 88 | 89 | class AnalogNASBench: 90 | def __init__(self): 91 | """ 92 | Initialize the AnalogNAS-Bench dataset. 93 | """ 94 | data_path = pkg_resources.resource_filename('analognasbench', 'data.anb') 95 | with open(data_path, 'rb') as f: 96 | self.data = pickle.load(f) 97 | 98 | # List of all metrics for querying 99 | self.metrics = [ 100 | 'architecture', 'baseline_accuracy', 'ptq_accuracy', 101 | 'qat_accuracy', 'noisy_accuracy', 'analog_accuracy', 102 | 'baseline_drift_60', 'baseline_drift_3600', 103 | 'baseline_drift_86400', 'baseline_drift_2592000', 104 | 'analog_drift_60', 'analog_drift_3600', 105 | 'analog_drift_86400', 'analog_drift_2592000' 106 | ] 107 | 108 | def query_metric(self, architecture, metric): 109 | """ 110 | Query a specific metric for a given architecture ID. 111 | 112 | :param architecture: Architecture identifier 113 | :param metric: Metric to retrieve 114 | :return: Metric value or None if not found 115 | """ 116 | if metric not in self.metrics: 117 | raise ValueError(f"Metric {metric} not found. Available metrics: {self.metrics}") 118 | 119 | arch = self.data.get(architecture) 120 | if not arch: 121 | return "architecture not found" 122 | 123 | return getattr(arch, metric) 124 | 125 | def get_architecture_details(self, architecture): 126 | """ 127 | Retrieve full details for a specific architecture. 128 | 129 | :param architecture: Architecture identifier 130 | :return: Formatted string of architecture details 131 | """ 132 | arch = self.data.get(architecture) 133 | if not arch: 134 | return "Architecture not found." 135 | 136 | # Format the details into a readable string 137 | details = f""" 138 | Architecture Details: 139 | -------------------- 140 | Architecture: \t\t{arch.architecture} 141 | Baseline Accuracy: \t{arch.baseline_accuracy} 142 | PTQ Accuracy: \t\t{arch.ptq_accuracy} 143 | QAT Accuracy: \t\t{arch.qat_accuracy} 144 | Noisy Accuracy: \t{arch.noisy_accuracy} 145 | Analog Accuracy: \t{arch.analog_accuracy} 146 | 147 | Baseline Drift: 148 | - 60s: \t\t{arch.baseline_drift_60} 149 | - 3600s: \t{arch.baseline_drift_3600} 150 | - 86400s: \t{arch.baseline_drift_86400} 151 | - 2592000s: \t{arch.baseline_drift_2592000} 152 | 153 | Analog Drift: 154 | - 60s: \t\t{arch.analog_drift_60} 155 | - 3600s: \t{arch.analog_drift_3600} 156 | - 86400s: \t{arch.analog_drift_86400} 157 | - 2592000s: \t{arch.analog_drift_2592000} 158 | """ 159 | 160 | return details 161 | 162 | def list_available_architectures(self): 163 | """ 164 | List all available architectures. 165 | 166 | :return: List of architectures (tuples) 167 | """ 168 | return list(self.data.keys()) -------------------------------------------------------------------------------- /analognasbench/examples/bananas.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy as np 3 | from collections import defaultdict 4 | 5 | from sklearn.neural_network import MLPRegressor 6 | from analognasbench.data import AnalogNASBench, DriftMeasurement 7 | 8 | def architecture_to_feature_vector(arch): 9 | return np.array(list(arch), dtype=float) 10 | 11 | 12 | def evaluate_architecture(benchmark, arch, metric='analog_drift_86400'): 13 | result = benchmark.query_metric(arch, metric) 14 | if isinstance(result, DriftMeasurement): 15 | return result.value 16 | elif isinstance(result, (int, float)): 17 | return float(result) 18 | else: 19 | return None 20 | 21 | 22 | def train_ensemble(X, y, n_models=5, hidden_layer_sizes=(32, 32), max_iter=200): 23 | """ 24 | Train an ensemble of MLP regressors on data (X, y). 25 | :param X: np.array of shape (n_samples, n_features) 26 | :param y: np.array of shape (n_samples,) 27 | :param n_models: number of networks in the ensemble 28 | :param hidden_layer_sizes: MLP hidden layers 29 | :param max_iter: training iterations for MLP 30 | :return: list of trained MLP models 31 | """ 32 | ensemble = [] 33 | for _ in range(n_models): 34 | mlp = MLPRegressor( 35 | hidden_layer_sizes=hidden_layer_sizes, 36 | max_iter=max_iter, 37 | random_state=random.randint(0, 999999) 38 | ) 39 | mlp.fit(X, y) 40 | ensemble.append(mlp) 41 | return ensemble 42 | 43 | 44 | def predict_with_ensemble(ensemble, x): 45 | """ 46 | Given an ensemble of MLPs and a single feature vector x, 47 | return the mean and std of predictions across the ensemble. 48 | """ 49 | preds = [model.predict([x])[0] for model in ensemble] 50 | preds = np.array(preds) 51 | return preds.mean(), preds.std() 52 | 53 | 54 | def bananas_search_analog_nasbench( 55 | init_size=10, 56 | total_queries=50, 57 | metric='analog_drift_86400', 58 | n_ensemble_models=5, 59 | alpha=1.0, 60 | hidden_layer_sizes=(32, 32), 61 | random_seed=42 62 | ): 63 | """ 64 | A simplified BANANAS-like search to maximize 'analog_drift_86400'. 65 | 66 | :param init_size: How many architectures we randomly sample initially 67 | :param total_queries: How many total architectures we will evaluate 68 | :param metric: Which metric from the benchmark to optimize 69 | :param n_ensemble_models: Number of neural networks in the ensemble 70 | :param alpha: The weight of the std in the acquisition (mu + alpha * std) 71 | :param hidden_layer_sizes: Hidden layer sizes for the MLPRegressor 72 | :param random_seed: Random seed for reproducibility 73 | :return: (best_arch, best_perf, history) 74 | """ 75 | random.seed(random_seed) 76 | np.random.seed(random_seed) 77 | 78 | benchmark = AnalogNASBench() 79 | arch_list = benchmark.list_available_architectures() 80 | if not arch_list: 81 | print("No architectures found in the benchmark.") 82 | return None, None, [] 83 | 84 | init_archs = random.sample(arch_list, min(init_size, len(arch_list))) 85 | 86 | X_data = [] 87 | y_data = [] 88 | tested_set = set() 89 | 90 | for arch in init_archs: 91 | perf = evaluate_architecture(benchmark, arch, metric=metric) 92 | if perf is not None: 93 | X_data.append(architecture_to_feature_vector(arch)) 94 | y_data.append(perf) 95 | tested_set.add(arch) 96 | 97 | best_arch = None 98 | best_perf = float('-inf') 99 | 100 | if X_data: 101 | max_idx = np.argmax(y_data) 102 | best_arch_encoded = X_data[max_idx] 103 | best_perf = y_data[max_idx] 104 | 105 | for i, arch in enumerate(init_archs): 106 | if i == max_idx: 107 | best_arch = arch 108 | break 109 | 110 | history = [] 111 | 112 | for arch, perf in zip(init_archs, y_data): 113 | history.append((arch, perf)) 114 | 115 | for iteration in range(total_queries - len(tested_set)): 116 | X_data_arr = np.array(X_data) 117 | y_data_arr = np.array(y_data) 118 | ensemble = train_ensemble( 119 | X_data_arr, 120 | y_data_arr, 121 | n_models=n_ensemble_models, 122 | hidden_layer_sizes=hidden_layer_sizes 123 | ) 124 | 125 | candidate_scores = [] 126 | for arch in arch_list: 127 | if arch in tested_set: 128 | continue 129 | x = architecture_to_feature_vector(arch) 130 | mu, sigma = predict_with_ensemble(ensemble, x) 131 | acquisition_value = mu + alpha * sigma 132 | candidate_scores.append((arch, acquisition_value)) 133 | 134 | if not candidate_scores: 135 | break 136 | 137 | candidate_scores.sort(key=lambda x: x[1], reverse=True) 138 | next_arch = candidate_scores[0][0] 139 | 140 | next_perf = evaluate_architecture(benchmark, next_arch, metric) 141 | tested_set.add(next_arch) 142 | 143 | if next_perf is not None: 144 | X_data.append(architecture_to_feature_vector(next_arch)) 145 | y_data.append(next_perf) 146 | history.append((next_arch, next_perf)) 147 | if next_perf > best_perf: 148 | best_perf = next_perf 149 | best_arch = next_arch 150 | 151 | print(f"Iteration {iteration+1}:") 152 | print(f"\tPicked arch: {next_arch}") 153 | print(f"\tPerformance: {next_perf}") 154 | print(f"\tBest so far: {best_arch} with perf={best_perf}") 155 | 156 | return best_arch, best_perf, history 157 | 158 | 159 | if __name__ == "__main__": 160 | best_arch, best_val, search_history = bananas_search_analog_nasbench( 161 | init_size=10, 162 | total_queries=50, 163 | metric='analog_drift_86400', 164 | n_ensemble_models=5, 165 | alpha=1.0, 166 | hidden_layer_sizes=(32, 32), 167 | random_seed=42 168 | ) 169 | 170 | print("\nBANANAS-like Search Complete!") 171 | print("Best Architecture Found:", best_arch) 172 | print("Best Performance Value:", best_val) 173 | 174 | benchmark = AnalogNASBench() 175 | details = benchmark.get_architecture_details(best_arch) 176 | print("\nFull Architecture Details:") 177 | print(details) 178 | -------------------------------------------------------------------------------- /analognasbench/examples/brute_force.py: -------------------------------------------------------------------------------- 1 | import math 2 | from analognasbench.data import AnalogNASBench, DriftMeasurement 3 | 4 | def brute_force_best_architecture(metric='analog_drift_86400'): 5 | """ 6 | Searches exhaustively through all architectures in the AnalogNASBench 7 | to find the one with the highest value of the given metric. 8 | 9 | :param metric: The metric name to optimize (e.g., 'analog_drift_86400') 10 | :return: (best_arch, best_value) 11 | """ 12 | benchmark = AnalogNASBench() 13 | 14 | all_archs = benchmark.list_available_architectures() 15 | if not all_archs: 16 | print("No architectures found in the benchmark.") 17 | return None, float('-inf') 18 | 19 | best_arch = None 20 | best_value = -math.inf # or float('-inf') 21 | 22 | for arch in all_archs: 23 | result = benchmark.query_metric(arch, metric) 24 | if isinstance(result, DriftMeasurement): 25 | val = result.value 26 | elif isinstance(result, (int, float)): 27 | val = float(result) 28 | else: 29 | # If the result is a string like "architecture not found" or None 30 | continue 31 | 32 | if val > best_value: 33 | best_value = val 34 | best_arch = arch 35 | 36 | return best_arch, best_value 37 | 38 | 39 | if __name__ == "__main__": 40 | best_arch, best_val = brute_force_best_architecture(metric='analog_drift_86400') 41 | print("Best architecture found (brute force):", best_arch) 42 | print("Best metric value:", best_val) 43 | 44 | # Optionally retrieve more details 45 | benchmark = AnalogNASBench() 46 | details = benchmark.get_architecture_details(best_arch) 47 | print(details) 48 | -------------------------------------------------------------------------------- /analognasbench/examples/random_search.py: -------------------------------------------------------------------------------- 1 | import random 2 | from analognasbench.data import AnalogNASBench, DriftMeasurement 3 | 4 | def random_search_analog_nasbench( 5 | num_samples=100, 6 | objective_metric='analog_drift_86400', 7 | seed=42 8 | ): 9 | """ 10 | Perform a random search over the AnalogNASBench to find 11 | the architecture with the best (maximum) 'analog_drift_86400' value. 12 | 13 | :param num_samples: Number of random architectures to evaluate 14 | :param objective_metric: Metric to optimize (default: 'analog_drift_86400') 15 | :param seed: Random seed for reproducibility 16 | :return: (best_arch, best_metric_value) 17 | """ 18 | random.seed(seed) 19 | 20 | benchmark = AnalogNASBench() 21 | 22 | all_archs = benchmark.list_available_architectures() 23 | 24 | best_arch = None 25 | best_metric_value = -float('inf') 26 | 27 | for _ in range(num_samples): 28 | arch = random.choice(all_archs) 29 | 30 | result = benchmark.query_metric(arch, objective_metric) 31 | 32 | if isinstance(result, str) and result == "architecture not found": 33 | continue 34 | 35 | if isinstance(result, DriftMeasurement): 36 | metric_value = result.value 37 | elif isinstance(result, (int, float)): 38 | metric_value = float(result) 39 | else: 40 | # If it's something else or None 41 | continue 42 | 43 | if metric_value > best_metric_value: 44 | best_arch = arch 45 | best_metric_value = metric_value 46 | 47 | return best_arch, best_metric_value 48 | 49 | 50 | if __name__ == "__main__": 51 | best_arch, best_val = random_search_analog_nasbench( 52 | num_samples=500, 53 | objective_metric='analog_drift_86400', 54 | seed=42 55 | ) 56 | print("Best architecture found:", best_arch) 57 | print("Best metric value:", best_val) 58 | 59 | benchmark = AnalogNASBench() 60 | arch_details = benchmark.get_architecture_details(best_arch) 61 | print("Architecture details:") 62 | print(arch_details) 63 | -------------------------------------------------------------------------------- /analognasbench/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name='analog-nasbench', 5 | version='0.1.3', 6 | packages=find_packages(), 7 | include_package_data=True, 8 | package_data={ 9 | 'analog-nasbench': ['data.anb'], 10 | }, 11 | data_files=[('analog-nasbench', ['analognasbench/data.anb'])], 12 | install_requires=[ 13 | 'pandas', 14 | 'numpy', 15 | ], 16 | ) -------------------------------------------------------------------------------- /analognasbench/test.py: -------------------------------------------------------------------------------- 1 | from analognasbench.data import AnalogNASBench 2 | 3 | # Initialize 4 | benchmark = AnalogNASBench() 5 | 6 | # Query a specific metric for an architecture 7 | # Availible metrics : 8 | # baseline_accuracy 9 | # ptq_accuracy 10 | # qat_accuracy 11 | # noisy_accuracy 12 | # analog_accuracy 13 | # noisy_drift_60 14 | # analog_drift_60 15 | # noisy_drift_3600 16 | # analog_drift_3600 17 | # noisy_drift_86400 18 | # analog_drift_86400 19 | # noisy_drift_2592000 20 | # analog_drift_2592000 21 | # params 22 | 23 | noisy_drift_3600 = benchmark.query_metric((0, 0, 2, 0, 0, 0), 'noisy_drift_3600') 24 | 25 | print("noisy_drift_3600 :",noisy_drift_3600) 26 | print("noisy_drift_3600 value :",noisy_drift_3600.value) 27 | print("noisy_drift_3600 uncertainty :",noisy_drift_3600.uncertainty) 28 | 29 | analog_accuracy = benchmark.query_metric((0, 0, 2, 0, 0, 0), 'analog_accuracy') 30 | 31 | print("analog_accuracy :",analog_accuracy) 32 | 33 | # # Get full architecture details 34 | arch_details = benchmark.get_architecture_details((0, 0, 2, 0, 0, 0)) 35 | print(arch_details) 36 | 37 | 38 | -------------------------------------------------------------------------------- /analognasbench/training_script.sh: -------------------------------------------------------------------------------- 1 | # SLURM Script to allocate gpus and run trainings 2 | 3 | 4 | #!/bin/bash 5 | #SBATCH --partition=npl-2024 6 | #SBATCH --job-name=train1 7 | 8 | #SBATCH --nodes=4 9 | #SBATCH --ntasks-per-node=8 10 | #SBATCH --gres=gpu:8 11 | #SBATCH --cpus-per-task=8 12 | 13 | #SBATCH --time=06:00:00 14 | #SBATCH --output=__train_%j.out 15 | #SBATCH --error=__error_%j.err 16 | 17 | source ~/scratch/miniconda3x86/bin/activate 18 | conda activate grafnas 19 | 20 | export MASTER_ADDR=$(scontrol show hostname $SLURM_NODELIST | head -n 1) 21 | export MASTER_PORT=$(expr 10000 + $(echo -n $SLURM_JOBID | tail -c 4)) 22 | export WORLD_SIZE=$(($SLURM_NNODES * $SLURM_NTASKS_PER_NODE)) 23 | 24 | srun python training_script.py --------------------------------------------------------------------------------