├── .circleci └── config.yml ├── .coveragerc ├── .gitignore ├── .readthedocs.yaml ├── .travis.yml ├── ACKNOWLEDGEMENTS ├── AUTHORS ├── CONTRIBUTING.rst ├── Data Analysis.ipynb ├── LICENSE ├── README.rst ├── bin ├── active-wait-ga-checkpoint.py ├── active-wait-ga.py ├── l2l-fun-ce-gaussmix.py ├── l2l-fun-ce.py ├── l2l-fun-es.py ├── l2l-fun-face.py ├── l2l-fun-ga-checkpoint.py ├── l2l-fun-ga.py ├── l2l-fun-gd.py ├── l2l-fun-gs.py ├── l2l-fun-mgd.py ├── l2l-fun-pt.py ├── l2l-fun-sa.py ├── l2l-funall.py ├── l2l-mnist-ce.py ├── l2l-mnist-es.py ├── l2l-template.py ├── logging.yaml └── plot-functions.py ├── doc ├── Makefile ├── conf.py ├── index.rst ├── indices.rst ├── intro.rst ├── l2l-bin.rst ├── l2l.logging_tools.rst ├── l2l.optimizees.functions.rst ├── l2l.optimizees.mnist.rst ├── l2l.optimizees.rst ├── l2l.optimizers.crossentropy.rst ├── l2l.optimizers.evolution.rst ├── l2l.optimizers.evolutionstrategies.rst ├── l2l.optimizers.face.rst ├── l2l.optimizers.gradientdescent.rst ├── l2l.optimizers.gridsearch.rst ├── l2l.optimizers.naturalevolutionstrategies.rst ├── l2l.optimizers.rst ├── l2l.optimizers.simulatedannealing.rst ├── l2l.rst ├── l2l.utils.rst ├── quickstart.rst └── requirements.txt ├── l2l ├── __init__.py ├── logging_tools.py ├── matplotlib_.py ├── optimizees │ ├── README.rst │ ├── __init__.py │ ├── active_wait │ │ ├── __init__.py │ │ └── optimizee_aw.py │ ├── functions │ │ ├── __init__.py │ │ ├── benchmarked_functions.py │ │ ├── function_generator.py │ │ ├── optimizee.py │ │ └── tools.py │ ├── mnist │ │ ├── __init__.py │ │ ├── nn.py │ │ └── optimizee.py │ ├── optimizee.py │ └── test_cases │ │ ├── __init__.py │ │ └── optimizee_testcase.py ├── optimizers │ ├── __init__.py │ ├── crossentropy │ │ ├── __init__.py │ │ ├── distribution.py │ │ └── optimizer.py │ ├── evolution │ │ ├── __init__.py │ │ ├── optimizer.py │ │ └── requirements.txt │ ├── evolutionstrategies │ │ ├── __init__.py │ │ └── optimizer.py │ ├── face │ │ ├── __init__.py │ │ └── optimizer.py │ ├── gradientdescent │ │ ├── __init__.py │ │ └── optimizer.py │ ├── gridsearch │ │ ├── __init__.py │ │ └── optimizer.py │ ├── multievolution │ │ ├── __init__.py │ │ ├── optimizer.py │ │ └── requirements.txt │ ├── multigradientdescent │ │ ├── __init__.py │ │ └── optimizer.py │ ├── naturalevolutionstrategies │ │ ├── __init__.py │ │ └── optimizer.py │ ├── optimizer.py │ ├── paralleltempering │ │ ├── __init__.py │ │ └── optimizer.py │ └── simulatedannealing │ │ ├── __init__.py │ │ └── optimizer.py ├── paths.py ├── tests │ ├── __init__.py │ ├── test_all.py │ ├── test_ce_optimizer.py │ ├── test_checkpoint.py │ ├── test_es_optimizer.py │ ├── test_face_optimizer.py │ ├── test_ga_optimizer.py │ ├── test_gd_optimizer.py │ ├── test_gs_optimizer.py │ ├── test_innerloop.py │ ├── test_optimizer.py │ ├── test_outerloop.py │ ├── test_pt_optimizer.py │ ├── test_runner.py │ ├── test_sa_optimizer.py │ └── test_setup.py ├── utils │ ├── __init__.py │ ├── environment.py │ ├── experiment.py │ ├── groups.py │ ├── individual.py │ ├── runner.py │ ├── tools.py │ └── trajectory.py └── version.py ├── requirements.txt ├── run-style-check.sh ├── setup.py └── test-requirements.txt /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | # Use the latest 2.1 version of CircleCI pipeline process engine. 2 | # See: https://circleci.com/docs/2.0/configuration-reference 3 | version: 2.1 4 | 5 | # Orbs are reusable packages of CircleCI configuration that you may share across projects, enabling you to create encapsulated, parameterized commands, jobs, and executors that can be used across multiple projects. 6 | # See: https://circleci.com/docs/2.0/orb-intro/ 7 | orbs: 8 | # The python orb contains a set of prepackaged CircleCI configuration you can use repeatedly in your configuration files 9 | # Orb commands and jobs help you with common scripting around a language/tool 10 | # so you dont have to copy and paste it everywhere. 11 | # See the orb documentation here: https://circleci.com/developer/orbs/orb/circleci/python 12 | python: circleci/python@1.5.0 13 | 14 | # Define a job to be invoked later in a workflow. 15 | # See: https://circleci.com/docs/2.0/configuration-reference/#jobs 16 | jobs: 17 | build-and-test: # This is the name of the job, feel free to change it to better match what you're trying to do! 18 | # These next lines defines a Docker executors: https://circleci.com/docs/2.0/executor-types/ 19 | # You can specify an image from Dockerhub or use one of the convenience images from CircleCI's Developer Hub 20 | # A list of available CircleCI Docker convenience images are available here: https://circleci.com/developer/images/image/cimg/python 21 | # The executor is the environment in which the steps below will be executed - below will use a python 3.10.2 container 22 | # Change the version below to your required version of python 23 | docker: 24 | - image: cimg/python:3.9.0 25 | # Checkout the code as the first step. This is a dedicated CircleCI step. 26 | # The python orb's install-packages step will install the dependencies from a Pipfile via Pipenv by default. 27 | # Here we're making sure we use just use the system-wide pip. By default it uses the project root's requirements.txt. 28 | # Then run your tests! 29 | # CircleCI will report the results back to your VCS provider. 30 | steps: 31 | - checkout 32 | - python/install-packages: 33 | pkg-manager: pip 34 | # app-dir: ~/project/package-directory/ # If you're requirements.txt isn't in the root directory. 35 | pip-dependency-file: test-requirements.txt # if you have a different name for your requirements file, maybe one that combines your runtime and test requirements. 36 | - run: 37 | name: Install L2L 38 | command: pip install -e . 39 | - run: 40 | name: Run tests 41 | # This assumes nose is installed via the install-package step above 42 | command: nose2 --with-coverage --start-dir=l2l/ 43 | 44 | # Invoke jobs via workflows 45 | # See: https://circleci.com/docs/2.0/configuration-reference/#workflows 46 | workflows: 47 | sample: # This is the name of the workflow, feel free to change it to better match your workflow. 48 | # Inside the workflow, you define the jobs you want to run. 49 | jobs: 50 | - build-and-test 51 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | omit = l2l/tests/* 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .eggs/ 2 | results/ 3 | 4 | *.pyc 5 | *~ 6 | 7 | .libs 8 | .deps 9 | *.orig 10 | 11 | **/*.swp 12 | **/*.bak 13 | **/.project 14 | 15 | **/.cproject 16 | **/.idea 17 | 18 | **/nbproject/ 19 | 20 | **/libltdl/ 21 | 22 | # libtool 23 | **/Makefile.in 24 | **/aclocal.m4 25 | **/autom4te.cache/ 26 | **/config.* 27 | **/configure 28 | **/depcomp 29 | **/install-sh 30 | **/libtool 31 | **/ltmain.sh 32 | **/m4/ 33 | **/missing 34 | **/stamp-h? 35 | **/compile 36 | .deps/ 37 | .dirstamp 38 | .libs/ 39 | *.l[ao] 40 | *.o 41 | *.h.in 42 | 43 | **/output/ 44 | **/__pycache__/ 45 | .ropeproject/ 46 | 47 | **/scratchpad.py 48 | 49 | **/ipython-log.py 50 | 51 | _build/ 52 | 53 | **/.nfs* 54 | 55 | # Style Report Directory 56 | /style-reports 57 | 58 | # Package egg-info directory 59 | /Learning_to_Learn.egg-info 60 | 61 | # unison temp files 62 | **/.unison* 63 | 64 | # An ignored directory to put all files that are to be ignored 65 | /ignored 66 | 67 | # Mac OS X 68 | **/.DS_Store 69 | 70 | # root path config file 71 | bin/path.conf 72 | 73 | # Some files that are generated during testing 74 | /*.png 75 | 76 | tags 77 | _static 78 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file for Sphinx projects 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | # Required 5 | version: 2 6 | 7 | # Set the OS, Python version and other tools you might need 8 | build: 9 | os: ubuntu-22.04 10 | tools: 11 | python: "3.12" 12 | # You can also specify other tool versions: 13 | # nodejs: "20" 14 | # rust: "1.70" 15 | # golang: "1.20" 16 | 17 | # Build documentation in the "docs/" directory with Sphinx 18 | sphinx: 19 | configuration: doc/conf.py 20 | # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs 21 | # builder: "dirhtml" 22 | # Fail on all warnings to avoid broken references 23 | # fail_on_warning: true 24 | 25 | # Optionally build your docs in additional formats such as PDF and ePub 26 | # formats: 27 | # - pdf 28 | # - epub 29 | 30 | # Optional but recommended, declare the Python requirements required 31 | # to build your documentation 32 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 33 | python: 34 | install: 35 | - requirements: doc/requirements.txt -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | dist: bionic 2 | sudo: false 3 | addons: 4 | apt: 5 | update: true 6 | 7 | language: python 8 | python: 9 | - "3.6" 10 | - "3.7" 11 | - "3.8" 12 | 13 | install: 14 | - pip install -r requirements.txt 15 | - pip -V 16 | - python setup.py install 17 | - pip install coverage coveralls nose 18 | - source install.sh 19 | 20 | script: 21 | nosetests -v --with-coverage --cover-package=l2l/ 22 | after_success: 23 | - coveralls 24 | -------------------------------------------------------------------------------- /ACKNOWLEDGEMENTS: -------------------------------------------------------------------------------- 1 | This open source software code was developed in part in the Human Brain Project, funded from the European Union’s 2 | Horizon 2020 Framework Programme for Research and Innovation under the Specific Grant Agreement No. 720270 (HBP 3 | SGA1) and 785907 (HBP SGA2). 4 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | Anand Subramoney 2 | Arjun Rao 3 | Sandra Diaz-Pier 4 | Franz Scherr 5 | Darjan Salaj 6 | Jakob Jordan 7 | Nikolaus Kopp 8 | Daniel Hackhofer 9 | Sinisa Stekovic 10 | Thomas Bohnstingl 11 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | The code in the L2L package will potentially be used by other people. So these guidelines are necessary to make the code useful for others: 2 | 3 | General Guidelines 4 | ================== 5 | 6 | * Since we have so many people working with the same repository, it would help a lot if you are familiar with git. `Try Git `_ and `The Git Book `_ are two very good resources for learning git. 7 | * Try to follow the existing code style as much as possible, stick to `PEP8 `_, don’t overdo OOP (really), and keep your code well commented. 8 | * For each pull request and commit, a Python style check is done by running `run-style-check.sh` in the root directory. This has to return with 0 errors for the pull request to be merged in. Before every pull request, you should run this script to make sure there are no errors. 9 | * Write documentation for all your changes, both in the code itself in the form of docstrings, and updates to `intro.rst `_. 10 | 11 | Working with the repository 12 | =========================== 13 | 14 | * Create a separate git branch for yourself and don't work off master. You can either create one branch for yourself or different feature branches for specific changes. Please make sure that the code remains within the private repository – so no public forks. 15 | * Use `pull requests `_ for merging code into the master branch. (See below for details about using pull requests) 16 | * Use `Github issues `_ for tracking tasks and reporting bugs in the framework. When you start implementing something specific, create and assign appropriate issues to yourself. You can use this for tracking progress and notes about implementation. 17 | 18 | Pull Requests 19 | ============= 20 | All project related code has to be merged into master, so it's available to other people. 21 | 22 | * For each *logical unit of change* (not the entire project!), open a pull request on GitHub. 23 | * Either @maharjun or @anandtrex will look at the code and give comments on code style, implementation etc. and once approved, merge it into master. 24 | * **IMPORTANT:** It’s easier for everyone if you try to merge in changes a small part at a time with pull requests instead of doing a full merge in the end. 25 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | L2L Gradient-free Optimization Framework 2 | ++++++++++++++++++++++++++++++++++++++++ 3 | 4 | .. image:: https://travis-ci.org/Meta-optimization/L2L.svg?branch=master 5 | :target: https://travis-ci.org/Meta-optimization/L2L 6 | https://app.circleci.com/pipelines/github/Meta-optimization 7 | 8 | .. image:: https://circleci.com/gh/Meta-optimization/L2L.svg?style=svg 9 | :target: https://circleci.com/gh/Meta-optimization/L2L 10 | 11 | .. image:: https://coveralls.io/repos/github/Meta-optimization/L2L/badge.svg?branch=master 12 | :target: https://coveralls.io/github/Meta-optimization/L2L?branch=master 13 | 14 | 15 | About 16 | ***** 17 | 18 | The L2L (Learning-to-learn) gradient-free optimization framework contains well documented and tested implementations of various gradient free optimization algorithms. It also defines an API that makes it easy to optimize (hyper-)parameters for any task (optimizee). All the implementations in this package are parallel and can run across different cores and nodes (but equally well on a single core). 19 | 20 | NOTE: The L2L framework is currently in **BETA** 21 | 22 | Getting Started 23 | *************** 24 | 25 | 26 | If you are developing a new Optimizee or want to try out a new Optimizee with the Optimizers in the L2L package, install 27 | L2L as a python package. See section `Installing the L2L Package`_ for details on how to install the package (this 28 | automatically installs all requirements). 29 | 30 | Documentation is available at ``_. 31 | 32 | 33 | Installing the L2L Package 34 | ************************** 35 | 36 | From the Top-Level directory of the directory, run the following command: 37 | 38 | python -m pip install -e . 39 | 40 | or alternatively (if you do not use a virtual environment) 41 | 42 | python -m pip install -e . --user 43 | 44 | The `--user` flag is to be used if you wish to install in the user path as 45 | opposed to the root path. However, we **recommend to use a virtual enviornment**, 46 | such as the standard Python env or conda env. 47 | 48 | The above will install the package by creating symlinks to the code files in the 49 | relevant directory containing python modules. This means that you can change any 50 | of the code files and see the changes reflected in the package immediately (i.e. 51 | without requiring a reinstall). In order to uninstall one may run the following: 52 | 53 | pip uninstall L2L 54 | 55 | *Note that if the setup was done using sudo access, then the uninstall must also 56 | be done using sudo access* 57 | 58 | Having installed this package, we now have access to the top level `l2l` module 59 | which contains all the relevant modules relevant for using the l2l package. 60 | 61 | This should also install the `sphinx` package which should now enable you to build 62 | the documentation as specified below. 63 | 64 | 65 | Building Documentation 66 | ********************** 67 | Run the following command from the `doc` directory 68 | 69 | make html 70 | 71 | And open the documentation with 72 | 73 | firefox _build/html/index.html 74 | 75 | All further (and extensive) documentation is in the html documentation! 76 | 77 | 78 | Invoking the tests 79 | ****************** 80 | 81 | To run the tests go to the folder `l2l/tests` and execute: 82 | 83 | python test_all.py 84 | 85 | If the package nosetests is installed run on the top folder (L2L): 86 | 87 | nosetests -v --with-coverage --cover-package=l2l/ 88 | -------------------------------------------------------------------------------- /bin/active-wait-ga-checkpoint.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import yaml 4 | import numpy as np 5 | 6 | import pickle 7 | 8 | from l2l.optimizees.active_wait.optimizee_aw import AWOptimizee, AWOptimizeeParameters 9 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 10 | from l2l.optimizers.evolution import GeneticAlgorithmOptimizer, GeneticAlgorithmParameters 11 | 12 | from l2l.utils.experiment import Experiment 13 | 14 | def main(): 15 | experiment = Experiment(root_dir_path='./results') 16 | name = 'L2L-FUN-GA' 17 | loaded_traj = experiment.load_trajectory("/home/hanna/Documents/Meta-optimization/results/activeWait_GeneticAlgorithm/simulation/trajectories/trajectory_5.bin") 18 | traj, _ = experiment.prepare_experiment(name=name,checkpoint=loaded_traj, log_stdout=True, debug=True, stop_run=True, overwrite=True) 19 | 20 | ## Benchmark function 21 | function_id = 4 22 | bench_functs = BenchmarkedFunctions() 23 | (benchmark_name, benchmark_function), benchmark_parameters = \ 24 | bench_functs.get_function_by_index(function_id, noise=True) 25 | 26 | # Active Wait Optimizee 27 | optimizee_parameters = AWOptimizeeParameters(difficulty=10000) 28 | optimizee = AWOptimizee(traj, optimizee_parameters) 29 | 30 | 31 | # Genetic Algorithm Optimizer 32 | optimizer_parameters = GeneticAlgorithmParameters(seed=1, 33 | pop_size=50, 34 | cx_prob=0.7, 35 | mut_prob=0.7, 36 | n_iteration=10, 37 | ind_prob=0.45, 38 | tourn_size=4, 39 | mate_par=0.5, 40 | mut_par=1 41 | ) 42 | 43 | ## Outerloop optimizer initialization 44 | parameters = GeneticAlgorithmParameters(seed=0, pop_size=50, cx_prob=0.5, 45 | mut_prob=0.3, n_iteration=5, 46 | ind_prob=0.02, 47 | tourn_size=15, mate_par=0.5, 48 | mut_par=1 49 | ) 50 | 51 | optimizer = GeneticAlgorithmOptimizer(traj, optimizee_create_individual=optimizee.create_individual, 52 | optimizee_fitness_weights=(-0.1,), 53 | parameters=parameters) 54 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 55 | optimizee_parameters=parameters) 56 | experiment.end_experiment(optimizer) 57 | 58 | if __name__ == '__main__': 59 | main() 60 | -------------------------------------------------------------------------------- /bin/active-wait-ga.py: -------------------------------------------------------------------------------- 1 | from l2l.utils.experiment import Experiment 2 | import numpy as np 3 | 4 | from l2l.optimizees.active_wait import AWOptimizee, AWOptimizeeParameters 5 | from l2l.optimizers.evolution import GeneticAlgorithmParameters, GeneticAlgorithmOptimizer 6 | 7 | 8 | def run_experiment(): 9 | experiment = Experiment( 10 | root_dir_path='../results') 11 | 12 | runner_params = { 13 | "srun": "", 14 | "exec": "python3.9" 15 | } 16 | traj, _ = experiment.prepare_experiment( 17 | runner_params=runner_params, name=f"aw_ga", overwrite=True) 18 | 19 | 20 | # Active Wait Optimizee 21 | optimizee_parameters = AWOptimizeeParameters(difficulty=10000.0) 22 | optimizee = AWOptimizee(traj, optimizee_parameters) 23 | 24 | 25 | # Genetic Algorithm Optimizer 26 | optimizer_parameters = GeneticAlgorithmParameters(seed=1580211, 27 | pop_size=16, 28 | cx_prob=0.7, 29 | mut_prob=0.7, 30 | n_iteration=10, 31 | ind_prob=0.45, 32 | tourn_size=4, 33 | mate_par=0.5, 34 | mut_par=1 35 | ) 36 | optimizer = GeneticAlgorithmOptimizer(traj, 37 | optimizee_create_individual=optimizee.create_individual, 38 | optimizee_fitness_weights=(1,), 39 | parameters=optimizer_parameters) 40 | 41 | 42 | # Run experiment 43 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 44 | optimizer_parameters=optimizer_parameters, 45 | optimizee_parameters=optimizee_parameters) 46 | # End experiment 47 | experiment.end_experiment(optimizer) 48 | 49 | 50 | def main(): 51 | run_experiment() 52 | 53 | 54 | 55 | if __name__ == '__main__': 56 | main() 57 | -------------------------------------------------------------------------------- /bin/l2l-fun-ce-gaussmix.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 4 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 5 | from l2l.optimizers.crossentropy.optimizer import CrossEntropyOptimizer, CrossEntropyParameters 6 | from l2l.optimizers.crossentropy.distribution import NoisyBayesianGaussianMixture 7 | from l2l.utils.experiment import Experiment 8 | 9 | 10 | def main(): 11 | name = 'L2L-FUN-CE' 12 | experiment = Experiment(root_dir_path='../results') 13 | traj, _ = experiment.prepare_experiment(name=name, log_stdout=True) 14 | 15 | 16 | function_id = 14 17 | bench_functs = BenchmarkedFunctions() 18 | (benchmark_name, benchmark_function), benchmark_parameters = \ 19 | bench_functs.get_function_by_index(function_id, noise=True) 20 | 21 | optimizee_seed = 100 22 | 23 | ## Innerloop simulator 24 | optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) 25 | 26 | ## Outerloop optimizer initialization 27 | parameters = CrossEntropyParameters(pop_size=50, rho=0.9, smoothing=0.0, temp_decay=0, n_iteration=160, 28 | distribution=NoisyBayesianGaussianMixture(n_components=3, 29 | noise_magnitude=1., 30 | noise_decay=0.9, 31 | weight_concentration_prior=1.5), 32 | stop_criterion=np.inf, seed=103) 33 | optimizer = CrossEntropyOptimizer(traj, optimizee_create_individual=optimizee.create_individual, 34 | optimizee_fitness_weights=(-0.1,), 35 | parameters=parameters, 36 | optimizee_bounding_func=optimizee.bounding_func) 37 | 38 | # Run experiment 39 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 40 | optimizer_parameters=parameters) 41 | # End experiment 42 | experiment.end_experiment(optimizer) 43 | 44 | 45 | if __name__ == '__main__': 46 | main() 47 | -------------------------------------------------------------------------------- /bin/l2l-fun-ce.py: -------------------------------------------------------------------------------- 1 | from l2l.optimizees.functions import tools as function_tools 2 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 3 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 4 | from l2l.optimizers.crossentropy.distribution import NoisyGaussian 5 | from l2l.optimizers.crossentropy import CrossEntropyOptimizer, CrossEntropyParameters 6 | from l2l.utils.experiment import Experiment 7 | 8 | import numpy as np 9 | 10 | 11 | def main(): 12 | experiment = Experiment(root_dir_path='../results') 13 | name = 'L2L-FUN-CE' 14 | 15 | traj, _ = experiment.prepare_experiment(name=name, log_stdout=True) 16 | 17 | ## Benchmark function 18 | function_id = 14 19 | bench_functs = BenchmarkedFunctions() 20 | (benchmark_name, benchmark_function), benchmark_parameters = \ 21 | bench_functs.get_function_by_index(function_id, noise=True) 22 | 23 | optimizee_seed = 100 24 | random_state = np.random.RandomState(seed=optimizee_seed) 25 | function_tools.plot(benchmark_function, random_state) 26 | 27 | ## Innerloop simulator 28 | optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=101) 29 | 30 | ## Outerloop optimizer initialization 31 | parameters = CrossEntropyParameters(pop_size=10, rho=0.9, smoothing=0.0, temp_decay=0, n_iteration=1000, 32 | distribution=NoisyGaussian(noise_magnitude=1., noise_decay=0.99), 33 | stop_criterion=np.inf, seed=102) 34 | optimizer = CrossEntropyOptimizer(traj, optimizee_create_individual=optimizee.create_individual, 35 | optimizee_fitness_weights=(-0.1,), 36 | parameters=parameters, 37 | optimizee_bounding_func=optimizee.bounding_func) 38 | 39 | experiment.run_experiment(optimizee=optimizee, optimizer=optimizer) 40 | # End experiment 41 | experiment.end_experiment(optimizer) 42 | 43 | 44 | if __name__ == '__main__': 45 | main() 46 | -------------------------------------------------------------------------------- /bin/l2l-fun-es.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | 4 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 5 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 6 | from l2l.optimizers.evolutionstrategies import EvolutionStrategiesParameters, EvolutionStrategiesOptimizer 7 | from l2l.utils.experiment import Experiment 8 | 9 | 10 | def run_experiment(): 11 | experiment = Experiment("../results/") 12 | name = 'L2L-FUN-ES' 13 | trajectory_name = 'mirroring-and-fitness-shaping' 14 | traj, runner_params = experiment.prepare_experiment(name=name, 15 | trajectory_name=trajectory_name, 16 | log_stdout=True) 17 | 18 | ## Benchmark function 19 | function_id = 14 20 | bench_functs = BenchmarkedFunctions() 21 | (benchmark_name, benchmark_function), benchmark_parameters = \ 22 | bench_functs.get_function_by_index(function_id, noise=True) 23 | 24 | optimizee_seed = 200 25 | 26 | ## Innerloop simulator 27 | optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) 28 | 29 | ## Outerloop optimizer initialization 30 | optimizer_seed = 1234 31 | parameters = EvolutionStrategiesParameters( 32 | learning_rate=0.1, 33 | noise_std=1.0, 34 | mirrored_sampling_enabled=True, 35 | fitness_shaping_enabled=True, 36 | pop_size=20, 37 | n_iteration=1000, 38 | stop_criterion=np.Inf, 39 | seed=optimizer_seed) 40 | 41 | optimizer = EvolutionStrategiesOptimizer( 42 | traj, 43 | optimizee_create_individual=optimizee.create_individual, 44 | optimizee_fitness_weights=(-1.,), 45 | parameters=parameters, 46 | optimizee_bounding_func=optimizee.bounding_func) 47 | 48 | # Run experiment 49 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 50 | optimizer_parameters=parameters) 51 | # End experiment 52 | experiment.end_experiment(optimizer) 53 | 54 | 55 | def main(): 56 | run_experiment() 57 | 58 | 59 | if __name__ == '__main__': 60 | main() 61 | -------------------------------------------------------------------------------- /bin/l2l-fun-face.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 4 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 5 | from l2l.optimizers.crossentropy.distribution import Gaussian 6 | from l2l.optimizers.face.optimizer import FACEOptimizer, FACEParameters 7 | from l2l.utils.experiment import Experiment 8 | 9 | 10 | def main(): 11 | name = 'L2L-FUN-FACE' 12 | experiment = Experiment("../results/") 13 | trajectory_name = name 14 | traj, runner_params, = experiment.prepare_experiment(name=name, 15 | trajectory_name=trajectory_name, 16 | log_stdout=True) 17 | 18 | ## Benchmark function 19 | function_id = 4 20 | bench_functs = BenchmarkedFunctions() 21 | (benchmark_name, benchmark_function), benchmark_parameters = \ 22 | bench_functs.get_function_by_index(function_id, noise=True) 23 | 24 | optimizee_seed = 100 25 | 26 | ## Innerloop simulator 27 | optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) 28 | 29 | ## Outerloop optimizer initialization 30 | parameters = FACEParameters(min_pop_size=20, max_pop_size=50, n_elite=10, smoothing=0.2, temp_decay=0, 31 | n_iteration=1, 32 | distribution=Gaussian(), n_expand=5, stop_criterion=np.inf, seed=109) 33 | optimizer = FACEOptimizer(traj, optimizee_create_individual=optimizee.create_individual, 34 | optimizee_fitness_weights=(-0.1,), 35 | parameters=parameters, 36 | optimizee_bounding_func=optimizee.bounding_func) 37 | 38 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 39 | optimizer_parameters=parameters, 40 | optimizee_parameters=None) 41 | experiment.end_experiment(optimizer) 42 | 43 | 44 | if __name__ == '__main__': 45 | main() 46 | -------------------------------------------------------------------------------- /bin/l2l-fun-ga-checkpoint.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import yaml 4 | import numpy as np 5 | 6 | import pickle 7 | 8 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 9 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 10 | from l2l.optimizers.evolution import GeneticAlgorithmOptimizer, GeneticAlgorithmParameters 11 | 12 | from l2l.utils.experiment import Experiment 13 | 14 | def main(): 15 | experiment = Experiment(root_dir_path='./results') 16 | name = 'L2L-FUN-GA' 17 | loaded_traj = experiment.load_trajectory("path_to_trajectory_file") 18 | traj, _ = experiment.prepare_experiment(name=name,checkpoint=loaded_traj, log_stdout=True, debug=True, stop_run=True, overwrite=True) 19 | 20 | ## Benchmark function 21 | function_id = 4 22 | bench_functs = BenchmarkedFunctions() 23 | (benchmark_name, benchmark_function), benchmark_parameters = \ 24 | bench_functs.get_function_by_index(function_id, noise=True) 25 | 26 | optimizee_seed = 100 27 | random_state = np.random.RandomState(seed=optimizee_seed) 28 | 29 | ## Innerloop simulator 30 | optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) 31 | 32 | ## Outerloop optimizer initialization 33 | parameters = GeneticAlgorithmParameters(seed=0, pop_size=50, cx_prob=0.5, 34 | mut_prob=0.3, n_iteration=5, 35 | ind_prob=0.02, 36 | tourn_size=15, mate_par=0.5, 37 | mut_par=1 38 | ) 39 | 40 | optimizer = GeneticAlgorithmOptimizer(traj, optimizee_create_individual=optimizee.create_individual, 41 | optimizee_fitness_weights=(-0.1,), 42 | parameters=parameters) 43 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 44 | optimizee_parameters=parameters) 45 | experiment.end_experiment(optimizer) 46 | 47 | if __name__ == '__main__': 48 | main() 49 | -------------------------------------------------------------------------------- /bin/l2l-fun-ga.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import yaml 4 | import numpy as np 5 | 6 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 7 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 8 | from l2l.optimizers.evolution import GeneticAlgorithmOptimizer, GeneticAlgorithmParameters 9 | 10 | from l2l.utils.experiment import Experiment 11 | 12 | 13 | def main(): 14 | experiment = Experiment(root_dir_path='../results') 15 | name = 'L2L-FUN-GA' 16 | traj, _ = experiment.prepare_experiment(name=name, log_stdout=True) 17 | 18 | ## Benchmark function 19 | function_id = 4 20 | bench_functs = BenchmarkedFunctions() 21 | (benchmark_name, benchmark_function), benchmark_parameters = \ 22 | bench_functs.get_function_by_index(function_id, noise=True) 23 | 24 | optimizee_seed = 100 25 | random_state = np.random.RandomState(seed=optimizee_seed) 26 | 27 | ## Innerloop simulator 28 | optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) 29 | 30 | ## Outerloop optimizer initialization 31 | parameters = GeneticAlgorithmParameters(seed=0, pop_size=50, cx_prob=0.5, 32 | mut_prob=0.3, n_iteration=100, 33 | ind_prob=0.02, 34 | tourn_size=15, mate_par=0.5, 35 | mut_par=1 36 | ) 37 | 38 | optimizer = GeneticAlgorithmOptimizer(traj, optimizee_create_individual=optimizee.create_individual, 39 | optimizee_fitness_weights=(-0.1,), 40 | parameters=parameters) 41 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 42 | optimizee_parameters=parameters) 43 | experiment.end_experiment(optimizer) 44 | 45 | 46 | if __name__ == '__main__': 47 | main() 48 | -------------------------------------------------------------------------------- /bin/l2l-fun-gd.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from l2l.utils.environment import Environment 3 | from l2l.utils.experiment import Experiment 4 | 5 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 6 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 7 | from l2l.optimizers.gradientdescent.optimizer import GradientDescentOptimizer 8 | # from l2l.optimizers.gradientdescent.optimizer import ClassicGDParameters 9 | # from l2l.optimizers.gradientdescent.optimizer import StochasticGDParameters 10 | # from l2l.optimizers.gradientdescent.optimizer import AdamParameters 11 | from l2l.optimizers.gradientdescent.optimizer import AdaMaxParameters 12 | from l2l.optimizers.gradientdescent.optimizer import RMSPropParameters 13 | 14 | 15 | def main(): 16 | name = 'L2L-FUN-GD' 17 | experiment = Experiment("../results") 18 | traj, runner_params = experiment.prepare_experiment(name=name, 19 | trajectory_name=name) 20 | 21 | ## Benchmark function 22 | function_id = 4 23 | bench_functs = BenchmarkedFunctions() 24 | (benchmark_name, benchmark_function), benchmark_parameters = \ 25 | bench_functs.get_function_by_index(function_id, noise=True) 26 | 27 | optimizee_seed = 100 28 | random_state = np.random.RandomState(seed=optimizee_seed) 29 | 30 | ## Innerloop simulator 31 | optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, 32 | seed=optimizee_seed) 33 | 34 | ## Outerloop optimizer initialization 35 | # parameters = ClassicGDParameters(learning_rate=0.01, exploration_step_size=0.01, 36 | # n_random_steps=5, n_iteration=100, 37 | # stop_criterion=np.Inf) 38 | # parameters = AdamParameters(learning_rate=0.01, exploration_step_size=0.01, n_random_steps=5, first_order_decay=0.8, 39 | # second_order_decay=0.8, n_iteration=100, stop_criterion=np.Inf) 40 | # parameters = AdaMaxParameters(learning_rate=0.02, exploration_step_size=0.01, n_random_steps=5, first_order_decay=0.9, 41 | # second_order_decay=0.999, n_iteration=100, stop_criterion=np.Inf, seed=123) 42 | # parameters = StochasticGDParameters(learning_rate=0.01, stochastic_deviation=1, stochastic_decay=0.99, 43 | # exploration_step_size=0.01, n_random_steps=5, n_iteration=100, 44 | # stop_criterion=np.Inf) 45 | parameters = RMSPropParameters(learning_rate=0.01, exploration_step_size=0.01, 46 | n_random_steps=5, momentum_decay=0.5, 47 | n_iteration=100, stop_criterion=np.Inf, seed=99) 48 | 49 | optimizer = GradientDescentOptimizer(traj, optimizee_create_individual=optimizee.create_individual, 50 | optimizee_fitness_weights=(0.1,), 51 | parameters=parameters, 52 | optimizee_bounding_func=optimizee.bounding_func) 53 | 54 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 55 | optimizer_parameters=parameters) 56 | 57 | experiment.end_experiment(optimizer) 58 | 59 | 60 | if __name__ == '__main__': 61 | main() 62 | -------------------------------------------------------------------------------- /bin/l2l-fun-gs.py: -------------------------------------------------------------------------------- 1 | from l2l.utils.experiment import Experiment 2 | import numpy as np 3 | 4 | from l2l.optimizees.functions import tools as function_tools 5 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 6 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 7 | from l2l.optimizers.gridsearch import GridSearchOptimizer, GridSearchParameters 8 | 9 | 10 | def main(): 11 | name = 'L2L-FUN-GS' 12 | experiment = Experiment(root_dir_path='../results') 13 | traj, _ = experiment.prepare_experiment(name=name, log_stdout=True) 14 | 15 | ## Benchmark function 16 | function_id = 4 17 | bench_functs = BenchmarkedFunctions() 18 | (benchmark_name, benchmark_function), benchmark_parameters = \ 19 | bench_functs.get_function_by_index(function_id, noise=True) 20 | 21 | optimizee_seed = 100 22 | random_state = np.random.RandomState(seed=optimizee_seed) 23 | function_tools.plot(benchmark_function, random_state) 24 | 25 | ## Innerloop simulator 26 | optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) 27 | 28 | ## Outerloop optimizer initialization 29 | n_grid_divs_per_axis = 30 30 | parameters = GridSearchParameters(param_grid={ 31 | 'coords': (optimizee.bound[0], optimizee.bound[1], n_grid_divs_per_axis) 32 | }) 33 | optimizer = GridSearchOptimizer(traj, optimizee_create_individual=optimizee.create_individual, 34 | optimizee_fitness_weights=(-0.1,), 35 | parameters=parameters) 36 | # Experiment run 37 | experiment.run_experiment(optimizee=optimizee, optimizer=optimizer, 38 | optimizee_parameters=parameters) 39 | # End experiment 40 | experiment.end_experiment(optimizer) 41 | 42 | 43 | if __name__ == '__main__': 44 | main() 45 | -------------------------------------------------------------------------------- /bin/l2l-fun-mgd.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from l2l.utils.environment import Environment 3 | from l2l.utils.experiment import Experiment 4 | 5 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 6 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 7 | from l2l.optimizers.multigradientdescent.optimizer import MultiGradientDescentOptimizer 8 | # from l2l.optimizers.gradientdescent.optimizer import ClassicGDParameters 9 | # from l2l.optimizers.gradientdescent.optimizer import StochasticGDParameters 10 | # from l2l.optimizers.gradientdescent.optimizer import AdamParameters 11 | from l2l.optimizers.multigradientdescent.optimizer import MultiRMSPropParameters 12 | 13 | 14 | def main(): 15 | name = 'L2L-FUN-MGD' 16 | experiment = Experiment("../results") 17 | traj, runner_params = experiment.prepare_experiment(name=name, 18 | trajectory_name=name) 19 | 20 | ## Benchmark function 21 | function_id = 4 22 | bench_functs = BenchmarkedFunctions() 23 | (benchmark_name, benchmark_function), benchmark_parameters = \ 24 | bench_functs.get_function_by_index(function_id, noise=True) 25 | 26 | optimizee_seed = 100 27 | random_state = np.random.RandomState(seed=optimizee_seed) 28 | 29 | ## Innerloop simulator 30 | optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, 31 | seed=optimizee_seed) 32 | 33 | ## Outerloop optimizer initialization 34 | # parameters = ClassicGDParameters(learning_rate=0.01, exploration_step_size=0.01, 35 | # n_random_steps=5, n_iteration=100, 36 | # stop_criterion=np.Inf) 37 | # parameters = AdamParameters(learning_rate=0.01, exploration_step_size=0.01, n_random_steps=5, first_order_decay=0.8, 38 | # second_order_decay=0.8, n_iteration=100, stop_criterion=np.Inf) 39 | # parameters = StochasticGDParameters(learning_rate=0.01, stochastic_deviation=1, stochastic_decay=0.99, 40 | # exploration_step_size=0.01, n_random_steps=5, n_iteration=100, 41 | # stop_criterion=np.Inf) 42 | parameters = MultiRMSPropParameters(learning_rate=0.01, exploration_step_size=0.01, 43 | n_random_steps=5, momentum_decay=0.5, 44 | n_iteration=100, stop_criterion=np.Inf, seed=99, n_inner_params=2) 45 | 46 | optimizer = MultiGradientDescentOptimizer(traj, optimizee_create_individual=optimizee.create_individual, 47 | optimizee_fitness_weights=(0.1,), 48 | parameters=parameters, 49 | optimizee_bounding_func=optimizee.bounding_func) 50 | 51 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 52 | optimizer_parameters=parameters) 53 | 54 | experiment.end_experiment(optimizer) 55 | 56 | 57 | if __name__ == '__main__': 58 | main() 59 | -------------------------------------------------------------------------------- /bin/l2l-fun-pt.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from l2l.utils.experiment import Experiment 3 | 4 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 5 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 6 | from l2l.optimizers.paralleltempering.optimizer import ParallelTemperingParameters, ParallelTemperingOptimizer, AvailableCoolingSchedules 7 | 8 | 9 | def main(): 10 | name = 'L2L-FunctionGenerator-PT' 11 | experiment = Experiment("../results/") 12 | traj, runner_params = experiment.prepare_experiment(name=name, 13 | trajectory_name=name, 14 | log_stdout=True) 15 | 16 | ## Benchmark function 17 | function_id = 14 18 | bench_functs = BenchmarkedFunctions() 19 | (benchmark_name, benchmark_function), benchmark_parameters = \ 20 | bench_functs.get_function_by_index(function_id, noise=True) 21 | 22 | optimizee_seed = 100 23 | 24 | ## Innerloop simulator 25 | optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) 26 | 27 | #-------------------------------------------------------------------------- 28 | # configure settings for parallel tempering: 29 | # for each of the parallel runs chose 30 | # a cooling schedule 31 | # an upper and lower temperature bound 32 | # a decay parameter 33 | #-------------------------------------------------------------------------- 34 | 35 | # specify the number of parallel running schedules. Each following container 36 | # has to have an entry for each parallel run 37 | n_parallel_runs = 5 38 | 39 | # for detailed information on the cooling schedules see either the wiki or 40 | # the documentaition in l2l.optimizers.paralleltempering.optimizer 41 | cooling_schedules = [AvailableCoolingSchedules.EXPONENTIAL_ADDAPTIVE, 42 | AvailableCoolingSchedules.EXPONENTIAL_ADDAPTIVE, 43 | AvailableCoolingSchedules.EXPONENTIAL_ADDAPTIVE, 44 | AvailableCoolingSchedules.LINEAR_ADDAPTIVE, 45 | AvailableCoolingSchedules.LINEAR_ADDAPTIVE] 46 | 47 | # has to be from 1 to 0, first entry hast to be larger than second 48 | # represents the starting temperature and the ending temperature 49 | temperature_bounds = np.array([ 50 | [0.8, 0], 51 | [0.7, 0], 52 | [0.6, 0], 53 | [1, 0.1], 54 | [0.9, 0.2]]) 55 | 56 | # decay parameter for each schedule. If needed can be different for each 57 | # schedule 58 | decay_parameters = np.full(n_parallel_runs, 0.99) 59 | #-------------------------------------------------------------------------- 60 | # end of configuration 61 | #-------------------------------------------------------------------------- 62 | 63 | # Check, if the temperature bounds and decay parameters are reasonable. 64 | assert (((temperature_bounds.all() <= 1) and (temperature_bounds.all() >= 0)) and (temperature_bounds[:, 0].all( 65 | ) > temperature_bounds[:, 1].all())), print("Warning: Temperature bounds are not within specifications.") 66 | assert ((decay_parameters.all() <= 1) and (decay_parameters.all() >= 0)), print( 67 | "Warning: Decay parameter not within specifications.") 68 | 69 | ## Outerloop optimizer initialization 70 | parameters = ParallelTemperingParameters(n_parallel_runs=n_parallel_runs, noisy_step=.03, n_iteration=1000, stop_criterion=np.Inf, 71 | seed=np.random.randint(1e5), cooling_schedules=cooling_schedules, 72 | temperature_bounds=temperature_bounds, decay_parameters=decay_parameters) 73 | optimizer = ParallelTemperingOptimizer(traj, optimizee_create_individual=optimizee.create_individual, 74 | optimizee_fitness_weights=(-1,), 75 | parameters=parameters, 76 | optimizee_bounding_func=optimizee.bounding_func) 77 | 78 | # Run experiment 79 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 80 | optimizer_parameters=parameters) 81 | # End experiment 82 | experiment.end_experiment(optimizer) 83 | 84 | 85 | if __name__ == '__main__': 86 | main() 87 | -------------------------------------------------------------------------------- /bin/l2l-fun-sa.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from l2l.utils.experiment import Experiment 3 | from l2l.optimizees.functions import tools as function_tools 4 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 5 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 6 | from l2l.optimizers.simulatedannealing.optimizer import SimulatedAnnealingParameters, SimulatedAnnealingOptimizer, AvailableCoolingSchedules 7 | 8 | 9 | def main(): 10 | name = 'L2L-FunctionGenerator-SA' 11 | experiment = Experiment("../results/") 12 | traj, runner_params = experiment.prepare_experiment(name=name, 13 | log_stdout=True) 14 | 15 | ## Benchmark function 16 | function_id = 14 17 | bench_functs = BenchmarkedFunctions() 18 | (benchmark_name, benchmark_function), benchmark_parameters = \ 19 | bench_functs.get_function_by_index(function_id, noise=True) 20 | 21 | optimizee_seed = 100 22 | random_state = np.random.RandomState(seed=optimizee_seed) 23 | function_tools.plot(benchmark_function, random_state) 24 | 25 | ## Innerloop simulator 26 | optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=optimizee_seed) 27 | 28 | ## Outerloop optimizer initialization 29 | parameters = SimulatedAnnealingParameters(n_parallel_runs=50, noisy_step=.03, temp_decay=.99, n_iteration=100, 30 | stop_criterion=np.Inf, seed=np.random.randint(1e5), cooling_schedule=AvailableCoolingSchedules.QUADRATIC_ADDAPTIVE) 31 | 32 | optimizer = SimulatedAnnealingOptimizer(traj, optimizee_create_individual=optimizee.create_individual, 33 | optimizee_fitness_weights=(-1,), 34 | parameters=parameters, 35 | optimizee_bounding_func=optimizee.bounding_func) 36 | # Run experiment 37 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 38 | optimizer_parameters=parameters) 39 | # End experiment 40 | experiment.end_experiment(optimizer) 41 | 42 | 43 | if __name__ == '__main__': 44 | main() 45 | -------------------------------------------------------------------------------- /bin/l2l-funall.py: -------------------------------------------------------------------------------- 1 | import logging.config 2 | import os 3 | import itertools 4 | 5 | import numpy as np 6 | from l2l.utils.environment import Environment 7 | 8 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 9 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 10 | from l2l.optimizers.crossentropy.distribution import NoisyGaussian, Gaussian 11 | from l2l.optimizers.crossentropy.optimizer import CrossEntropyOptimizer, CrossEntropyParameters 12 | from l2l.optimizers.face.optimizer import FACEOptimizer, FACEParameters 13 | from l2l.optimizers.gradientdescent.optimizer import GradientDescentOptimizer, RMSPropParameters, ClassicGDParameters, \ 14 | AdamParameters, StochasticGDParameters 15 | from l2l.optimizers.gridsearch import GridSearchOptimizer, GridSearchParameters 16 | from l2l.utils.experiment import Experiment 17 | 18 | 19 | def main(): 20 | experiment = Experiment(root_dir_path='../results') 21 | name = 'L2L-FUNALL' 22 | runner_params = {} 23 | traj, _ = experiment.prepare_experiment(name=name, log_stdout=True, 24 | runner_params=runner_params) 25 | n_iterations = 100 26 | seed = 1 27 | 28 | # NOTE: Need to use lambdas here since we want the distributions within CE, FACE etc. optimizers to be reinitialized 29 | # afresh each time since it seems like they are stateful. 30 | optimizers = [ 31 | (CrossEntropyOptimizer, 32 | lambda: CrossEntropyParameters(pop_size=50, rho=0.2, smoothing=0.0, temp_decay=0, 33 | n_iteration=n_iterations, 34 | distribution=NoisyGaussian(noise_decay=0.95), 35 | stop_criterion=np.inf, seed=seed)), 36 | (FACEOptimizer, 37 | lambda: FACEParameters(min_pop_size=20, max_pop_size=50, n_elite=10, smoothing=0.2, temp_decay=0, 38 | n_iteration=n_iterations, distribution=Gaussian(), n_expand=5, 39 | seed=seed, stop_criterion=np.inf)), 40 | (GradientDescentOptimizer, 41 | lambda: RMSPropParameters(learning_rate=0.01, exploration_step_size=0.01, n_random_steps=5, momentum_decay=0.5, 42 | n_iteration=n_iterations, stop_criterion=np.Inf, seed=seed)), 43 | (GradientDescentOptimizer, 44 | lambda: ClassicGDParameters(learning_rate=0.01, exploration_step_size=0.01, n_random_steps=5, 45 | n_iteration=n_iterations, stop_criterion=np.Inf, seed=seed)), 46 | (GradientDescentOptimizer, 47 | lambda: AdamParameters(learning_rate=0.01, exploration_step_size=0.01, n_random_steps=5, first_order_decay=0.8, 48 | second_order_decay=0.8, n_iteration=n_iterations, stop_criterion=np.Inf, seed=seed)), 49 | (GradientDescentOptimizer, 50 | lambda: StochasticGDParameters(learning_rate=0.01, stochastic_deviation=1, stochastic_decay=0.99, 51 | exploration_step_size=0.01, n_random_steps=5, n_iteration=n_iterations, 52 | stop_criterion=np.Inf, seed=seed)) 53 | ] 54 | 55 | # NOTE: Benchmark functions 56 | bench_functs = BenchmarkedFunctions() 57 | function_ids = range(len(bench_functs.function_name_map)) 58 | 59 | for function_id, (optimizer_class, optimizer_parameters_fn) in itertools.product(function_ids, optimizers): 60 | optimizer_parameters = optimizer_parameters_fn() 61 | 62 | (benchmark_name, benchmark_function), benchmark_parameters = \ 63 | bench_functs.get_function_by_index(function_id, noise=True) 64 | 65 | optimizee = FunctionGeneratorOptimizee(traj, benchmark_function, seed=100) 66 | 67 | optimizee_fitness_weights = -1. 68 | # Gradient descent does descent! 69 | if optimizer_class == GradientDescentOptimizer: 70 | optimizee_fitness_weights = +1. 71 | # Grid search optimizer input depends on optimizee! 72 | elif optimizer_class == GridSearchOptimizer: 73 | optimizer_parameters = GridSearchParameters(param_grid={ 74 | 'coords': (optimizee.bound[0], optimizee.bound[1], 30) 75 | }) 76 | 77 | optimizer = optimizer_class(traj, optimizee_create_individual=optimizee.create_individual, 78 | optimizee_fitness_weights=(optimizee_fitness_weights,), 79 | parameters=optimizer_parameters, 80 | optimizee_bounding_func=optimizee.bounding_func) 81 | 82 | experiment.run_experiment(optimizee=optimizee, 83 | optimizee_parameters=None, 84 | optimizer=optimizer, 85 | optimizer_parameters=optimizer_parameters) 86 | 87 | experiment.end_experiment(optimizer) 88 | 89 | 90 | if __name__ == '__main__': 91 | main() 92 | -------------------------------------------------------------------------------- /bin/l2l-mnist-ce.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from l2l.utils.experiment import Experiment 3 | 4 | from l2l.optimizees.mnist.optimizee import MNISTOptimizeeParameters, MNISTOptimizee 5 | from l2l.optimizers.crossentropy import CrossEntropyParameters, CrossEntropyOptimizer 6 | from l2l.optimizers.crossentropy.distribution import NoisyGaussian 7 | 8 | 9 | def run_experiment(): 10 | name = 'L2L-MNIST-CE' 11 | experiment = Experiment("../results/") 12 | traj, runner_params = experiment.prepare_experiment(name=name, 13 | trajectory_name=name, 14 | log_stdout=True) 15 | optimizee_seed = 200 16 | 17 | optimizee_parameters = MNISTOptimizeeParameters(n_hidden=10, seed=optimizee_seed, use_small_mnist=True) 18 | ## Innerloop simulator 19 | optimizee = MNISTOptimizee(traj, optimizee_parameters) 20 | 21 | ## Outerloop optimizer initialization 22 | optimizer_seed = 1234 23 | optimizer_parameters = CrossEntropyParameters(pop_size=40, rho=0.9, smoothing=0.0, temp_decay=0, n_iteration=5000, 24 | distribution=NoisyGaussian(noise_magnitude=1., noise_decay=0.99), 25 | stop_criterion=np.inf, seed=optimizer_seed) 26 | 27 | 28 | optimizer = CrossEntropyOptimizer(traj, optimizee_create_individual=optimizee.create_individual, 29 | optimizee_fitness_weights=(1.,), 30 | parameters=optimizer_parameters, 31 | optimizee_bounding_func=optimizee.bounding_func) 32 | 33 | # Run experiment 34 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 35 | optimizer_parameters=optimizer_parameters, 36 | optimizee_parameters=optimizee_parameters) 37 | # End experiment 38 | experiment.end_experiment(optimizer) 39 | 40 | 41 | def main(): 42 | run_experiment() 43 | 44 | 45 | if __name__ == '__main__': 46 | main() 47 | -------------------------------------------------------------------------------- /bin/l2l-mnist-es.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from l2l.utils.experiment import Experiment 3 | from l2l.optimizees.mnist.optimizee import MNISTOptimizeeParameters, MNISTOptimizee 4 | from l2l.optimizers.evolutionstrategies import EvolutionStrategiesParameters, EvolutionStrategiesOptimizer 5 | 6 | def run_experiment(): 7 | name = 'L2L-MNIST-ES' 8 | trajectory_name = 'mirroring-and-fitness-shaping' 9 | experiment = Experiment("../results/") 10 | traj, runner_params = experiment.prepare_experiment(name=name, 11 | trajectory_name=trajectory_name, 12 | log_stdout=True) 13 | 14 | optimizee_seed = 200 15 | optimizee_parameters = MNISTOptimizeeParameters(n_hidden=10, seed=optimizee_seed, use_small_mnist=True) 16 | ## Innerloop simulator 17 | optimizee = MNISTOptimizee(traj, optimizee_parameters) 18 | 19 | ## Outerloop optimizer initialization 20 | optimizer_seed = 1234 21 | optimizer_parameters = EvolutionStrategiesParameters( 22 | learning_rate=0.1, 23 | noise_std=0.1, 24 | mirrored_sampling_enabled=True, 25 | fitness_shaping_enabled=True, 26 | pop_size=20, 27 | n_iteration=2000, 28 | stop_criterion=np.Inf, 29 | seed=optimizer_seed) 30 | 31 | optimizer = EvolutionStrategiesOptimizer( 32 | traj, 33 | optimizee_create_individual=optimizee.create_individual, 34 | optimizee_fitness_weights=(1.,), 35 | parameters=optimizer_parameters, 36 | optimizee_bounding_func=optimizee.bounding_func) 37 | 38 | # Run experiment 39 | experiment.run_experiment(optimizer=optimizer, optimizee=optimizee, 40 | optimizer_parameters=optimizer_parameters, 41 | optimizee_parameters=optimizee_parameters) 42 | # End experiment 43 | experiment.end_experiment(optimizer) 44 | 45 | 46 | def main(): 47 | run_experiment() 48 | 49 | 50 | if __name__ == '__main__': 51 | main() 52 | -------------------------------------------------------------------------------- /bin/l2l-template.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is a typical example of a script used to run a L2L experiment. Read the comments in the file for more 3 | explanations 4 | """ 5 | from l2l.utils.experiment import Experiment 6 | from l2l.optimizees.optimizee import Optimizee, OptimizeeParameters 7 | from l2l.optimizers.optimizer import Optimizer, OptimizerParameters 8 | 9 | 10 | def main(): 11 | # TODO: use the experiment module to prepare and run later the simulation 12 | # define a directory to store the results 13 | experiment = Experiment(root_dir_path='~/home/user/L2L/results') 14 | # TODO when using the template: use keywords to prepare the experiment and 15 | # create a dictionary for runner parameters 16 | # prepare_experiment returns the trajectory and all runner parameters 17 | runner_params = { 18 | "srun": "srun -n 1 -c 50", 19 | "exec": "python3", 20 | "max_workers": 32, 21 | } 22 | 23 | traj, runner_params = experiment.prepare_experiment(name='L2L', 24 | log_stdout=True, 25 | runner_parameter=runner_params, 26 | # To enable more detailed output for debugging 27 | # set debug to True 28 | debug=False, 29 | # If you do not want to restart the optimizee 30 | # and the simulation should be stopped 31 | # set stop_run to True 32 | stop_run=False, 33 | # if you want to overwrite previous results, 34 | # set overwrite to True, 35 | # sotherwise specify a different root_dir_path. 36 | overwrite= False) 37 | 38 | ## Innerloop simulator 39 | # TODO when using the template: Change the optimizee to the appropriate 40 | # Optimizee class 41 | optimizee = Optimizee(traj) 42 | # TODO Create optimizee parameters 43 | optimizee_parameters = OptimizeeParameters() 44 | 45 | ## Outerloop optimizer initialization 46 | # TODO when using the template: Change the optimizer to the appropriate 47 | # Optimizer class and use the right value for optimizee_fitness_weights. 48 | # Length is the number of dimensions of fitness, and negative value 49 | # implies minimization and vice versa 50 | optimizer_parameters = OptimizerParameters() 51 | optimizer = Optimizer(traj, optimizee.create_individual, (1.0,), 52 | optimizer_parameters) 53 | 54 | experiment.run_experiment(optimizee=optimizee, 55 | optimizee_parameters=optimizee_parameters, 56 | optimizer=optimizer, 57 | optimizer_parameters=optimizer_parameters) 58 | experiment.end_experiment(optimizer) 59 | 60 | 61 | if __name__ == '__main__': 62 | main() 63 | -------------------------------------------------------------------------------- /bin/logging.yaml: -------------------------------------------------------------------------------- 1 | version: 1 2 | formatters: 3 | simple: 4 | format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s' 5 | handlers: 6 | console: 7 | class: logging.StreamHandler 8 | stream: ext://sys.stdout 9 | formatter: simple 10 | file: 11 | class: logging.handlers.WatchedFileHandler 12 | formatter: simple 13 | filename: output.log 14 | loggers: 15 | optimizers: 16 | level: INFO 17 | handlers: [console, file] 18 | propagate: no 19 | bin: 20 | level: INFO 21 | handlers: [console, file] 22 | propagate: no 23 | pypet: 24 | level: ERROR 25 | handlers: [console, file] 26 | propagate: no 27 | root: 28 | level: INFO 29 | handlers: [console, file] 30 | -------------------------------------------------------------------------------- /bin/plot-functions.py: -------------------------------------------------------------------------------- 1 | import logging.config 2 | import os 3 | import warnings 4 | 5 | import yaml 6 | 7 | from l2l.optimizees.functions.function_generator import FunctionGenerator, GaussianParameters, PermutationParameters, \ 8 | EasomParameters, LangermannParameters, MichalewiczParameters, ShekelParameters, RastriginParameters, \ 9 | RosenbrockParameters, ChasmParameters, AckleyParameters 10 | from l2l.paths import Paths 11 | 12 | warnings.filterwarnings("ignore") 13 | 14 | logger = logging.getLogger('bin.plot-function-generator') 15 | 16 | 17 | def main(): 18 | name = 'plot-function-generator' 19 | try: 20 | with open('bin/path.conf') as f: 21 | root_dir_path = f.read().strip() 22 | except FileNotFoundError: 23 | raise FileNotFoundError( 24 | "You have not set the root path to store your results." 25 | " Write the path to a path.conf text file in the bin directory" 26 | " before running the simulation" 27 | ) 28 | paths = Paths(name, dict(run_no='test'), root_dir_path=root_dir_path) 29 | 30 | with open("bin/logging.yaml") as f: 31 | l_dict = yaml.load(f) 32 | log_output_file = os.path.join(paths.results_path, l_dict['handlers']['file']['filename']) 33 | l_dict['handlers']['file']['filename'] = log_output_file 34 | logging.config.dictConfig(l_dict) 35 | 36 | print("All output can be found in file ", log_output_file) 37 | print("Change the values in logging.yaml to control log level and destination") 38 | print("e.g. change the handler to console for the loggers you're interesting in to get output to stdout") 39 | 40 | fg_params = [GaussianParameters(sigma=[[1.5, .1], [.1, .3]], mean=[-1., -1.]), 41 | GaussianParameters(sigma=[[.25, .3], [.3, 1.]], mean=[1., 1.]), 42 | GaussianParameters(sigma=[[.5, .25], [.25, 1.3]], mean=[2., -2.])] 43 | FunctionGenerator(fg_params, dims=2, noise=True).plot() 44 | 45 | FunctionGenerator([PermutationParameters(beta=0.005)], dims=2).plot() 46 | 47 | FunctionGenerator([EasomParameters()], dims=3).plot() 48 | 49 | FunctionGenerator([LangermannParameters(A='default', c='default')], dims=2).plot() 50 | 51 | FunctionGenerator([MichalewiczParameters(m='default')], dims=2).plot() 52 | 53 | FunctionGenerator([ShekelParameters(A='default', c='default')], dims=2).plot() 54 | 55 | fg_params = [ShekelParameters(A=[[8, 5]], c=[0.08]), 56 | LangermannParameters(A='default', c='default')] 57 | FunctionGenerator(fg_params, dims=2).plot() 58 | 59 | FunctionGenerator([RastriginParameters()], dims=2).plot() 60 | 61 | FunctionGenerator([RosenbrockParameters()], dims=2).plot() 62 | 63 | FunctionGenerator([ChasmParameters()], dims=2).plot() 64 | 65 | FunctionGenerator([AckleyParameters()], dims=2).plot() 66 | 67 | 68 | if __name__ == '__main__': 69 | main() 70 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help 18 | help: 19 | @echo "Please use \`make ' where is one of" 20 | @echo " html to make standalone HTML files" 21 | @echo " dirhtml to make HTML files named index.html in directories" 22 | @echo " singlehtml to make a single large HTML file" 23 | @echo " pickle to make pickle files" 24 | @echo " json to make JSON files" 25 | @echo " htmlhelp to make HTML files and a HTML help project" 26 | @echo " qthelp to make HTML files and a qthelp project" 27 | @echo " applehelp to make an Apple Help Book" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " epub3 to make an epub3" 31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 32 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 34 | @echo " text to make text files" 35 | @echo " man to make manual pages" 36 | @echo " texinfo to make Texinfo files" 37 | @echo " info to make Texinfo files and run them through makeinfo" 38 | @echo " gettext to make PO message catalogs" 39 | @echo " changes to make an overview of all changed/added/deprecated items" 40 | @echo " xml to make Docutils-native XML files" 41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 42 | @echo " linkcheck to check all external links for integrity" 43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 44 | @echo " coverage to run coverage check of the documentation (if enabled)" 45 | @echo " dummy to check syntax errors of document sources" 46 | 47 | .PHONY: clean 48 | clean: 49 | rm -rf $(BUILDDIR)/* 50 | 51 | .PHONY: html 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | .PHONY: dirhtml 58 | dirhtml: 59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 60 | @echo 61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 62 | 63 | .PHONY: singlehtml 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | .PHONY: pickle 70 | pickle: 71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 72 | @echo 73 | @echo "Build finished; now you can process the pickle files." 74 | 75 | .PHONY: json 76 | json: 77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 78 | @echo 79 | @echo "Build finished; now you can process the JSON files." 80 | 81 | .PHONY: htmlhelp 82 | htmlhelp: 83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 84 | @echo 85 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 86 | ".hhp project file in $(BUILDDIR)/htmlhelp." 87 | 88 | .PHONY: qthelp 89 | qthelp: 90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 91 | @echo 92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/L2L.qhcp" 95 | @echo "To view the help file:" 96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/L2L.qhc" 97 | 98 | .PHONY: applehelp 99 | applehelp: 100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 101 | @echo 102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 103 | @echo "N.B. You won't be able to view it unless you put it in" \ 104 | "~/Library/Documentation/Help or install it in your application" \ 105 | "bundle." 106 | 107 | .PHONY: devhelp 108 | devhelp: 109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 110 | @echo 111 | @echo "Build finished." 112 | @echo "To view the help file:" 113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/L2L" 114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/L2L" 115 | @echo "# devhelp" 116 | 117 | .PHONY: epub 118 | epub: 119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 120 | @echo 121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 122 | 123 | .PHONY: epub3 124 | epub3: 125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 126 | @echo 127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." 128 | 129 | .PHONY: latex 130 | latex: 131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 132 | @echo 133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 135 | "(use \`make latexpdf' here to do that automatically)." 136 | 137 | .PHONY: latexpdf 138 | latexpdf: 139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 140 | @echo "Running LaTeX files through pdflatex..." 141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 143 | 144 | .PHONY: latexpdfja 145 | latexpdfja: 146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 147 | @echo "Running LaTeX files through platex and dvipdfmx..." 148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 150 | 151 | .PHONY: text 152 | text: 153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 154 | @echo 155 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 156 | 157 | .PHONY: man 158 | man: 159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 160 | @echo 161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 162 | 163 | .PHONY: texinfo 164 | texinfo: 165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 166 | @echo 167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 168 | @echo "Run \`make' in that directory to run these through makeinfo" \ 169 | "(use \`make info' here to do that automatically)." 170 | 171 | .PHONY: info 172 | info: 173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 174 | @echo "Running Texinfo files through makeinfo..." 175 | make -C $(BUILDDIR)/texinfo info 176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 177 | 178 | .PHONY: gettext 179 | gettext: 180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 181 | @echo 182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 183 | 184 | .PHONY: changes 185 | changes: 186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 187 | @echo 188 | @echo "The overview file is in $(BUILDDIR)/changes." 189 | 190 | .PHONY: linkcheck 191 | linkcheck: 192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 193 | @echo 194 | @echo "Link check complete; look for any errors in the above output " \ 195 | "or in $(BUILDDIR)/linkcheck/output.txt." 196 | 197 | .PHONY: doctest 198 | doctest: 199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 200 | @echo "Testing of doctests in the sources finished, look at the " \ 201 | "results in $(BUILDDIR)/doctest/output.txt." 202 | 203 | .PHONY: coverage 204 | coverage: 205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 206 | @echo "Testing of coverage in the sources finished, look at the " \ 207 | "results in $(BUILDDIR)/coverage/python.txt." 208 | 209 | .PHONY: xml 210 | xml: 211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 212 | @echo 213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 214 | 215 | .PHONY: pseudoxml 216 | pseudoxml: 217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 218 | @echo 219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 220 | 221 | .PHONY: dummy 222 | dummy: 223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy 224 | @echo 225 | @echo "Build finished. Dummy builder generates no files." 226 | -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | ==== 2 | L2L 3 | ==== 4 | 5 | .. toctree:: 6 | 7 | quickstart 8 | intro 9 | l2l 10 | l2l-bin 11 | indices 12 | -------------------------------------------------------------------------------- /doc/indices.rst: -------------------------------------------------------------------------------- 1 | Indices and tables 2 | ================== 3 | 4 | * :ref:`genindex` 5 | * :ref:`modindex` 6 | * :ref:`search` 7 | -------------------------------------------------------------------------------- /doc/l2l-bin.rst: -------------------------------------------------------------------------------- 1 | .. _l2l-experiments: 2 | 3 | L2L Experiments 4 | =============== 5 | 6 | This is template script for setting up an experiment using an arbitrary Optimizer and Optimizee. 7 | For actual examples, see :file:`bin/l2l-fun-ce.py` or :file:`bin/l2l-mnist-ce.py`. 8 | 9 | .. include:: ../bin/l2l-template.py 10 | :code: python 11 | 12 | -------------------------------------------------------------------------------- /doc/l2l.logging_tools.rst: -------------------------------------------------------------------------------- 1 | Logging Tools 2 | ============= 3 | 4 | Module Functions 5 | ---------------- 6 | 7 | .. automodule:: l2l.logging_tools 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | -------------------------------------------------------------------------------- /doc/l2l.optimizees.functions.rst: -------------------------------------------------------------------------------- 1 | Optimizee for simple functions 2 | ============================== 3 | The fitness function to optimize is the value of the function. 4 | 5 | FunctionGeneratorOptimizee 6 | -------------------------- 7 | 8 | .. autoclass:: l2l.optimizees.functions.optimizee.FunctionGeneratorOptimizee 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | 13 | 14 | l2l.optimizees.functions.tools 15 | ------------------------------ 16 | Contains some tools used by the functions implementation. 17 | 18 | .. automodule:: l2l.optimizees.functions.tools 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /doc/l2l.optimizees.mnist.rst: -------------------------------------------------------------------------------- 1 | Optimizee for MNIST 2 | =================== 3 | The fitness function to optimize is the MNIST performance. 4 | 5 | MNISTOptimizee 6 | -------------- 7 | 8 | .. autoclass:: l2l.optimizees.mnist.optimizee.MNISTOptimizee 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | 13 | 14 | MNISTOptimizeeParameters 15 | ------------------------ 16 | .. autoclass:: l2l.optimizees.mnist.optimizee.MNISTOptimizeeParameters 17 | :members: 18 | :undoc-members: 19 | -------------------------------------------------------------------------------- /doc/l2l.optimizees.rst: -------------------------------------------------------------------------------- 1 | Optimizees 2 | ========== 3 | 4 | 5 | Optimizee Base Module 6 | --------------------- 7 | 8 | .. autoclass:: l2l.optimizees.optimizee.Optimizee 9 | :members: 10 | :undoc-members: 11 | 12 | 13 | 14 | Implemented examples 15 | -------------------- 16 | 17 | .. toctree:: 18 | :maxdepth: 1 19 | 20 | l2l.optimizees.functions 21 | l2l.optimizees.mnist 22 | -------------------------------------------------------------------------------- /doc/l2l.optimizers.crossentropy.rst: -------------------------------------------------------------------------------- 1 | Optimizer using Cross Entropy 2 | ============================= 3 | 4 | CrossEntropyOptimizer 5 | --------------------- 6 | 7 | .. autoclass:: l2l.optimizers.crossentropy.optimizer.CrossEntropyOptimizer 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | CrossEntropyParameters 13 | ---------------------- 14 | .. autoclass:: l2l.optimizers.crossentropy.optimizer.CrossEntropyParameters 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | 19 | Distributions 20 | ------------- 21 | .. autoclass:: l2l.optimizers.crossentropy.distribution.Distribution 22 | :members: 23 | :undoc-members: 24 | :show-inheritance: 25 | 26 | .. autoclass:: l2l.optimizers.crossentropy.distribution.Gaussian 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | .. autoclass:: l2l.optimizers.crossentropy.distribution.NoisyGaussian 32 | :members: 33 | :undoc-members: 34 | :show-inheritance: 35 | 36 | .. autoclass:: l2l.optimizers.crossentropy.distribution.BayesianGaussianMixture 37 | :members: 38 | :undoc-members: 39 | :show-inheritance: 40 | 41 | .. autoclass:: l2l.optimizers.crossentropy.distribution.NoisyBayesianGaussianMixture 42 | :members: 43 | :undoc-members: 44 | :show-inheritance: 45 | -------------------------------------------------------------------------------- /doc/l2l.optimizers.evolution.rst: -------------------------------------------------------------------------------- 1 | Optimizer using Evolutionary Algorithm 2 | ====================================== 3 | 4 | GeneticAlgorithmOptimizer 5 | ------------------------- 6 | 7 | .. autoclass:: l2l.optimizers.evolution.optimizer.GeneticAlgorithmOptimizer 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | GeneticAlgorithmParameters 13 | -------------------------- 14 | .. autoclass:: l2l.optimizers.evolution.optimizer.GeneticAlgorithmParameters 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | -------------------------------------------------------------------------------- /doc/l2l.optimizers.evolutionstrategies.rst: -------------------------------------------------------------------------------- 1 | Optimizer using Evolution Strategies 2 | ==================================== 3 | 4 | EvolutionStrategiesOptimizer 5 | ---------------------------- 6 | 7 | .. autoclass:: l2l.optimizers.evolutionstrategies.optimizer.EvolutionStrategiesOptimizer 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | EvolutionStrategiesParameters 13 | ----------------------------- 14 | .. autoclass:: l2l.optimizers.evolutionstrategies.optimizer.EvolutionStrategiesParameters 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | -------------------------------------------------------------------------------- /doc/l2l.optimizers.face.rst: -------------------------------------------------------------------------------- 1 | Optimizer using FACE 2 | ==================== 3 | 4 | FACEOptimizer 5 | ------------- 6 | 7 | .. autoclass:: l2l.optimizers.face.optimizer.FACEOptimizer 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | FACEParameters 13 | -------------- 14 | .. autoclass:: l2l.optimizers.face.optimizer.FACEParameters 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | -------------------------------------------------------------------------------- /doc/l2l.optimizers.gradientdescent.rst: -------------------------------------------------------------------------------- 1 | Optimizer using Gradient Descent 2 | ================================ 3 | 4 | GradientDescentOptimizer 5 | ------------------------ 6 | 7 | .. autoclass:: l2l.optimizers.gradientdescent.optimizer.GradientDescentOptimizer 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | ClassicGDParameters 13 | ------------------- 14 | .. autoclass:: l2l.optimizers.gradientdescent.optimizer.ClassicGDParameters 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | 19 | StochasticGDParameters 20 | ---------------------- 21 | .. autoclass:: l2l.optimizers.gradientdescent.optimizer.StochasticGDParameters 22 | :members: 23 | :undoc-members: 24 | :show-inheritance: 25 | 26 | AdamParameters 27 | -------------- 28 | .. autoclass:: l2l.optimizers.gradientdescent.optimizer.AdamParameters 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | 33 | RMSPropParameters 34 | ----------------- 35 | .. autoclass:: l2l.optimizers.gradientdescent.optimizer.RMSPropParameters 36 | :members: 37 | :undoc-members: 38 | :show-inheritance: 39 | -------------------------------------------------------------------------------- /doc/l2l.optimizers.gridsearch.rst: -------------------------------------------------------------------------------- 1 | Optimizer using Grid Search 2 | =========================== 3 | 4 | GridSearchOptimizer 5 | ------------------- 6 | 7 | .. autoclass:: l2l.optimizers.gridsearch.optimizer.GridSearchOptimizer 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | GridSearchParameters 13 | -------------------- 14 | .. autoclass:: l2l.optimizers.gridsearch.optimizer.GridSearchParameters 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | -------------------------------------------------------------------------------- /doc/l2l.optimizers.naturalevolutionstrategies.rst: -------------------------------------------------------------------------------- 1 | Optimizer using Natural Evolution Strategies 2 | ============================================ 3 | 4 | NaturalEvolutionStrategiesOptimizer 5 | ----------------------------------- 6 | 7 | .. autoclass:: l2l.optimizers.naturalevolutionstrategies.optimizer.NaturalEvolutionStrategiesOptimizer 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | NaturalEvolutionStrategiesParameters 13 | ------------------------------------ 14 | 15 | .. autoclass:: l2l.optimizers.naturalevolutionstrategies.optimizer.NaturalEvolutionStrategiesParameters 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | -------------------------------------------------------------------------------- /doc/l2l.optimizers.rst: -------------------------------------------------------------------------------- 1 | Optimizers 2 | ========== 3 | 4 | 5 | Optimizer Base Module 6 | --------------------- 7 | 8 | .. autoclass:: l2l.optimizers.optimizer.Optimizer 9 | :members: 10 | :undoc-members: 11 | :private-members: 12 | 13 | Implemented Examples 14 | -------------------- 15 | 16 | .. toctree:: 17 | :maxdepth: 1 18 | 19 | l2l.optimizers.crossentropy 20 | l2l.optimizers.face 21 | l2l.optimizers.gradientdescent 22 | l2l.optimizers.gridsearch 23 | l2l.optimizers.evolution 24 | l2l.optimizers.simulatedannealing 25 | l2l.optimizers.evolutionstrategies 26 | l2l.optimizers.naturalevolutionstrategies 27 | 28 | -------------------------------------------------------------------------------- /doc/l2l.optimizers.simulatedannealing.rst: -------------------------------------------------------------------------------- 1 | Optimizer using Simulated Annealing 2 | =================================== 3 | 4 | SimulatedAnnealingOptimizer 5 | --------------------------- 6 | 7 | .. autoclass:: l2l.optimizers.simulatedannealing.optimizer.SimulatedAnnealingOptimizer 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | SimulatedAnnealingParameters 13 | ---------------------------- 14 | .. autoclass:: l2l.optimizers.simulatedannealing.optimizer.SimulatedAnnealingParameters 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | -------------------------------------------------------------------------------- /doc/l2l.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | .. toctree:: 5 | 6 | l2l.optimizees 7 | l2l.optimizers 8 | l2l.utils 9 | l2l.logging_tools 10 | 11 | 12 | Other module functions 13 | ---------------------- 14 | 15 | .. autoclass:: l2l.sdict 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | .. autoclass:: l2l.sdictm 21 | :members: 22 | :undoc-members: 23 | :show-inheritance: 24 | 25 | .. autoclass:: l2l.DictEntryType 26 | :members: 27 | :undoc-members: 28 | :show-inheritance: 29 | 30 | .. autofunction:: l2l.list_to_dict 31 | 32 | .. autofunction:: l2l.dict_to_list 33 | 34 | .. autofunction:: l2l.stdout_redirected 35 | 36 | .. autofunction:: l2l.stdout_discarded 37 | 38 | .. autofunction:: l2l.convert_dict_to_numpy 39 | -------------------------------------------------------------------------------- /doc/l2l.utils.rst: -------------------------------------------------------------------------------- 1 | Simulation control 2 | ================== 3 | 4 | Trajectory 5 | ---------- 6 | 7 | .. autoclass:: l2l.utils.trajectory.Trajectory 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | Individual 13 | ---------- 14 | 15 | .. autoclass:: l2l.utils.individual.Individual 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | ParamterGroup 21 | ------------- 22 | 23 | .. autoclass:: l2l.utils.groups.ParameterGroup 24 | :members: 25 | :undoc-members: 26 | :show-inheritance: 27 | 28 | Environment 29 | ------------- 30 | 31 | .. autoclass:: l2l.utils.environment.Environment 32 | :members: 33 | :undoc-members: 34 | :show-inheritance: 35 | 36 | Experiment 37 | ------------- 38 | 39 | .. autoclass:: l2l.utils.experiment.Experiment 40 | :members: 41 | :undoc-members: 42 | :show-inheritance: 43 | 44 | Runner 45 | ------------- 46 | .. autoclass:: l2l.utils.runner.Runner 47 | :members: 48 | :undoc-members: 49 | :show-inheritance: -------------------------------------------------------------------------------- /doc/quickstart.rst: -------------------------------------------------------------------------------- 1 | Quickstart 2 | ========== 3 | 4 | Running an experiment 5 | +++++++++++++++++++++ 6 | 7 | With existing implementations of optimizees and optimizers: 8 | 9 | * See :ref:`l2l-experiments` for an example implementation of an L2L experiment with an arbitrary `Optimizee` and 10 | `Optimizer`. The source code also contains `many examples `_ of 11 | scripts for various combinations of Optimizees and Optimizers. 12 | * See :ref:`data-postprocessing` for details on how to use the generated data for plots and analysis. 13 | 14 | Writing Optimizees and Optimizers 15 | +++++++++++++++++++++++++++++++++ 16 | 17 | * See :class:`~l2l.optimizees.functions.optimizee.FunctionGeneratorOptimizee` for an example of an `Optimizee` (based on simple 18 | function minimization). 19 | * See :class:`~l2l.optimizers.crossentropy.optimizer.CrossEntropyOptimizer` for an example of an 20 | implementation of cross entropie `Optimizer`. 21 | -------------------------------------------------------------------------------- /doc/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | numpy 3 | scikit-learn 4 | sphinx-rtd-theme 5 | -e . 6 | -------------------------------------------------------------------------------- /l2l/logging_tools.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | import logging.config 4 | import socket 5 | import copy 6 | 7 | 8 | def create_shared_logger_data(logger_names, log_levels, log_to_consoles, 9 | sim_name, log_directory): 10 | """ 11 | This function must be called to create a shared copy of information that will be 12 | required to setup logging across processes. This must be run exactly once in the 13 | root process. 14 | 15 | :param logger_names: This is a list of names of the loggers whose output you're 16 | interested in. 17 | 18 | :param log_levels: This is the list of the same size of `logger_names` containing 19 | the log levels specified as strings (e.g. 'INFO', 'DEBUG', 'WARNING', 'ERROR'). 20 | 21 | :param log_to_consoles: This is a list of the same size of `logger_names` containing 22 | boolean values which indicate whether or not to redirect the output of the said 23 | logger to stdout or not. Note that with scoop, and output to stdout on any 24 | worker gets directed to the console of the main process. 25 | 26 | :param sim_name: This is a string that is used when creating the log files. 27 | Short for simulation name. 28 | 29 | :param log_directory: This is the path of the directory in which the log files will 30 | be stored. This directory must be an existing directory. 31 | """ 32 | 33 | # process / validate input 34 | assert len(logger_names) == len(log_levels) == len(log_to_consoles), \ 35 | "The sizes of logger_names, log_levels, log_to_consoles are inconsistent" 36 | assert all(isinstance(x, str) for x in logger_names + log_levels), \ 37 | "'logger_names' and 'log_levels' must be lists of strings" 38 | assert os.path.isdir(log_directory), "The log_directory {} is not a vlid log directory".format(log_directory) 39 | 40 | log_to_consoles = [bool(x) for x in log_to_consoles] 41 | 42 | global logger_names_global, log_levels_global, log_to_consoles_global 43 | global sim_name_global, log_directory_global 44 | logger_names_global = logger_names 45 | log_levels_global = log_levels 46 | log_to_consoles_global = log_to_consoles 47 | sim_name_global = sim_name 48 | log_directory_global = log_directory 49 | 50 | 51 | def configure_loggers(exactly_once=False): 52 | """ 53 | This function configures the loggers using the shared information that was set by 54 | :func:`.create_shared_logger_data`. This function must be run at the beginning 55 | of every function that is parallelized in order to be able to reliably 56 | configure the loggers. As an example look at its usage in the method 57 | :meth:`~.FunctionGeneratorOptimizee.simulate()` from the 58 | class :class:`~.FunctionGeneratorOptimizee` 59 | 60 | You may also wish to call this function in your main simulation (after calling 61 | :func:`.create_shared_logger_data`) to configure the logging for the root process 62 | before any of the parallelized functions are run. 63 | 64 | :param exactly_once: If the configuration of logging is causing a significant 65 | overhead per parallelized run (This is a rather unlikely scenario), then this 66 | value may be set to `True`. When True, the function will configure the loggers 67 | exactly once per scoop worker. 68 | """ 69 | 70 | if exactly_once and configure_loggers._already_configured: 71 | return 72 | 73 | # Scoop logging has been removed, as JUBE takes care of the logging of each iteration 74 | # Get logger data from global variables and perform the relevant thing 75 | logger_names = logger_names_global 76 | log_levels = log_levels_global 77 | log_to_consoles = log_to_consoles_global 78 | sim_name = sim_name_global 79 | log_directory = log_directory_global 80 | 81 | file_name_prefix = '%s_' % (sim_name,) 82 | 83 | config_dict_copy = copy.deepcopy(configure_loggers.basic_config_dict) 84 | 85 | config_dict_copy['loggers'] = {} 86 | 87 | # Configuring the output files 88 | log_fname = os.path.join(log_directory, 89 | file_name_prefix + config_dict_copy['handlers']['file_log']['filename']) 90 | error_fname = os.path.join(log_directory, 91 | file_name_prefix + config_dict_copy['handlers']['file_error']['filename']) 92 | config_dict_copy['handlers']['file_log']['filename'] = log_fname 93 | config_dict_copy['handlers']['file_error']['filename'] = error_fname 94 | 95 | # Creating logger entries 96 | for logger_name, log_level, log_to_console in zip(logger_names, log_levels, log_to_consoles): 97 | config_dict_copy['loggers'][logger_name] = {} 98 | logger_dict = config_dict_copy['loggers'][logger_name] 99 | logger_dict['level'] = log_level 100 | if log_to_console: 101 | logger_dict['handlers'] = ['console', 'file_log', 'file_error'] 102 | else: 103 | logger_dict['handlers'] = ['file_log', 'file_error'] 104 | 105 | logging.config.dictConfig(config_dict_copy) 106 | configure_loggers._already_configured = True 107 | 108 | 109 | configure_loggers._already_configured = False 110 | configure_loggers.basic_config_dict = { 111 | 'version': 1, 112 | 'formatters': { 113 | 'file': { 114 | 'format': '%(asctime)s %(name)s {} %(process)d %(levelname)-8s: %(message)s'.format(socket.gethostname()) 115 | }, 116 | 'stream': { 117 | 'format': '%(processName)-10s %(name)s {} %(process)d %(levelname)-8s: %(message)s'.format( 118 | socket.gethostname()) 119 | }, 120 | }, 121 | 'handlers': { 122 | 'console': { 123 | 'class': 'logging.StreamHandler', 124 | 'stream': 'ext://sys.stdout', 125 | 'formatter': 'stream', 126 | }, 127 | 'file_log': { 128 | 'class': 'logging.FileHandler', 129 | 'formatter': 'file', 130 | 'filename': 'LOG.txt', 131 | }, 132 | 'file_error': { 133 | 'class': 'logging.FileHandler', 134 | 'formatter': 'file', 135 | 'filename': 'ERROR.txt', 136 | 'level': 'ERROR', 137 | }, 138 | }, 139 | 'loggers': {}, 140 | 'root': { 141 | # 'level': 'INFO', 142 | 'handlers': [] 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /l2l/matplotlib_.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is a utility module that imports matplotlib and does the right things e.g. supress warnings and set the right 3 | matplotlib backend. In the rest of the code always use ``from l2l.matplotlib_ import plt`` to get the equivalent of 4 | ```import matplotlib.pyplot as plt``` 5 | """ 6 | 7 | import warnings 8 | 9 | with warnings.catch_warnings(): 10 | warnings.simplefilter("ignore") 11 | import matplotlib 12 | 13 | # matplotlib.rcParams.update({'text.usetex': False}) 14 | # matplotlib.use('gtk3agg') 15 | # matplotlib.use('qt4agg') 16 | # matplotlib.use('tkagg') 17 | # matplotlib.use('svg') 18 | matplotlib.use('agg') 19 | import matplotlib.pyplot as plt_ 20 | 21 | plt = plt_ 22 | -------------------------------------------------------------------------------- /l2l/optimizees/README.rst: -------------------------------------------------------------------------------- 1 | The Optimizees are implemented in other packages according to the guidelines in the wiki about `Writing new Optimizees `_. 2 | 3 | For example, see the `Liquid State Machine Optimizee package `_. -------------------------------------------------------------------------------- /l2l/optimizees/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meta-optimization/L2L/54d473aa36e5e14b5f2b7c3ed8d5c547b475e33c/l2l/optimizees/__init__.py -------------------------------------------------------------------------------- /l2l/optimizees/active_wait/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizee_aw import AWOptimizee, AWOptimizeeParameters 2 | 3 | __all__ = ['AWOptimizee', 'AWOptimizeeParameters'] 4 | -------------------------------------------------------------------------------- /l2l/optimizees/active_wait/optimizee_aw.py: -------------------------------------------------------------------------------- 1 | import time 2 | from collections import namedtuple 3 | from l2l.optimizees.optimizee import Optimizee 4 | 5 | AWOptimizeeParameters = namedtuple( 6 | 'AWOptimizeeParameters', ['difficulty']) 7 | 8 | 9 | class AWOptimizee(Optimizee): 10 | def __init__(self, traj, parameters): 11 | super().__init__(traj) 12 | self.difficulty = parameters.difficulty 13 | self.ind_idx = traj.individual.ind_idx 14 | self.generation = traj.individual.generation 15 | self.bound = [self.difficulty, self.difficulty] 16 | 17 | def create_individual(self): 18 | """ 19 | Creates and returns the individual 20 | """ 21 | # create individual 22 | individual = {'difficulty': self.difficulty} 23 | return individual 24 | 25 | def is_prime(self, n): 26 | if n <= 1: 27 | return False 28 | for i in range(2, int(n**0.5) + 1): 29 | if n % i == 0: 30 | return False 31 | return True 32 | 33 | def bounding_func(self, individual): 34 | return individual 35 | 36 | def simulate(self, traj): 37 | """ 38 | Calculates primes and returns fitness=0 39 | """ 40 | self.ind_idx = traj.individual.ind_idx 41 | self.generation = traj.individual.generation 42 | 43 | # Active wait by calculating all primes up to 'difficulty' 44 | primes = [] 45 | 46 | for number in range(1, int(self.difficulty)): 47 | if self.is_prime(number): 48 | primes.append(number) 49 | 50 | fitness = 0 51 | return (fitness,) 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | -------------------------------------------------------------------------------- /l2l/optimizees/functions/__init__.py: -------------------------------------------------------------------------------- 1 | from .benchmarked_functions import BenchmarkedFunctions 2 | from .optimizee import FunctionGeneratorOptimizee 3 | from .function_generator import FunctionGenerator 4 | 5 | __all__ = ['FunctionGenerator', 'FunctionGeneratorOptimizee', 'BenchmarkedFunctions'] 6 | -------------------------------------------------------------------------------- /l2l/optimizees/functions/benchmarked_functions.py: -------------------------------------------------------------------------------- 1 | from l2l.optimizees.functions.function_generator import FunctionGenerator, GaussianParameters, \ 2 | MichalewiczParameters, ShekelParameters, EasomParameters, LangermannParameters, \ 3 | RastriginParameters, ChasmParameters, RosenbrockParameters, AckleyParameters, PermutationParameters 4 | from collections import OrderedDict 5 | 6 | 7 | class BenchmarkedFunctions: 8 | """ 9 | Implements benchmarked functions class for an easier call of the benchmarked functions 10 | 11 | """ 12 | def __init__(self): 13 | self.function_name_map = [("Rastrigin2d", self._create_rastrigin2d), 14 | ("Rastrigin10d", self._create_rastrigin10d), 15 | ("Rosenbrock2d", self._create_rosenbrock2d), 16 | ("Rosenbrock10d", self._create_rosenbrock10d), 17 | ("Ackley2d", self._create_ackley2d), 18 | ("Ackley10d", self._create_ackley10d), 19 | ("Chasm", self._create_chasm), 20 | ("Gauss2d", self._create_gauss2d), 21 | ("Shekel2d", self._create_shekel2d), 22 | ("Langermann2d", self._create_langermann), 23 | ("Michalewicz2d", self._create_michalewicz2d), 24 | ("Permutation2d", self._create_permutation2d), 25 | ("Easom2d", self._create_easom2d), 26 | ("Easom10d", self._create_easom10d), 27 | ("3Gaussians2d", self._create_3gaussians2d)] 28 | self.function_name_index_map = OrderedDict([(name, index) 29 | for index, (name, _) in enumerate(self.function_name_map)]) 30 | 31 | def get_function_by_index(self, id_, noise=False, mu=0., sigma=0.01): 32 | """ 33 | Get the benchmarked function with given id 34 | :param id_: Function id 35 | :param noise: Indicates whether the function should provide noisy values 36 | :param mu: mean of the noise 37 | :param sigma: convariance of the noise 38 | 39 | :return function_name_map entry for the given id and the parameters for the benchmark 40 | """ 41 | 42 | #first update the noise for the given function 43 | return_function_name, return_function_creator = self.function_name_map[id_] 44 | return_function = return_function_creator(noise, mu, sigma) 45 | 46 | return (return_function_name, return_function), self.get_params(return_function, id_) 47 | 48 | def get_function_by_name(self, name, noise=False, mu=0., sigma=0.01): 49 | """ 50 | Get the benchmarked function with given id 51 | :param name_: Function name in self.function_name_map 52 | :param noise: Indicates whether the function should provide noisy values 53 | :param mu: mean of the noise 54 | :param sigma: convariance of the noise 55 | 56 | :return function_name_map entry for the given id and the parameters for the benchmark 57 | """ 58 | try: 59 | id_ = self.function_name_index_map[name] 60 | except KeyError: 61 | raise ValueError('There exists no function by name {}'.format(name)) 62 | return_function_name, return_function_creator = self.function_name_map[id_] 63 | return_function = return_function_creator(noise, mu, sigma) 64 | 65 | return (return_function_name, return_function), self.get_params(return_function, id_) 66 | 67 | def get_params(self, fg_object, id): 68 | params_dict_items = [("benchmark_id", id)] 69 | function_params_items = fg_object.get_params().items() 70 | params_dict_items += function_params_items 71 | return OrderedDict(params_dict_items) 72 | 73 | def _create_rastrigin2d(self, noise, mu, sigma): 74 | return FunctionGenerator([RastriginParameters()], 75 | dims=2, noise=noise, mu=mu, sigma=sigma) 76 | 77 | def _create_rastrigin10d(self, noise, mu, sigma): 78 | return FunctionGenerator([RastriginParameters()], 79 | dims=10, noise=noise, mu=mu, sigma=sigma) 80 | 81 | def _create_rosenbrock2d(self, noise, mu, sigma): 82 | return FunctionGenerator([RosenbrockParameters()], 83 | dims=2, noise=noise, mu=mu, sigma=sigma) 84 | 85 | def _create_rosenbrock10d(self, noise, mu, sigma): 86 | return FunctionGenerator([RosenbrockParameters()], 87 | dims=10, noise=noise, mu=mu, sigma=sigma) 88 | 89 | def _create_ackley2d(self, noise, mu, sigma): 90 | return FunctionGenerator([AckleyParameters()], 91 | dims=2, noise=noise, mu=mu, sigma=sigma) 92 | 93 | def _create_ackley10d(self, noise, mu, sigma): 94 | return FunctionGenerator([AckleyParameters()], 95 | dims=10, noise=noise, mu=mu, sigma=sigma) 96 | 97 | def _create_chasm(self, noise, mu, sigma): 98 | return FunctionGenerator([ChasmParameters()], 99 | dims=2, noise=noise, mu=mu, sigma=sigma) 100 | 101 | def _create_gauss2d(self, noise, mu, sigma): 102 | return FunctionGenerator([GaussianParameters(sigma=[[1.5, .1], [.1, .3]], mean=[-1., -1.])], 103 | dims=2, noise=noise, mu=mu, sigma=sigma) 104 | 105 | def _create_shekel2d(self, noise, mu, sigma): 106 | return FunctionGenerator([ShekelParameters(A='default', c='default')], 107 | dims=2, noise=noise, mu=mu, sigma=sigma) 108 | 109 | def _create_michalewicz2d(self, noise, mu, sigma): 110 | return FunctionGenerator([MichalewiczParameters(m='default')], 111 | dims=2, noise=noise, mu=mu, sigma=sigma) 112 | 113 | def _create_permutation2d(self, noise, mu, sigma): 114 | return FunctionGenerator([PermutationParameters(beta=0.005)], 115 | dims=2, noise=noise, mu=mu, sigma=sigma) 116 | 117 | def _create_easom2d(self, noise, mu, sigma): 118 | return FunctionGenerator([EasomParameters()], 119 | dims=2, noise=noise, mu=mu, sigma=sigma) 120 | 121 | def _create_easom10d(self, noise, mu, sigma): 122 | return FunctionGenerator([EasomParameters()], 123 | dims=10, noise=noise, mu=mu, sigma=sigma) 124 | 125 | def _create_langermann(self, noise, mu, sigma): 126 | return FunctionGenerator([LangermannParameters(A='default', c='default')], 127 | dims=2, noise=noise, mu=mu, sigma=sigma) 128 | 129 | def _create_3gaussians2d(self, noise, mu, sigma): 130 | fg_params = [GaussianParameters(sigma=[[1.5, .1], [.1, .3]], mean=[-1., -1.]), 131 | GaussianParameters(sigma=[[.25, .3], [.3, 1.]], mean=[1., 1.]), 132 | GaussianParameters(sigma=[[.5, .25], [.25, 1.3]], mean=[2., -2.])] 133 | return FunctionGenerator(fg_params, dims=2, noise=noise, mu=mu, sigma=sigma) 134 | -------------------------------------------------------------------------------- /l2l/optimizees/functions/optimizee.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from l2l.optimizees.optimizee import Optimizee 4 | 5 | 6 | class FunctionGeneratorOptimizee(Optimizee): 7 | """ 8 | Implements a simple function optimizee. Functions are generated using the FunctionGenerator. 9 | NOTE: Make sure the optimizee_fitness_weights is set to (-1,) to minimize the value of the function 10 | 11 | :param traj: The trajectory used to conduct the optimization. 12 | :param fg_instance: Instance of the FunctionGenerator class. 13 | :param seed: The random seed used for generation of optimizee individuals. It uses a copy of 14 | the fg_instance and overrides the random generator using one seeded by `seed`. Note that this 15 | random generator is also the one used by the :class:`.FunctionGeneratorOptimizee` itself. 16 | NOTE that this seed is converted to an np.uint32. 17 | """ 18 | 19 | def __init__(self, traj, fg_instance, seed): 20 | super().__init__(traj) 21 | 22 | seed = np.uint32(seed) 23 | self.random_state = np.random.RandomState(seed=seed) 24 | 25 | self.fg_instance = fg_instance 26 | self.dims = self.fg_instance.dims 27 | self.cost_fn = self.fg_instance.cost_function 28 | self.bound = self.fg_instance.bound 29 | self.random_state = np.random.RandomState(seed=seed) 30 | 31 | def create_individual(self): 32 | """ 33 | Creates a random value of parameter within given bounds 34 | """ 35 | # Define the first solution candidate randomly 36 | return {'coords': self.random_state.rand(self.dims) * (self.bound[1] - self.bound[0]) + self.bound[0]} 37 | 38 | def bounding_func(self, individual): 39 | """ 40 | Bounds the individual within the required bounds via coordinate clipping 41 | """ 42 | return {'coords': np.clip(individual['coords'], a_min=self.bound[0], a_max=self.bound[1])} 43 | 44 | def simulate(self, traj): 45 | """ 46 | Returns the value of the function chosen during initialization 47 | 48 | :param ~l2l.utils.trajectory.Trajectory traj: Trajectory 49 | :return: a single element :obj:`tuple` containing the value of the chosen function 50 | """ 51 | 52 | individual = np.array(traj.individual.coords) 53 | return (self.cost_fn(individual, random_state=self.random_state), ) 54 | -------------------------------------------------------------------------------- /l2l/optimizees/functions/tools.py: -------------------------------------------------------------------------------- 1 | def plot(fn, random_state): 2 | """ 3 | Implements plotting of 2D functions generated by FunctionGenerator 4 | :param fn: Instance of FunctionGenerator 5 | """ 6 | import numpy as np 7 | from l2l.matplotlib_ import plt 8 | from mpl_toolkits.mplot3d import Axes3D 9 | from matplotlib import cm 10 | from matplotlib.ticker import LinearLocator, FormatStrFormatter 11 | 12 | fig = plt.figure() 13 | ax = fig.gca(projection=Axes3D.name) 14 | 15 | # Make data. 16 | X = np.arange(fn.bound[0], fn.bound[1], 0.05) 17 | Y = np.arange(fn.bound[0], fn.bound[1], 0.05) 18 | XX, YY = np.meshgrid(X, Y) 19 | Z = [fn.cost_function([x, y], random_state=random_state) for x, y in zip(XX.ravel(), YY.ravel())] 20 | Z = np.array(Z).reshape(XX.shape) 21 | 22 | # Plot the surface. 23 | surf = ax.plot_surface(XX, YY, Z, cmap=cm.coolwarm, linewidth=0, antialiased=False) 24 | 25 | # Customize the z axis. 26 | # ax.set_zlim(-1.01, 1.01) 27 | ax.zaxis.set_major_locator(LinearLocator(10)) 28 | ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) 29 | W = np.where(Z == np.min(Z)) 30 | ax.set(title='Min value is %.2f at (%.2f, %.2f)' % (np.min(Z), X[W[0]], Y[W[1]])) 31 | 32 | # Add a color bar which maps values to colors. 33 | fig.colorbar(surf, shrink=0.5, aspect=5) 34 | plt.savefig('function.png') 35 | plt.show() 36 | -------------------------------------------------------------------------------- /l2l/optimizees/mnist/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizee import MNISTOptimizee, MNISTOptimizeeParameters 2 | 3 | __all__ = ['MNISTOptimizee', 'MNISTOptimizeeParameters'] 4 | -------------------------------------------------------------------------------- /l2l/optimizees/mnist/nn.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def sigmoid(x): 5 | """Compute sigmoid function for each element in x.""" 6 | return 1.0 / (1.0 + np.exp(-x)) 7 | 8 | 9 | def relu(x): 10 | """ Compute RELU """ 11 | result = x.copy() 12 | result[result < 0] = 0. 13 | return result 14 | 15 | 16 | def softmax(a): 17 | """ 18 | Compute softmax over the last dimension 19 | :param a: 20 | :return: 21 | """ 22 | exp_a = np.exp(a - np.max(a)) 23 | return exp_a / np.sum(exp_a, axis=-1) 24 | 25 | 26 | class NeuralNetworkClassifier: 27 | def __init__(self, n_input, n_hidden, n_output): 28 | """ 29 | 30 | :param n_input: 31 | :param n_hidden: 32 | :param n_output: 33 | """ 34 | self.n_input, self.n_hidden, self.n_output = n_input, n_hidden, n_output 35 | self.hidden_weights = np.zeros((self.n_hidden, self.n_input)) 36 | self.output_weights = np.zeros((self.n_output, self.n_hidden)) 37 | 38 | def get_weights_shapes(self): 39 | """ 40 | :return: A list of 2 tuples for each layer of the network 41 | """ 42 | return [(self.n_hidden, self.n_input), (self.n_output, self.n_hidden)] 43 | 44 | def set_weights(self, hidden_weights, output_weights): 45 | self.hidden_weights[:] = hidden_weights 46 | self.output_weights[:] = output_weights 47 | 48 | def score(self, x, y): 49 | """ 50 | 51 | :param x: batch_size x n_input size 52 | :param y: batch_size size 53 | :return: 54 | """ 55 | hidden_activation = sigmoid(np.dot(self.hidden_weights, x.T)) # -> n_hidden x batch_size 56 | output_activation = np.dot(self.output_weights, hidden_activation) # -> n_output x batch_size 57 | output_labels = np.argmax(output_activation, axis=0) # -> batch_size 58 | assert y.shape == output_labels.shape, "The shapes of y and output labels are %s, %s" % (y.shape, output_labels.shape) 59 | n_correct = np.count_nonzero(y == output_labels) 60 | n_total = len(y) 61 | score = n_correct / n_total 62 | return score 63 | 64 | 65 | def main(): 66 | from sklearn.datasets import load_digits, fetch_mldata 67 | 68 | SMALL_MNIST = False 69 | 70 | if SMALL_MNIST: 71 | mnist_digits = load_digits() 72 | n_input = np.prod(mnist_digits.images.shape[1:]) 73 | n_images = len(mnist_digits.images) # 1797 74 | data_images = mnist_digits.images.reshape(n_images, -1) / 16. # -> 1797 x 64 75 | data_targets = mnist_digits.target 76 | # im_size_x, im_size_y = 8, 8 77 | else: 78 | mnist_digits = fetch_mldata('MNIST original') 79 | n_input = np.prod(mnist_digits.data.shape[1:]) 80 | data_images = mnist_digits.data / 255. # -> 70000 x 284 81 | data_targets = mnist_digits.target 82 | # im_size_x, im_size_y = 28, 28 83 | 84 | n_hidden, n_output = 5, 10 85 | nn = NeuralNetworkClassifier(n_input, n_hidden, n_output) 86 | weight_shapes = nn.get_weights_shapes() 87 | weights = [] 88 | for weight_shape in weight_shapes: 89 | weights.append(np.random.randn(*weight_shape)) 90 | nn.set_weights(*weights) 91 | score = nn.score(data_images, data_targets) 92 | print("Score is: ", score) 93 | 94 | 95 | if __name__ == '__main__': 96 | main() 97 | -------------------------------------------------------------------------------- /l2l/optimizees/mnist/optimizee.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | 3 | import numpy as np 4 | from sklearn.datasets import load_digits, fetch_openml 5 | 6 | from l2l.optimizees.optimizee import Optimizee 7 | from .nn import NeuralNetworkClassifier 8 | 9 | MNISTOptimizeeParameters = namedtuple('MNISTOptimizeeParameters', ['n_hidden', 'seed', 'use_small_mnist']) 10 | 11 | 12 | class MNISTOptimizee(Optimizee): 13 | """ 14 | Implements a simple function optimizee. Functions are generated using the FunctionGenerator. 15 | NOTE: Make sure the optimizee_fitness_weights is set to (-1,) to minimize the value of the function 16 | 17 | :param traj: 18 | The trajectory used to conduct the optimization. 19 | 20 | :param parameters: 21 | Instance of :func:`~collections.namedtuple` :class:`.MNISTOptimizeeParameters` 22 | 23 | """ 24 | 25 | def __init__(self, traj, parameters): 26 | super().__init__(traj) 27 | 28 | if parameters.use_small_mnist: 29 | # 8 x 8 images 30 | mnist_digits = load_digits() 31 | n_input = np.prod(mnist_digits.images.shape[1:]) 32 | n_images = len(mnist_digits.images) # 1797 33 | data_images = mnist_digits.images.reshape(n_images, -1) / 16. # -> 1797 x 64 34 | data_targets = mnist_digits.target 35 | else: 36 | # 28 x 28 images 37 | mnist_digits = fetch_openml('MNIST original') 38 | n_input = np.prod(mnist_digits.data.shape[1:]) 39 | data_images = mnist_digits.data / 255. # -> 70000 x 284 40 | n_images = len(data_images) 41 | data_targets = mnist_digits.target 42 | 43 | self.n_images = n_images 44 | self.data_images, self.data_targets = data_images, data_targets 45 | 46 | seed = parameters.seed 47 | n_hidden = parameters.n_hidden 48 | 49 | seed = np.uint32(seed) 50 | self.random_state = np.random.RandomState(seed=seed) 51 | 52 | n_output = 10 # This is always true for mnist 53 | self.nn = NeuralNetworkClassifier(n_input, n_hidden, n_output) 54 | 55 | self.random_state = np.random.RandomState(seed=seed) 56 | 57 | # create_individual can be called because __init__ is complete except for traj initializtion 58 | indiv_dict = self.create_individual() 59 | for key, val in indiv_dict.items(): 60 | traj.individual.f_add_parameter(key, val) 61 | traj.individual.f_add_parameter('seed', seed) 62 | 63 | def create_individual(self): 64 | """ 65 | Creates a random value of parameter within given bounds 66 | """ 67 | 68 | weight_shapes = self.nn.get_weights_shapes() 69 | cumulative_num_weights_per_layer = np.cumsum([np.prod(weight_shape) for weight_shape in weight_shapes]) 70 | 71 | flattened_weights = np.empty(cumulative_num_weights_per_layer[-1]) 72 | for i, weight_shape in enumerate(weight_shapes): 73 | if i == 0: 74 | flattened_weights[:cumulative_num_weights_per_layer[i]] = \ 75 | self.random_state.randn(np.prod(weight_shape)) / np.sqrt(weight_shape[1]) 76 | else: 77 | flattened_weights[cumulative_num_weights_per_layer[i - 1]:cumulative_num_weights_per_layer[i]] = \ 78 | self.random_state.randn(np.prod(weight_shape)) / np.sqrt(weight_shape[1]) 79 | 80 | # return dict(weights=self.random_state.randn(cumulative_num_weights_per_layer[-1])) 81 | return dict(weights=flattened_weights) 82 | 83 | def bounding_func(self, individual): 84 | """ 85 | Bounds the individual within the required bounds via coordinate clipping 86 | """ 87 | return individual 88 | 89 | def simulate(self, traj): 90 | """ 91 | Returns the value of the function chosen during initialization 92 | 93 | :param ~l2l.utils.trajectory.Trajectory traj: Trajectory 94 | :return: a single element :obj:`tuple` containing the value of the chosen function 95 | """ 96 | flattened_weights = traj.individual.weights 97 | weight_shapes = self.nn.get_weights_shapes() 98 | 99 | cumulative_num_weights_per_layer = np.cumsum([np.prod(weight_shape) for weight_shape in weight_shapes]) 100 | 101 | weights = [] 102 | for i, weight_shape in enumerate(weight_shapes): 103 | if i == 0: 104 | w = flattened_weights[:cumulative_num_weights_per_layer[i]].reshape(weight_shape) 105 | else: 106 | w = flattened_weights[ 107 | cumulative_num_weights_per_layer[i - 1]:cumulative_num_weights_per_layer[i]].reshape(weight_shape) 108 | weights.append(w) 109 | 110 | self.nn.set_weights(*weights) 111 | return self.nn.score(self.data_images, self.data_targets) 112 | -------------------------------------------------------------------------------- /l2l/optimizees/optimizee.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | 3 | OptimizeeParameters = namedtuple('OptimizeeParameters', []) 4 | 5 | 6 | class Optimizee: 7 | """ 8 | This is the base class for the Optimizees, i.e. the inner loop algorithms. Often, these are the implementations that 9 | interact with the environment. Given a set of parameters, it runs the simulation and returns the fitness achieved 10 | with those parameters. 11 | """ 12 | 13 | def __init__(self, traj): 14 | """ 15 | This is the base class init function. Any implementation must in this class add a parameter add its parameters 16 | to this trajectory under the parameter group 'individual' which is created here in the base class. It is 17 | especially necessary to add all explored parameters (i.e. parameters that are returned via create_individual) to 18 | the trajectory. 19 | """ 20 | traj.f_add_parameter_group('individual', 'Contains parameters of the optimizee') 21 | 22 | def create_individual(self): 23 | """ 24 | Create one individual i.e. one instance of parameters. This instance must be a dictionary with dot-separated 25 | parameter names as keys and parameter values as values. This is used by the optimizers via the 26 | function create_individual() to initialize the individual/parameters. After that, the change in parameters is 27 | model specific e.g. In simulated annealing, it is perturbed on specific criteria 28 | 29 | :return dict: A dictionary containing the names of the parameters and their values 30 | """ 31 | 32 | def simulate(self, traj): 33 | """ 34 | This is the primary function that does the simulation for the given parameter given (within :obj:`traj`) 35 | 36 | :param ~l2l.utils.trajectory.Trajectory traj: The trajectory that contains the parameters and the 37 | individual that we want to simulate. The individual is accessible using `traj.individual` and parameter e.g. 38 | param1 is accessible using `traj.param1` 39 | 40 | :return: a :class:`tuple` containing the fitness values of the current run. The :class:`tuple` allows a 41 | multi-dimensional fitness function. 42 | 43 | """ 44 | -------------------------------------------------------------------------------- /l2l/optimizees/test_cases/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizee_testcase import TestcaseOptimizee 2 | 3 | 4 | __all__ = ['TestcaseOptimizee', 'TestcaseOptimizeeParameters'] -------------------------------------------------------------------------------- /l2l/optimizees/test_cases/optimizee_testcase.py: -------------------------------------------------------------------------------- 1 | import os 2 | import numpy as np 3 | from collections import namedtuple 4 | 5 | from l2l.optimizees.optimizee import Optimizee 6 | 7 | TestcaseOptimizeeParameters = namedtuple( 8 | 'TestcaseOptimizeeParameters', ['exit_code']) 9 | 10 | class TestcaseOptimizee(Optimizee): 11 | def __init__(self, traj, parameters): 12 | super().__init__(traj) 13 | self.exit_code = parameters.exit_code 14 | self.ind_idx = traj.individual.ind_idx 15 | self.generation = traj.individual.generation 16 | self.bound = [self.exit_code, self.exit_code] 17 | 18 | def create_individual(self): 19 | """ 20 | Creates and returns the individual 21 | """ 22 | individual = {'exit_code': float(self.exit_code)} 23 | return individual 24 | 25 | def bounding_func(self, individual): 26 | return individual 27 | 28 | def simulate(self, traj): 29 | """ 30 | Simulates error and returns fitness = 0 31 | """ 32 | self.ind_idx = traj.individual.ind_idx 33 | self.generation = traj.individual.generation 34 | if(traj.retry < 2): 35 | os._exit(self.exit_code) 36 | fitness = 0 37 | return (fitness,) -------------------------------------------------------------------------------- /l2l/optimizers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meta-optimization/L2L/54d473aa36e5e14b5f2b7c3ed8d5c547b475e33c/l2l/optimizers/__init__.py -------------------------------------------------------------------------------- /l2l/optimizers/crossentropy/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizer import CrossEntropyParameters, CrossEntropyOptimizer 2 | 3 | __all__ = ['CrossEntropyParameters', 'CrossEntropyOptimizer'] 4 | -------------------------------------------------------------------------------- /l2l/optimizers/evolution/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizer import GeneticAlgorithmParameters 2 | from .optimizer import GeneticAlgorithmOptimizer 3 | 4 | __all__ = [ 5 | 'GeneticAlgorithmParameters', 6 | 'GeneticAlgorithmOptimizer', 7 | ] 8 | -------------------------------------------------------------------------------- /l2l/optimizers/evolution/requirements.txt: -------------------------------------------------------------------------------- 1 | deap==1.0.2 2 | -------------------------------------------------------------------------------- /l2l/optimizers/evolutionstrategies/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizer import EvolutionStrategiesOptimizer, EvolutionStrategiesParameters 2 | 3 | __all__ = ['EvolutionStrategiesOptimizer', 'EvolutionStrategiesParameters'] 4 | -------------------------------------------------------------------------------- /l2l/optimizers/face/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizer import FACEParameters 2 | from .optimizer import FACEOptimizer 3 | 4 | __all__ = [ 5 | 'FACEParameters', 6 | 'FACEOptimizer' 7 | ] 8 | -------------------------------------------------------------------------------- /l2l/optimizers/gradientdescent/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizer import ClassicGDParameters 2 | from .optimizer import StochasticGDParameters 3 | from .optimizer import AdamParameters 4 | from .optimizer import RMSPropParameters 5 | 6 | from .optimizer import GradientDescentOptimizer 7 | 8 | __all__ = [ 9 | 'ClassicGDParameters', 10 | 'StochasticGDParameters', 11 | 'AdamParameters', 12 | 'RMSPropParameters', 13 | 'GradientDescentOptimizer', 14 | ] 15 | -------------------------------------------------------------------------------- /l2l/optimizers/gridsearch/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizer import GridSearchOptimizer 2 | from .optimizer import GridSearchParameters 3 | 4 | __all__ = ['GridSearchOptimizer', 'GridSearchParameters'] 5 | -------------------------------------------------------------------------------- /l2l/optimizers/gridsearch/optimizer.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from collections import namedtuple 3 | 4 | import numpy as np 5 | from l2l.utils.tools import cartesian_product 6 | 7 | from l2l import DictEntryType, get_grouped_dict 8 | from l2l import dict_to_list 9 | from l2l.optimizers.optimizer import Optimizer 10 | 11 | logger = logging.getLogger("optimizers.gridsearch") 12 | 13 | GridSearchParameters = namedtuple('GridSearchParameters', ['param_grid']) 14 | GridSearchParameters.__doc__ = """ 15 | :param dict param_grid: This is the data structure specifying the grid over which to search. This should be a 16 | dictionary as follows:: 17 | 18 | optimizee_param_grid['param_name'] = (lower_bound, higher_bound, n_steps) 19 | 20 | Where the interval `[lower_bound, upper_bound]` is divided into `n_steps` intervals thereby providing 21 | `n_steps + 1` points for the grid. 22 | 23 | Note that there must be as many keys as there are in the `Individual-Dict` returned by the function 24 | :meth:`.Optimizee.create_individual`. Also, if any of the parameters of the individuals is an array, then the above 25 | grid specification applies to each element of the array. 26 | """ 27 | 28 | 29 | class GridSearchOptimizer(Optimizer): 30 | """ 31 | This class implements a basic grid search optimizer. It runs the optimizee on a given grid of parameter values and 32 | returns the best fitness found. moreover, this can also simply be used to run a grid search and process the results 33 | stored in the traj in any manner desired. 34 | 35 | Notes regarding what it does - 36 | 37 | 1. This algorithm does not do any kind of adaptive searching and thus the concept of generations does not apply 38 | per se. That said, it is currently implemented as a series of runs in a single generation. All of these runs 39 | are declared in the constructor itself. The :meth:`.Optimizer.post_process()` function simply prints the 40 | individual with the maximal fitness. 41 | 42 | 2. This algorithm doesnt make use of self.eval_pop and :meth:`.Optimizer._expand_trajectory()` simply because the 43 | cartesian product can be used more efficiently directly. (Imagine having to split a dict of 10000 parameter 44 | combinations into 10000 small `Individual-Dict`s and storing into eval_pop only to join them and call 45 | `traj.f_expand()` in :meth:`.Optimizer._expand_trajectory()`) 46 | 47 | :param ~l2l.utils.trajectory.Trajectory traj: Use this trajectory to store the parameters of the specific runs. 48 | The parameters should be initialized based on the values in `parameters` 49 | 50 | :param optimizee_create_individual: A function which when called returns one instance of parameter (or "individual") 51 | 52 | :param optimizee_fitness_weights: The weights which should be multiplied with the fitness returned from the 53 | :class:`~l2l.optimizees.optimizee.Optimizee` -- one for each element of the fitness (fitness can be 54 | multi-dimensional). If some element is negative, the Optimizer minimizes that element of fitness instead of 55 | maximizing. By default, the `Optimizer` maximizes all fitness dimensions. 56 | 57 | :param parameters: An instance of :class:`.GridSearchParameters` 58 | 59 | """ 60 | 61 | def __init__(self, traj, 62 | optimizee_create_individual, 63 | optimizee_fitness_weights, 64 | parameters, 65 | optimizee_bounding_func=None): 66 | super().__init__(traj, optimizee_create_individual=optimizee_create_individual, 67 | optimizee_fitness_weights=optimizee_fitness_weights, parameters=parameters, 68 | optimizee_bounding_func=optimizee_bounding_func) 69 | 70 | self.best_individual = None 71 | self.best_fitness = None 72 | 73 | sample_individual = self.optimizee_create_individual() 74 | 75 | # Generate parameter dictionary based on optimizee_param_grid 76 | self.param_list = {} 77 | _, optimizee_individual_param_spec = dict_to_list(sample_individual, get_dict_spec=True) 78 | self.optimizee_individual_dict_spec = optimizee_individual_param_spec 79 | 80 | optimizee_param_grid = parameters.param_grid 81 | # Assert validity of optimizee_param_grid 82 | assert set(sample_individual.keys()) == set(optimizee_param_grid.keys()), \ 83 | "The Parameters of optimizee_param_grid don't match those of the optimizee individual" 84 | 85 | for param_name, param_type, param_length in optimizee_individual_param_spec: 86 | param_lower_bound, param_upper_bound, param_n_steps = optimizee_param_grid[param_name] 87 | if param_type == DictEntryType.Scalar: 88 | self.param_list[param_name] = np.linspace(param_lower_bound, param_upper_bound, param_n_steps + 1) 89 | elif param_type == DictEntryType.Sequence: 90 | curr_param_list = np.linspace(param_lower_bound, param_upper_bound, param_n_steps + 1) 91 | curr_param_list = np.meshgrid(*([curr_param_list] * param_length), indexing='ij') 92 | curr_param_list = [x.ravel() for x in curr_param_list] 93 | curr_param_list = np.stack(curr_param_list, axis=-1) 94 | self.param_list[param_name] = curr_param_list 95 | 96 | self.param_list = cartesian_product(self.param_list, tuple(sorted(optimizee_param_grid.keys()))) 97 | self.size = len(self.param_list[list(self.param_list.keys())[0]]) 98 | 99 | # Adding the bounds information to the trajectory 100 | traj.f_add_parameter_group('grid_spec') 101 | for param_name, param_grid_spec in optimizee_param_grid.items(): 102 | traj.grid_spec.f_add_parameter(param_name + '.lower_bound', param_grid_spec[0]) 103 | traj.grid_spec.f_add_parameter(param_name + '.uper_bound', param_grid_spec[1]) 104 | traj.f_add_parameter('n_iteration', 1, comment='Grid search does only 1 iteration') 105 | #: The current generation number 106 | self.g = 0 107 | # Expanding the trajectory 108 | grouped_params_dict = {'individual.' + key: value for key, value in self.param_list.items()} 109 | final_params_dict = {'generation': [self.g], 110 | 'ind_idx': range(self.size)} 111 | final_params_dict.update(grouped_params_dict) 112 | traj.f_expand(cartesian_product(final_params_dict, 113 | [('ind_idx',) + tuple(grouped_params_dict.keys()), 'generation'])) 114 | 115 | #: The population (i.e. list of individuals) to be evaluated at the next iteration 116 | self.eval_pop = None 117 | 118 | def post_process(self, traj, fitnesses_results): 119 | """ 120 | In this optimizer, the post_proces function merely returns the best individual out of the grid and 121 | does not expand the trajectory. It also stores any relevant results 122 | """ 123 | logger.info('Finished Simulation') 124 | logger.info('-------------------') 125 | logger.info('') 126 | 127 | run_idx_array = np.array([x[0] for x in fitnesses_results]) 128 | fitness_array = np.array([x[1] for x in fitnesses_results]) 129 | optimizee_fitness_weights = np.reshape(np.array(self.optimizee_fitness_weights), (-1, 1)) 130 | 131 | weighted_fitness_array = np.multiply(fitness_array, optimizee_fitness_weights).ravel() 132 | max_fitness_indiv_index = np.argmax(weighted_fitness_array) 133 | 134 | logger.info('Storing Results') 135 | logger.info('---------------') 136 | 137 | for run_idx, run_fitness, run_weighted_fitness in zip(run_idx_array, fitness_array, weighted_fitness_array): 138 | traj.v_idx = run_idx 139 | traj.f_add_result('$set.$.fitness', np.array(run_fitness)) 140 | traj.f_add_result('$set.$.weighted_fitness', run_weighted_fitness) 141 | 142 | logger.info('Best Individual is:') 143 | logger.info('') 144 | 145 | traj.v_idx = run_idx_array[max_fitness_indiv_index] 146 | individual = traj.individual 147 | self.best_individual = {} 148 | for param_name, _, _ in self.optimizee_individual_dict_spec: 149 | param_value = self.param_list[param_name][max_fitness_indiv_index] 150 | logger.info(' %s: %s', param_name, param_value) 151 | self.best_individual[param_name] = param_value 152 | 153 | self.best_fitness = fitness_array[max_fitness_indiv_index] 154 | logger.info(' with fitness: %s', fitness_array[max_fitness_indiv_index]) 155 | logger.info(' with weighted fitness: %s', weighted_fitness_array[max_fitness_indiv_index]) 156 | 157 | self.g += 1 158 | traj.v_idx = -1 159 | 160 | def _expand_trajectory(self, traj): 161 | """ 162 | Add as many explored runs as individuals that need to be evaluated. Furthermore, add the individuals as explored 163 | parameters. 164 | 165 | :param ~l2l.utils.trajectory.Trajectory traj: The trajectory that contains the parameters and the 166 | individual that we want to simulate. The individual is accessible using `traj.individual` and parameter e.g. 167 | param1 is accessible using `traj.param1` 168 | 169 | :return: 170 | """ 171 | 172 | grouped_params_dict = get_grouped_dict(self.eval_pop) 173 | grouped_params_dict = {'individual.' + key: val for key, val in grouped_params_dict.items()} 174 | 175 | final_params_dict = {'generation': [self.g], 176 | 'ind_idx': range(len(self.eval_pop))} 177 | final_params_dict.update(grouped_params_dict) 178 | 179 | # We need to convert them to lists or write our own custom IndividualParameter ;-) 180 | # Note the second argument to `cartesian_product`: This is for only having the cartesian product 181 | # between ``generation x (ind_idx AND individual)``, so that every individual has just one 182 | # unique index within a generation. 183 | traj.f_expand(cartesian_product(final_params_dict, 184 | [('ind_idx',) + tuple(grouped_params_dict.keys()), 'generation'])) 185 | 186 | def end(self, traj): 187 | """ 188 | Run any code required to clean-up, print final individuals etc. 189 | """ 190 | traj.f_add_result('final_individual', self.best_individual) 191 | traj.f_add_result('final_fitness', self.best_fitness) 192 | traj.f_add_result('n_iteration', self.g) 193 | 194 | logger.info('x -------------------------------- x') 195 | logger.info(' Completed SUCCESSFUL Grid Search ') 196 | logger.info('x -------------------------------- x') 197 | -------------------------------------------------------------------------------- /l2l/optimizers/multievolution/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizer import MultiGeneticAlgorithmParameters 2 | from .optimizer import MultiGeneticAlgorithmOptimizer 3 | 4 | __all__ = [ 5 | 'MultiGeneticAlgorithmParameters', 6 | 'MultiGeneticAlgorithmOptimizer', 7 | ] 8 | -------------------------------------------------------------------------------- /l2l/optimizers/multievolution/requirements.txt: -------------------------------------------------------------------------------- 1 | deap==1.0.2 2 | -------------------------------------------------------------------------------- /l2l/optimizers/multigradientdescent/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizer import MultiClassicGDParameters 2 | from .optimizer import MultiStochasticGDParameters 3 | from .optimizer import MultiAdamParameters 4 | from .optimizer import MultiRMSPropParameters 5 | 6 | from .optimizer import MultiGradientDescentOptimizer 7 | 8 | __all__ = [ 9 | 'MultiClassicGDParameters', 10 | 'MultiStochasticGDParameters', 11 | 'MultiAdamParameters', 12 | 'MultiRMSPropParameters', 13 | 'MultiGradientDescentOptimizer', 14 | ] 15 | 16 | -------------------------------------------------------------------------------- /l2l/optimizers/naturalevolutionstrategies/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizer import NaturalEvolutionStrategiesOptimizer, NaturalEvolutionStrategiesParameters 2 | 3 | __all__ = ['NaturalEvolutionStrategiesOptimizer', 'NaturalEvolutionStrategiesParameters'] 4 | -------------------------------------------------------------------------------- /l2l/optimizers/optimizer.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | 3 | from l2l.utils.tools import cartesian_product 4 | 5 | from l2l import get_grouped_dict 6 | 7 | OptimizerParameters = namedtuple('OptimizerParamters', []) 8 | 9 | 10 | class Optimizer: 11 | """ 12 | This is the base class for the Optimizers i.e. the outer loop algorithms. These algorithms generate parameters, \ 13 | give them to the inner loop to be evaluated, and with the resulting fitness modify the parameters in some way. 14 | 15 | :param ~l2l.utils.trajectory.Trajectory traj: Use this trajectory to store the parameters of the specific runs. 16 | The parameters should be initialized based on the values in :param parameters: 17 | 18 | :param optimizee_create_individual: A function which when called returns one instance of parameter (or "individual") 19 | 20 | :param optimizee_fitness_weights: The weights which should be multiplied with the fitness returned from the 21 | :class:`~l2l.optimizees.optimizee.Optimizee` -- one for each element of the fitness (fitness can be 22 | multi-dimensional). If some element is negative, the Optimizer minimizes that element of fitness instead of 23 | maximizing. By default, the `Optimizer` maximizes all fitness dimensions. 24 | 25 | :param parameters: A named tuple containing the parameters for the Optimizer class 26 | 27 | """ 28 | 29 | def __init__(self, traj, 30 | optimizee_create_individual, 31 | optimizee_fitness_weights, 32 | optimizee_bounding_func, 33 | parameters): 34 | # Creating Placeholders for individuals and results that are about to be explored 35 | traj.f_add_parameter('generation', 0, comment='Current generation') 36 | traj.f_add_parameter('ind_idx', 0, comment='Index of individual') 37 | 38 | # Initializing basic variables 39 | self.optimizee_create_individual = optimizee_create_individual 40 | self.optimizee_fitness_weights = optimizee_fitness_weights 41 | self.optimizee_bounding_func = optimizee_bounding_func 42 | self.parameters = parameters 43 | 44 | #: The current generation number 45 | self.g = None 46 | #: The population (i.e. list of individuals) to be evaluated at the next iteration 47 | self.eval_pop = None 48 | 49 | def post_process(self, traj, fitnesses_results): 50 | """ 51 | This is the key function of this class. Given a set of :obj:`fitnesses_results`, and the :obj:`traj`, it uses 52 | the fitness to decide on the next set of parameters to be evaluated. Then it fills the :attr:`.Optimizer.eval_pop` with the 53 | list of parameters it wants evaluated at the next simulation cycle, increments :attr:`.Optimizer.g` and calls 54 | :meth:`._expand_trajectory` 55 | 56 | :param ~l2l.utils.trajectory.Trajectory traj: The trajectory that contains the parameters and the 57 | individual that we want to simulate. The individual is accessible using `traj.individual` and parameter e.g. 58 | param1 is accessible using `traj.param1` 59 | 60 | :param list fitnesses_results: This is a list of fitness results that contain tuples run index and the fitness. 61 | It is of the form `[(run_idx, run), ...]` 62 | 63 | """ 64 | # NOTE: Always remember to keep the following two lines. 65 | # TODO: Set eval_pop to the values of parameters you want to evaluate in the next cycle 66 | # self.eval_pop = ... 67 | self.g += 1 68 | self._expand_trajectory(traj) 69 | 70 | def end(self, traj): 71 | """ 72 | Run any code required to clean-up, print final individuals etc. 73 | 74 | :param ~l2l.utils.trajectory.Trajectory traj: The trajectory that contains the parameters and the 75 | individual that we want to simulate. The individual is accessible using `traj.individual` and parameter e.g. 76 | param1 is accessible using `traj.param1` 77 | 78 | """ 79 | pass 80 | 81 | def _expand_trajectory(self, traj): 82 | """ 83 | Add as many explored runs as individuals that need to be evaluated. Furthermore, add the individuals as explored 84 | parameters. 85 | 86 | :param ~l2l.utils.trajectory.Trajectory traj: The trajectory that contains the parameters and the 87 | individual that we want to simulate. The individual is accessible using `traj.individual` and parameter e.g. 88 | param1 is accessible using `traj.param1` 89 | 90 | :return: 91 | """ 92 | 93 | grouped_params_dict = get_grouped_dict(self.eval_pop) 94 | grouped_params_dict = {'individual.' + key: val for key, val in grouped_params_dict.items()} 95 | 96 | final_params_dict = {'generation': [self.g], 97 | 'ind_idx': range(len(self.eval_pop))} 98 | final_params_dict.update(grouped_params_dict) 99 | 100 | # We need to convert them to lists or write our own custom IndividualParameter ;-) 101 | # Note the second argument to `cartesian_product`: This is for only having the cartesian product 102 | # between ``generation x (ind_idx AND individual)``, so that every individual has just one 103 | # unique index within a generation. 104 | traj.f_expand(cartesian_product(final_params_dict, 105 | [('ind_idx',) + tuple(grouped_params_dict.keys()), 'generation'])) 106 | -------------------------------------------------------------------------------- /l2l/optimizers/paralleltempering/__init__.py: -------------------------------------------------------------------------------- 1 | from .optimizer import ParallelTemperingParameters 2 | from .optimizer import ParallelTemperingOptimizer 3 | 4 | __all__ = [ 5 | 'ParallelTemperingParameters', 6 | 'ParallelTemperingOptimizer', 7 | ] 8 | -------------------------------------------------------------------------------- /l2l/optimizers/simulatedannealing/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meta-optimization/L2L/54d473aa36e5e14b5f2b7c3ed8d5c547b475e33c/l2l/optimizers/simulatedannealing/__init__.py -------------------------------------------------------------------------------- /l2l/paths.py: -------------------------------------------------------------------------------- 1 | import os 2 | from collections import OrderedDict 3 | 4 | import re 5 | 6 | import itertools 7 | 8 | __author__ = 'anand' 9 | 10 | 11 | class Paths: 12 | def __init__(self, root_dir_name, param_dict, suffix="", root_dir_path='./results'): 13 | """ 14 | Manages generating paths for various cases 15 | 16 | :param root_dir_name: Root dir name where all the subdirectories are created 17 | :param param_dict: Dictionary in the form of dict(paramname1=param1val, paramname2=param2val). See :meth:`Paths.output_dir_path` for where this is used. 18 | :param suffix: Suffix used for various output files 19 | :param root_dir_path: The root dir path where the root dir is created 20 | """ 21 | self._root_dir_name = root_dir_name 22 | self._root_dir_path = root_dir_path 23 | if not os.path.exists(root_dir_path): 24 | raise RuntimeError("{} does not exit. Please create it.".format(root_dir_path)) 25 | self._suffix = suffix 26 | self._param_combo = order_dict_alphabetically(param_dict) 27 | 28 | @property 29 | def root_dir_path(self): 30 | """ 31 | Get the full path of the root directory 32 | :return: 33 | """ 34 | return os.path.join(self._root_dir_path, self._root_dir_name) 35 | 36 | @property 37 | def output_dir_path(self): 38 | """ 39 | Get the path of the "output" directory of the form /root_dir_path/root_dir_name/param1name-param1val-param2name-param2val. 40 | The parameter names are sorted in alphabetical order in the leaf directory name. 41 | :return: 42 | """ 43 | return os.path.join(self.root_dir_path, make_param_string(**self._param_combo)) 44 | 45 | # The functions that should actually be used are below 46 | @property 47 | def results_path(self): 48 | """ 49 | Get the path of the results directory of the form /root_dir_path/root_dir_name/param1name-param1val-param2name-param2val/results 50 | :return: 51 | """ 52 | path = os.path.join(self.output_dir_path, "results") 53 | os.makedirs(path, exist_ok=True) 54 | return path 55 | 56 | @property 57 | def simulation_path(self): 58 | """ 59 | Get the path of the simulation directory of the form /root_dir_path/root_dir_name/param1name-param1val-param2name-param2val/simulation 60 | :return: 61 | """ 62 | path = os.path.join(self.output_dir_path, "simulation") 63 | os.makedirs(path, exist_ok=True) 64 | return path 65 | 66 | @property 67 | def data_path(self): 68 | """ 69 | Get the path of the data directory of the form /root_dir_path/root_dir_name/param1name-param1val-param2name-param2val/data 70 | :return: 71 | """ 72 | path = os.path.join(self.output_dir_path, "data") 73 | os.makedirs(path, exist_ok=True) 74 | return path 75 | 76 | @property 77 | def logs_path(self): 78 | """ 79 | Get the path of the logs directory of the form /root_dir_path/root_dir_name/param1name-param1val-param2name-param2val/logs 80 | :return: 81 | """ 82 | path = os.path.join(self.output_dir_path, "logs") 83 | os.makedirs(path, exist_ok=True) 84 | return path 85 | 86 | # General function to generate paths 87 | def get_fpath(self, name, ext, **kwargs): 88 | """ 89 | Get the path of an arbitrary file of the form /root_dir_path/root_dir_name/param1name-param1val-param2name-param2val/results/{name}-{param-paramval*}-{kwarg-kwargval*}.ext 90 | :return: 91 | """ 92 | d = self._param_combo.copy() 93 | d.update(kwargs) 94 | return os.path.join(self.results_path, "{}-{}{}.{}".format(name, make_param_string(**d), self._suffix, ext)) 95 | 96 | 97 | def make_param_string(delimiter='-', **kwargs): 98 | """ 99 | Takes a dictionary and constructs a string of the form key1-val1-key2-val2-... (denoted here as {key-val*}) 100 | The keys are alphabetically sorted 101 | :param str delimiter: Delimiter to use (default is '-') 102 | :param dict kwargs: A python dictionary 103 | :return: 104 | """ 105 | param_string = "" 106 | for key in sorted(kwargs): 107 | param_string += delimiter 108 | param_string += key.replace('_', delimiter) 109 | val = kwargs[key] 110 | if isinstance(val, float): 111 | param_string += "{}{:.2f}".format(delimiter, val) 112 | else: 113 | param_string += "{}{}".format(delimiter, val) 114 | param_string = re.sub("^-", "", param_string) 115 | return param_string 116 | 117 | 118 | def order_dict_alphabetically(d): 119 | """ 120 | Sort a given dictionary alphabetically 121 | :param dict d: 122 | :return: 123 | """ 124 | od = OrderedDict() 125 | for key in sorted(list(d.keys())): 126 | assert key not in od 127 | od[key] = d[key] 128 | return od 129 | 130 | 131 | def dict_product(dicts): 132 | return (dict(zip(dicts, x)) for x in itertools.product(*dicts.values())) 133 | 134 | 135 | class PathsMap: 136 | def __init__(self, param_lists, args_name, n_networks, suffix, root_dir_path='./results'): 137 | """ 138 | This class manages groups of paths for larger simulations of different parameter combinations since each 139 | :class:`~l2l.paths.Path` above only manages one parameter combination. 140 | :param param_lists: 141 | :param args_name: 142 | :param n_networks: 143 | :param suffix: 144 | """ 145 | self._root_dir_name = args_name 146 | self._root_dir_path = root_dir_path 147 | self._suffix = suffix 148 | 149 | param_lists.update(dict(network_num=range(n_networks))) 150 | self.param_lists = param_lists 151 | 152 | list_dict = dict_product(param_lists) 153 | self.paths_map = {} 154 | for param_combo in list_dict: 155 | key = tuple(order_dict_alphabetically(param_combo).items()) 156 | assert key not in self.paths_map 157 | self.paths_map[key] = Paths(args_name, param_combo, suffix) 158 | 159 | @property 160 | def paths_list(self): 161 | return list(self.paths_map.values()) 162 | 163 | def get(self, **kwargs): 164 | param_combo = kwargs 165 | key = tuple(order_dict_alphabetically(param_combo).items()) 166 | return self.paths_map[key] 167 | 168 | def filter(self, **kwargs): 169 | filtered_list = [] 170 | for key, paths in self.paths_map.items(): 171 | params_combo = OrderedDict(key) 172 | for param_name, param_value in kwargs.items(): 173 | if params_combo[param_name] != param_value: 174 | break 175 | else: 176 | filtered_list.append(paths) 177 | 178 | return filtered_list 179 | 180 | # Aggregate reults paths 181 | @property 182 | def root_dir_path(self): 183 | return os.path.join(self._root_dir_path, self._root_dir_name) 184 | 185 | @property 186 | def agg_results_path(self): 187 | path = os.path.join(self.root_dir_path, "results") 188 | os.makedirs(path, exist_ok=True) 189 | return path 190 | 191 | def get_agg_fpath(self, name, param_combo, ext, **kwargs): 192 | d = param_combo.copy() 193 | d.update(kwargs) 194 | return os.path.join(self.agg_results_path, "{}-{}{}.{}" 195 | .format(name, make_param_string(**d), self._suffix, ext)) 196 | -------------------------------------------------------------------------------- /l2l/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meta-optimization/L2L/54d473aa36e5e14b5f2b7c3ed8d5c547b475e33c/l2l/tests/__init__.py -------------------------------------------------------------------------------- /l2l/tests/test_all.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | import shutil 4 | 5 | 6 | from l2l.tests import test_ce_optimizer 7 | from l2l.tests import test_ga_optimizer 8 | from l2l.tests import test_sa_optimizer 9 | from l2l.tests import test_gd_optimizer 10 | from l2l.tests import test_gs_optimizer 11 | from l2l.tests import test_innerloop 12 | from l2l.tests import test_outerloop 13 | from l2l.tests import test_setup 14 | from l2l.tests import test_checkpoint 15 | from l2l.tests import test_runner 16 | 17 | 18 | def test_suite(): 19 | 20 | suite = unittest.TestSuite() 21 | suite.addTest(test_setup.suite()) 22 | suite.addTest(test_outerloop.suite()) 23 | suite.addTest(test_innerloop.suite()) 24 | suite.addTest(test_ce_optimizer.suite()) 25 | suite.addTest(test_sa_optimizer.suite()) 26 | suite.addTest(test_gd_optimizer.suite()) 27 | suite.addTest(test_ga_optimizer.suite()) 28 | suite.addTest(test_gs_optimizer.suite()) 29 | suite.addTest(test_checkpoint.suite()) 30 | suite.addTest(test_runner.suite()) 31 | 32 | return suite 33 | 34 | 35 | if __name__ == "__main__": 36 | 37 | runner = unittest.TextTestRunner(verbosity=2) 38 | home_path = os.environ.get("HOME") 39 | root_dir_path = os.path.join(home_path, 'results') 40 | runner.run(test_suite()) 41 | if os.path.exists(root_dir_path): 42 | print(f'removing {root_dir_path}') 43 | shutil.rmtree(root_dir_path) 44 | -------------------------------------------------------------------------------- /l2l/tests/test_ce_optimizer.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import numpy as np 4 | from l2l.tests.test_optimizer import OptimizerTestCase 5 | from l2l.optimizers.crossentropy.distribution import NoisyGaussian 6 | from l2l.optimizers.crossentropy import CrossEntropyOptimizer, CrossEntropyParameters 7 | 8 | 9 | class CEOptimizerTestCase(OptimizerTestCase): 10 | 11 | def test_setup(self): 12 | 13 | optimizer_parameters = CrossEntropyParameters(pop_size=2, rho=0.9, smoothing=0.0, temp_decay=0, n_iteration=1, 14 | distribution=NoisyGaussian( 15 | noise_magnitude=1., noise_decay=0.99), 16 | stop_criterion=np.inf, seed=1) 17 | optimizer = CrossEntropyOptimizer(self.trajectory_functionGenerator, optimizee_create_individual=self.optimizee_functionGenerator.create_individual, 18 | optimizee_fitness_weights=(-0.1,), 19 | parameters=optimizer_parameters, 20 | optimizee_bounding_func=self.optimizee_functionGenerator.bounding_func) 21 | 22 | self.assertIsNotNone(optimizer.parameters) 23 | self.assertIsNotNone(self.experiment_functionGenerator) 24 | 25 | try: 26 | 27 | self.experiment_functionGenerator.run_experiment(optimizee=self.optimizee_functionGenerator, 28 | optimizee_parameters=self.optimizee_functionGenerator_parameters, 29 | optimizer=optimizer, 30 | optimizer_parameters=optimizer_parameters) 31 | except Exception as e: 32 | self.fail(e.__name__) 33 | best = self.experiment_functionGenerator.optimizer.best_individual['coords'] 34 | self.assertEqual(best[0], -4.998856251826551) 35 | self.assertEqual(best[1], -1.9766742736816023) 36 | self.experiment_functionGenerator.end_experiment(optimizer) 37 | 38 | #test with active wait opimizee 39 | optimizer_parameters = CrossEntropyParameters(pop_size=3, rho=0.9, smoothing=0.0, temp_decay=0, n_iteration=1, 40 | distribution=NoisyGaussian( 41 | noise_magnitude=1., noise_decay=0.99), 42 | stop_criterion=np.inf, seed=1) 43 | optimizer = lambda : {CrossEntropyOptimizer(self.trajectory_activeWait, optimizee_create_individual=self.optimizee_activeWait.create_individual, 44 | optimizee_fitness_weights=(-0.1,), 45 | parameters=optimizer_parameters, 46 | optimizee_bounding_func=self.optimizee_activeWait.bounding_func)} 47 | self.assertRaises(Exception, optimizer) 48 | 49 | def suite(): 50 | suite = unittest.TestLoader().loadTestsFromTestCase(CEOptimizerTestCase) 51 | return suite 52 | 53 | 54 | def run(): 55 | runner = unittest.TextTestRunner(verbosity=2) 56 | runner.run(suite()) 57 | 58 | 59 | if __name__ == "__main__": 60 | run() 61 | -------------------------------------------------------------------------------- /l2l/tests/test_es_optimizer.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import numpy as np 4 | from l2l.tests.test_optimizer import OptimizerTestCase 5 | from l2l.optimizers.evolutionstrategies import EvolutionStrategiesParameters, EvolutionStrategiesOptimizer 6 | 7 | 8 | class ESOptimizerTestCase(OptimizerTestCase): 9 | 10 | def test_setup(self): 11 | 12 | optimizer_parameters = EvolutionStrategiesParameters( 13 | learning_rate=0.1, 14 | noise_std=1.0, 15 | mirrored_sampling_enabled=True, 16 | fitness_shaping_enabled=True, 17 | pop_size=1, 18 | n_iteration=1, 19 | stop_criterion=np.inf, 20 | seed=1) 21 | 22 | optimizer = EvolutionStrategiesOptimizer( 23 | self.trajectory_functionGenerator, 24 | optimizee_create_individual=self.optimizee_functionGenerator.create_individual, 25 | optimizee_fitness_weights=(-1.,), 26 | parameters=optimizer_parameters, 27 | optimizee_bounding_func=self.optimizee_functionGenerator.bounding_func) 28 | 29 | self.assertIsNotNone(optimizer.parameters) 30 | self.assertIsNotNone(self.experiment_functionGenerator) 31 | 32 | try: 33 | 34 | self.experiment_functionGenerator.run_experiment(optimizee=self.optimizee_functionGenerator, 35 | optimizee_parameters=self.optimizee_functionGenerator_parameters, 36 | optimizer=optimizer, 37 | optimizer_parameters=optimizer_parameters) 38 | except Exception as e: 39 | self.fail(e.__name__) 40 | best = self.experiment_functionGenerator.optimizer.best_individual['coords'] 41 | self.assertEqual(best[0], 0.7945654106889819) 42 | self.assertEqual(best[1], 1.5914885207715055) 43 | self.experiment_functionGenerator.end_experiment(optimizer) 44 | 45 | #active wait optimizee 46 | optimizer = EvolutionStrategiesOptimizer( 47 | self.trajectory_activeWait, 48 | optimizee_create_individual=self.optimizee_activeWait.create_individual, 49 | optimizee_fitness_weights=(-1.,), 50 | parameters=optimizer_parameters, 51 | optimizee_bounding_func=self.optimizee_activeWait.bounding_func) 52 | 53 | try: 54 | self.experiment_activeWait.run_experiment(optimizee=self.optimizee_activeWait, 55 | optimizee_parameters=self.optimizee_activeWait_parameters, 56 | optimizer=optimizer, 57 | optimizer_parameters=optimizer_parameters) 58 | except Exception as e: 59 | self.fail(e.__name__) 60 | #best = self.experiment_activeWait.optimizer.best_individual['difficulty'] 61 | #self.assertEqual(best, 10001.624345363663) 62 | results = self.experiment_activeWait.optimizer.eval_pop_arr 63 | expected_results = [[10001.624345363663],[9998.375654636337],[10000.0]] 64 | self.assertListEqual(results.tolist(),expected_results) 65 | self.experiment_activeWait.end_experiment(optimizer) 66 | 67 | def suite(): 68 | suite = unittest.TestLoader().loadTestsFromTestCase(ESOptimizerTestCase) 69 | return suite 70 | 71 | 72 | def run(): 73 | runner = unittest.TextTestRunner(verbosity=2) 74 | runner.run(suite()) 75 | 76 | 77 | if __name__ == "__main__": 78 | run() 79 | -------------------------------------------------------------------------------- /l2l/tests/test_face_optimizer.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import numpy as np 4 | from l2l.tests.test_optimizer import OptimizerTestCase 5 | from l2l.optimizers.crossentropy.distribution import Gaussian 6 | from l2l.optimizers.face import FACEOptimizer, FACEParameters 7 | 8 | 9 | class FACEOptimizerTestCase(OptimizerTestCase): 10 | 11 | def test_setup(self): 12 | 13 | optimizer_parameters = FACEParameters(min_pop_size=2, max_pop_size=3, n_elite=1, smoothing=0.2, temp_decay=0, 14 | n_iteration=1, 15 | distribution=Gaussian(), n_expand=5, stop_criterion=np.inf, seed=1) 16 | optimizer = FACEOptimizer(self.trajectory_functionGenerator, optimizee_create_individual=self.optimizee_functionGenerator.create_individual, 17 | optimizee_fitness_weights=(-0.1,), 18 | parameters=optimizer_parameters, 19 | optimizee_bounding_func=self.optimizee_functionGenerator.bounding_func) 20 | self.assertIsNotNone(optimizer.parameters) 21 | self.assertIsNotNone(self.experiment_functionGenerator) 22 | 23 | try: 24 | 25 | self.experiment_functionGenerator.run_experiment(optimizee=self.optimizee_functionGenerator, 26 | optimizee_parameters=self.optimizee_functionGenerator_parameters, 27 | optimizer=optimizer, 28 | optimizer_parameters=optimizer_parameters) 29 | except Exception as e: 30 | self.fail(e.__name__) 31 | best = self.experiment_functionGenerator.optimizer.best_individual['coords'] 32 | #self.assertEqual(best[0], -4.998856251826551) 33 | #self.assertEqual(best[1], -1.9766742736816023) 34 | self.experiment_functionGenerator.end_experiment(optimizer) 35 | 36 | #active wait optimizee 37 | optimizer_parameters = FACEParameters(min_pop_size=2, max_pop_size=3, n_elite=1, smoothing=0.2, temp_decay=0, 38 | n_iteration=1, 39 | distribution=Gaussian(), n_expand=5, stop_criterion=np.inf, seed=1) 40 | optimizer = lambda: {FACEOptimizer(self.trajectory_activeWait, optimizee_create_individual=self.optimizee_activeWait.create_individual, 41 | optimizee_fitness_weights=(-0.1,), 42 | parameters=optimizer_parameters, 43 | optimizee_bounding_func=self.optimizee_activeWait.bounding_func)} 44 | self.assertRaises(Exception, optimizer) 45 | 46 | def suite(): 47 | suite = unittest.TestLoader().loadTestsFromTestCase(FACEOptimizerTestCase) 48 | return suite 49 | 50 | 51 | def run(): 52 | runner = unittest.TextTestRunner(verbosity=2) 53 | runner.run(suite()) 54 | 55 | 56 | if __name__ == "__main__": 57 | run() -------------------------------------------------------------------------------- /l2l/tests/test_ga_optimizer.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | 4 | from l2l.tests.test_optimizer import OptimizerTestCase 5 | from l2l.optimizers.evolution import GeneticAlgorithmOptimizer, GeneticAlgorithmParameters 6 | from l2l.utils.experiment import Experiment 7 | 8 | 9 | class GAOptimizerTestCase(OptimizerTestCase): 10 | 11 | def test_setup(self): 12 | 13 | #test with function generator opimizee 14 | optimizer_parameters = GeneticAlgorithmParameters(seed=0, pop_size=1, cx_prob=0.5, 15 | mut_prob=0.3, n_iteration=1, ind_prob=0.02, 16 | tourn_size=1, mate_par=0.5, 17 | mut_par=1 18 | ) 19 | 20 | optimizer = GeneticAlgorithmOptimizer(self.trajectory_functionGenerator, optimizee_create_individual=self.optimizee_functionGenerator.create_individual, 21 | optimizee_fitness_weights=(-0.1,), 22 | parameters=optimizer_parameters) 23 | 24 | self.assertIsNotNone(optimizer.parameters) 25 | self.assertIsNotNone(self.experiment_functionGenerator) 26 | 27 | try: 28 | 29 | self.experiment_functionGenerator.run_experiment(optimizee=self.optimizee_functionGenerator, 30 | optimizee_parameters=self.optimizee_functionGenerator_parameters, 31 | optimizer=optimizer, 32 | optimizer_parameters=optimizer_parameters) 33 | except Exception as e: 34 | self.fail(Exception.__name__) 35 | best = self.experiment_functionGenerator.optimizer.best_individual['coords'] 36 | self.assertEqual(best[0], -4.998856251826551) 37 | self.assertEqual(best[1], -1.9766742736816023) 38 | self.experiment_functionGenerator.end_experiment(optimizer) 39 | 40 | #test with active wait opimizee 41 | optimizer = GeneticAlgorithmOptimizer(self.trajectory_activeWait, optimizee_create_individual=self.optimizee_activeWait.create_individual, 42 | optimizee_fitness_weights=(-0.1,), 43 | parameters=optimizer_parameters) 44 | try: 45 | 46 | self.experiment_activeWait.run_experiment(optimizee=self.optimizee_activeWait, 47 | optimizee_parameters=self.optimizee_activeWait_parameters, 48 | optimizer=optimizer, 49 | optimizer_parameters=optimizer_parameters) 50 | except Exception as e: 51 | self.fail(Exception.__name__) 52 | best = self.experiment_activeWait.optimizer.best_individual['difficulty'] 53 | self.assertEqual(best, 10000) 54 | self.experiment_activeWait.end_experiment(optimizer) 55 | 56 | def suite(): 57 | suite = unittest.TestLoader().loadTestsFromTestCase(GAOptimizerTestCase) 58 | return suite 59 | 60 | 61 | def run(): 62 | runner = unittest.TextTestRunner(verbosity=2) 63 | runner.run(suite()) 64 | 65 | 66 | if __name__ == "__main__": 67 | run() 68 | -------------------------------------------------------------------------------- /l2l/tests/test_gd_optimizer.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import numpy as np 4 | from l2l.optimizers.gradientdescent.optimizer import GradientDescentOptimizer 5 | from l2l.optimizers.gradientdescent.optimizer import RMSPropParameters 6 | from l2l.tests.test_optimizer import OptimizerTestCase 7 | from l2l.utils.experiment import Experiment 8 | 9 | from l2l import list_to_dict 10 | 11 | 12 | class GDOptimizerTestCase(OptimizerTestCase): 13 | 14 | def test_gd(self): 15 | optimizer_parameters = RMSPropParameters(learning_rate=0.01, exploration_step_size=0.01, 16 | n_random_steps=1, momentum_decay=0.5, 17 | n_iteration=1, stop_criterion=np.inf, seed=99) 18 | 19 | #test with function generator optimizee 20 | optimizer = GradientDescentOptimizer(self.trajectory_functionGenerator, 21 | optimizee_create_individual=self.optimizee_functionGenerator.create_individual, 22 | optimizee_fitness_weights=(0.1,), 23 | parameters=optimizer_parameters, 24 | optimizee_bounding_func=self.optimizee_functionGenerator.bounding_func) 25 | self.assertIsNotNone(optimizer.parameters) 26 | self.assertIsNotNone(self.experiment_functionGenerator) 27 | 28 | 29 | try: 30 | self.experiment_functionGenerator.run_experiment(optimizee=self.optimizee_functionGenerator, 31 | optimizee_parameters=self.optimizee_functionGenerator_parameters, 32 | optimizer=optimizer, 33 | optimizer_parameters=optimizer_parameters) 34 | except Exception as e: 35 | self.fail(e.__name__) 36 | 37 | best = list_to_dict(self.experiment_functionGenerator.optimizer.current_individual.tolist(), 38 | self.experiment_functionGenerator.optimizer.optimizee_individual_dict_spec)['coords'] 39 | self.assertEqual(best[0],-4.998856251826551) 40 | self.assertEqual(best[1],-1.9766742736816023) 41 | self.experiment_functionGenerator.end_experiment(optimizer) 42 | 43 | #test with active wait opimizee 44 | optimizer = GradientDescentOptimizer(self.trajectory_activeWait, 45 | optimizee_create_individual=self.optimizee_activeWait.create_individual, 46 | optimizee_fitness_weights=(0.1,), 47 | parameters=optimizer_parameters, 48 | optimizee_bounding_func=self.optimizee_activeWait.bounding_func) 49 | try: 50 | 51 | self.experiment_activeWait.run_experiment(optimizee=self.optimizee_activeWait, 52 | optimizee_parameters=self.optimizee_activeWait_parameters, 53 | optimizer=optimizer, 54 | optimizer_parameters=optimizer_parameters) 55 | except Exception as e: 56 | self.fail(Exception.__name__) 57 | 58 | best = list_to_dict(self.experiment_activeWait.optimizer.current_individual.tolist(), 59 | self.experiment_activeWait.optimizer.optimizee_individual_dict_spec)['difficulty'] 60 | self.assertEqual(best, 10000) 61 | self.experiment_activeWait.end_experiment(optimizer) 62 | 63 | 64 | def suite(): 65 | suite = unittest.TestLoader().loadTestsFromTestCase(GDOptimizerTestCase) 66 | return suite 67 | 68 | 69 | def run(): 70 | runner = unittest.TextTestRunner(verbosity=2) 71 | runner.run(suite()) 72 | 73 | 74 | if __name__ == "__main__": 75 | run() 76 | -------------------------------------------------------------------------------- /l2l/tests/test_gs_optimizer.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from l2l.tests.test_optimizer import OptimizerTestCase 4 | 5 | from l2l.optimizers.gridsearch import GridSearchOptimizer, GridSearchParameters 6 | 7 | from l2l import list_to_dict 8 | from l2l.utils.experiment import Experiment 9 | 10 | 11 | class GSOptimizerTestCase(OptimizerTestCase): 12 | 13 | def test_gd(self): 14 | #test with function generator optimizee 15 | n_grid_divs_per_axis = 2 16 | optimizer_parameters = GridSearchParameters(param_grid={ 17 | 'coords': (self.optimizee_functionGenerator.bound[0], self.optimizee_functionGenerator.bound[1], n_grid_divs_per_axis) 18 | }) 19 | optimizer = GridSearchOptimizer(self.trajectory_functionGenerator, optimizee_create_individual=self.optimizee_functionGenerator.create_individual, 20 | optimizee_fitness_weights=(-0.1,), 21 | parameters=optimizer_parameters) 22 | self.assertIsNotNone(optimizer.parameters) 23 | self.assertIsNotNone(self.experiment_functionGenerator) 24 | 25 | try: 26 | 27 | self.experiment_functionGenerator.run_experiment(optimizee=self.optimizee_functionGenerator, 28 | optimizee_parameters=self.optimizee_functionGenerator_parameters, 29 | optimizer=optimizer, 30 | optimizer_parameters=optimizer_parameters) 31 | except Exception as e: 32 | self.fail(e.__name__) 33 | print(self.experiment_functionGenerator.optimizer) 34 | best = self.experiment_functionGenerator.optimizer.best_individual['coords'] 35 | self.assertEqual(best[0], 0) 36 | self.assertEqual(best[1], 0) 37 | self.experiment_functionGenerator.end_experiment(optimizer) 38 | 39 | #test with active wait opimizee 40 | n_grid_divs_per_axis = 2 41 | optimizer_parameters = GridSearchParameters(param_grid={ 42 | 'difficulty': (self.optimizee_activeWait.bound[0],self.optimizee_activeWait.bound[1], n_grid_divs_per_axis) 43 | }) 44 | optimizer = GridSearchOptimizer(self.trajectory_activeWait, optimizee_create_individual=self.optimizee_activeWait.create_individual, 45 | optimizee_fitness_weights=(-0.1,), 46 | parameters=optimizer_parameters) 47 | try: 48 | 49 | self.experiment_activeWait.run_experiment(optimizee=self.optimizee_activeWait, 50 | optimizee_parameters=self.optimizee_activeWait_parameters, 51 | optimizer=optimizer, 52 | optimizer_parameters=optimizer_parameters) 53 | except Exception as e: 54 | self.fail(Exception.__name__) 55 | best = self.experiment_activeWait.optimizer.best_individual['difficulty'] 56 | self.assertEqual(best, 10000) 57 | self.experiment_activeWait.end_experiment(optimizer) 58 | 59 | 60 | def suite(): 61 | suite = unittest.TestLoader().loadTestsFromTestCase(GSOptimizerTestCase) 62 | return suite 63 | 64 | 65 | def run(): 66 | runner = unittest.TextTestRunner(verbosity=2) 67 | runner.run(suite()) 68 | 69 | 70 | if __name__ == "__main__": 71 | run() 72 | -------------------------------------------------------------------------------- /l2l/tests/test_innerloop.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | 4 | 5 | class InnerLoopTestCase(unittest.TestCase): 6 | 7 | def setUp(self): 8 | return 9 | 10 | def suite(): 11 | suite = unittest.TestLoader().loadTestsFromTestCase(InnerLoopTestCase) 12 | return suite 13 | 14 | 15 | def run(): 16 | runner = unittest.TextTestRunner(verbosity=2) 17 | runner.run(suite()) -------------------------------------------------------------------------------- /l2l/tests/test_optimizer.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | 4 | from l2l.optimizees.active_wait.optimizee_aw import AWOptimizee, AWOptimizeeParameters 5 | from l2l.utils.experiment import Experiment 6 | 7 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 8 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 9 | from collections import namedtuple 10 | 11 | 12 | class OptimizerTestCase(unittest.TestCase): 13 | 14 | def setUp(self): 15 | # Test function 16 | function_id = 14 17 | bench_functs = BenchmarkedFunctions() 18 | (benchmark_name, benchmark_function), benchmark_parameters = \ 19 | bench_functs.get_function_by_index(function_id, noise=True) 20 | home_path = os.environ.get("HOME") 21 | root_dir_path = os.path.join(home_path, 'results') 22 | 23 | #set up funtiongenerator optimizee 24 | self.experiment_functionGenerator = Experiment(root_dir_path=root_dir_path) 25 | self.trajectory_functionGenerator, all_runner_params = self.experiment_functionGenerator.prepare_experiment(name='L2L', 26 | log_stdout=True, 27 | overwrite=True, 28 | stop_run=False) 29 | self.optimizee_functionGenerator_parameters = namedtuple('OptimizeeParameters', []) 30 | self.optimizee_functionGenerator = FunctionGeneratorOptimizee( 31 | self.trajectory_functionGenerator, benchmark_function, seed=1) 32 | 33 | #set up activeWait optimizee 34 | self.experiment_activeWait = Experiment(root_dir_path=root_dir_path) 35 | self.trajectory_activeWait, all_runner_params = self.experiment_activeWait.prepare_experiment(name='L2L', 36 | log_stdout=True, 37 | overwrite=True, 38 | stop_run=False) 39 | self.optimizee_activeWait_parameters = AWOptimizeeParameters(difficulty=10000.0) 40 | self.optimizee_activeWait = AWOptimizee(self.trajectory_activeWait, self.optimizee_activeWait_parameters) 41 | 42 | self.experiment_stop_error = Experiment(root_dir_path=root_dir_path) 43 | self.trajectory_stop_error, all_runner_params = self.experiment_stop_error.prepare_experiment(name='L2L', 44 | log_stdout=True, 45 | overwrite=True, 46 | stop_run=True) -------------------------------------------------------------------------------- /l2l/tests/test_outerloop.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | 4 | 5 | class OuterLoopTestCase(unittest.TestCase): 6 | 7 | def setUp(self): 8 | return 9 | 10 | def suite(): 11 | suite = unittest.TestLoader().loadTestsFromTestCase(OuterLoopTestCase) 12 | return suite 13 | 14 | 15 | def run(): 16 | runner = unittest.TextTestRunner(verbosity=2) 17 | runner.run(suite()) -------------------------------------------------------------------------------- /l2l/tests/test_pt_optimizer.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from l2l.tests.test_optimizer import OptimizerTestCase 4 | import numpy as np 5 | from l2l.optimizers.paralleltempering.optimizer import AvailableCoolingSchedules 6 | from l2l.optimizers.paralleltempering.optimizer import ParallelTemperingParameters, ParallelTemperingOptimizer 7 | 8 | 9 | class PTOptimizerTestCase(OptimizerTestCase): 10 | 11 | def test_sa(self): 12 | cooling_schedules = [AvailableCoolingSchedules.EXPONENTIAL_ADDAPTIVE, 13 | AvailableCoolingSchedules.EXPONENTIAL_ADDAPTIVE, 14 | AvailableCoolingSchedules.EXPONENTIAL_ADDAPTIVE, 15 | AvailableCoolingSchedules.LINEAR_ADDAPTIVE, 16 | AvailableCoolingSchedules.LINEAR_ADDAPTIVE] 17 | 18 | temperature_bounds = np.array([ 19 | [0.8, 0], 20 | [0.7, 0], 21 | [0.6, 0], 22 | [1, 0.1], 23 | [0.9, 0.2]]) 24 | 25 | decay_parameters = np.full(2, 0.99) 26 | assert (((temperature_bounds.all() <= 1) and (temperature_bounds.all() >= 0)) and (temperature_bounds[:, 0].all( 27 | ) > temperature_bounds[:, 1].all())), print("Warning: Temperature bounds are not within specifications.") 28 | assert ((decay_parameters.all() <= 1) and (decay_parameters.all() >= 0)), print( 29 | "Warning: Decay parameter not within specifications.") 30 | 31 | optimizer_parameters = ParallelTemperingParameters(n_parallel_runs=2, noisy_step=.03, n_iteration=1, 32 | stop_criterion=np.inf, 33 | seed=np.random.randint(1e5), cooling_schedules=cooling_schedules, 34 | temperature_bounds=temperature_bounds, 35 | decay_parameters=decay_parameters) 36 | optimizer = ParallelTemperingOptimizer(self.trajectory_functionGenerator, optimizee_create_individual=self.optimizee_functionGenerator.create_individual, 37 | optimizee_fitness_weights=(-1,), 38 | parameters=optimizer_parameters, 39 | optimizee_bounding_func=self.optimizee_functionGenerator.bounding_func) 40 | 41 | self.assertIsNotNone(optimizer.parameters) 42 | self.assertIsNotNone(self.experiment_functionGenerator) 43 | 44 | try: 45 | 46 | self.experiment_functionGenerator.run_experiment(optimizee=self.optimizee_functionGenerator, 47 | optimizee_parameters=self.optimizee_functionGenerator_parameters, 48 | optimizer=optimizer, 49 | optimizer_parameters=optimizer_parameters) 50 | except Exception as e: 51 | self.fail(e.__name__) 52 | 53 | #activeWait optimizee 54 | optimizer = ParallelTemperingOptimizer(self.trajectory_activeWait, optimizee_create_individual=self.optimizee_activeWait.create_individual, 55 | optimizee_fitness_weights=(-1,), 56 | parameters=optimizer_parameters, 57 | optimizee_bounding_func=self.optimizee_activeWait.bounding_func) 58 | 59 | try: 60 | self.experiment_activeWait.run_experiment(optimizee=self.optimizee_activeWait, 61 | optimizee_parameters=self.optimizee_activeWait_parameters, 62 | optimizer=optimizer, 63 | optimizer_parameters=optimizer_parameters) 64 | except Exception as e: 65 | self.fail(e.__name__) 66 | 67 | def suite(): 68 | suite = unittest.TestLoader().loadTestsFromTestCase(PTOptimizerTestCase) 69 | return suite 70 | 71 | 72 | def run(): 73 | runner = unittest.TextTestRunner(verbosity=2) 74 | runner.run(suite()) 75 | 76 | 77 | if __name__ == "__main__": 78 | run() 79 | -------------------------------------------------------------------------------- /l2l/tests/test_runner.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | 4 | import numpy as np 5 | from l2l.tests.test_optimizer import OptimizerTestCase 6 | from l2l.optimizers.gradientdescent.optimizer import GradientDescentOptimizer 7 | from l2l.optimizers.gradientdescent.optimizer import RMSPropParameters 8 | from l2l.optimizees.test_cases.optimizee_testcase import TestcaseOptimizee, TestcaseOptimizeeParameters 9 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 10 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 11 | from l2l.utils.experiment import Experiment 12 | 13 | class RunnerTestCase(OptimizerTestCase): 14 | 15 | def test_setup(self): 16 | #TODO test restarting individuals 17 | # use gradient decent for testing 18 | optimizee_parameters = TestcaseOptimizeeParameters(exit_code=129) 19 | optimizer_parameters = RMSPropParameters(learning_rate=0.01, exploration_step_size=0.01, 20 | n_random_steps=1, momentum_decay=0.5, 21 | n_iteration=1, stop_criterion=np.inf, seed=99) 22 | #test with function generator optimizee 23 | optimizee = TestcaseOptimizee(self.trajectory_functionGenerator, optimizee_parameters) 24 | optimizer = GradientDescentOptimizer(self.trajectory_functionGenerator, 25 | optimizee_create_individual=optimizee.create_individual, 26 | optimizee_fitness_weights=(0.1,), 27 | parameters=optimizer_parameters, 28 | optimizee_bounding_func=optimizee.bounding_func) 29 | try: 30 | self.experiment_functionGenerator.run_experiment(optimizee=optimizee, 31 | optimizee_parameters=optimizee_parameters, 32 | optimizer=optimizer, 33 | optimizer_parameters=optimizer_parameters) 34 | except Exception as e: 35 | self.fail(f"Error in Runner Test. Massage: {e}") 36 | self.experiment_functionGenerator.end_experiment(optimizer) 37 | 38 | #test if executions stops, when stop_run = True 39 | optimizee = TestcaseOptimizee(self.trajectory_stop_error, optimizee_parameters) 40 | optimizer = GradientDescentOptimizer(self.trajectory_stop_error, 41 | optimizee_create_individual=optimizee.create_individual, 42 | optimizee_fitness_weights=(0.1,), 43 | parameters=optimizer_parameters, 44 | optimizee_bounding_func=optimizee.bounding_func) 45 | self.assertRaises(SystemExit, lambda: self.experiment_stop_error.run_experiment(optimizee=optimizee, 46 | optimizee_parameters=optimizee_parameters, 47 | optimizer=optimizer, 48 | optimizer_parameters=optimizer_parameters)) 49 | 50 | #test if runner can handle spaces in path name 51 | home_path = os.environ.get("HOME") 52 | self.root_dir_path = os.path.join(home_path, ' results') 53 | runner_params = {} 54 | self.experiment = Experiment(root_dir_path=self.root_dir_path) 55 | self.trajectory, _ = self.experiment.prepare_experiment( 56 | name='test_trajectory', 57 | log_stdout=True, 58 | add_time=True, 59 | automatic_storing=True, 60 | runner_params=runner_params, 61 | overwrite = True) 62 | 63 | ## Benchmark function 64 | function_id = 4 65 | bench_functs = BenchmarkedFunctions() 66 | (benchmark_name, benchmark_function), benchmark_parameters = \ 67 | bench_functs.get_function_by_index(function_id, noise=True) 68 | 69 | optimizee_seed = 100 70 | random_state = np.random.RandomState(seed=optimizee_seed) 71 | 72 | ## Innerloop simulator 73 | optimizee = FunctionGeneratorOptimizee(self.trajectory, benchmark_function, 74 | seed=optimizee_seed) 75 | 76 | optimizer_parameters = RMSPropParameters(learning_rate=0.01, exploration_step_size=0.01, 77 | n_random_steps=1, momentum_decay=0.5, 78 | n_iteration=1, stop_criterion=np.inf, seed=99) 79 | 80 | #test with function generator optimizee 81 | optimizer = GradientDescentOptimizer(self.trajectory, 82 | optimizee_create_individual=optimizee.create_individual, 83 | optimizee_fitness_weights=(0.1,), 84 | parameters=optimizer_parameters, 85 | optimizee_bounding_func=optimizee.bounding_func) 86 | 87 | self.experiment.run_experiment(optimizee=optimizee, 88 | optimizer=optimizer, 89 | optimizer_parameters=optimizer_parameters) 90 | 91 | def suite(): 92 | suite = unittest.TestLoader().loadTestsFromTestCase(RunnerTestCase) 93 | return suite 94 | 95 | 96 | def run(): 97 | runner = unittest.TextTestRunner(verbosity=2) 98 | runner.run(suite()) 99 | 100 | 101 | if __name__ == "__main__": 102 | run() 103 | -------------------------------------------------------------------------------- /l2l/tests/test_sa_optimizer.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from l2l.tests.test_optimizer import OptimizerTestCase 4 | import numpy as np 5 | from l2l.optimizers.simulatedannealing.optimizer import SimulatedAnnealingParameters, SimulatedAnnealingOptimizer, AvailableCoolingSchedules 6 | 7 | 8 | class SAOptimizerTestCase(OptimizerTestCase): 9 | 10 | def test_sa(self): 11 | optimizer_parameters = SimulatedAnnealingParameters(n_parallel_runs=1, noisy_step=.03, temp_decay=.99, n_iteration=1, 12 | stop_criterion=np.inf, seed=np.random.randint(1e5), 13 | cooling_schedule=AvailableCoolingSchedules.QUADRATIC_ADDAPTIVE) 14 | 15 | optimizer = SimulatedAnnealingOptimizer(self.trajectory_functionGenerator, optimizee_create_individual=self.optimizee_functionGenerator.create_individual, 16 | optimizee_fitness_weights=(-1,), 17 | parameters=optimizer_parameters, 18 | optimizee_bounding_func=self.optimizee_functionGenerator.bounding_func) 19 | self.assertIsNotNone(optimizer.parameters) 20 | self.assertIsNotNone(self.experiment_functionGenerator) 21 | 22 | try: 23 | 24 | self.experiment_functionGenerator.run_experiment(optimizee=self.optimizee_functionGenerator, 25 | optimizee_parameters=self.optimizee_functionGenerator_parameters, 26 | optimizer=optimizer, 27 | optimizer_parameters=optimizer_parameters) 28 | except Exception as e: 29 | self.fail(e.__name__) 30 | 31 | #activeWait Optimizee 32 | optimizer = SimulatedAnnealingOptimizer(self.trajectory_activeWait, optimizee_create_individual=self.optimizee_activeWait.create_individual, 33 | optimizee_fitness_weights=(-1,), 34 | parameters=optimizer_parameters, 35 | optimizee_bounding_func=self.optimizee_activeWait.bounding_func) 36 | 37 | try: 38 | self.experiment_activeWait.run_experiment(optimizee=self.optimizee_activeWait, 39 | optimizee_parameters=self.optimizee_activeWait_parameters, 40 | optimizer=optimizer, 41 | optimizer_parameters=optimizer_parameters) 42 | except Exception as e: 43 | self.fail(e.__name__) 44 | 45 | def suite(): 46 | suite = unittest.TestLoader().loadTestsFromTestCase(SAOptimizerTestCase) 47 | return suite 48 | 49 | 50 | def run(): 51 | runner = unittest.TextTestRunner(verbosity=2) 52 | runner.run(suite()) 53 | 54 | 55 | if __name__ == "__main__": 56 | run() 57 | -------------------------------------------------------------------------------- /l2l/tests/test_setup.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from l2l.utils.runner import prepare_optimizee 4 | from l2l.paths import Paths 5 | from l2l.optimizees.functions.benchmarked_functions import BenchmarkedFunctions 6 | from l2l.optimizees.functions.optimizee import FunctionGeneratorOptimizee 7 | from l2l.utils.experiment import Experiment 8 | 9 | import os 10 | 11 | 12 | class SetupTestCase(unittest.TestCase): 13 | 14 | def setUp(self): 15 | home_path = os.environ.get("HOME") 16 | self.root_dir_path = os.path.join(home_path, 'results') 17 | self.experiment = Experiment(root_dir_path=self.root_dir_path) 18 | runner_params = {} 19 | try: 20 | self.trajectory, _ = self.experiment.prepare_experiment( 21 | name='test_trajectory', 22 | log_stdout=True, 23 | add_time=True, 24 | automatic_storing=True, 25 | runner_params=runner_params, 26 | overwrite = True) 27 | except FileNotFoundError as fe: 28 | self.fail( 29 | "{} \n L2L is not well configured. Missing path file.".format( 30 | fe)) 31 | self.paths = self.experiment.paths 32 | 33 | def test_paths(self): 34 | self.assertIsNotNone(self.paths) 35 | self.assertIsNotNone(Paths.simulation_path) 36 | self.assertTrue(os.path.exists(self.root_dir_path)) 37 | 38 | def test_environment_trajectory_setup(self): 39 | self.assertIsNotNone(self.trajectory.individual) 40 | 41 | def test_trajectory_parms_setup(self): 42 | self.trajectory.f_add_parameter_group("Test_params", "Contains Test parameters") 43 | self.trajectory.f_add_parameter_to_group("Test_params", "param1", "value1") 44 | self.assertEqual("value1", self.trajectory.Test_params.params["param1"]) 45 | 46 | def test_runner_setup(self): 47 | self.experiment = Experiment(root_dir_path=self.root_dir_path) 48 | self.trajectory, _ = self.experiment.prepare_experiment( 49 | name='test_trajectory', 50 | trajectory='test_trajectory', 51 | filename=".", 52 | file_title='{} data'.format('test_trajectory'), 53 | comment='{} data'.format('test_trajectory'), 54 | add_time=True, 55 | automatic_storing=True, 56 | log_stdout=False, 57 | overwrite = True 58 | ) 59 | 60 | ## Benchmark function 61 | function_id = 14 62 | bench_functs = BenchmarkedFunctions() 63 | (benchmark_name, benchmark_function), benchmark_parameters = \ 64 | bench_functs.get_function_by_index(function_id, noise=True) 65 | 66 | optimizee_seed = 1 67 | optimizee = FunctionGeneratorOptimizee(self.trajectory, benchmark_function, 68 | seed=optimizee_seed) 69 | 70 | prepare_optimizee(optimizee, self.paths.root_dir_path) 71 | 72 | fname = os.path.join(self.paths.root_dir_path, "optimizee.bin") 73 | 74 | try: 75 | f = open(fname, "r") 76 | f.close() 77 | except Exception: 78 | self.fail() 79 | 80 | 81 | def suite(): 82 | suite = unittest.TestLoader().loadTestsFromTestCase(SetupTestCase) 83 | return suite 84 | 85 | 86 | def run(): 87 | runner = unittest.TextTestRunner(verbosity=2) 88 | runner.run(suite()) 89 | 90 | 91 | if __name__ == "__main__": 92 | run() 93 | -------------------------------------------------------------------------------- /l2l/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meta-optimization/L2L/54d473aa36e5e14b5f2b7c3ed8d5c547b475e33c/l2l/utils/__init__.py -------------------------------------------------------------------------------- /l2l/utils/environment.py: -------------------------------------------------------------------------------- 1 | from l2l.utils.trajectory import Trajectory 2 | import logging 3 | from l2l.utils.runner import Runner 4 | 5 | logger = logging.getLogger("utils.environment") 6 | 7 | 8 | class Environment: 9 | """ 10 | The Environment class takes the place of the pypet Environment and provides the required functionality 11 | to execute the inner loop. 12 | Based on the pypet environment concept: https://github.com/SmokinCaterpillar/pypet 13 | """ 14 | 15 | def __init__(self, *args, **keyword_args): 16 | """ 17 | Initializes an Environment 18 | :param args: arguments passed to the environment initialization 19 | :param keyword_args: arguments by keyword. Relevant keywords are trajectory and filename. 20 | The trajectory object holds individual parameters and history per generation of the exploration process. 21 | """ 22 | if 'trajectory' in keyword_args: 23 | self.trajectory = Trajectory(name=keyword_args['trajectory'], debug = keyword_args['debug'], 24 | stop_run = keyword_args['stop_run'], timeout=keyword_args['timeout']) 25 | if 'checkpoint' in keyword_args: 26 | self.trajectory = keyword_args["checkpoint"] 27 | self.trajectory.is_loaded = True 28 | if 'filename' in keyword_args: 29 | self.filename = keyword_args['filename'] 30 | self.postprocessing = None 31 | self.multiprocessing = True 32 | if 'multiprocessing' in keyword_args: 33 | self.multiprocessing = keyword_args['multiprocessing'] 34 | self.run_id = 0 35 | self.enable_logging() 36 | 37 | def run(self): 38 | """ 39 | Runs all generations of the optimizees using the runner. 40 | """ 41 | result = {} 42 | logger.info(f"Environment run starting Runner for n iterations: {self.trajectory.par['n_iteration']}") 43 | runner = Runner(self.trajectory, self.trajectory.par['n_iteration']+self.trajectory.individual.generation) 44 | for it in range(self.trajectory.individual.generation, self.trajectory.par['n_iteration']+self.trajectory.individual.generation): 45 | if self.multiprocessing: 46 | # Multiprocessing is done through the runner 47 | result[it] = [] 48 | logger.info(f"Iteration: {it+1}/{self.trajectory.par['n_iteration']}") 49 | # execute run 50 | try: 51 | result[it] = runner.run(self.trajectory,it) 52 | except Exception as e: 53 | if self.logging: 54 | logger.exception("Error launching run: " + str(e.__cause__)) 55 | raise e 56 | 57 | # Add results to the trajectory 58 | self.trajectory.results.f_add_result_to_group("all_results", it, result[it]) 59 | self.trajectory.current_results = result[it] 60 | # Update trajectory file 61 | runner.dump_traj(self.trajectory) 62 | # Perform the postprocessing step in order to generate the new parameter set 63 | self.postprocessing(self.trajectory, result[it]) 64 | runner.close_workers() 65 | 66 | def add_postprocessing(self, func): 67 | """ 68 | Function to add a postprocessing step 69 | :param func: the function which performs the postprocessing. Postprocessing is the step where the results 70 | are assessed in order to produce a new set of parameters for the next generation. 71 | """ 72 | self.postprocessing = func 73 | 74 | def enable_logging(self): 75 | """ 76 | Function to enable logging 77 | TODO think about removing this. 78 | """ 79 | self.logging = True 80 | 81 | def disable_logging(self): 82 | """ 83 | Function to enable logging 84 | """ 85 | self.logging = False 86 | -------------------------------------------------------------------------------- /l2l/utils/experiment.py: -------------------------------------------------------------------------------- 1 | import logging.config 2 | import os 3 | import pickle 4 | import shutil 5 | 6 | from l2l.utils.environment import Environment 7 | 8 | from l2l.logging_tools import create_shared_logger_data, configure_loggers 9 | from l2l.paths import Paths 10 | import l2l.utils.runner as runner 11 | 12 | 13 | class Experiment(object): 14 | def __init__(self, root_dir_path): 15 | """ 16 | Prepares and starts the l2l simulation. 17 | 18 | For an example see `L2L/bin/l2l-template.py` 19 | 20 | :param root_dir_path: str, Path to the results folder. Accepts relative 21 | paths. Will check if the folder exists and create if not. 22 | """ 23 | self.root_dir_path = os.path.abspath(root_dir_path) 24 | self.logger = logging.getLogger('utils.experiment') 25 | self.paths = None 26 | self.env = None 27 | self.traj = None 28 | self.optimizee = None 29 | self.optimizer = None 30 | 31 | 32 | def prepare_experiment(self, **kwargs): 33 | """ 34 | Prepare the experiment by creating the enviroment and 35 | :param kwargs: optional dictionary, contains 36 | - name: str, name of the run, Default: L2L-run 37 | - trajectory_name: str, name of the trajectory, Default: trajectory 38 | - checkpoint: object, trajectory object 39 | - log_stdout: bool, if stdout should be sent to logs, Default:False 40 | - runner_params: dict, User specified parameters for the runner. 41 | See notes section for default runner parameters 42 | - multiprocessing, bool, enable multiprocessing, Default: False 43 | - debug, bool, enable verbose mode to get more detailed logs for debugging, 44 | Default: False 45 | - stop_run, bool, when an error occures the execution is stoped, Default: False 46 | - timeout, bool, stops execution after 2 hours if it is not finished by then, 47 | Default: True 48 | -overwrite, bool, specifies whether existing files should be overwritten 49 | Default: False 50 | :return traj, trajectory object 51 | :return all_runner_params, dict, a dictionary with all parameters for the runner 52 | given by the user and default ones 53 | 54 | :notes 55 | Default runner parameters are: 56 | - srun: "" 57 | - exec: 'python3 + "self.paths.simulation_path/run_optimizee.py"' 58 | - max_workers: 32 59 | - work_path: self.paths.root_dir_path, 60 | - paths_obj: self.paths 61 | """ 62 | name = kwargs.get('name', 'L2L-run') 63 | if not os.path.isdir(self.root_dir_path): 64 | os.mkdir(os.path.abspath(self.root_dir_path)) 65 | print('Created a folder at {}'.format(self.root_dir_path)) 66 | 67 | if('checkpoint' in kwargs): 68 | self.traj = kwargs['checkpoint'] 69 | trajectory_name = self.traj._name 70 | else: 71 | trajectory_name = kwargs.get('trajectory_name', 'trajectory') 72 | 73 | self.paths = Paths(name, {}, 74 | root_dir_path=self.root_dir_path, 75 | suffix="-" + trajectory_name) 76 | 77 | overwrite = kwargs.get('overwrite', False) 78 | if os.path.isdir(self.paths.output_dir_path): 79 | if overwrite: 80 | ready_path = 'simulation/ready_files' 81 | if os.path.isdir(os.path.join(self.paths.output_dir_path, ready_path)): 82 | shutil.rmtree(os.path.join(self.paths.output_dir_path, ready_path)) 83 | else: 84 | raise Exception("There are already exsiting outputfiles in this directory. Please change the path specification.") 85 | 86 | print("All output logs can be found in directory ", 87 | self.paths.logs_path) 88 | 89 | # Create an environment that handles running our simulation 90 | # This initializes an environment 91 | if self.traj: 92 | self.env = Environment( 93 | checkpoint=self.traj, 94 | filename=self.paths.output_dir_path, 95 | file_title='{} data'.format(name), 96 | comment='{} data'.format(name), 97 | add_time=True, 98 | automatic_storing=True, 99 | log_stdout=kwargs.get('log_stdout', False), # Sends stdout to logs 100 | multiprocessing=kwargs.get('multiprocessing', True), 101 | debug = kwargs.get('debug', False), 102 | stop_run = kwargs.get('stop_run',False), 103 | timeout = kwargs.get('timeout', True) 104 | ) 105 | else: 106 | self.env = Environment( 107 | trajectory=trajectory_name, 108 | filename=self.paths.output_dir_path, 109 | file_title='{} data'.format(name), 110 | comment='{} data'.format(name), 111 | add_time=True, 112 | automatic_storing=True, 113 | log_stdout=kwargs.get('log_stdout', False), # Sends stdout to logs 114 | multiprocessing=kwargs.get('multiprocessing', True), 115 | debug = kwargs.get('debug', False), 116 | stop_run = kwargs.get('stop_run', False), 117 | timeout = kwargs.get('timeout', True) 118 | ) 119 | # Get the trajectory from the environment 120 | self.traj = self.env.trajectory 121 | 122 | create_shared_logger_data( 123 | logger_names=['optimizers', 'utils'], 124 | log_levels=['INFO', 'INFO'], 125 | log_to_consoles=[True, True], 126 | sim_name=name, 127 | log_directory=self.paths.logs_path) 128 | configure_loggers() 129 | 130 | 131 | default_runner_params = { 132 | "srun": "", 133 | "exec": 'python3 "' + os.path.join(self.paths.simulation_path, "run_optimizee.py") + '"', 134 | "max_workers": 32, 135 | "work_path": self.paths.root_dir_path, 136 | "paths_obj": self.paths, 137 | } 138 | 139 | # Will contain all runner parameters 140 | all_runner_params = {} 141 | self.traj.f_add_parameter_group("runner_params", 142 | "Contains runner parameters") 143 | 144 | 145 | 146 | # Go through the parameter dictionary and add to the trajectory 147 | if kwargs.get('runner_params'): 148 | for k, v in kwargs['runner_params'].items(): 149 | if k == "exec": 150 | val = v + ' "' + os.path.join(self.paths.simulation_path, 151 | "run_optimizee.py") + '"' 152 | self.traj.f_add_parameter_to_group("runner_params", k, val) 153 | all_runner_params[k] = val 154 | else: 155 | self.traj.f_add_parameter_to_group("runner_params", k, v) 156 | all_runner_params[k] = v 157 | 158 | # Default parameters are added if they are not already set by the user 159 | for k, v in default_runner_params.items(): 160 | if kwargs.get('runner_params'): 161 | if k not in kwargs.get('runner_params').keys(): 162 | self.traj.f_add_parameter_to_group("runner_params", k, v) 163 | all_runner_params[k] = v 164 | if k == "max_workers": 165 | self.logger.info(f"No parameter \'max_workers\' given to runner. Using default value {v}.") 166 | 167 | else: 168 | self.traj.f_add_parameter_to_group("runner_params", k, v) 169 | all_runner_params[k] = v 170 | 171 | 172 | 173 | 174 | print('Runner parameters used: {}'.format(all_runner_params)) 175 | return self.traj, all_runner_params 176 | 177 | 178 | 179 | def run_experiment(self, optimizer, optimizee, 180 | optimizer_parameters=None, optimizee_parameters=None): 181 | """ 182 | Runs the simulation with all parameter combinations 183 | 184 | Optimizee and optimizer object are required as well as their parameters 185 | as namedtuples. 186 | 187 | :param optimizee: optimizee object 188 | :param optimizee_parameters: Namedtuple, optional, parameters of the optimizee 189 | :param optimizer: optimizer object 190 | :param optimizer_parameters: Namedtuple, optional, parameters of the optimizer 191 | """ 192 | ind = optimizee.create_individual() 193 | for key in ind: 194 | if(isinstance(ind[key], int)): 195 | raise ValueError('Parameter of type integer is not allowed') 196 | self.optimizee = optimizee 197 | self.optimizer = optimizer 198 | self.optimizer = optimizer 199 | self.logger.info("Optimizee parameters: %s", optimizee_parameters) 200 | self.logger.info("Optimizer parameters: %s", optimizer_parameters) 201 | runner.prepare_optimizee(optimizee, self.paths.simulation_path) 202 | # Add post processing 203 | self.env.add_postprocessing(optimizer.post_process) 204 | # Run the simulation 205 | self.env.run() 206 | 207 | def end_experiment(self, optimizer): 208 | """ 209 | Ends the experiment and disables the logging 210 | 211 | :param optimizer: optimizer object 212 | :return traj, trajectory object 213 | :return path, Path object 214 | """ 215 | # Outer-loop optimizer end 216 | optimizer.end(self.traj) 217 | # Finally disable logging and close all log-files 218 | self.env.disable_logging() 219 | return self.traj, self.paths 220 | 221 | def load_trajectory(self, traj_path): 222 | """ 223 | Loads a trajectory from a given file 224 | :param traj_path: path to the trajectory file 225 | :return traj: trajectory object 226 | """ 227 | traj_file = open(os.path.join(traj_path), 228 | "rb") 229 | loaded_traj = pickle.load(traj_file) 230 | traj_file.close() 231 | return loaded_traj 232 | 233 | -------------------------------------------------------------------------------- /l2l/utils/groups.py: -------------------------------------------------------------------------------- 1 | from l2l import sdictm 2 | import logging 3 | 4 | logger = logging.getLogger("utils.groups") 5 | 6 | 7 | class ParameterGroup: 8 | """ 9 | This class is a Dictionary which can be used to store parameters. It is used to fit the pypet already existing 10 | interface with the trajectory 11 | """ 12 | 13 | def __init__(self): 14 | self.params = {} 15 | 16 | def f_add_parameter(self, key, val, comment=""): 17 | """ 18 | Adds parameter with name key and value val. The comment is ignored for the moment but kept for 19 | compatibility with the pypet groups 20 | :param key: Name of the parameter 21 | :param val: Value of the parameter 22 | :param comment: Ignores for the moment 23 | """ 24 | self.params[key] = val 25 | 26 | def __str__(self): 27 | return str(self.params) 28 | 29 | def __repr__(self): 30 | return self.params.__repr__() 31 | 32 | def __getstate__(self): 33 | return self.__dict__ 34 | 35 | def __setstate__(self, d): 36 | self.__dict__.update(d) 37 | 38 | 39 | class ResultGroup(sdictm): 40 | """ 41 | ResultGroup is a class derived from sdictm, which is a dictorary with parameters accessible using . (dot) 42 | Used to keep the interface with pypet trajectory result groups 43 | """ 44 | 45 | def __init__(self): 46 | super(ResultGroup, self).__init__({}) 47 | self._data = {} 48 | 49 | def f_add_result_group(self, name, comment=""): 50 | """ 51 | Adds a new results group to this dictionary 52 | :param name: Name of the new result group 53 | :param comment: Ignored for the moment 54 | """ 55 | self._data[name] = ResultGroup() 56 | 57 | def f_add_result(self,key, val, comment=""): 58 | """ 59 | Adds a result in a result group. The name of the result group precedes the name of the result name and 60 | they are split by a . (dot) 61 | In case this result is not to be part of a result group, it is added to the root level of the dictionary. 62 | :param key: the name of the result to add. Preceded by a result group name if it is to be added to an existing 63 | group. Produces an error if the value is to be added to a non existent result group. 64 | :param val: Value of the result to be added 65 | :exception: Produces an exception if the value is to be added to a non existent result group. 66 | """ 67 | if '.' in str(key): 68 | subkey = key.split('.') 69 | if subkey[0] in self._data.keys(): 70 | self._data[subkey[0]].f_add_result(subkey[1], val) 71 | else: 72 | logger.exception("Key not found when adding to result group") 73 | raise Exception("Group name not found when adding value to result group") 74 | else: 75 | self._data[key] = val 76 | 77 | def f_add_result_to_group(self, group_name, key, val, comment=""): 78 | """ 79 | Adds a result in a group. 80 | :param group_name: name of the group 81 | :param key: the name of the result to add. 82 | :param val: 83 | :exception Produces an exception if the value is to be added to an inexistent result group. 84 | """ 85 | if group_name in self._data.keys(): 86 | self._data[group_name].f_add_result(key, val) 87 | else: 88 | logger.exception("Key not found when adding to result group") 89 | raise Exception("Group name not found when adding value to result group") 90 | 91 | def __str__(self): 92 | return str(self.results) 93 | 94 | def __getstate__(self): 95 | return self.__dict__ 96 | 97 | def __setstate__(self, d): 98 | self.__dict__.update(d) 99 | 100 | 101 | class ParameterDict(sdictm): 102 | """ 103 | ParameterDict is a class derived from sdictm which takes care of holding parameters in the trajectory 104 | The interface was kept to match the one from pypet parameters. 105 | """ 106 | 107 | def __init__(self, traj): 108 | super(ParameterDict, self).__init__({}) 109 | self.trajectory = traj 110 | 111 | def __getattr__(self, attr): 112 | """ 113 | This function has been overwritten in order to allow a particular access to values in the dictionary. 114 | If attr is ind_idx, it returns the id from the current result with index trajectory.v_idx 115 | :param attr: Contains the attribute name to be accessed 116 | :return: the value of the attribute name indicated by attr 117 | """ 118 | if attr == '__getstate__': 119 | raise AttributeError() 120 | if attr == 'ind_idx': 121 | return [i[0] for i in self.trajectory.current_results].index(self.trajectory.v_idx) 122 | if attr in self._INSTANCE_VAR_LIST: 123 | return object.__getattribute__(self, attr) 124 | if '.' in attr: 125 | # This is triggered exclusively in the case where __getattr__ is called from __getitem__ 126 | attrs = attr.split('.') 127 | ret = self._data.get(attrs[0]) 128 | for at in attrs[1:]: 129 | ret = ret[at] 130 | else: 131 | ret = self._data.get(attr) 132 | if ret is None: 133 | new_val = self.__class__({}) 134 | self._data[attr] = new_val 135 | ret = new_val 136 | return ret 137 | 138 | def __getstate__(self): 139 | return self.__dict__ 140 | 141 | def __setstate__(self, d): 142 | self.__dict__.update(d) 143 | -------------------------------------------------------------------------------- /l2l/utils/individual.py: -------------------------------------------------------------------------------- 1 | from l2l.utils.groups import ParameterGroup 2 | 3 | 4 | class Individual(ParameterGroup): 5 | """ 6 | This class represents individuals in the parameter search. It derives from a Parameter group. 7 | The main elements which make an individual are the ID of its generation, its individual ID and the 8 | params specific for its run. 9 | """ 10 | 11 | def __init__(self, generation=0, ind_idx=0, params=[]): 12 | """ 13 | Initialization of the individual 14 | :param generation: ID of the generation to which this individual belongs to 15 | :param ind_idx: global ID of the individual 16 | :param params: individual parameters which are used to execute the optimizee simulate function 17 | """ 18 | self.params = {} 19 | for i in params: 20 | self.f_add_parameter(i.keys()[0], i[i.keys()[0]]) 21 | self.generation = generation 22 | self.ind_idx = ind_idx 23 | 24 | def __getattr__(self, attr): 25 | if attr == 'keys': 26 | return self.params.keys() 27 | ret = self.params.get('individual.' + attr) 28 | return ret 29 | 30 | def __getitem__(self, key): 31 | return self.__getattr__(key) 32 | 33 | def __getstate__(self): 34 | return self.__dict__ 35 | 36 | def __setstate__(self, d): 37 | self.__dict__.update(d) 38 | -------------------------------------------------------------------------------- /l2l/utils/tools.py: -------------------------------------------------------------------------------- 1 | # *************************************************************************************** 2 | # * Title: pypet/cartesian_product 3 | # * Author: Robert Meyer 4 | # * Date: 2018 5 | # * Code version: 0.4.3 6 | # * Availability: https://github.com/SmokinCaterpillar/pypet 7 | # LICENCE: 8 | # 9 | # Copyright (c) 2013-2018, Robert Meyer 10 | # All rights reserved. 11 | # 12 | # Redistribution and use in source and binary forms, with or without modification, 13 | # are permitted provided that the following conditions are met: 14 | # 15 | # Redistributions of source code must retain the above copyright notice, this 16 | # list of conditions and the following disclaimer. 17 | # 18 | # Redistributions in binary form must reproduce the above copyright notice, this 19 | # list of conditions and the following disclaimer in the documentation and/or 20 | # other materials provided with the distribution. 21 | # 22 | # Neither the name of the author nor the names of other contributors 23 | # may be used to endorse or promote products 24 | # derived from this software without specific prior written permission. 25 | # 26 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 27 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 28 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 29 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 30 | # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 31 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 32 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 33 | # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 34 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 35 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 36 | # * 37 | # ***************************************************************************************/ 38 | 39 | import itertools as itools 40 | 41 | 42 | def cartesian_product(parameter_dict, combined_parameters=()): 43 | """ Generates a Cartesian product of the input parameter dictionary. 44 | 45 | For example: 46 | 47 | >>> print cartesian_product({'param1':[1,2,3], 'param2':[42.0, 52.5]}) 48 | {'param1':[1,1,2,2,3,3],'param2': [42.0,52.5,42.0,52.5,42.0,52.5]} 49 | 50 | :param parameter_dict: 51 | 52 | Dictionary containing parameter names as keys and iterables of data to explore. 53 | 54 | :param combined_parameters: 55 | 56 | Tuple of tuples. Defines the order of the parameters and parameters that are 57 | linked together. 58 | If an inner tuple contains only a single item, you can spare the 59 | inner tuple brackets. 60 | 61 | 62 | For example: 63 | 64 | >>> print cartesian_product( {'param1': [42.0, 52.5], 'param2':['a', 'b'], 'param3' : [1,2,3]}, ('param3',('param1', 'param2'))) 65 | {param3':[1,1,2,2,3,3],'param1' : [42.0,52.5,42.0,52.5,42.0,52.5], 'param2':['a','b','a','b','a','b']} 66 | 67 | :returns: Dictionary with cartesian product lists. 68 | 69 | """ 70 | if not combined_parameters: 71 | combined_parameters = list(parameter_dict) 72 | else: 73 | combined_parameters = list(combined_parameters) 74 | 75 | for idx, item in enumerate(combined_parameters): 76 | if isinstance(item, str): 77 | combined_parameters[idx] = (item,) 78 | 79 | iterator_list = [] 80 | for item_tuple in combined_parameters: 81 | inner_iterator_list = [parameter_dict[key] for key in item_tuple] 82 | zipped_iterator = zip(*inner_iterator_list) 83 | iterator_list.append(zipped_iterator) 84 | 85 | result_dict = {} 86 | for key in parameter_dict: 87 | result_dict[key] = [] 88 | 89 | cartesian_iterator = itools.product(*iterator_list) 90 | 91 | for cartesian_tuple in cartesian_iterator: 92 | for idx, item_tuple in enumerate(combined_parameters): 93 | for inneridx, key in enumerate(item_tuple): 94 | result_dict[key].append(cartesian_tuple[idx][inneridx]) 95 | 96 | return result_dict 97 | -------------------------------------------------------------------------------- /l2l/utils/trajectory.py: -------------------------------------------------------------------------------- 1 | import time 2 | from l2l.utils.groups import ParameterGroup, ResultGroup, ParameterDict 3 | from l2l.utils.individual import Individual 4 | import logging 5 | 6 | logger = logging.getLogger("utils.trajectory") 7 | 8 | 9 | class Trajectory: 10 | """ 11 | The trajectory is a class which holds the history of the parameter space exploration, defines the current 12 | parameters to be explored and holds the results from each execution. 13 | Based on the pypet trajectory concept: https://github.com/SmokinCaterpillar/pypet 14 | """ 15 | 16 | def __init__(self, **keyword_args): 17 | """ 18 | Initializes the trajectory. Some parameters are kept to match the interface with the pypet trajectory. 19 | TODO: remove all irrelevant attributes and simplify the class 20 | """ 21 | if 'name' in keyword_args: 22 | self._name = keyword_args['name'] 23 | self._timestamp = time.time() 24 | self._parameters = ParameterDict(self) # Contains all parameters 25 | self._results = {} # Contains all results 26 | self.individual = Individual() 27 | self.results = ResultGroup() 28 | self.results.f_add_result_group('all_results', "Contains all the results") 29 | self.current_results = {} 30 | self._parameters.parameter_group = {} 31 | self._parameters.parameter = {} 32 | self.individuals = {} 33 | self.v_idx = 0 34 | if 'debug' in keyword_args: 35 | self.debug = keyword_args['debug'] 36 | else: 37 | self.debug = False 38 | if 'stop_run' in keyword_args: 39 | self.stop_run = keyword_args['stop_run'] 40 | else: 41 | self.stop_run = True 42 | if 'timeout' in keyword_args: 43 | self.timeout = keyword_args['timeout'] 44 | else: 45 | self.timeout = False 46 | self.is_loaded = False 47 | self.hall_of_fame = None 48 | self.retry = 0 #is needed for testing the restart worker 49 | 50 | def f_add_parameter_group(self, name, comment=""): 51 | """ 52 | Adds a new parameter group 53 | :param name: name of the new parameter group 54 | :param comment: ignored for the moment. Kept to match pypet interface. 55 | """ 56 | self._parameters[name] = ParameterGroup() 57 | logger.info("Added new parameter group: " + name) 58 | 59 | def f_add_parameter_to_group(self, group_name, key, val): 60 | """ 61 | Adds a parameter to an already existing group. 62 | 63 | :param group_name: Name of the group where the parameter should be added 64 | :param key: Name of the parameter to be added 65 | :param val: Value of the parameter 66 | 67 | Throws an exception if the group does not exist 68 | """ 69 | if group_name in self._parameters.keys(): 70 | self._parameters[group_name].f_add_parameter(key, val) 71 | else: 72 | # LOG("Key not found when adding to result group") 73 | raise Exception("Group name not found when adding value to result group") 74 | 75 | def f_add_result(self,key, val, comment=""): 76 | """ 77 | Adds a result to the trajectory 78 | :param key: it identifies either a generation params result group or another result 79 | :param val: The value to be added to the results 80 | TODO: verify where is the generation_params call performed 81 | """ 82 | if key == 'generation_params': 83 | self.results[key] = ResultGroup() 84 | else: 85 | self._results[key] = val 86 | 87 | def f_add_parameter(self, key, val, comment=""): 88 | """ 89 | Adds a parameter to the trajectory 90 | :param key: Name of the parameter 91 | :param val: Value of the parameter 92 | :param comment 93 | """ 94 | self._parameters[key] = val 95 | 96 | def f_add_derived_parameter(self, key, val, comment=""): 97 | """ 98 | Adds a derived parameter to the trajectory. Match the previous pypet interface. 99 | :param key: Name of the parameter 100 | :param val: Value of the parameter 101 | :param comment: 102 | """ 103 | self.f_add_parameter(key,val,comment) 104 | 105 | def f_expand(self, build_dict, fail_safe=True): 106 | """ 107 | The expand function takes care of adding a new generation and individuals to the trajectory 108 | This is a critical function to allow the addition of a new generation, called by the optimizer 109 | from the postprocessing function 110 | :param build_dict: The dictionary containing the new generation id and its individuals 111 | :param fail_safe: Currently ignored 112 | """ 113 | params = {} 114 | gen = [] 115 | ind_idx = [] 116 | for key in build_dict.keys(): 117 | if key == 'generation': 118 | gen = build_dict['generation'] 119 | elif key == 'ind_idx': 120 | ind_idx = build_dict['ind_idx'] 121 | else: 122 | params[key] = build_dict[key] 123 | 124 | generation = gen[0] 125 | self.individuals[generation] = [] 126 | 127 | for i in ind_idx: 128 | ind = Individual(generation,i,[]) 129 | for j in params: 130 | ind.f_add_parameter(j, params[j][i]) 131 | self.individuals[generation].append(ind) 132 | logger.info("Expanded trajectory for generation: " + str(generation)) 133 | 134 | def __str__(self): 135 | return str(self._parameters) 136 | 137 | def __getattr__(self, attr): 138 | """ 139 | Handle attribute access like a sdict 140 | :param attr: The attribute to be accessed 141 | :return: the value of this attributes 142 | """ 143 | if '.' in attr: 144 | # This is triggered exclusively in the case where __getattr__ is called from __getitem__ 145 | attrs = attr.split('.') 146 | ret = self._parameters.get(attrs[0]) 147 | for at in attrs[1:]: 148 | ret = ret[at] 149 | elif attr == 'par' or attr == 'parameters': 150 | ret = self._parameters 151 | else: 152 | ret = self._parameters.get(attr,default_value=None) 153 | return ret 154 | 155 | def __getitem__(self, key): 156 | return self.__getattr__(key) 157 | 158 | def __getstate__(self): 159 | # print(self.__dict__) 160 | return self.__dict__ 161 | 162 | def __setstate__(self, d): 163 | self.__dict__.update(d) 164 | -------------------------------------------------------------------------------- /l2l/version.py: -------------------------------------------------------------------------------- 1 | MAJOR_VERSION = "1.0" 2 | FULL_VERSION = "1.0.0-beta" 3 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | scipy 3 | pyyaml 4 | sphinx 5 | sphinxjp.themes.basicstrap 6 | sphinx-rtd-theme 7 | jinja2 8 | gitpython 9 | scikit-learn 10 | flake8 11 | deap 12 | -------------------------------------------------------------------------------- /run-style-check.sh: -------------------------------------------------------------------------------- 1 | mkdir style-reports 2 | now=$(date +"%m-%d-%Y-%T") 3 | flake8 --ignore=E501,W293,W291,E265,E231,E127,E262,E266 --max-line-length=120 --exclude style-reports/ --tee --output-file style-reports/report-${now}.txt 4 | # echo $? 5 | # E501 line too long (89 > 79 characters) 6 | # W293 blank line contains whitespace 7 | # W291 trailing whitespace 8 | # E265 block comment should start with '# ' 9 | # E231 missing whitespace after ':' 10 | # E127 continuation line over-indented for visual indent 11 | # E262 inline comment should start with '# ' 12 | # E266 too many leading '#' for block comment 13 | exit $? 14 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | from setuptools import find_packages 3 | import re 4 | from l2l.version import FULL_VERSION 5 | 6 | """ 7 | This file installs the l2l package. 8 | Note that it does not perform any installation of the documentation. For this, follow the specified procedure in the 9 | README. For updating the version, update MAJOR_VERSION and FULL_VERSION in l2l/version.py 10 | """ 11 | 12 | 13 | def get_requirements(filename): 14 | """ 15 | Helper function to read the list of requirements from a file 16 | """ 17 | dependency_links = [] 18 | with open(filename) as requirements_file: 19 | requirements = requirements_file.read().strip('\n').splitlines() 20 | return requirements, dependency_links 21 | 22 | 23 | requirements, dependency_links = get_requirements('requirements.txt') 24 | setup( 25 | name="L2L", 26 | version=FULL_VERSION, 27 | packages=find_packages("."), 28 | author="Anand Subramoney, Arjun Rao", 29 | author_email="anand@igi.tugraz.at, arjun@igi.tugraz.at", 30 | description="This module provides the infrastructure create optimizers and " 31 | "optimizees in order to implement learning-to-learn", 32 | setup_requires=['Cython', 'numpy'], 33 | install_requires=requirements, 34 | provides=['l2l'], 35 | dependency_links=dependency_links, 36 | ) 37 | 38 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | # Packages for runtime 2 | numpy 3 | scipy 4 | pyyaml 5 | sphinx 6 | sphinxjp.themes.basicstrap 7 | jinja2 8 | gitpython 9 | scikit-learn 10 | flake8 11 | deap 12 | # For testing 13 | nose2[coverage_plugin]>=0.6.5 14 | --------------------------------------------------------------------------------