├── .gitignore ├── LICENSE ├── README.md ├── a2c_ppo_acktr ├── __init__.py ├── algo │ ├── __init__.py │ ├── a2c_acktr.py │ ├── gail.py │ ├── kfac.py │ └── ppo.py ├── arguments.py ├── distributions.py ├── envs.py ├── model.py ├── storage.py └── utils.py ├── enjoy.py ├── evaluation.py ├── gail_experts ├── README.md └── convert_to_pytorch.py ├── generate_tmux_yaml.py ├── imgs ├── a2c_beamrider.png ├── a2c_breakout.png ├── a2c_qbert.png ├── a2c_seaquest.png ├── acktr_beamrider.png ├── acktr_breakout.png ├── acktr_qbert.png ├── acktr_seaquest.png ├── ppo_halfcheetah.png ├── ppo_hopper.png ├── ppo_reacher.png └── ppo_walker.png ├── logs ├── halfcheetah │ ├── halfcheetah-0 │ │ └── 0.monitor.csv │ ├── halfcheetah-1 │ │ └── 0.monitor.csv │ ├── halfcheetah-2 │ │ └── 0.monitor.csv │ ├── halfcheetah-3 │ │ └── 0.monitor.csv │ ├── halfcheetah-4 │ │ └── 0.monitor.csv │ ├── halfcheetah-5 │ │ └── 0.monitor.csv │ ├── halfcheetah-6 │ │ └── 0.monitor.csv │ ├── halfcheetah-7 │ │ └── 0.monitor.csv │ ├── halfcheetah-8 │ │ └── 0.monitor.csv │ └── halfcheetah-9 │ │ └── 0.monitor.csv ├── hopper │ ├── hopper-0 │ │ └── 0.monitor.csv │ ├── hopper-1 │ │ └── 0.monitor.csv │ ├── hopper-2 │ │ └── 0.monitor.csv │ ├── hopper-3 │ │ └── 0.monitor.csv │ ├── hopper-4 │ │ └── 0.monitor.csv │ ├── hopper-5 │ │ └── 0.monitor.csv │ ├── hopper-6 │ │ └── 0.monitor.csv │ ├── hopper-7 │ │ └── 0.monitor.csv │ ├── hopper-8 │ │ └── 0.monitor.csv │ └── hopper-9 │ │ └── 0.monitor.csv ├── reacher │ ├── reacher-0 │ │ └── 0.monitor.csv │ ├── reacher-1 │ │ └── 0.monitor.csv │ ├── reacher-2 │ │ └── 0.monitor.csv │ ├── reacher-3 │ │ └── 0.monitor.csv │ ├── reacher-4 │ │ └── 0.monitor.csv │ ├── reacher-5 │ │ └── 0.monitor.csv │ ├── reacher-6 │ │ └── 0.monitor.csv │ ├── reacher-7 │ │ └── 0.monitor.csv │ ├── reacher-8 │ │ └── 0.monitor.csv │ └── reacher-9 │ │ └── 0.monitor.csv └── walker2d │ ├── walker2d-0 │ └── 0.monitor.csv │ ├── walker2d-1 │ └── 0.monitor.csv │ ├── walker2d-2 │ └── 0.monitor.csv │ ├── walker2d-3 │ └── 0.monitor.csv │ ├── walker2d-4 │ └── 0.monitor.csv │ ├── walker2d-5 │ └── 0.monitor.csv │ ├── walker2d-6 │ └── 0.monitor.csv │ ├── walker2d-7 │ └── 0.monitor.csv │ ├── walker2d-8 │ └── 0.monitor.csv │ └── walker2d-9 │ └── 0.monitor.csv ├── main.py ├── requirements.txt ├── run_all.yaml ├── setup.py ├── time_limit_logs ├── halfcheetah │ ├── halfcheetah-0 │ │ └── 0.monitor.csv │ ├── halfcheetah-1 │ │ └── 0.monitor.csv │ ├── halfcheetah-2 │ │ └── 0.monitor.csv │ ├── halfcheetah-3 │ │ └── 0.monitor.csv │ ├── halfcheetah-4 │ │ └── 0.monitor.csv │ ├── halfcheetah-5 │ │ └── 0.monitor.csv │ ├── halfcheetah-6 │ │ └── 0.monitor.csv │ ├── halfcheetah-7 │ │ └── 0.monitor.csv │ ├── halfcheetah-8 │ │ └── 0.monitor.csv │ ├── halfcheetah-9 │ │ └── 0.monitor.csv │ ├── unfixhalfcheetah-0 │ │ └── 0.monitor.csv │ ├── unfixhalfcheetah-1 │ │ └── 0.monitor.csv │ ├── unfixhalfcheetah-2 │ │ └── 0.monitor.csv │ ├── unfixhalfcheetah-3 │ │ └── 0.monitor.csv │ ├── unfixhalfcheetah-4 │ │ └── 0.monitor.csv │ ├── unfixhalfcheetah-5 │ │ └── 0.monitor.csv │ ├── unfixhalfcheetah-6 │ │ └── 0.monitor.csv │ ├── unfixhalfcheetah-7 │ │ └── 0.monitor.csv │ ├── unfixhalfcheetah-8 │ │ └── 0.monitor.csv │ └── unfixhalfcheetah-9 │ │ └── 0.monitor.csv ├── hopper │ ├── hopper-0 │ │ └── 0.monitor.csv │ ├── hopper-1 │ │ └── 0.monitor.csv │ ├── hopper-2 │ │ └── 0.monitor.csv │ ├── hopper-3 │ │ └── 0.monitor.csv │ ├── hopper-4 │ │ └── 0.monitor.csv │ ├── hopper-5 │ │ └── 0.monitor.csv │ ├── hopper-6 │ │ └── 0.monitor.csv │ ├── hopper-7 │ │ └── 0.monitor.csv │ ├── hopper-8 │ │ └── 0.monitor.csv │ ├── hopper-9 │ │ └── 0.monitor.csv │ ├── unfixhopper-0 │ │ └── 0.monitor.csv │ ├── unfixhopper-1 │ │ └── 0.monitor.csv │ ├── unfixhopper-2 │ │ └── 0.monitor.csv │ ├── unfixhopper-3 │ │ └── 0.monitor.csv │ ├── unfixhopper-4 │ │ └── 0.monitor.csv │ ├── unfixhopper-5 │ │ └── 0.monitor.csv │ ├── unfixhopper-6 │ │ └── 0.monitor.csv │ ├── unfixhopper-7 │ │ └── 0.monitor.csv │ ├── unfixhopper-8 │ │ └── 0.monitor.csv │ └── unfixhopper-9 │ │ └── 0.monitor.csv ├── reacher │ ├── reacher-0 │ │ └── 0.monitor.csv │ ├── reacher-1 │ │ └── 0.monitor.csv │ ├── reacher-2 │ │ └── 0.monitor.csv │ ├── reacher-3 │ │ └── 0.monitor.csv │ ├── reacher-4 │ │ └── 0.monitor.csv │ ├── reacher-5 │ │ └── 0.monitor.csv │ ├── reacher-6 │ │ └── 0.monitor.csv │ ├── reacher-7 │ │ └── 0.monitor.csv │ ├── reacher-8 │ │ └── 0.monitor.csv │ ├── reacher-9 │ │ └── 0.monitor.csv │ ├── unfixreacher-0 │ │ └── 0.monitor.csv │ ├── unfixreacher-1 │ │ └── 0.monitor.csv │ ├── unfixreacher-2 │ │ └── 0.monitor.csv │ ├── unfixreacher-3 │ │ └── 0.monitor.csv │ ├── unfixreacher-4 │ │ └── 0.monitor.csv │ ├── unfixreacher-5 │ │ └── 0.monitor.csv │ ├── unfixreacher-6 │ │ └── 0.monitor.csv │ ├── unfixreacher-7 │ │ └── 0.monitor.csv │ ├── unfixreacher-8 │ │ └── 0.monitor.csv │ └── unfixreacher-9 │ │ └── 0.monitor.csv └── walker2d │ ├── unfixwalker2d-0 │ └── 0.monitor.csv │ ├── unfixwalker2d-1 │ └── 0.monitor.csv │ ├── unfixwalker2d-2 │ └── 0.monitor.csv │ ├── unfixwalker2d-3 │ └── 0.monitor.csv │ ├── unfixwalker2d-4 │ └── 0.monitor.csv │ ├── unfixwalker2d-5 │ └── 0.monitor.csv │ ├── unfixwalker2d-6 │ └── 0.monitor.csv │ ├── unfixwalker2d-7 │ └── 0.monitor.csv │ ├── unfixwalker2d-8 │ └── 0.monitor.csv │ ├── unfixwalker2d-9 │ └── 0.monitor.csv │ ├── walker2d-0 │ └── 0.monitor.csv │ ├── walker2d-1 │ └── 0.monitor.csv │ ├── walker2d-2 │ └── 0.monitor.csv │ ├── walker2d-3 │ └── 0.monitor.csv │ ├── walker2d-4 │ └── 0.monitor.csv │ ├── walker2d-5 │ └── 0.monitor.csv │ ├── walker2d-6 │ └── 0.monitor.csv │ ├── walker2d-7 │ └── 0.monitor.csv │ ├── walker2d-8 │ └── 0.monitor.csv │ └── walker2d-9 │ └── 0.monitor.csv └── visualize.ipynb /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | trained_models/ 104 | .fuse_hidden* 105 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Ilya Kostrikov 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pytorch-a2c-ppo-acktr 2 | 3 | ## Update (April 12th, 2021) 4 | 5 | PPO is great, but [Soft Actor Critic](https://arxiv.org/abs/1812.05905) can be better for many continuous control tasks. Please check out [my new RL](http://github.com/ikostrikov/jax-rl) repository in jax. 6 | 7 | ## Please use hyper parameters from this readme. With other hyper parameters things might not work (it's RL after all)! 8 | 9 | This is a PyTorch implementation of 10 | * Advantage Actor Critic (A2C), a synchronous deterministic version of [A3C](https://arxiv.org/pdf/1602.01783v1.pdf) 11 | * Proximal Policy Optimization [PPO](https://arxiv.org/pdf/1707.06347.pdf) 12 | * Scalable trust-region method for deep reinforcement learning using Kronecker-factored approximation [ACKTR](https://arxiv.org/abs/1708.05144) 13 | * Generative Adversarial Imitation Learning [GAIL](https://arxiv.org/abs/1606.03476) 14 | 15 | Also see the OpenAI posts: [A2C/ACKTR](https://blog.openai.com/baselines-acktr-a2c/) and [PPO](https://blog.openai.com/openai-baselines-ppo/) for more information. 16 | 17 | This implementation is inspired by the OpenAI baselines for [A2C](https://github.com/openai/baselines/tree/master/baselines/a2c), [ACKTR](https://github.com/openai/baselines/tree/master/baselines/acktr) and [PPO](https://github.com/openai/baselines/tree/master/baselines/ppo1). It uses the same hyper parameters and the model since they were well tuned for Atari games. 18 | 19 | Please use this bibtex if you want to cite this repository in your publications: 20 | 21 | @misc{pytorchrl, 22 | author = {Kostrikov, Ilya}, 23 | title = {PyTorch Implementations of Reinforcement Learning Algorithms}, 24 | year = {2018}, 25 | publisher = {GitHub}, 26 | journal = {GitHub repository}, 27 | howpublished = {\url{https://github.com/ikostrikov/pytorch-a2c-ppo-acktr-gail}}, 28 | } 29 | 30 | ## Supported (and tested) environments (via [OpenAI Gym](https://gym.openai.com)) 31 | * [Atari Learning Environment](https://github.com/mgbellemare/Arcade-Learning-Environment) 32 | * [MuJoCo](http://mujoco.org) 33 | * [PyBullet](http://pybullet.org) (including Racecar, Minitaur and Kuka) 34 | * [DeepMind Control Suite](https://github.com/deepmind/dm_control) (via [dm_control2gym](https://github.com/martinseilair/dm_control2gym)) 35 | 36 | I highly recommend PyBullet as a free open source alternative to MuJoCo for continuous control tasks. 37 | 38 | All environments are operated using exactly the same Gym interface. See their documentations for a comprehensive list. 39 | 40 | To use the DeepMind Control Suite environments, set the flag `--env-name dm..`, where `domain_name` and `task_name` are the name of a domain (e.g. `hopper`) and a task within that domain (e.g. `stand`) from the DeepMind Control Suite. Refer to their repo and their [tech report](https://arxiv.org/abs/1801.00690) for a full list of available domains and tasks. Other than setting the task, the API for interacting with the environment is exactly the same as for all the Gym environments thanks to [dm_control2gym](https://github.com/martinseilair/dm_control2gym). 41 | 42 | ## Requirements 43 | 44 | * Python 3 (it might work with Python 2, but I didn't test it) 45 | * [PyTorch](http://pytorch.org/) 46 | * [Stable baselines3](https://github.com/DLR-RM/stable-baselines3) 47 | 48 | In order to install requirements, follow: 49 | 50 | ```bash 51 | # PyTorch 52 | conda install pytorch torchvision -c soumith 53 | 54 | # Other requirements 55 | pip install -r requirements.txt 56 | 57 | # Gym Atari 58 | conda install -c conda-forge gym-atari 59 | ``` 60 | 61 | ## Contributions 62 | 63 | Contributions are very welcome. If you know how to make this code better, please open an issue. If you want to submit a pull request, please open an issue first. Also see a todo list below. 64 | 65 | Also I'm searching for volunteers to run all experiments on Atari and MuJoCo (with multiple random seeds). 66 | 67 | ## Disclaimer 68 | 69 | It's extremely difficult to reproduce results for Reinforcement Learning methods. See ["Deep Reinforcement Learning that Matters"](https://arxiv.org/abs/1709.06560) for more information. I tried to reproduce OpenAI results as closely as possible. However, majors differences in performance can be caused even by minor differences in TensorFlow and PyTorch libraries. 70 | 71 | ### TODO 72 | * Improve this README file. Rearrange images. 73 | * Improve performance of KFAC, see kfac.py for more information 74 | * Run evaluation for all games and algorithms 75 | 76 | ## Visualization 77 | 78 | In order to visualize the results use ```visualize.ipynb```. 79 | 80 | 81 | ## Training 82 | 83 | ### Atari 84 | #### A2C 85 | 86 | ```bash 87 | python main.py --env-name "PongNoFrameskip-v4" 88 | ``` 89 | 90 | #### PPO 91 | 92 | ```bash 93 | python main.py --env-name "PongNoFrameskip-v4" --algo ppo --use-gae --lr 2.5e-4 --clip-param 0.1 --value-loss-coef 0.5 --num-processes 8 --num-steps 128 --num-mini-batch 4 --log-interval 1 --use-linear-lr-decay --entropy-coef 0.01 94 | ``` 95 | 96 | #### ACKTR 97 | 98 | ```bash 99 | python main.py --env-name "PongNoFrameskip-v4" --algo acktr --num-processes 32 --num-steps 20 100 | ``` 101 | 102 | ### MuJoCo 103 | 104 | Please always try to use ```--use-proper-time-limits``` flag. It properly handles partial trajectories (see https://github.com/sfujim/TD3/blob/master/main.py#L123). 105 | 106 | #### A2C 107 | 108 | ```bash 109 | python main.py --env-name "Reacher-v2" --num-env-steps 1000000 110 | ``` 111 | 112 | #### PPO 113 | 114 | ```bash 115 | python main.py --env-name "Reacher-v2" --algo ppo --use-gae --log-interval 1 --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --gae-lambda 0.95 --num-env-steps 1000000 --use-linear-lr-decay --use-proper-time-limits 116 | ``` 117 | 118 | #### ACKTR 119 | 120 | ACKTR requires some modifications to be made specifically for MuJoCo. But at the moment, I want to keep this code as unified as possible. Thus, I'm going for better ways to integrate it into the codebase. 121 | 122 | ## Enjoy 123 | 124 | ### Atari 125 | 126 | ```bash 127 | python enjoy.py --load-dir trained_models/a2c --env-name "PongNoFrameskip-v4" 128 | ``` 129 | 130 | ### MuJoCo 131 | 132 | ```bash 133 | python enjoy.py --load-dir trained_models/ppo --env-name "Reacher-v2" 134 | ``` 135 | 136 | ## Results 137 | 138 | ### A2C 139 | 140 | ![BreakoutNoFrameskip-v4](imgs/a2c_breakout.png) 141 | 142 | ![SeaquestNoFrameskip-v4](imgs/a2c_seaquest.png) 143 | 144 | ![QbertNoFrameskip-v4](imgs/a2c_qbert.png) 145 | 146 | ![beamriderNoFrameskip-v4](imgs/a2c_beamrider.png) 147 | 148 | ### PPO 149 | 150 | 151 | ![BreakoutNoFrameskip-v4](imgs/ppo_halfcheetah.png) 152 | 153 | ![SeaquestNoFrameskip-v4](imgs/ppo_hopper.png) 154 | 155 | ![QbertNoFrameskip-v4](imgs/ppo_reacher.png) 156 | 157 | ![beamriderNoFrameskip-v4](imgs/ppo_walker.png) 158 | 159 | 160 | ### ACKTR 161 | 162 | ![BreakoutNoFrameskip-v4](imgs/acktr_breakout.png) 163 | 164 | ![SeaquestNoFrameskip-v4](imgs/acktr_seaquest.png) 165 | 166 | ![QbertNoFrameskip-v4](imgs/acktr_qbert.png) 167 | 168 | ![beamriderNoFrameskip-v4](imgs/acktr_beamrider.png) 169 | -------------------------------------------------------------------------------- /a2c_ppo_acktr/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/a2c_ppo_acktr/__init__.py -------------------------------------------------------------------------------- /a2c_ppo_acktr/algo/__init__.py: -------------------------------------------------------------------------------- 1 | from .a2c_acktr import A2C_ACKTR 2 | from .ppo import PPO -------------------------------------------------------------------------------- /a2c_ppo_acktr/algo/a2c_acktr.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.optim as optim 4 | 5 | from a2c_ppo_acktr.algo.kfac import KFACOptimizer 6 | 7 | 8 | class A2C_ACKTR(): 9 | def __init__(self, 10 | actor_critic, 11 | value_loss_coef, 12 | entropy_coef, 13 | lr=None, 14 | eps=None, 15 | alpha=None, 16 | max_grad_norm=None, 17 | acktr=False): 18 | 19 | self.actor_critic = actor_critic 20 | self.acktr = acktr 21 | 22 | self.value_loss_coef = value_loss_coef 23 | self.entropy_coef = entropy_coef 24 | 25 | self.max_grad_norm = max_grad_norm 26 | 27 | if acktr: 28 | self.optimizer = KFACOptimizer(actor_critic) 29 | else: 30 | self.optimizer = optim.RMSprop( 31 | actor_critic.parameters(), lr, eps=eps, alpha=alpha) 32 | 33 | def update(self, rollouts): 34 | obs_shape = rollouts.obs.size()[2:] 35 | action_shape = rollouts.actions.size()[-1] 36 | num_steps, num_processes, _ = rollouts.rewards.size() 37 | 38 | values, action_log_probs, dist_entropy, _ = self.actor_critic.evaluate_actions( 39 | rollouts.obs[:-1].view(-1, *obs_shape), 40 | rollouts.recurrent_hidden_states[0].view( 41 | -1, self.actor_critic.recurrent_hidden_state_size), 42 | rollouts.masks[:-1].view(-1, 1), 43 | rollouts.actions.view(-1, action_shape)) 44 | 45 | values = values.view(num_steps, num_processes, 1) 46 | action_log_probs = action_log_probs.view(num_steps, num_processes, 1) 47 | 48 | advantages = rollouts.returns[:-1] - values 49 | value_loss = advantages.pow(2).mean() 50 | 51 | action_loss = -(advantages.detach() * action_log_probs).mean() 52 | 53 | if self.acktr and self.optimizer.steps % self.optimizer.Ts == 0: 54 | # Compute fisher, see Martens 2014 55 | self.actor_critic.zero_grad() 56 | pg_fisher_loss = -action_log_probs.mean() 57 | 58 | value_noise = torch.randn(values.size()) 59 | if values.is_cuda: 60 | value_noise = value_noise.cuda() 61 | 62 | sample_values = values + value_noise 63 | vf_fisher_loss = -(values - sample_values.detach()).pow(2).mean() 64 | 65 | fisher_loss = pg_fisher_loss + vf_fisher_loss 66 | self.optimizer.acc_stats = True 67 | fisher_loss.backward(retain_graph=True) 68 | self.optimizer.acc_stats = False 69 | 70 | self.optimizer.zero_grad() 71 | (value_loss * self.value_loss_coef + action_loss - 72 | dist_entropy * self.entropy_coef).backward() 73 | 74 | if self.acktr == False: 75 | nn.utils.clip_grad_norm_(self.actor_critic.parameters(), 76 | self.max_grad_norm) 77 | 78 | self.optimizer.step() 79 | 80 | return value_loss.item(), action_loss.item(), dist_entropy.item() 81 | -------------------------------------------------------------------------------- /a2c_ppo_acktr/algo/gail.py: -------------------------------------------------------------------------------- 1 | import h5py 2 | import numpy as np 3 | import torch 4 | import torch.nn as nn 5 | import torch.nn.functional as F 6 | import torch.utils.data 7 | from torch import autograd 8 | 9 | from stable_baselines3.common.running_mean_std import RunningMeanStd 10 | 11 | class Discriminator(nn.Module): 12 | def __init__(self, input_dim, hidden_dim, device): 13 | super(Discriminator, self).__init__() 14 | 15 | self.device = device 16 | 17 | self.trunk = nn.Sequential( 18 | nn.Linear(input_dim, hidden_dim), nn.Tanh(), 19 | nn.Linear(hidden_dim, hidden_dim), nn.Tanh(), 20 | nn.Linear(hidden_dim, 1)).to(device) 21 | 22 | self.trunk.train() 23 | 24 | self.optimizer = torch.optim.Adam(self.trunk.parameters()) 25 | 26 | self.returns = None 27 | self.ret_rms = RunningMeanStd(shape=()) 28 | 29 | def compute_grad_pen(self, 30 | expert_state, 31 | expert_action, 32 | policy_state, 33 | policy_action, 34 | lambda_=10): 35 | alpha = torch.rand(expert_state.size(0), 1) 36 | expert_data = torch.cat([expert_state, expert_action], dim=1) 37 | policy_data = torch.cat([policy_state, policy_action], dim=1) 38 | 39 | alpha = alpha.expand_as(expert_data).to(expert_data.device) 40 | 41 | mixup_data = alpha * expert_data + (1 - alpha) * policy_data 42 | mixup_data.requires_grad = True 43 | 44 | disc = self.trunk(mixup_data) 45 | ones = torch.ones(disc.size()).to(disc.device) 46 | grad = autograd.grad( 47 | outputs=disc, 48 | inputs=mixup_data, 49 | grad_outputs=ones, 50 | create_graph=True, 51 | retain_graph=True, 52 | only_inputs=True)[0] 53 | 54 | grad_pen = lambda_ * (grad.norm(2, dim=1) - 1).pow(2).mean() 55 | return grad_pen 56 | 57 | def update(self, expert_loader, rollouts, obsfilt=None): 58 | self.train() 59 | 60 | policy_data_generator = rollouts.feed_forward_generator( 61 | None, mini_batch_size=expert_loader.batch_size) 62 | 63 | loss = 0 64 | n = 0 65 | for expert_batch, policy_batch in zip(expert_loader, 66 | policy_data_generator): 67 | policy_state, policy_action = policy_batch[0], policy_batch[2] 68 | policy_d = self.trunk( 69 | torch.cat([policy_state, policy_action], dim=1)) 70 | 71 | expert_state, expert_action = expert_batch 72 | expert_state = obsfilt(expert_state.numpy(), update=False) 73 | expert_state = torch.FloatTensor(expert_state).to(self.device) 74 | expert_action = expert_action.to(self.device) 75 | expert_d = self.trunk( 76 | torch.cat([expert_state, expert_action], dim=1)) 77 | 78 | expert_loss = F.binary_cross_entropy_with_logits( 79 | expert_d, 80 | torch.ones(expert_d.size()).to(self.device)) 81 | policy_loss = F.binary_cross_entropy_with_logits( 82 | policy_d, 83 | torch.zeros(policy_d.size()).to(self.device)) 84 | 85 | gail_loss = expert_loss + policy_loss 86 | grad_pen = self.compute_grad_pen(expert_state, expert_action, 87 | policy_state, policy_action) 88 | 89 | loss += (gail_loss + grad_pen).item() 90 | n += 1 91 | 92 | self.optimizer.zero_grad() 93 | (gail_loss + grad_pen).backward() 94 | self.optimizer.step() 95 | return loss / n 96 | 97 | def predict_reward(self, state, action, gamma, masks, update_rms=True): 98 | with torch.no_grad(): 99 | self.eval() 100 | d = self.trunk(torch.cat([state, action], dim=1)) 101 | s = torch.sigmoid(d) 102 | reward = s.log() - (1 - s).log() 103 | if self.returns is None: 104 | self.returns = reward.clone() 105 | 106 | if update_rms: 107 | self.returns = self.returns * masks * gamma + reward 108 | self.ret_rms.update(self.returns.cpu().numpy()) 109 | 110 | return reward / np.sqrt(self.ret_rms.var[0] + 1e-8) 111 | 112 | 113 | class ExpertDataset(torch.utils.data.Dataset): 114 | def __init__(self, file_name, num_trajectories=4, subsample_frequency=20): 115 | all_trajectories = torch.load(file_name) 116 | 117 | perm = torch.randperm(all_trajectories['states'].size(0)) 118 | idx = perm[:num_trajectories] 119 | 120 | self.trajectories = {} 121 | 122 | # See https://github.com/pytorch/pytorch/issues/14886 123 | # .long() for fixing bug in torch v0.4.1 124 | start_idx = torch.randint( 125 | 0, subsample_frequency, size=(num_trajectories, )).long() 126 | 127 | for k, v in all_trajectories.items(): 128 | data = v[idx] 129 | 130 | if k != 'lengths': 131 | samples = [] 132 | for i in range(num_trajectories): 133 | samples.append(data[i, start_idx[i]::subsample_frequency]) 134 | self.trajectories[k] = torch.stack(samples) 135 | else: 136 | self.trajectories[k] = data // subsample_frequency 137 | 138 | self.i2traj_idx = {} 139 | self.i2i = {} 140 | 141 | self.length = self.trajectories['lengths'].sum().item() 142 | 143 | traj_idx = 0 144 | i = 0 145 | 146 | self.get_idx = [] 147 | 148 | for j in range(self.length): 149 | 150 | while self.trajectories['lengths'][traj_idx].item() <= i: 151 | i -= self.trajectories['lengths'][traj_idx].item() 152 | traj_idx += 1 153 | 154 | self.get_idx.append((traj_idx, i)) 155 | 156 | i += 1 157 | 158 | 159 | def __len__(self): 160 | return self.length 161 | 162 | def __getitem__(self, i): 163 | traj_idx, i = self.get_idx[i] 164 | 165 | return self.trajectories['states'][traj_idx][i], self.trajectories[ 166 | 'actions'][traj_idx][i] 167 | -------------------------------------------------------------------------------- /a2c_ppo_acktr/algo/kfac.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | import torch 4 | import torch.nn as nn 5 | import torch.nn.functional as F 6 | import torch.optim as optim 7 | 8 | from a2c_ppo_acktr.utils import AddBias 9 | 10 | # TODO: In order to make this code faster: 11 | # 1) Implement _extract_patches as a single cuda kernel 12 | # 2) Compute QR decomposition in a separate process 13 | # 3) Actually make a general KFAC optimizer so it fits PyTorch 14 | 15 | 16 | def _extract_patches(x, kernel_size, stride, padding): 17 | if padding[0] + padding[1] > 0: 18 | x = F.pad(x, (padding[1], padding[1], padding[0], 19 | padding[0])).data # Actually check dims 20 | x = x.unfold(2, kernel_size[0], stride[0]) 21 | x = x.unfold(3, kernel_size[1], stride[1]) 22 | x = x.transpose_(1, 2).transpose_(2, 3).contiguous() 23 | x = x.view( 24 | x.size(0), x.size(1), x.size(2), 25 | x.size(3) * x.size(4) * x.size(5)) 26 | return x 27 | 28 | 29 | def compute_cov_a(a, classname, layer_info, fast_cnn): 30 | batch_size = a.size(0) 31 | 32 | if classname == 'Conv2d': 33 | if fast_cnn: 34 | a = _extract_patches(a, *layer_info) 35 | a = a.view(a.size(0), -1, a.size(-1)) 36 | a = a.mean(1) 37 | else: 38 | a = _extract_patches(a, *layer_info) 39 | a = a.view(-1, a.size(-1)).div_(a.size(1)).div_(a.size(2)) 40 | elif classname == 'AddBias': 41 | is_cuda = a.is_cuda 42 | a = torch.ones(a.size(0), 1) 43 | if is_cuda: 44 | a = a.cuda() 45 | 46 | return a.t() @ (a / batch_size) 47 | 48 | 49 | def compute_cov_g(g, classname, layer_info, fast_cnn): 50 | batch_size = g.size(0) 51 | 52 | if classname == 'Conv2d': 53 | if fast_cnn: 54 | g = g.view(g.size(0), g.size(1), -1) 55 | g = g.sum(-1) 56 | else: 57 | g = g.transpose(1, 2).transpose(2, 3).contiguous() 58 | g = g.view(-1, g.size(-1)).mul_(g.size(1)).mul_(g.size(2)) 59 | elif classname == 'AddBias': 60 | g = g.view(g.size(0), g.size(1), -1) 61 | g = g.sum(-1) 62 | 63 | g_ = g * batch_size 64 | return g_.t() @ (g_ / g.size(0)) 65 | 66 | 67 | def update_running_stat(aa, m_aa, momentum): 68 | # Do the trick to keep aa unchanged and not create any additional tensors 69 | m_aa *= momentum / (1 - momentum) 70 | m_aa += aa 71 | m_aa *= (1 - momentum) 72 | 73 | 74 | class SplitBias(nn.Module): 75 | def __init__(self, module): 76 | super(SplitBias, self).__init__() 77 | self.module = module 78 | self.add_bias = AddBias(module.bias.data) 79 | self.module.bias = None 80 | 81 | def forward(self, input): 82 | x = self.module(input) 83 | x = self.add_bias(x) 84 | return x 85 | 86 | 87 | class KFACOptimizer(optim.Optimizer): 88 | def __init__(self, 89 | model, 90 | lr=0.25, 91 | momentum=0.9, 92 | stat_decay=0.99, 93 | kl_clip=0.001, 94 | damping=1e-2, 95 | weight_decay=0, 96 | fast_cnn=False, 97 | Ts=1, 98 | Tf=10): 99 | defaults = dict() 100 | 101 | def split_bias(module): 102 | for mname, child in module.named_children(): 103 | if hasattr(child, 'bias') and child.bias is not None: 104 | module._modules[mname] = SplitBias(child) 105 | else: 106 | split_bias(child) 107 | 108 | split_bias(model) 109 | 110 | super(KFACOptimizer, self).__init__(model.parameters(), defaults) 111 | 112 | self.known_modules = {'Linear', 'Conv2d', 'AddBias'} 113 | 114 | self.modules = [] 115 | self.grad_outputs = {} 116 | 117 | self.model = model 118 | self._prepare_model() 119 | 120 | self.steps = 0 121 | 122 | self.m_aa, self.m_gg = {}, {} 123 | self.Q_a, self.Q_g = {}, {} 124 | self.d_a, self.d_g = {}, {} 125 | 126 | self.momentum = momentum 127 | self.stat_decay = stat_decay 128 | 129 | self.lr = lr 130 | self.kl_clip = kl_clip 131 | self.damping = damping 132 | self.weight_decay = weight_decay 133 | 134 | self.fast_cnn = fast_cnn 135 | 136 | self.Ts = Ts 137 | self.Tf = Tf 138 | 139 | self.optim = optim.SGD( 140 | model.parameters(), 141 | lr=self.lr * (1 - self.momentum), 142 | momentum=self.momentum) 143 | 144 | def _save_input(self, module, input): 145 | if torch.is_grad_enabled() and self.steps % self.Ts == 0: 146 | classname = module.__class__.__name__ 147 | layer_info = None 148 | if classname == 'Conv2d': 149 | layer_info = (module.kernel_size, module.stride, 150 | module.padding) 151 | 152 | aa = compute_cov_a(input[0].data, classname, layer_info, 153 | self.fast_cnn) 154 | 155 | # Initialize buffers 156 | if self.steps == 0: 157 | self.m_aa[module] = aa.clone() 158 | 159 | update_running_stat(aa, self.m_aa[module], self.stat_decay) 160 | 161 | def _save_grad_output(self, module, grad_input, grad_output): 162 | # Accumulate statistics for Fisher matrices 163 | if self.acc_stats: 164 | classname = module.__class__.__name__ 165 | layer_info = None 166 | if classname == 'Conv2d': 167 | layer_info = (module.kernel_size, module.stride, 168 | module.padding) 169 | 170 | gg = compute_cov_g(grad_output[0].data, classname, layer_info, 171 | self.fast_cnn) 172 | 173 | # Initialize buffers 174 | if self.steps == 0: 175 | self.m_gg[module] = gg.clone() 176 | 177 | update_running_stat(gg, self.m_gg[module], self.stat_decay) 178 | 179 | def _prepare_model(self): 180 | for module in self.model.modules(): 181 | classname = module.__class__.__name__ 182 | if classname in self.known_modules: 183 | assert not ((classname in ['Linear', 'Conv2d']) and module.bias is not None), \ 184 | "You must have a bias as a separate layer" 185 | 186 | self.modules.append(module) 187 | module.register_forward_pre_hook(self._save_input) 188 | module.register_backward_hook(self._save_grad_output) 189 | 190 | def step(self): 191 | # Add weight decay 192 | if self.weight_decay > 0: 193 | for p in self.model.parameters(): 194 | p.grad.data.add_(self.weight_decay, p.data) 195 | 196 | updates = {} 197 | for i, m in enumerate(self.modules): 198 | assert len(list(m.parameters()) 199 | ) == 1, "Can handle only one parameter at the moment" 200 | classname = m.__class__.__name__ 201 | p = next(m.parameters()) 202 | 203 | la = self.damping + self.weight_decay 204 | 205 | if self.steps % self.Tf == 0: 206 | # My asynchronous implementation exists, I will add it later. 207 | # Experimenting with different ways to this in PyTorch. 208 | self.d_a[m], self.Q_a[m] = torch.symeig( 209 | self.m_aa[m], eigenvectors=True) 210 | self.d_g[m], self.Q_g[m] = torch.symeig( 211 | self.m_gg[m], eigenvectors=True) 212 | 213 | self.d_a[m].mul_((self.d_a[m] > 1e-6).float()) 214 | self.d_g[m].mul_((self.d_g[m] > 1e-6).float()) 215 | 216 | if classname == 'Conv2d': 217 | p_grad_mat = p.grad.data.view(p.grad.data.size(0), -1) 218 | else: 219 | p_grad_mat = p.grad.data 220 | 221 | v1 = self.Q_g[m].t() @ p_grad_mat @ self.Q_a[m] 222 | v2 = v1 / ( 223 | self.d_g[m].unsqueeze(1) * self.d_a[m].unsqueeze(0) + la) 224 | v = self.Q_g[m] @ v2 @ self.Q_a[m].t() 225 | 226 | v = v.view(p.grad.data.size()) 227 | updates[p] = v 228 | 229 | vg_sum = 0 230 | for p in self.model.parameters(): 231 | v = updates[p] 232 | vg_sum += (v * p.grad.data * self.lr * self.lr).sum() 233 | 234 | nu = min(1, math.sqrt(self.kl_clip / vg_sum)) 235 | 236 | for p in self.model.parameters(): 237 | v = updates[p] 238 | p.grad.data.copy_(v) 239 | p.grad.data.mul_(nu) 240 | 241 | self.optim.step() 242 | self.steps += 1 243 | -------------------------------------------------------------------------------- /a2c_ppo_acktr/algo/ppo.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | import torch.optim as optim 5 | 6 | 7 | class PPO(): 8 | def __init__(self, 9 | actor_critic, 10 | clip_param, 11 | ppo_epoch, 12 | num_mini_batch, 13 | value_loss_coef, 14 | entropy_coef, 15 | lr=None, 16 | eps=None, 17 | max_grad_norm=None, 18 | use_clipped_value_loss=True): 19 | 20 | self.actor_critic = actor_critic 21 | 22 | self.clip_param = clip_param 23 | self.ppo_epoch = ppo_epoch 24 | self.num_mini_batch = num_mini_batch 25 | 26 | self.value_loss_coef = value_loss_coef 27 | self.entropy_coef = entropy_coef 28 | 29 | self.max_grad_norm = max_grad_norm 30 | self.use_clipped_value_loss = use_clipped_value_loss 31 | 32 | self.optimizer = optim.Adam(actor_critic.parameters(), lr=lr, eps=eps) 33 | 34 | def update(self, rollouts): 35 | advantages = rollouts.returns[:-1] - rollouts.value_preds[:-1] 36 | advantages = (advantages - advantages.mean()) / ( 37 | advantages.std() + 1e-5) 38 | 39 | value_loss_epoch = 0 40 | action_loss_epoch = 0 41 | dist_entropy_epoch = 0 42 | 43 | for e in range(self.ppo_epoch): 44 | if self.actor_critic.is_recurrent: 45 | data_generator = rollouts.recurrent_generator( 46 | advantages, self.num_mini_batch) 47 | else: 48 | data_generator = rollouts.feed_forward_generator( 49 | advantages, self.num_mini_batch) 50 | 51 | for sample in data_generator: 52 | obs_batch, recurrent_hidden_states_batch, actions_batch, \ 53 | value_preds_batch, return_batch, masks_batch, old_action_log_probs_batch, \ 54 | adv_targ = sample 55 | 56 | # Reshape to do in a single forward pass for all steps 57 | values, action_log_probs, dist_entropy, _ = self.actor_critic.evaluate_actions( 58 | obs_batch, recurrent_hidden_states_batch, masks_batch, 59 | actions_batch) 60 | 61 | ratio = torch.exp(action_log_probs - 62 | old_action_log_probs_batch) 63 | surr1 = ratio * adv_targ 64 | surr2 = torch.clamp(ratio, 1.0 - self.clip_param, 65 | 1.0 + self.clip_param) * adv_targ 66 | action_loss = -torch.min(surr1, surr2).mean() 67 | 68 | if self.use_clipped_value_loss: 69 | value_pred_clipped = value_preds_batch + \ 70 | (values - value_preds_batch).clamp(-self.clip_param, self.clip_param) 71 | value_losses = (values - return_batch).pow(2) 72 | value_losses_clipped = ( 73 | value_pred_clipped - return_batch).pow(2) 74 | value_loss = 0.5 * torch.max(value_losses, 75 | value_losses_clipped).mean() 76 | else: 77 | value_loss = 0.5 * (return_batch - values).pow(2).mean() 78 | 79 | self.optimizer.zero_grad() 80 | (value_loss * self.value_loss_coef + action_loss - 81 | dist_entropy * self.entropy_coef).backward() 82 | nn.utils.clip_grad_norm_(self.actor_critic.parameters(), 83 | self.max_grad_norm) 84 | self.optimizer.step() 85 | 86 | value_loss_epoch += value_loss.item() 87 | action_loss_epoch += action_loss.item() 88 | dist_entropy_epoch += dist_entropy.item() 89 | 90 | num_updates = self.ppo_epoch * self.num_mini_batch 91 | 92 | value_loss_epoch /= num_updates 93 | action_loss_epoch /= num_updates 94 | dist_entropy_epoch /= num_updates 95 | 96 | return value_loss_epoch, action_loss_epoch, dist_entropy_epoch 97 | -------------------------------------------------------------------------------- /a2c_ppo_acktr/arguments.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | import torch 4 | 5 | 6 | def get_args(): 7 | parser = argparse.ArgumentParser(description='RL') 8 | parser.add_argument( 9 | '--algo', default='a2c', help='algorithm to use: a2c | ppo | acktr') 10 | parser.add_argument( 11 | '--gail', 12 | action='store_true', 13 | default=False, 14 | help='do imitation learning with gail') 15 | parser.add_argument( 16 | '--gail-experts-dir', 17 | default='./gail_experts', 18 | help='directory that contains expert demonstrations for gail') 19 | parser.add_argument( 20 | '--gail-batch-size', 21 | type=int, 22 | default=128, 23 | help='gail batch size (default: 128)') 24 | parser.add_argument( 25 | '--gail-epoch', type=int, default=5, help='gail epochs (default: 5)') 26 | parser.add_argument( 27 | '--lr', type=float, default=7e-4, help='learning rate (default: 7e-4)') 28 | parser.add_argument( 29 | '--eps', 30 | type=float, 31 | default=1e-5, 32 | help='RMSprop optimizer epsilon (default: 1e-5)') 33 | parser.add_argument( 34 | '--alpha', 35 | type=float, 36 | default=0.99, 37 | help='RMSprop optimizer apha (default: 0.99)') 38 | parser.add_argument( 39 | '--gamma', 40 | type=float, 41 | default=0.99, 42 | help='discount factor for rewards (default: 0.99)') 43 | parser.add_argument( 44 | '--use-gae', 45 | action='store_true', 46 | default=False, 47 | help='use generalized advantage estimation') 48 | parser.add_argument( 49 | '--gae-lambda', 50 | type=float, 51 | default=0.95, 52 | help='gae lambda parameter (default: 0.95)') 53 | parser.add_argument( 54 | '--entropy-coef', 55 | type=float, 56 | default=0.01, 57 | help='entropy term coefficient (default: 0.01)') 58 | parser.add_argument( 59 | '--value-loss-coef', 60 | type=float, 61 | default=0.5, 62 | help='value loss coefficient (default: 0.5)') 63 | parser.add_argument( 64 | '--max-grad-norm', 65 | type=float, 66 | default=0.5, 67 | help='max norm of gradients (default: 0.5)') 68 | parser.add_argument( 69 | '--seed', type=int, default=1, help='random seed (default: 1)') 70 | parser.add_argument( 71 | '--cuda-deterministic', 72 | action='store_true', 73 | default=False, 74 | help="sets flags for determinism when using CUDA (potentially slow!)") 75 | parser.add_argument( 76 | '--num-processes', 77 | type=int, 78 | default=16, 79 | help='how many training CPU processes to use (default: 16)') 80 | parser.add_argument( 81 | '--num-steps', 82 | type=int, 83 | default=5, 84 | help='number of forward steps in A2C (default: 5)') 85 | parser.add_argument( 86 | '--ppo-epoch', 87 | type=int, 88 | default=4, 89 | help='number of ppo epochs (default: 4)') 90 | parser.add_argument( 91 | '--num-mini-batch', 92 | type=int, 93 | default=32, 94 | help='number of batches for ppo (default: 32)') 95 | parser.add_argument( 96 | '--clip-param', 97 | type=float, 98 | default=0.2, 99 | help='ppo clip parameter (default: 0.2)') 100 | parser.add_argument( 101 | '--log-interval', 102 | type=int, 103 | default=10, 104 | help='log interval, one log per n updates (default: 10)') 105 | parser.add_argument( 106 | '--save-interval', 107 | type=int, 108 | default=100, 109 | help='save interval, one save per n updates (default: 100)') 110 | parser.add_argument( 111 | '--eval-interval', 112 | type=int, 113 | default=None, 114 | help='eval interval, one eval per n updates (default: None)') 115 | parser.add_argument( 116 | '--num-env-steps', 117 | type=int, 118 | default=10e6, 119 | help='number of environment steps to train (default: 10e6)') 120 | parser.add_argument( 121 | '--env-name', 122 | default='PongNoFrameskip-v4', 123 | help='environment to train on (default: PongNoFrameskip-v4)') 124 | parser.add_argument( 125 | '--log-dir', 126 | default='/tmp/gym/', 127 | help='directory to save agent logs (default: /tmp/gym)') 128 | parser.add_argument( 129 | '--save-dir', 130 | default='./trained_models/', 131 | help='directory to save agent logs (default: ./trained_models/)') 132 | parser.add_argument( 133 | '--no-cuda', 134 | action='store_true', 135 | default=False, 136 | help='disables CUDA training') 137 | parser.add_argument( 138 | '--use-proper-time-limits', 139 | action='store_true', 140 | default=False, 141 | help='compute returns taking into account time limits') 142 | parser.add_argument( 143 | '--recurrent-policy', 144 | action='store_true', 145 | default=False, 146 | help='use a recurrent policy') 147 | parser.add_argument( 148 | '--use-linear-lr-decay', 149 | action='store_true', 150 | default=False, 151 | help='use a linear schedule on the learning rate') 152 | args = parser.parse_args() 153 | 154 | args.cuda = not args.no_cuda and torch.cuda.is_available() 155 | 156 | assert args.algo in ['a2c', 'ppo', 'acktr'] 157 | if args.recurrent_policy: 158 | assert args.algo in ['a2c', 'ppo'], \ 159 | 'Recurrent policy is not implemented for ACKTR' 160 | 161 | return args 162 | -------------------------------------------------------------------------------- /a2c_ppo_acktr/distributions.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | import torch 4 | import torch.nn as nn 5 | import torch.nn.functional as F 6 | 7 | from a2c_ppo_acktr.utils import AddBias, init 8 | 9 | """ 10 | Modify standard PyTorch distributions so they are compatible with this code. 11 | """ 12 | 13 | # 14 | # Standardize distribution interfaces 15 | # 16 | 17 | # Categorical 18 | class FixedCategorical(torch.distributions.Categorical): 19 | def sample(self): 20 | return super().sample().unsqueeze(-1) 21 | 22 | def log_probs(self, actions): 23 | return ( 24 | super() 25 | .log_prob(actions.squeeze(-1)) 26 | .view(actions.size(0), -1) 27 | .sum(-1) 28 | .unsqueeze(-1) 29 | ) 30 | 31 | def mode(self): 32 | return self.probs.argmax(dim=-1, keepdim=True) 33 | 34 | 35 | # Normal 36 | class FixedNormal(torch.distributions.Normal): 37 | def log_probs(self, actions): 38 | return super().log_prob(actions).sum(-1, keepdim=True) 39 | 40 | def entropy(self): 41 | return super().entropy().sum(-1) 42 | 43 | def mode(self): 44 | return self.mean 45 | 46 | 47 | # Bernoulli 48 | class FixedBernoulli(torch.distributions.Bernoulli): 49 | def log_probs(self, actions): 50 | return super.log_prob(actions).view(actions.size(0), -1).sum(-1).unsqueeze(-1) 51 | 52 | def entropy(self): 53 | return super().entropy().sum(-1) 54 | 55 | def mode(self): 56 | return torch.gt(self.probs, 0.5).float() 57 | 58 | 59 | class Categorical(nn.Module): 60 | def __init__(self, num_inputs, num_outputs): 61 | super(Categorical, self).__init__() 62 | 63 | init_ = lambda m: init( 64 | m, 65 | nn.init.orthogonal_, 66 | lambda x: nn.init.constant_(x, 0), 67 | gain=0.01) 68 | 69 | self.linear = init_(nn.Linear(num_inputs, num_outputs)) 70 | 71 | def forward(self, x): 72 | x = self.linear(x) 73 | return FixedCategorical(logits=x) 74 | 75 | 76 | class DiagGaussian(nn.Module): 77 | def __init__(self, num_inputs, num_outputs): 78 | super(DiagGaussian, self).__init__() 79 | 80 | init_ = lambda m: init(m, nn.init.orthogonal_, lambda x: nn.init. 81 | constant_(x, 0)) 82 | 83 | self.fc_mean = init_(nn.Linear(num_inputs, num_outputs)) 84 | self.logstd = AddBias(torch.zeros(num_outputs)) 85 | 86 | def forward(self, x): 87 | action_mean = self.fc_mean(x) 88 | 89 | # An ugly hack for my KFAC implementation. 90 | zeros = torch.zeros(action_mean.size()) 91 | if x.is_cuda: 92 | zeros = zeros.cuda() 93 | 94 | action_logstd = self.logstd(zeros) 95 | return FixedNormal(action_mean, action_logstd.exp()) 96 | 97 | 98 | class Bernoulli(nn.Module): 99 | def __init__(self, num_inputs, num_outputs): 100 | super(Bernoulli, self).__init__() 101 | 102 | init_ = lambda m: init(m, nn.init.orthogonal_, lambda x: nn.init. 103 | constant_(x, 0)) 104 | 105 | self.linear = init_(nn.Linear(num_inputs, num_outputs)) 106 | 107 | def forward(self, x): 108 | x = self.linear(x) 109 | return FixedBernoulli(logits=x) 110 | -------------------------------------------------------------------------------- /a2c_ppo_acktr/envs.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import gym 4 | import numpy as np 5 | import torch 6 | from gym.spaces.box import Box 7 | from gym.wrappers.clip_action import ClipAction 8 | from stable_baselines3.common.atari_wrappers import (ClipRewardEnv, 9 | EpisodicLifeEnv, 10 | FireResetEnv, 11 | MaxAndSkipEnv, 12 | NoopResetEnv, WarpFrame) 13 | from stable_baselines3.common.monitor import Monitor 14 | from stable_baselines3.common.vec_env import (DummyVecEnv, SubprocVecEnv, 15 | VecEnvWrapper) 16 | from stable_baselines3.common.vec_env.vec_normalize import \ 17 | VecNormalize as VecNormalize_ 18 | 19 | try: 20 | import dmc2gym 21 | except ImportError: 22 | pass 23 | 24 | try: 25 | import roboschool 26 | except ImportError: 27 | pass 28 | 29 | try: 30 | import pybullet_envs 31 | except ImportError: 32 | pass 33 | 34 | 35 | def make_env(env_id, seed, rank, log_dir, allow_early_resets): 36 | def _thunk(): 37 | if env_id.startswith("dm"): 38 | _, domain, task = env_id.split('.') 39 | env = dmc2gym.make(domain_name=domain, task_name=task) 40 | env = ClipAction(env) 41 | else: 42 | env = gym.make(env_id) 43 | 44 | is_atari = hasattr(gym.envs, 'atari') and isinstance( 45 | env.unwrapped, gym.envs.atari.atari_env.AtariEnv) 46 | if is_atari: 47 | env = NoopResetEnv(env, noop_max=30) 48 | env = MaxAndSkipEnv(env, skip=4) 49 | 50 | env.seed(seed + rank) 51 | 52 | if str(env.__class__.__name__).find('TimeLimit') >= 0: 53 | env = TimeLimitMask(env) 54 | 55 | if log_dir is not None: 56 | env = Monitor(env, 57 | os.path.join(log_dir, str(rank)), 58 | allow_early_resets=allow_early_resets) 59 | 60 | if is_atari: 61 | if len(env.observation_space.shape) == 3: 62 | env = EpisodicLifeEnv(env) 63 | if "FIRE" in env.unwrapped.get_action_meanings(): 64 | env = FireResetEnv(env) 65 | env = WarpFrame(env, width=84, height=84) 66 | env = ClipRewardEnv(env) 67 | elif len(env.observation_space.shape) == 3: 68 | raise NotImplementedError( 69 | "CNN models work only for atari,\n" 70 | "please use a custom wrapper for a custom pixel input env.\n" 71 | "See wrap_deepmind for an example.") 72 | 73 | # If the input has shape (W,H,3), wrap for PyTorch convolutions 74 | obs_shape = env.observation_space.shape 75 | if len(obs_shape) == 3 and obs_shape[2] in [1, 3]: 76 | env = TransposeImage(env, op=[2, 0, 1]) 77 | 78 | return env 79 | 80 | return _thunk 81 | 82 | 83 | def make_vec_envs(env_name, 84 | seed, 85 | num_processes, 86 | gamma, 87 | log_dir, 88 | device, 89 | allow_early_resets, 90 | num_frame_stack=None): 91 | envs = [ 92 | make_env(env_name, seed, i, log_dir, allow_early_resets) 93 | for i in range(num_processes) 94 | ] 95 | 96 | if len(envs) > 1: 97 | envs = SubprocVecEnv(envs) 98 | else: 99 | envs = DummyVecEnv(envs) 100 | 101 | if len(envs.observation_space.shape) == 1: 102 | if gamma is None: 103 | envs = VecNormalize(envs, norm_reward=False) 104 | else: 105 | envs = VecNormalize(envs, gamma=gamma) 106 | 107 | envs = VecPyTorch(envs, device) 108 | 109 | if num_frame_stack is not None: 110 | envs = VecPyTorchFrameStack(envs, num_frame_stack, device) 111 | elif len(envs.observation_space.shape) == 3: 112 | envs = VecPyTorchFrameStack(envs, 4, device) 113 | 114 | return envs 115 | 116 | 117 | # Checks whether done was caused my timit limits or not 118 | class TimeLimitMask(gym.Wrapper): 119 | def step(self, action): 120 | obs, rew, done, info = self.env.step(action) 121 | if done and self.env._max_episode_steps == self.env._elapsed_steps: 122 | info['bad_transition'] = True 123 | 124 | return obs, rew, done, info 125 | 126 | def reset(self, **kwargs): 127 | return self.env.reset(**kwargs) 128 | 129 | 130 | # Can be used to test recurrent policies for Reacher-v2 131 | class MaskGoal(gym.ObservationWrapper): 132 | def observation(self, observation): 133 | if self.env._elapsed_steps > 0: 134 | observation[-2:] = 0 135 | return observation 136 | 137 | 138 | class TransposeObs(gym.ObservationWrapper): 139 | def __init__(self, env=None): 140 | """ 141 | Transpose observation space (base class) 142 | """ 143 | super(TransposeObs, self).__init__(env) 144 | 145 | 146 | class TransposeImage(TransposeObs): 147 | def __init__(self, env=None, op=[2, 0, 1]): 148 | """ 149 | Transpose observation space for images 150 | """ 151 | super(TransposeImage, self).__init__(env) 152 | assert len(op) == 3, "Error: Operation, " + str(op) + ", must be dim3" 153 | self.op = op 154 | obs_shape = self.observation_space.shape 155 | self.observation_space = Box( 156 | self.observation_space.low[0, 0, 0], 157 | self.observation_space.high[0, 0, 0], [ 158 | obs_shape[self.op[0]], obs_shape[self.op[1]], 159 | obs_shape[self.op[2]] 160 | ], 161 | dtype=self.observation_space.dtype) 162 | 163 | def observation(self, ob): 164 | return ob.transpose(self.op[0], self.op[1], self.op[2]) 165 | 166 | 167 | class VecPyTorch(VecEnvWrapper): 168 | def __init__(self, venv, device): 169 | """Return only every `skip`-th frame""" 170 | super(VecPyTorch, self).__init__(venv) 171 | self.device = device 172 | # TODO: Fix data types 173 | 174 | def reset(self): 175 | obs = self.venv.reset() 176 | obs = torch.from_numpy(obs).float().to(self.device) 177 | return obs 178 | 179 | def step_async(self, actions): 180 | if isinstance(actions, torch.LongTensor): 181 | # Squeeze the dimension for discrete actions 182 | actions = actions.squeeze(1) 183 | actions = actions.cpu().numpy() 184 | self.venv.step_async(actions) 185 | 186 | def step_wait(self): 187 | obs, reward, done, info = self.venv.step_wait() 188 | obs = torch.from_numpy(obs).float().to(self.device) 189 | reward = torch.from_numpy(reward).unsqueeze(dim=1).float() 190 | return obs, reward, done, info 191 | 192 | 193 | class VecNormalize(VecNormalize_): 194 | def __init__(self, *args, **kwargs): 195 | super(VecNormalize, self).__init__(*args, **kwargs) 196 | self.training = True 197 | 198 | def _obfilt(self, obs, update=True): 199 | if self.obs_rms: 200 | if self.training and update: 201 | self.obs_rms.update(obs) 202 | obs = np.clip((obs - self.obs_rms.mean) / 203 | np.sqrt(self.obs_rms.var + self.epsilon), 204 | -self.clip_obs, self.clip_obs) 205 | return obs 206 | else: 207 | return obs 208 | 209 | def train(self): 210 | self.training = True 211 | 212 | def eval(self): 213 | self.training = False 214 | 215 | 216 | # Derived from 217 | # https://github.com/openai/baselines/blob/master/baselines/common/vec_env/vec_frame_stack.py 218 | class VecPyTorchFrameStack(VecEnvWrapper): 219 | def __init__(self, venv, nstack, device=None): 220 | self.venv = venv 221 | self.nstack = nstack 222 | 223 | wos = venv.observation_space # wrapped ob space 224 | self.shape_dim0 = wos.shape[0] 225 | 226 | low = np.repeat(wos.low, self.nstack, axis=0) 227 | high = np.repeat(wos.high, self.nstack, axis=0) 228 | 229 | if device is None: 230 | device = torch.device('cpu') 231 | self.stacked_obs = torch.zeros((venv.num_envs, ) + 232 | low.shape).to(device) 233 | 234 | observation_space = gym.spaces.Box(low=low, 235 | high=high, 236 | dtype=venv.observation_space.dtype) 237 | VecEnvWrapper.__init__(self, venv, observation_space=observation_space) 238 | 239 | def step_wait(self): 240 | obs, rews, news, infos = self.venv.step_wait() 241 | self.stacked_obs[:, :-self.shape_dim0] = \ 242 | self.stacked_obs[:, self.shape_dim0:].clone() 243 | for (i, new) in enumerate(news): 244 | if new: 245 | self.stacked_obs[i] = 0 246 | self.stacked_obs[:, -self.shape_dim0:] = obs 247 | return self.stacked_obs, rews, news, infos 248 | 249 | def reset(self): 250 | obs = self.venv.reset() 251 | if torch.backends.cudnn.deterministic: 252 | self.stacked_obs = torch.zeros(self.stacked_obs.shape) 253 | else: 254 | self.stacked_obs.zero_() 255 | self.stacked_obs[:, -self.shape_dim0:] = obs 256 | return self.stacked_obs 257 | 258 | def close(self): 259 | self.venv.close() 260 | -------------------------------------------------------------------------------- /a2c_ppo_acktr/model.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | import torch.nn as nn 4 | import torch.nn.functional as F 5 | 6 | from a2c_ppo_acktr.distributions import Bernoulli, Categorical, DiagGaussian 7 | from a2c_ppo_acktr.utils import init 8 | 9 | 10 | class Flatten(nn.Module): 11 | def forward(self, x): 12 | return x.view(x.size(0), -1) 13 | 14 | 15 | class Policy(nn.Module): 16 | def __init__(self, obs_shape, action_space, base=None, base_kwargs=None): 17 | super(Policy, self).__init__() 18 | if base_kwargs is None: 19 | base_kwargs = {} 20 | if base is None: 21 | if len(obs_shape) == 3: 22 | base = CNNBase 23 | elif len(obs_shape) == 1: 24 | base = MLPBase 25 | else: 26 | raise NotImplementedError 27 | 28 | self.base = base(obs_shape[0], **base_kwargs) 29 | 30 | if action_space.__class__.__name__ == "Discrete": 31 | num_outputs = action_space.n 32 | self.dist = Categorical(self.base.output_size, num_outputs) 33 | elif action_space.__class__.__name__ == "Box": 34 | num_outputs = action_space.shape[0] 35 | self.dist = DiagGaussian(self.base.output_size, num_outputs) 36 | elif action_space.__class__.__name__ == "MultiBinary": 37 | num_outputs = action_space.shape[0] 38 | self.dist = Bernoulli(self.base.output_size, num_outputs) 39 | else: 40 | raise NotImplementedError 41 | 42 | @property 43 | def is_recurrent(self): 44 | return self.base.is_recurrent 45 | 46 | @property 47 | def recurrent_hidden_state_size(self): 48 | """Size of rnn_hx.""" 49 | return self.base.recurrent_hidden_state_size 50 | 51 | def forward(self, inputs, rnn_hxs, masks): 52 | raise NotImplementedError 53 | 54 | def act(self, inputs, rnn_hxs, masks, deterministic=False): 55 | value, actor_features, rnn_hxs = self.base(inputs, rnn_hxs, masks) 56 | dist = self.dist(actor_features) 57 | 58 | if deterministic: 59 | action = dist.mode() 60 | else: 61 | action = dist.sample() 62 | 63 | action_log_probs = dist.log_probs(action) 64 | dist_entropy = dist.entropy().mean() 65 | 66 | return value, action, action_log_probs, rnn_hxs 67 | 68 | def get_value(self, inputs, rnn_hxs, masks): 69 | value, _, _ = self.base(inputs, rnn_hxs, masks) 70 | return value 71 | 72 | def evaluate_actions(self, inputs, rnn_hxs, masks, action): 73 | value, actor_features, rnn_hxs = self.base(inputs, rnn_hxs, masks) 74 | dist = self.dist(actor_features) 75 | 76 | action_log_probs = dist.log_probs(action) 77 | dist_entropy = dist.entropy().mean() 78 | 79 | return value, action_log_probs, dist_entropy, rnn_hxs 80 | 81 | 82 | class NNBase(nn.Module): 83 | def __init__(self, recurrent, recurrent_input_size, hidden_size): 84 | super(NNBase, self).__init__() 85 | 86 | self._hidden_size = hidden_size 87 | self._recurrent = recurrent 88 | 89 | if recurrent: 90 | self.gru = nn.GRU(recurrent_input_size, hidden_size) 91 | for name, param in self.gru.named_parameters(): 92 | if 'bias' in name: 93 | nn.init.constant_(param, 0) 94 | elif 'weight' in name: 95 | nn.init.orthogonal_(param) 96 | 97 | @property 98 | def is_recurrent(self): 99 | return self._recurrent 100 | 101 | @property 102 | def recurrent_hidden_state_size(self): 103 | if self._recurrent: 104 | return self._hidden_size 105 | return 1 106 | 107 | @property 108 | def output_size(self): 109 | return self._hidden_size 110 | 111 | def _forward_gru(self, x, hxs, masks): 112 | if x.size(0) == hxs.size(0): 113 | x, hxs = self.gru(x.unsqueeze(0), (hxs * masks).unsqueeze(0)) 114 | x = x.squeeze(0) 115 | hxs = hxs.squeeze(0) 116 | else: 117 | # x is a (T, N, -1) tensor that has been flatten to (T * N, -1) 118 | N = hxs.size(0) 119 | T = int(x.size(0) / N) 120 | 121 | # unflatten 122 | x = x.view(T, N, x.size(1)) 123 | 124 | # Same deal with masks 125 | masks = masks.view(T, N) 126 | 127 | # Let's figure out which steps in the sequence have a zero for any agent 128 | # We will always assume t=0 has a zero in it as that makes the logic cleaner 129 | has_zeros = ((masks[1:] == 0.0) \ 130 | .any(dim=-1) 131 | .nonzero() 132 | .squeeze() 133 | .cpu()) 134 | 135 | # +1 to correct the masks[1:] 136 | if has_zeros.dim() == 0: 137 | # Deal with scalar 138 | has_zeros = [has_zeros.item() + 1] 139 | else: 140 | has_zeros = (has_zeros + 1).numpy().tolist() 141 | 142 | # add t=0 and t=T to the list 143 | has_zeros = [0] + has_zeros + [T] 144 | 145 | hxs = hxs.unsqueeze(0) 146 | outputs = [] 147 | for i in range(len(has_zeros) - 1): 148 | # We can now process steps that don't have any zeros in masks together! 149 | # This is much faster 150 | start_idx = has_zeros[i] 151 | end_idx = has_zeros[i + 1] 152 | 153 | rnn_scores, hxs = self.gru( 154 | x[start_idx:end_idx], 155 | hxs * masks[start_idx].view(1, -1, 1)) 156 | 157 | outputs.append(rnn_scores) 158 | 159 | # assert len(outputs) == T 160 | # x is a (T, N, -1) tensor 161 | x = torch.cat(outputs, dim=0) 162 | # flatten 163 | x = x.view(T * N, -1) 164 | hxs = hxs.squeeze(0) 165 | 166 | return x, hxs 167 | 168 | 169 | class CNNBase(NNBase): 170 | def __init__(self, num_inputs, recurrent=False, hidden_size=512): 171 | super(CNNBase, self).__init__(recurrent, hidden_size, hidden_size) 172 | 173 | init_ = lambda m: init(m, nn.init.orthogonal_, lambda x: nn.init. 174 | constant_(x, 0), nn.init.calculate_gain('relu')) 175 | 176 | self.main = nn.Sequential( 177 | init_(nn.Conv2d(num_inputs, 32, 8, stride=4)), nn.ReLU(), 178 | init_(nn.Conv2d(32, 64, 4, stride=2)), nn.ReLU(), 179 | init_(nn.Conv2d(64, 32, 3, stride=1)), nn.ReLU(), Flatten(), 180 | init_(nn.Linear(32 * 7 * 7, hidden_size)), nn.ReLU()) 181 | 182 | init_ = lambda m: init(m, nn.init.orthogonal_, lambda x: nn.init. 183 | constant_(x, 0)) 184 | 185 | self.critic_linear = init_(nn.Linear(hidden_size, 1)) 186 | 187 | self.train() 188 | 189 | def forward(self, inputs, rnn_hxs, masks): 190 | x = self.main(inputs / 255.0) 191 | 192 | if self.is_recurrent: 193 | x, rnn_hxs = self._forward_gru(x, rnn_hxs, masks) 194 | 195 | return self.critic_linear(x), x, rnn_hxs 196 | 197 | 198 | class MLPBase(NNBase): 199 | def __init__(self, num_inputs, recurrent=False, hidden_size=64): 200 | super(MLPBase, self).__init__(recurrent, num_inputs, hidden_size) 201 | 202 | if recurrent: 203 | num_inputs = hidden_size 204 | 205 | init_ = lambda m: init(m, nn.init.orthogonal_, lambda x: nn.init. 206 | constant_(x, 0), np.sqrt(2)) 207 | 208 | self.actor = nn.Sequential( 209 | init_(nn.Linear(num_inputs, hidden_size)), nn.Tanh(), 210 | init_(nn.Linear(hidden_size, hidden_size)), nn.Tanh()) 211 | 212 | self.critic = nn.Sequential( 213 | init_(nn.Linear(num_inputs, hidden_size)), nn.Tanh(), 214 | init_(nn.Linear(hidden_size, hidden_size)), nn.Tanh()) 215 | 216 | self.critic_linear = init_(nn.Linear(hidden_size, 1)) 217 | 218 | self.train() 219 | 220 | def forward(self, inputs, rnn_hxs, masks): 221 | x = inputs 222 | 223 | if self.is_recurrent: 224 | x, rnn_hxs = self._forward_gru(x, rnn_hxs, masks) 225 | 226 | hidden_critic = self.critic(x) 227 | hidden_actor = self.actor(x) 228 | 229 | return self.critic_linear(hidden_critic), hidden_actor, rnn_hxs 230 | -------------------------------------------------------------------------------- /a2c_ppo_acktr/storage.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.utils.data.sampler import BatchSampler, SubsetRandomSampler 3 | 4 | 5 | def _flatten_helper(T, N, _tensor): 6 | return _tensor.view(T * N, *_tensor.size()[2:]) 7 | 8 | 9 | class RolloutStorage(object): 10 | def __init__(self, num_steps, num_processes, obs_shape, action_space, 11 | recurrent_hidden_state_size): 12 | self.obs = torch.zeros(num_steps + 1, num_processes, *obs_shape) 13 | self.recurrent_hidden_states = torch.zeros( 14 | num_steps + 1, num_processes, recurrent_hidden_state_size) 15 | self.rewards = torch.zeros(num_steps, num_processes, 1) 16 | self.value_preds = torch.zeros(num_steps + 1, num_processes, 1) 17 | self.returns = torch.zeros(num_steps + 1, num_processes, 1) 18 | self.action_log_probs = torch.zeros(num_steps, num_processes, 1) 19 | if action_space.__class__.__name__ == 'Discrete': 20 | action_shape = 1 21 | else: 22 | action_shape = action_space.shape[0] 23 | self.actions = torch.zeros(num_steps, num_processes, action_shape) 24 | if action_space.__class__.__name__ == 'Discrete': 25 | self.actions = self.actions.long() 26 | self.masks = torch.ones(num_steps + 1, num_processes, 1) 27 | 28 | # Masks that indicate whether it's a true terminal state 29 | # or time limit end state 30 | self.bad_masks = torch.ones(num_steps + 1, num_processes, 1) 31 | 32 | self.num_steps = num_steps 33 | self.step = 0 34 | 35 | def to(self, device): 36 | self.obs = self.obs.to(device) 37 | self.recurrent_hidden_states = self.recurrent_hidden_states.to(device) 38 | self.rewards = self.rewards.to(device) 39 | self.value_preds = self.value_preds.to(device) 40 | self.returns = self.returns.to(device) 41 | self.action_log_probs = self.action_log_probs.to(device) 42 | self.actions = self.actions.to(device) 43 | self.masks = self.masks.to(device) 44 | self.bad_masks = self.bad_masks.to(device) 45 | 46 | def insert(self, obs, recurrent_hidden_states, actions, action_log_probs, 47 | value_preds, rewards, masks, bad_masks): 48 | self.obs[self.step + 1].copy_(obs) 49 | self.recurrent_hidden_states[self.step + 50 | 1].copy_(recurrent_hidden_states) 51 | self.actions[self.step].copy_(actions) 52 | self.action_log_probs[self.step].copy_(action_log_probs) 53 | self.value_preds[self.step].copy_(value_preds) 54 | self.rewards[self.step].copy_(rewards) 55 | self.masks[self.step + 1].copy_(masks) 56 | self.bad_masks[self.step + 1].copy_(bad_masks) 57 | 58 | self.step = (self.step + 1) % self.num_steps 59 | 60 | def after_update(self): 61 | self.obs[0].copy_(self.obs[-1]) 62 | self.recurrent_hidden_states[0].copy_(self.recurrent_hidden_states[-1]) 63 | self.masks[0].copy_(self.masks[-1]) 64 | self.bad_masks[0].copy_(self.bad_masks[-1]) 65 | 66 | def compute_returns(self, 67 | next_value, 68 | use_gae, 69 | gamma, 70 | gae_lambda, 71 | use_proper_time_limits=True): 72 | if use_proper_time_limits: 73 | if use_gae: 74 | self.value_preds[-1] = next_value 75 | gae = 0 76 | for step in reversed(range(self.rewards.size(0))): 77 | delta = self.rewards[step] + gamma * self.value_preds[ 78 | step + 1] * self.masks[step + 79 | 1] - self.value_preds[step] 80 | gae = delta + gamma * gae_lambda * self.masks[step + 81 | 1] * gae 82 | gae = gae * self.bad_masks[step + 1] 83 | self.returns[step] = gae + self.value_preds[step] 84 | else: 85 | self.returns[-1] = next_value 86 | for step in reversed(range(self.rewards.size(0))): 87 | self.returns[step] = (self.returns[step + 1] * \ 88 | gamma * self.masks[step + 1] + self.rewards[step]) * self.bad_masks[step + 1] \ 89 | + (1 - self.bad_masks[step + 1]) * self.value_preds[step] 90 | else: 91 | if use_gae: 92 | self.value_preds[-1] = next_value 93 | gae = 0 94 | for step in reversed(range(self.rewards.size(0))): 95 | delta = self.rewards[step] + gamma * self.value_preds[ 96 | step + 1] * self.masks[step + 97 | 1] - self.value_preds[step] 98 | gae = delta + gamma * gae_lambda * self.masks[step + 99 | 1] * gae 100 | self.returns[step] = gae + self.value_preds[step] 101 | else: 102 | self.returns[-1] = next_value 103 | for step in reversed(range(self.rewards.size(0))): 104 | self.returns[step] = self.returns[step + 1] * \ 105 | gamma * self.masks[step + 1] + self.rewards[step] 106 | 107 | def feed_forward_generator(self, 108 | advantages, 109 | num_mini_batch=None, 110 | mini_batch_size=None): 111 | num_steps, num_processes = self.rewards.size()[0:2] 112 | batch_size = num_processes * num_steps 113 | 114 | if mini_batch_size is None: 115 | assert batch_size >= num_mini_batch, ( 116 | "PPO requires the number of processes ({}) " 117 | "* number of steps ({}) = {} " 118 | "to be greater than or equal to the number of PPO mini batches ({})." 119 | "".format(num_processes, num_steps, num_processes * num_steps, 120 | num_mini_batch)) 121 | mini_batch_size = batch_size // num_mini_batch 122 | sampler = BatchSampler( 123 | SubsetRandomSampler(range(batch_size)), 124 | mini_batch_size, 125 | drop_last=True) 126 | for indices in sampler: 127 | obs_batch = self.obs[:-1].view(-1, *self.obs.size()[2:])[indices] 128 | recurrent_hidden_states_batch = self.recurrent_hidden_states[:-1].view( 129 | -1, self.recurrent_hidden_states.size(-1))[indices] 130 | actions_batch = self.actions.view(-1, 131 | self.actions.size(-1))[indices] 132 | value_preds_batch = self.value_preds[:-1].view(-1, 1)[indices] 133 | return_batch = self.returns[:-1].view(-1, 1)[indices] 134 | masks_batch = self.masks[:-1].view(-1, 1)[indices] 135 | old_action_log_probs_batch = self.action_log_probs.view(-1, 136 | 1)[indices] 137 | if advantages is None: 138 | adv_targ = None 139 | else: 140 | adv_targ = advantages.view(-1, 1)[indices] 141 | 142 | yield obs_batch, recurrent_hidden_states_batch, actions_batch, \ 143 | value_preds_batch, return_batch, masks_batch, old_action_log_probs_batch, adv_targ 144 | 145 | def recurrent_generator(self, advantages, num_mini_batch): 146 | num_processes = self.rewards.size(1) 147 | assert num_processes >= num_mini_batch, ( 148 | "PPO requires the number of processes ({}) " 149 | "to be greater than or equal to the number of " 150 | "PPO mini batches ({}).".format(num_processes, num_mini_batch)) 151 | num_envs_per_batch = num_processes // num_mini_batch 152 | perm = torch.randperm(num_processes) 153 | for start_ind in range(0, num_processes, num_envs_per_batch): 154 | obs_batch = [] 155 | recurrent_hidden_states_batch = [] 156 | actions_batch = [] 157 | value_preds_batch = [] 158 | return_batch = [] 159 | masks_batch = [] 160 | old_action_log_probs_batch = [] 161 | adv_targ = [] 162 | 163 | for offset in range(num_envs_per_batch): 164 | ind = perm[start_ind + offset] 165 | obs_batch.append(self.obs[:-1, ind]) 166 | recurrent_hidden_states_batch.append( 167 | self.recurrent_hidden_states[0:1, ind]) 168 | actions_batch.append(self.actions[:, ind]) 169 | value_preds_batch.append(self.value_preds[:-1, ind]) 170 | return_batch.append(self.returns[:-1, ind]) 171 | masks_batch.append(self.masks[:-1, ind]) 172 | old_action_log_probs_batch.append( 173 | self.action_log_probs[:, ind]) 174 | adv_targ.append(advantages[:, ind]) 175 | 176 | T, N = self.num_steps, num_envs_per_batch 177 | # These are all tensors of size (T, N, -1) 178 | obs_batch = torch.stack(obs_batch, 1) 179 | actions_batch = torch.stack(actions_batch, 1) 180 | value_preds_batch = torch.stack(value_preds_batch, 1) 181 | return_batch = torch.stack(return_batch, 1) 182 | masks_batch = torch.stack(masks_batch, 1) 183 | old_action_log_probs_batch = torch.stack( 184 | old_action_log_probs_batch, 1) 185 | adv_targ = torch.stack(adv_targ, 1) 186 | 187 | # States is just a (N, -1) tensor 188 | recurrent_hidden_states_batch = torch.stack( 189 | recurrent_hidden_states_batch, 1).view(N, -1) 190 | 191 | # Flatten the (T, N, ...) tensors to (T * N, ...) 192 | obs_batch = _flatten_helper(T, N, obs_batch) 193 | actions_batch = _flatten_helper(T, N, actions_batch) 194 | value_preds_batch = _flatten_helper(T, N, value_preds_batch) 195 | return_batch = _flatten_helper(T, N, return_batch) 196 | masks_batch = _flatten_helper(T, N, masks_batch) 197 | old_action_log_probs_batch = _flatten_helper(T, N, \ 198 | old_action_log_probs_batch) 199 | adv_targ = _flatten_helper(T, N, adv_targ) 200 | 201 | yield obs_batch, recurrent_hidden_states_batch, actions_batch, \ 202 | value_preds_batch, return_batch, masks_batch, old_action_log_probs_batch, adv_targ 203 | -------------------------------------------------------------------------------- /a2c_ppo_acktr/utils.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | 4 | import torch 5 | import torch.nn as nn 6 | 7 | from a2c_ppo_acktr.envs import VecNormalize 8 | 9 | 10 | # Get a render function 11 | def get_render_func(venv): 12 | if hasattr(venv, 'envs'): 13 | return venv.envs[0].render 14 | elif hasattr(venv, 'venv'): 15 | return get_render_func(venv.venv) 16 | elif hasattr(venv, 'env'): 17 | return get_render_func(venv.env) 18 | 19 | return None 20 | 21 | 22 | def get_vec_normalize(venv): 23 | if isinstance(venv, VecNormalize): 24 | return venv 25 | elif hasattr(venv, 'venv'): 26 | return get_vec_normalize(venv.venv) 27 | 28 | return None 29 | 30 | 31 | # Necessary for my KFAC implementation. 32 | class AddBias(nn.Module): 33 | def __init__(self, bias): 34 | super(AddBias, self).__init__() 35 | self._bias = nn.Parameter(bias.unsqueeze(1)) 36 | 37 | def forward(self, x): 38 | if x.dim() == 2: 39 | bias = self._bias.t().view(1, -1) 40 | else: 41 | bias = self._bias.t().view(1, -1, 1, 1) 42 | 43 | return x + bias 44 | 45 | 46 | def update_linear_schedule(optimizer, epoch, total_num_epochs, initial_lr): 47 | """Decreases the learning rate linearly""" 48 | lr = initial_lr - (initial_lr * (epoch / float(total_num_epochs))) 49 | for param_group in optimizer.param_groups: 50 | param_group['lr'] = lr 51 | 52 | 53 | def init(module, weight_init, bias_init, gain=1): 54 | weight_init(module.weight.data, gain=gain) 55 | bias_init(module.bias.data) 56 | return module 57 | 58 | 59 | def cleanup_log_dir(log_dir): 60 | try: 61 | os.makedirs(log_dir) 62 | except OSError: 63 | files = glob.glob(os.path.join(log_dir, '*.monitor.csv')) 64 | for f in files: 65 | os.remove(f) 66 | -------------------------------------------------------------------------------- /enjoy.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | # workaround to unpickle olf model files 4 | import sys 5 | 6 | import numpy as np 7 | import torch 8 | 9 | from a2c_ppo_acktr.envs import VecPyTorch, make_vec_envs 10 | from a2c_ppo_acktr.utils import get_render_func, get_vec_normalize 11 | 12 | sys.path.append('a2c_ppo_acktr') 13 | 14 | parser = argparse.ArgumentParser(description='RL') 15 | parser.add_argument( 16 | '--seed', type=int, default=1, help='random seed (default: 1)') 17 | parser.add_argument( 18 | '--log-interval', 19 | type=int, 20 | default=10, 21 | help='log interval, one log per n updates (default: 10)') 22 | parser.add_argument( 23 | '--env-name', 24 | default='PongNoFrameskip-v4', 25 | help='environment to train on (default: PongNoFrameskip-v4)') 26 | parser.add_argument( 27 | '--load-dir', 28 | default='./trained_models/', 29 | help='directory to save agent logs (default: ./trained_models/)') 30 | parser.add_argument( 31 | '--non-det', 32 | action='store_true', 33 | default=False, 34 | help='whether to use a non-deterministic policy') 35 | args = parser.parse_args() 36 | 37 | args.det = not args.non_det 38 | 39 | env = make_vec_envs( 40 | args.env_name, 41 | args.seed + 1000, 42 | 1, 43 | None, 44 | None, 45 | device='cpu', 46 | allow_early_resets=False) 47 | 48 | # Get a render function 49 | render_func = get_render_func(env) 50 | 51 | # We need to use the same statistics for normalization as used in training 52 | actor_critic, obs_rms = \ 53 | torch.load(os.path.join(args.load_dir, args.env_name + ".pt"), 54 | map_location='cpu') 55 | 56 | vec_norm = get_vec_normalize(env) 57 | if vec_norm is not None: 58 | vec_norm.eval() 59 | vec_norm.obs_rms = obs_rms 60 | 61 | recurrent_hidden_states = torch.zeros(1, 62 | actor_critic.recurrent_hidden_state_size) 63 | masks = torch.zeros(1, 1) 64 | 65 | obs = env.reset() 66 | 67 | if render_func is not None: 68 | render_func('human') 69 | 70 | if args.env_name.find('Bullet') > -1: 71 | import pybullet as p 72 | 73 | torsoId = -1 74 | for i in range(p.getNumBodies()): 75 | if (p.getBodyInfo(i)[0].decode() == "torso"): 76 | torsoId = i 77 | 78 | while True: 79 | with torch.no_grad(): 80 | value, action, _, recurrent_hidden_states = actor_critic.act( 81 | obs, recurrent_hidden_states, masks, deterministic=args.det) 82 | 83 | # Obser reward and next obs 84 | obs, reward, done, _ = env.step(action) 85 | 86 | masks.fill_(0.0 if done else 1.0) 87 | 88 | if args.env_name.find('Bullet') > -1: 89 | if torsoId > -1: 90 | distance = 5 91 | yaw = 0 92 | humanPos, humanOrn = p.getBasePositionAndOrientation(torsoId) 93 | p.resetDebugVisualizerCamera(distance, yaw, -20, humanPos) 94 | 95 | if render_func is not None: 96 | render_func('human') 97 | -------------------------------------------------------------------------------- /evaluation.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | 4 | from a2c_ppo_acktr import utils 5 | from a2c_ppo_acktr.envs import make_vec_envs 6 | 7 | 8 | def evaluate(actor_critic, obs_rms, env_name, seed, num_processes, eval_log_dir, 9 | device): 10 | eval_envs = make_vec_envs(env_name, seed + num_processes, num_processes, 11 | None, eval_log_dir, device, True) 12 | 13 | vec_norm = utils.get_vec_normalize(eval_envs) 14 | if vec_norm is not None: 15 | vec_norm.eval() 16 | vec_norm.obs_rms = obs_rms 17 | 18 | eval_episode_rewards = [] 19 | 20 | obs = eval_envs.reset() 21 | eval_recurrent_hidden_states = torch.zeros( 22 | num_processes, actor_critic.recurrent_hidden_state_size, device=device) 23 | eval_masks = torch.zeros(num_processes, 1, device=device) 24 | 25 | while len(eval_episode_rewards) < 10: 26 | with torch.no_grad(): 27 | _, action, _, eval_recurrent_hidden_states = actor_critic.act( 28 | obs, 29 | eval_recurrent_hidden_states, 30 | eval_masks, 31 | deterministic=True) 32 | 33 | # Obser reward and next obs 34 | obs, _, done, infos = eval_envs.step(action) 35 | 36 | eval_masks = torch.tensor( 37 | [[0.0] if done_ else [1.0] for done_ in done], 38 | dtype=torch.float32, 39 | device=device) 40 | 41 | for info in infos: 42 | if 'episode' in info.keys(): 43 | eval_episode_rewards.append(info['episode']['r']) 44 | 45 | eval_envs.close() 46 | 47 | print(" Evaluation using {} episodes: mean reward {:.5f}\n".format( 48 | len(eval_episode_rewards), np.mean(eval_episode_rewards))) 49 | -------------------------------------------------------------------------------- /gail_experts/README.md: -------------------------------------------------------------------------------- 1 | ## Data 2 | 3 | Download from 4 | https://drive.google.com/open?id=1Ipu5k99nwewVDG1yFetUxqtwVlgBg5su 5 | 6 | and store in this folder. 7 | 8 | ## Convert to pytorch 9 | 10 | ```bash 11 | python convert_to_pytorch.py --h5-file trajs_halfcheetah.h5 12 | ``` 13 | 14 | ## Run 15 | 16 | ```bash 17 | python main.py --env-name "HalfCheetah-v2" --algo ppo --use-gae --log-interval 1 --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --gae-lambda 0.95 --num-env-steps 10000000 --use-linear-lr-decay --use-proper-time-limits --gail 18 | ``` 19 | -------------------------------------------------------------------------------- /gail_experts/convert_to_pytorch.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import sys 4 | 5 | import h5py 6 | import numpy as np 7 | import torch 8 | 9 | 10 | def main(): 11 | parser = argparse.ArgumentParser( 12 | 'Converts expert trajectories from h5 to pt format.') 13 | parser.add_argument( 14 | '--h5-file', 15 | default='trajs_halfcheetah.h5', 16 | help='input h5 file', 17 | type=str) 18 | parser.add_argument( 19 | '--pt-file', 20 | default=None, 21 | help='output pt file, by default replaces file extension with pt', 22 | type=str) 23 | args = parser.parse_args() 24 | 25 | if args.pt_file is None: 26 | args.pt_file = os.path.splitext(args.h5_file)[0] + '.pt' 27 | 28 | with h5py.File(args.h5_file, 'r') as f: 29 | dataset_size = f['obs_B_T_Do'].shape[0] # full dataset size 30 | 31 | states = f['obs_B_T_Do'][:dataset_size, ...][...] 32 | actions = f['a_B_T_Da'][:dataset_size, ...][...] 33 | rewards = f['r_B_T'][:dataset_size, ...][...] 34 | lens = f['len_B'][:dataset_size, ...][...] 35 | 36 | states = torch.from_numpy(states).float() 37 | actions = torch.from_numpy(actions).float() 38 | rewards = torch.from_numpy(rewards).float() 39 | lens = torch.from_numpy(lens).long() 40 | 41 | data = { 42 | 'states': states, 43 | 'actions': actions, 44 | 'rewards': rewards, 45 | 'lengths': lens 46 | } 47 | 48 | torch.save(data, args.pt_file) 49 | 50 | 51 | if __name__ == '__main__': 52 | main() 53 | -------------------------------------------------------------------------------- /generate_tmux_yaml.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | import yaml 4 | 5 | parser = argparse.ArgumentParser(description='Process some integers.') 6 | parser.add_argument( 7 | '--num-seeds', 8 | type=int, 9 | default=4, 10 | help='number of random seeds to generate') 11 | parser.add_argument( 12 | '--env-names', 13 | default="PongNoFrameskip-v4", 14 | help='environment name separated by semicolons') 15 | args = parser.parse_args() 16 | 17 | ppo_mujoco_template = "python main.py --env-name {0} --algo ppo --use-gae --log-interval 1 --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay --no-cuda --log-dir /tmp/gym/{1}/{1}-{2} --seed {2} --use-proper-time-limits" 18 | 19 | ppo_atari_template = "env CUDA_VISIBLE_DEVICES={2} python main.py --env-name {0} --algo ppo --use-gae --lr 2.5e-4 --clip-param 0.1 --value-loss-coef 0.5 --num-processes 8 --num-steps 128 --num-mini-batch 4 --log-interval 1 --use-linear-lr-decay --entropy-coef 0.01 --log-dir /tmp/gym/{1}/{1}-{2} --seed {2}" 20 | 21 | template = ppo_atari_template 22 | 23 | config = {"session_name": "run-all", "windows": []} 24 | 25 | for i in range(args.num_seeds): 26 | panes_list = [] 27 | for env_name in args.env_names.split(';'): 28 | panes_list.append( 29 | template.format(env_name, 30 | env_name.split('-')[0].lower(), i)) 31 | 32 | config["windows"].append({ 33 | "window_name": "seed-{}".format(i), 34 | "panes": panes_list 35 | }) 36 | 37 | yaml.dump(config, open("run_all.yaml", "w"), default_flow_style=False) 38 | -------------------------------------------------------------------------------- /imgs/a2c_beamrider.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/imgs/a2c_beamrider.png -------------------------------------------------------------------------------- /imgs/a2c_breakout.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/imgs/a2c_breakout.png -------------------------------------------------------------------------------- /imgs/a2c_qbert.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/imgs/a2c_qbert.png -------------------------------------------------------------------------------- /imgs/a2c_seaquest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/imgs/a2c_seaquest.png -------------------------------------------------------------------------------- /imgs/acktr_beamrider.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/imgs/acktr_beamrider.png -------------------------------------------------------------------------------- /imgs/acktr_breakout.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/imgs/acktr_breakout.png -------------------------------------------------------------------------------- /imgs/acktr_qbert.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/imgs/acktr_qbert.png -------------------------------------------------------------------------------- /imgs/acktr_seaquest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/imgs/acktr_seaquest.png -------------------------------------------------------------------------------- /imgs/ppo_halfcheetah.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/imgs/ppo_halfcheetah.png -------------------------------------------------------------------------------- /imgs/ppo_hopper.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/imgs/ppo_hopper.png -------------------------------------------------------------------------------- /imgs/ppo_reacher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/imgs/ppo_reacher.png -------------------------------------------------------------------------------- /imgs/ppo_walker.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ikostrikov/pytorch-a2c-ppo-acktr-gail/41332b78dfb50321c29bade65f9d244387f68a60/imgs/ppo_walker.png -------------------------------------------------------------------------------- /logs/halfcheetah/halfcheetah-7/0.monitor.csv: -------------------------------------------------------------------------------- 1 | # {"t_start": 1551811846.9572158, "env_id": "HalfCheetah-v2"} 2 | r,l,t 3 | -863.678411,1000,1.08738 4 | -700.912535,1000,2.040133 5 | -969.126867,1000,4.758011 6 | -552.43874,1000,5.948351 7 | -730.717627,1000,8.793514 8 | -769.474304,1000,10.137241 9 | -693.057495,1000,12.923291 10 | -663.00241,1000,14.193786 11 | -724.741087,1000,17.177559 12 | -765.651702,1000,18.51482 13 | -788.053209,1000,21.534084 14 | -612.6923,1000,22.6542 15 | -687.018875,1000,25.29461 16 | -676.171726,1000,26.449709 17 | -595.622135,1000,29.242858 18 | -886.38184,1000,30.337499 19 | -729.935636,1000,33.236759 20 | -530.625043,1000,34.394579 21 | -785.76687,1000,37.14142 22 | -579.548493,1000,38.304897 23 | -641.416548,1000,40.689108 24 | -675.852107,1000,41.750999 25 | -445.705416,1000,44.549795 26 | -649.223917,1000,45.890566 27 | -587.316297,1000,48.785383 28 | -497.395543,1000,49.936654 29 | -625.57526,1000,52.82549 30 | -525.146529,1000,54.087484 31 | -587.289534,1000,56.838509 32 | -534.053414,1000,58.08737 33 | -556.664688,1000,60.965091 34 | -687.191608,1000,62.333764 35 | -466.605503,1000,65.334083 36 | -475.127929,1000,66.661115 37 | -449.05563,1000,69.298652 38 | -439.08815,1000,70.660476 39 | -573.819446,1000,73.5622 40 | -719.271604,1000,74.740827 41 | -393.111584,1000,77.56245 42 | -515.815792,1000,78.917562 43 | -516.092144,1000,81.905451 44 | -446.892282,1000,83.127434 45 | -433.912699,1000,84.233692 46 | -416.965347,1000,87.049605 47 | -478.335585,1000,88.380022 48 | -417.797803,1000,91.085893 49 | -411.374284,1000,92.273135 50 | -397.648581,1000,95.118473 51 | -471.521684,1000,96.425281 52 | -343.370347,1000,99.439658 53 | -472.417891,1000,100.733803 54 | -488.188626,1000,103.387653 55 | -419.785281,1000,104.451354 56 | -359.242046,1000,106.847066 57 | -363.04821,1000,107.806928 58 | -346.432621,1000,110.350147 59 | -287.386423,1000,111.31595 60 | -278.884236,1000,113.934361 61 | -318.081039,1000,115.291175 62 | -305.813841,1000,118.183098 63 | -347.971585,1000,119.483785 64 | -305.032189,1000,122.142476 65 | -276.883759,1000,123.215961 66 | -304.210525,1000,126.069956 67 | -213.364623,1000,127.383857 68 | -270.060357,1000,130.130261 69 | -271.229215,1000,131.391195 70 | -445.779883,1000,134.249552 71 | -281.592911,1000,135.374669 72 | -240.800311,1000,138.017601 73 | -201.902833,1000,139.079134 74 | -178.93407,1000,141.964736 75 | -212.8177,1000,143.318792 76 | -265.994415,1000,145.61517 77 | -207.353975,1000,146.67987 78 | -160.424926,1000,149.462198 79 | -218.872222,1000,150.383206 80 | -175.70963,1000,152.882155 81 | -237.366639,1000,154.260747 82 | -509.497789,1000,157.206753 83 | -149.198462,1000,158.591578 84 | -152.89544,1000,161.225028 85 | -105.582932,1000,162.333332 86 | -93.572349,1000,164.832746 87 | -41.953841,1000,165.98933 88 | -107.877954,1000,167.063022 89 | -48.825319,1000,169.883608 90 | -179.648629,1000,170.99356 91 | -262.596481,1000,173.696197 92 | -170.38018,1000,175.058832 93 | -84.967908,1000,177.833131 94 | -46.505014,1000,179.208659 95 | -45.041227,1000,181.799242 96 | -36.823446,1000,183.108907 97 | -78.390896,1000,186.058104 98 | 25.759969,1000,187.38747 99 | -68.045135,1000,190.341491 100 | -92.517781,1000,191.580708 101 | -11.586877,1000,194.159098 102 | -15.628996,1000,195.193175 103 | 78.924097,1000,197.970778 104 | 38.041806,1000,199.139827 105 | 9.258714,1000,201.976247 106 | -19.771098,1000,203.049267 107 | 65.18983,1000,205.854596 108 | 39.30153,1000,207.142627 109 | 48.429272,1000,209.952925 110 | 25.636547,1000,211.384608 111 | -112.414477,1000,214.41339 112 | 79.323766,1000,215.413231 113 | 90.64365,1000,218.216068 114 | 104.205979,1000,219.467458 115 | 30.869505,1000,222.289158 116 | 26.871137,1000,223.570335 117 | 28.997685,1000,226.355434 118 | 144.075991,1000,227.690851 119 | 110.169738,1000,230.543786 120 | 158.284473,1000,231.880428 121 | 57.32555,1000,234.862424 122 | 83.529362,1000,235.932408 123 | 154.006695,1000,238.818993 124 | 13.544955,1000,240.02497 125 | 4.132307,1000,243.00284 126 | 29.387148,1000,244.180231 127 | 1.643078,1000,246.813055 128 | 17.313806,1000,248.144384 129 | 96.068193,1000,250.98646 130 | 121.628739,1000,252.14121 131 | 193.753465,1000,253.226228 132 | 120.421644,1000,256.131198 133 | 136.450449,1000,257.476294 134 | 197.931288,1000,260.322755 135 | 201.128422,1000,261.364504 136 | 152.28939,1000,263.828241 137 | 149.119033,1000,265.042797 138 | 37.375684,1000,268.003998 139 | 254.376542,1000,269.3532 140 | 200.16743,1000,272.103872 141 | 181.212777,1000,273.184102 142 | 278.582796,1000,275.947399 143 | 159.837164,1000,277.010601 144 | 297.629598,1000,279.679187 145 | 213.046113,1000,280.900891 146 | 242.554006,1000,283.77537 147 | 273.130289,1000,284.826903 148 | 264.464776,1000,287.139618 149 | 144.053985,1000,288.361377 150 | 282.869716,1000,291.28897 151 | 231.957692,1000,292.177878 152 | 253.943507,1000,294.774782 153 | 301.037305,1000,296.104779 154 | 177.475055,1000,299.011884 155 | 436.892596,1000,300.298731 156 | 376.501404,1000,303.124465 157 | 393.895761,1000,304.501409 158 | 240.130583,1000,307.143052 159 | 287.430625,1000,308.184081 160 | 426.446937,1000,311.209501 161 | 393.508264,1000,312.570682 162 | 441.745101,1000,315.473795 163 | 338.721416,1000,316.733881 164 | 460.183483,1000,319.769768 165 | 364.55902,1000,321.113196 166 | 409.919463,1000,323.633161 167 | 422.389678,1000,324.827241 168 | 476.890491,1000,327.751391 169 | 305.28911,1000,328.822421 170 | 455.178858,1000,331.811074 171 | 406.860177,1000,333.154346 172 | 333.364932,1000,335.434919 173 | 544.277653,1000,336.574157 174 | 363.193182,1000,337.858777 175 | 566.368266,1000,340.668572 176 | 346.2844,1000,341.867337 177 | 644.917482,1000,344.610306 178 | 435.32767,1000,345.867956 179 | 591.2417,1000,348.677907 180 | 427.433332,1000,350.0691 181 | 499.369476,1000,353.347212 182 | 564.111715,1000,354.695138 183 | 590.118536,1000,357.322072 184 | 629.726071,1000,358.551198 185 | 535.137796,1000,361.458192 186 | 565.05163,1000,362.774727 187 | 657.114124,1000,365.360475 188 | 618.021685,1000,366.492927 189 | 673.622907,1000,369.314366 190 | 555.100456,1000,370.477668 191 | 596.450489,1000,373.375065 192 | 568.622167,1000,374.394741 193 | 577.082751,1000,377.07256 194 | 620.438944,1000,378.410126 195 | 545.540159,1000,381.046779 196 | 619.429359,1000,382.151363 197 | 635.905772,1000,384.876043 198 | 642.608196,1000,385.92587 199 | 650.53397,1000,388.68424 200 | 737.567636,1000,389.836377 201 | 625.67782,1000,392.663081 202 | 723.701677,1000,393.829151 203 | 680.087863,1000,396.471397 204 | 722.625803,1000,397.79718 205 | 728.056341,1000,400.720177 206 | 713.582635,1000,401.996468 207 | 736.146324,1000,404.876943 208 | 770.669121,1000,405.941613 209 | 778.78327,1000,408.482891 210 | 791.208424,1000,409.735549 211 | 714.961451,1000,412.363456 212 | 829.048596,1000,413.676677 213 | 709.833762,1000,416.583212 214 | 804.888067,1000,417.806781 215 | 707.001248,1000,420.248766 216 | 748.023706,1000,421.181992 217 | 646.281634,1000,422.229025 218 | -326.229249,1000,424.793824 219 | 800.173626,1000,425.909744 220 | 761.655784,1000,428.812234 221 | 806.201436,1000,430.138498 222 | 788.479533,1000,432.947156 223 | 786.468483,1000,434.095591 224 | 744.252423,1000,436.635842 225 | 860.055632,1000,437.780808 226 | 653.916494,1000,440.632996 227 | 798.595315,1000,441.853017 228 | 835.963506,1000,444.708429 229 | 740.878747,1000,445.820698 230 | 846.374553,1000,448.662409 231 | 702.345049,1000,449.914441 232 | 886.090175,1000,452.689282 233 | 852.679059,1000,453.694713 234 | 844.317842,1000,456.743757 235 | 827.010536,1000,458.072629 236 | 757.416442,1000,460.785463 237 | 848.074268,1000,462.109685 238 | 897.238233,1000,464.865323 239 | 810.220073,1000,466.187892 240 | 897.233973,1000,468.842211 241 | 912.032175,1000,469.90562 242 | 854.961126,1000,472.692739 243 | 798.817493,1000,473.984461 244 | 791.057105,1000,476.184467 245 | 888.51223,1000,477.219265 246 | 846.354543,1000,479.885671 247 | 902.197174,1000,481.194925 248 | 846.503508,1000,483.866581 249 | 909.250639,1000,485.139402 250 | 857.783128,1000,487.679203 251 | 866.404133,1000,488.661194 252 | 930.568125,1000,490.823167 253 | 978.0112,1000,492.167131 254 | 856.381049,1000,495.081854 255 | 815.918836,1000,496.316173 256 | 974.304439,1000,498.629638 257 | 1009.061713,1000,500.006646 258 | 945.215826,1000,501.337898 259 | 938.975364,1000,504.258851 260 | 942.526165,1000,505.787577 261 | 948.603543,1000,508.585591 262 | 957.178415,1000,509.859269 263 | 990.256911,1000,512.547027 264 | 951.106035,1000,513.623998 265 | 1011.769875,1000,516.514748 266 | 1028.281455,1000,517.656946 267 | 1007.226918,1000,520.303528 268 | 945.587253,1000,521.521387 269 | 1015.434886,1000,524.564537 270 | 988.720412,1000,525.996633 271 | 986.318808,1000,528.975718 272 | 956.26601,1000,530.301992 273 | 1045.136018,1000,532.994356 274 | 1017.636939,1000,534.054188 275 | 916.478209,1000,536.581473 276 | 1035.563347,1000,537.591203 277 | 1029.50269,1000,540.28323 278 | 992.146859,1000,541.592123 279 | 1029.115549,1000,544.427193 280 | 1023.002632,1000,545.770049 281 | 937.784951,1000,548.467394 282 | 1057.877443,1000,549.823902 283 | 1061.951805,1000,552.610459 284 | 1047.082168,1000,553.935739 285 | 970.51217,1000,556.614591 286 | 975.279313,1000,557.741159 287 | -142.471408,1000,560.424792 288 | 971.448827,1000,561.333153 289 | 993.554361,1000,563.999424 290 | 1005.773412,1000,565.036525 291 | 1033.669006,1000,567.600391 292 | 1064.551122,1000,568.895028 293 | 1099.666511,1000,571.796931 294 | 1012.024006,1000,573.037893 295 | 1039.499025,1000,575.777209 296 | 1079.258396,1000,577.112798 297 | 1004.400061,1000,580.031354 298 | 1015.38982,1000,581.326762 299 | 1019.592548,1000,583.926271 300 | 1047.163691,1000,585.218696 301 | 1110.186082,1000,586.337937 302 | 1059.915556,1000,589.26616 303 | 1064.346845,1000,590.827071 304 | 1012.730567,1000,593.611083 305 | 1032.479226,1000,594.628811 306 | 977.476282,1000,597.378854 307 | 1121.210753,1000,598.671187 308 | 1069.601069,1000,601.423118 309 | 1092.076964,1000,602.533452 310 | 1124.758769,1000,605.454924 311 | 1083.944391,1000,606.804616 312 | 1085.131454,1000,609.462014 313 | 1117.119162,1000,610.528641 314 | 1063.453051,1000,613.154081 315 | 1081.782485,1000,614.169843 316 | 1123.075997,1000,616.939718 317 | 1200.531465,1000,618.286593 318 | 1110.545421,1000,621.205416 319 | 1077.676615,1000,622.348917 320 | 1133.700463,1000,625.164594 321 | 998.77086,1000,626.09032 322 | 1063.993745,1000,628.831974 323 | 1124.023452,1000,630.199973 324 | 1076.980819,1000,633.031432 325 | 1130.80678,1000,633.974797 326 | 1139.408942,1000,636.646372 327 | 1066.688757,1000,637.862053 328 | 1017.170546,1000,640.628767 329 | 1043.462889,1000,642.027981 330 | 1148.333988,1000,644.912514 331 | 1112.914055,1000,646.192457 332 | 1110.952715,1000,648.998098 333 | 1120.204007,1000,650.355528 334 | 1178.946488,1000,653.442617 335 | 1156.422502,1000,654.71551 336 | 1186.729013,1000,657.35852 337 | 1143.99236,1000,658.682812 338 | 1113.231623,1000,661.496953 339 | 1163.565392,1000,662.842408 340 | 1148.72479,1000,665.491306 341 | 1163.712782,1000,666.610242 342 | 1165.300095,1000,669.398365 343 | 1085.903326,1000,670.438319 344 | 1166.620582,1000,671.530045 345 | 1176.894144,1000,674.35022 346 | -186.410725,1000,675.688798 347 | 1164.971006,1000,678.398684 348 | 1120.639465,1000,679.475727 349 | 1214.879422,1000,681.949909 350 | 1200.671996,1000,682.960261 351 | 1175.008235,1000,685.617227 352 | 1170.92363,1000,686.700423 353 | 1245.611792,1000,689.325931 354 | 1217.669479,1000,690.440764 355 | 1117.576496,1000,693.421565 356 | 1192.750969,1000,694.610299 357 | 1173.643821,1000,697.194105 358 | 1148.949539,1000,698.46141 359 | 1239.039204,1000,701.218389 360 | 1176.538707,1000,702.567571 361 | 1178.619437,1000,705.466251 362 | 1194.930743,1000,706.745513 363 | 1220.772269,1000,709.380901 364 | 1204.596289,1000,710.694466 365 | -207.568441,1000,713.517076 366 | 1142.000085,1000,714.612887 367 | 1212.342083,1000,717.278958 368 | 1208.92251,1000,718.367599 369 | 1187.730942,1000,721.137454 370 | 1175.214491,1000,722.418371 371 | 1244.362796,1000,725.046766 372 | 1192.636176,1000,726.515924 373 | 1153.70075,1000,729.693428 374 | 1263.450961,1000,730.735482 375 | 1205.839376,1000,733.362952 376 | 1157.338756,1000,734.60808 377 | 1226.714435,1000,737.446803 378 | 1136.55907,1000,738.440816 379 | 1197.004828,1000,741.029809 380 | 1251.669282,1000,742.321429 381 | 1169.591332,1000,745.273369 382 | 1175.315186,1000,746.264677 383 | 1287.578899,1000,748.93114 384 | 1192.807579,1000,750.189072 385 | 1196.868257,1000,753.450752 386 | 1178.218017,1000,754.759298 387 | 1169.844137,1000,756.10981 388 | 1253.768418,1000,758.881466 389 | 1226.71507,1000,759.949672 390 | 1144.563523,1000,762.757038 391 | 1233.096438,1000,764.070319 392 | 1242.334307,1000,766.548992 393 | 1267.147603,1000,767.498803 394 | 1241.246591,1000,770.110006 395 | 1216.568078,1000,771.087512 396 | 1232.50534,1000,773.690158 397 | 1192.38252,1000,775.04751 398 | 1192.529773,1000,777.984974 399 | 1197.772407,1000,779.206509 400 | 1231.831413,1000,781.697871 401 | 1257.484894,1000,782.783396 402 | 1237.614977,1000,785.734502 403 | 1219.328607,1000,787.081221 404 | 1248.092246,1000,789.782531 405 | 1183.327412,1000,790.996055 406 | 1213.355323,1000,793.91777 407 | 1252.190665,1000,795.051542 408 | 1215.816225,1000,797.793562 409 | 1183.267447,1000,798.989968 410 | 1254.214224,1000,801.854732 411 | 1255.200705,1000,803.082424 412 | 1244.051325,1000,805.866077 413 | 1204.472981,1000,806.809448 414 | 1238.15827,1000,809.685193 415 | 1249.947126,1000,810.844172 416 | 1294.381167,1000,813.857665 417 | 1179.464694,1000,815.414248 418 | 1228.97724,1000,818.468212 419 | 1294.34755,1000,819.761278 420 | 1286.667062,1000,823.010887 421 | 896.173339,1000,824.377486 422 | 1279.767675,1000,826.772813 423 | 1307.564922,1000,827.968613 424 | 1263.683785,1000,830.539475 425 | 1320.224623,1000,831.883434 426 | 1273.131525,1000,834.541928 427 | 1279.344728,1000,835.68702 428 | 1272.088808,1000,838.70668 429 | 1243.611551,1000,840.040226 430 | 1267.509213,1000,841.325225 431 | 1272.204952,1000,844.189966 432 | 1276.367181,1000,845.721147 433 | 1116.783047,1000,848.648245 434 | 1145.940173,1000,849.708395 435 | 1259.521667,1000,852.22837 436 | 1308.927682,1000,853.296201 437 | 1318.6338,1000,855.788748 438 | 1270.032007,1000,857.131658 439 | 1259.442147,1000,860.08781 440 | 1303.031773,1000,861.34407 441 | 1281.499694,1000,863.954454 442 | 1289.230461,1000,865.321059 443 | 1272.781291,1000,867.961024 444 | 1254.240441,1000,869.010779 445 | 1316.460956,1000,871.683224 446 | 1291.097385,1000,872.940252 447 | 1272.629264,1000,875.699198 448 | 1143.943649,1000,877.076202 449 | 1310.932602,1000,879.881687 450 | 1265.146693,1000,880.901056 451 | 1309.721111,1000,883.768743 452 | 1284.524916,1000,885.053283 453 | 1329.838279,1000,887.902546 454 | 1262.113414,1000,888.98311 455 | 1398.040717,1000,891.756555 456 | 1325.668765,1000,893.107771 457 | 1275.663404,1000,895.996209 458 | 1265.162983,1000,897.140467 459 | 1273.370194,1000,900.180047 460 | 1233.133075,1000,901.540732 461 | 1287.027972,1000,904.558984 462 | 1231.197854,1000,905.911244 463 | 1321.679047,1000,908.510955 464 | 1312.242993,1000,909.832317 465 | 1095.247801,1000,912.695892 466 | 1312.263844,1000,914.188861 467 | 1348.482471,1000,917.080477 468 | 1265.151301,1000,918.196066 469 | 1287.300666,1000,920.482696 470 | 1338.093469,1000,921.640459 471 | 1310.86654,1000,924.228429 472 | 1292.961074,1000,925.374282 473 | 1035.165659,1000,926.725541 474 | 1347.021435,1000,929.725354 475 | 1313.175546,1000,931.27525 476 | 1263.687967,1000,933.840042 477 | 1381.746211,1000,934.879169 478 | 1312.651382,1000,937.696879 479 | 1286.133817,1000,939.029546 480 | 1284.966166,1000,941.898241 481 | 1370.971683,1000,943.2458 482 | 1343.660517,1000,946.221667 483 | 1346.176585,1000,947.574882 484 | 1246.956164,1000,950.582112 485 | 1383.379107,1000,952.141104 486 | 1240.421172,1000,955.217387 487 | 1356.962852,1000,956.289171 488 | 1317.625628,1000,959.022342 489 | 1341.960947,1000,960.161044 490 | 1323.008449,1000,963.090997 491 | 1386.916702,1000,964.368864 492 | 1295.731075,1000,967.185292 493 | 1317.811562,1000,968.279423 494 | 1283.662978,1000,970.718357 495 | 1313.911165,1000,971.842746 496 | 1352.211889,1000,974.643857 497 | 1314.706247,1000,975.669657 498 | 1298.998591,1000,978.428276 499 | 1352.522979,1000,979.304437 500 | 1342.603284,1000,981.811546 501 | 1332.250209,1000,983.135908 502 | 1318.008594,1000,986.044235 503 | 1347.017439,1000,987.294649 504 | 1327.291098,1000,990.138474 505 | 1354.111627,1000,991.333571 506 | 1295.323806,1000,994.265888 507 | 1339.274039,1000,995.609289 508 | 1353.948781,1000,998.286374 509 | 1362.923164,1000,999.629091 510 | 1295.949408,1000,1002.291848 511 | 1188.784143,1000,1003.500566 512 | 1330.074504,1000,1006.551373 513 | 1319.866172,1000,1007.789447 514 | 1304.639444,1000,1009.023222 515 | 1340.267038,1000,1011.833386 516 | 1310.675715,1000,1013.195146 517 | 1299.302238,1000,1015.895333 518 | 1323.726075,1000,1016.965457 519 | 1314.382042,1000,1019.531999 520 | 1371.329012,1000,1020.530098 521 | 1281.474563,1000,1023.184052 522 | 1333.966797,1000,1024.512407 523 | 1324.735686,1000,1027.459625 524 | 1365.634017,1000,1028.513625 525 | 1341.637454,1000,1031.169665 526 | 1365.822889,1000,1032.429499 527 | 1371.658313,1000,1035.353064 528 | 1357.341079,1000,1036.685304 529 | 1341.308791,1000,1039.483365 530 | 1363.396745,1000,1040.829921 531 | 1407.095375,1000,1043.737981 532 | 1398.190672,1000,1045.076542 533 | 1402.678234,1000,1047.70838 534 | 1350.98966,1000,1048.819323 535 | 1365.979656,1000,1051.694335 536 | 1362.416962,1000,1052.682757 537 | 1350.894417,1000,1055.463221 538 | 1341.916723,1000,1056.762297 539 | 1372.098927,1000,1059.506602 540 | 1365.434686,1000,1060.863653 541 | 1348.960628,1000,1063.791352 542 | 1379.500423,1000,1064.979077 543 | 1339.440079,1000,1067.605634 544 | 1333.689177,1000,1068.940476 545 | 1348.813995,1000,1071.876464 546 | 1343.560758,1000,1073.223495 547 | 1369.521791,1000,1076.114655 548 | 1420.565512,1000,1077.451258 549 | 1416.474373,1000,1080.541597 550 | 1391.491284,1000,1081.89042 551 | 1351.649415,1000,1084.696571 552 | 1402.089061,1000,1086.038021 553 | -278.202361,1000,1088.782444 554 | 1426.68084,1000,1090.150698 555 | 1372.205722,1000,1092.94422 556 | 1327.188899,1000,1094.279226 557 | 1395.880822,1000,1095.548559 558 | 1422.468152,1000,1098.32144 559 | 1364.991235,1000,1099.660251 560 | 1200.511078,1000,1102.278947 561 | 1386.837438,1000,1103.33136 562 | 1354.526895,1000,1106.167405 563 | 1362.57653,1000,1107.467357 564 | 1380.906583,1000,1110.495329 565 | 1408.438886,1000,1111.770042 566 | 1394.794806,1000,1114.488801 567 | 1287.196731,1000,1115.733701 568 | 1347.569146,1000,1118.574657 569 | 1364.378019,1000,1119.621599 570 | 1405.535963,1000,1122.219803 571 | 1421.087249,1000,1123.500865 572 | 1401.029897,1000,1126.133368 573 | 1416.473314,1000,1126.966455 574 | 1393.034864,1000,1129.440628 575 | 1446.691821,1000,1130.408718 576 | 1408.347304,1000,1133.134454 577 | 1418.562057,1000,1134.455636 578 | 1472.815472,1000,1137.232914 579 | 1391.000184,1000,1138.232742 580 | 1426.507186,1000,1140.758514 581 | 1443.706328,1000,1142.018054 582 | 1438.620115,1000,1144.825871 583 | 1367.275086,1000,1146.004451 584 | 1378.876339,1000,1149.009869 585 | 1406.582108,1000,1150.331825 586 | 1417.574158,1000,1152.711985 587 | 1330.561724,1000,1154.058242 588 | 1411.958918,1000,1156.802913 589 | 1391.902151,1000,1157.817776 590 | 1405.196735,1000,1160.451621 591 | 1431.961022,1000,1161.494309 592 | 1456.972825,1000,1164.12898 593 | 1391.319598,1000,1165.48333 594 | 1418.277885,1000,1168.220456 595 | 1423.994355,1000,1169.048201 596 | 1427.856045,1000,1171.751928 597 | 1468.749439,1000,1173.320147 598 | 1408.843478,1000,1176.32063 599 | 1479.257095,1000,1177.576693 600 | 1375.04754,1000,1178.915881 601 | 1471.986198,1000,1181.809966 602 | 1381.028287,1000,1183.140265 603 | 1393.003027,1000,1186.25054 604 | 1443.148466,1000,1187.354451 605 | 1393.872206,1000,1190.153266 606 | 1421.908365,1000,1191.309342 607 | 1431.70885,1000,1194.01627 608 | 1418.218164,1000,1195.104468 609 | 1457.858798,1000,1197.84436 610 | 1431.01735,1000,1199.170383 611 | 1454.557287,1000,1201.914174 612 | 1424.073661,1000,1203.255048 613 | 1453.019385,1000,1206.246207 614 | 1460.442521,1000,1207.454167 615 | 1469.341089,1000,1210.094263 616 | 1437.657377,1000,1211.099261 617 | 1479.839952,1000,1213.651078 618 | 1466.204575,1000,1214.845307 619 | 1482.928216,1000,1217.712286 620 | 1425.161373,1000,1219.057606 621 | 1465.915059,1000,1221.761175 622 | 1437.177162,1000,1223.07847 623 | 1500.996894,1000,1225.985442 624 | 1463.144756,1000,1227.2597 625 | 1415.497152,1000,1230.208398 626 | 1483.740677,1000,1231.399901 627 | 1483.158304,1000,1233.974222 628 | 1461.5897,1000,1234.962847 629 | 1420.790315,1000,1237.733648 630 | 1329.79996,1000,1239.001454 631 | 1485.711606,1000,1241.783471 632 | 1486.089289,1000,1243.058825 633 | 1479.941376,1000,1245.986107 634 | 1501.936745,1000,1247.305073 635 | 1508.501237,1000,1250.22684 636 | 1438.446409,1000,1251.469622 637 | 1465.37434,1000,1254.039816 638 | 1454.298246,1000,1255.190371 639 | 1352.499439,1000,1257.92403 640 | 1452.024587,1000,1259.129828 641 | 1464.45817,1000,1261.824111 642 | 1483.295639,1000,1262.95797 643 | 1465.204364,1000,1263.985428 644 | 1446.557464,1000,1266.818786 645 | 1497.187375,1000,1267.960435 646 | 1477.867663,1000,1270.567046 647 | 1490.783247,1000,1271.849611 648 | 1445.368721,1000,1274.631681 649 | 1461.197587,1000,1275.555108 650 | 1460.121219,1000,1278.312867 651 | -224.916744,1000,1279.721825 652 | 1521.924633,1000,1282.724369 653 | 1473.50311,1000,1283.799051 654 | 1261.477761,1000,1286.128731 655 | 1440.926756,1000,1287.427093 656 | 1481.117598,1000,1290.304626 657 | 1454.731078,1000,1291.573813 658 | 1500.526094,1000,1294.584912 659 | 1480.172299,1000,1295.822899 660 | 1518.721128,1000,1298.405158 661 | 1476.920594,1000,1299.563361 662 | 1497.388948,1000,1302.233644 663 | -230.362569,1000,1303.499494 664 | 1436.643991,1000,1306.408558 665 | 1459.240444,1000,1307.46542 666 | 1524.537517,1000,1310.381864 667 | 1501.15452,1000,1311.733514 668 | 1439.92645,1000,1314.627096 669 | 1505.427104,1000,1316.001504 670 | 1479.462288,1000,1318.645937 671 | 1497.230564,1000,1319.702511 672 | 1471.724453,1000,1322.398228 673 | 1408.9952,1000,1323.224727 674 | 1465.541332,1000,1325.784832 675 | 1498.86643,1000,1327.129044 676 | 1474.9164,1000,1329.464344 677 | 1523.505369,1000,1330.782412 678 | 1486.630773,1000,1333.371381 679 | 1482.955172,1000,1334.381278 680 | 1520.690256,1000,1336.881912 681 | 1499.028843,1000,1338.228182 682 | 1458.775133,1000,1341.007829 683 | 1517.44344,1000,1341.810928 684 | 1524.562054,1000,1344.252299 685 | 1556.579658,1000,1345.29971 686 | 1481.930681,1000,1346.562289 687 | 1469.824327,1000,1349.082298 688 | 1477.417703,1000,1350.148786 689 | 1518.469571,1000,1352.707199 690 | 1512.289501,1000,1354.026629 691 | 1538.454916,1000,1356.674719 692 | 1499.721441,1000,1357.77374 693 | 1492.74153,1000,1360.586605 694 | 1612.800158,1000,1361.844908 695 | 1539.92357,1000,1364.676126 696 | 1539.90402,1000,1365.758054 697 | 1518.511715,1000,1368.297376 698 | 1488.38012,1000,1369.269698 699 | 1572.658315,1000,1371.89587 700 | 1438.537688,1000,1372.738039 701 | 1520.577586,1000,1375.403601 702 | 1263.190341,1000,1376.80486 703 | 1557.696321,1000,1379.939008 704 | 1551.468216,1000,1381.226555 705 | 1555.828669,1000,1383.779542 706 | 1537.279386,1000,1385.070577 707 | 1546.937908,1000,1387.791376 708 | 1357.476178,1000,1389.124524 709 | 1504.270637,1000,1392.053146 710 | 1521.314315,1000,1393.363345 711 | 1552.62198,1000,1396.068964 712 | 1496.339829,1000,1397.415679 713 | 1496.166608,1000,1400.374208 714 | 1395.938185,1000,1401.715546 715 | 1524.37572,1000,1404.245348 716 | 1445.126095,1000,1405.279001 717 | 1521.454356,1000,1408.011683 718 | 1541.943346,1000,1409.337919 719 | 1566.226192,1000,1412.012666 720 | 1466.006266,1000,1413.381728 721 | 1555.088487,1000,1416.053546 722 | 1497.973262,1000,1417.073803 723 | 809.280398,1000,1419.928401 724 | 1524.515905,1000,1421.143965 725 | 1552.989774,1000,1424.051631 726 | 1572.97674,1000,1425.368333 727 | 1500.209476,1000,1427.987207 728 | 1520.00071,1000,1429.352894 729 | 1550.787111,1000,1430.722999 730 | 1596.83608,1000,1433.652764 731 | 1542.887037,1000,1435.003048 732 | 1237.969189,1000,1437.844051 733 | 1548.98261,1000,1438.978405 734 | 1534.593072,1000,1441.661593 735 | 1555.322289,1000,1442.689343 736 | 1566.84887,1000,1445.204552 737 | 1498.666811,1000,1446.251609 738 | 1575.29336,1000,1448.838894 739 | 1547.942641,1000,1450.071018 740 | 1536.331769,1000,1452.795045 741 | 1513.112186,1000,1454.126225 742 | 1539.56658,1000,1457.121436 743 | 1568.999926,1000,1458.464762 744 | 1518.173034,1000,1461.351964 745 | 1558.813897,1000,1462.49576 746 | 1496.983089,1000,1465.060242 747 | 1479.608862,1000,1466.205008 748 | 1505.154529,1000,1469.083746 749 | 1600.496574,1000,1470.197218 750 | 1519.989506,1000,1472.743313 751 | 1569.260231,1000,1473.670007 752 | 1536.344137,1000,1476.300234 753 | 1540.185474,1000,1477.495444 754 | 1498.790824,1000,1480.252834 755 | 1533.781361,1000,1481.47569 756 | 1523.225777,1000,1484.317931 757 | 1579.967241,1000,1485.842725 758 | 1558.271746,1000,1488.897848 759 | 1597.437394,1000,1490.214442 760 | 1517.511313,1000,1493.082003 761 | 1506.348228,1000,1494.370628 762 | 1569.72198,1000,1497.051499 763 | 1549.246527,1000,1497.870193 764 | 1552.980151,1000,1500.36623 765 | 1559.557782,1000,1501.696865 766 | 1513.492331,1000,1504.328161 767 | 1501.155944,1000,1505.416328 768 | 1613.819425,1000,1507.746207 769 | 1578.19202,1000,1508.96219 770 | 1546.02223,1000,1510.322356 771 | 1558.128915,1000,1512.751599 772 | 1561.109996,1000,1514.01408 773 | 1573.070085,1000,1516.448258 774 | 1579.121567,1000,1517.696406 775 | 1506.30432,1000,1520.429926 776 | 1566.574652,1000,1521.551378 777 | 1558.027869,1000,1524.084677 778 | 1602.009823,1000,1524.96412 779 | 1423.27286,1000,1527.63238 780 | 1557.081786,1000,1528.966032 781 | 1539.349775,1000,1531.887439 782 | 1600.670679,1000,1532.757738 783 | 1589.577848,1000,1535.470881 784 | 1594.796235,1000,1537.034132 785 | 1564.764814,1000,1540.017986 786 | 1484.419972,1000,1541.041849 787 | 1597.809228,1000,1543.538698 788 | 1554.726876,1000,1544.566126 789 | 1541.994097,1000,1547.402536 790 | 1600.941659,1000,1548.753546 791 | 1541.300574,1000,1551.43984 792 | 1588.089279,1000,1552.629599 793 | 1571.545007,1000,1555.159121 794 | 1524.836774,1000,1556.505641 795 | 1570.74362,1000,1559.336583 796 | 1596.400547,1000,1560.302614 797 | 1567.91616,1000,1562.751957 798 | 1554.217097,1000,1564.039658 799 | 1532.75017,1000,1566.972584 800 | 1592.669505,1000,1568.166361 801 | 1548.216774,1000,1570.510645 802 | 1583.164102,1000,1571.890503 803 | 1522.618913,1000,1574.74989 804 | 1595.546007,1000,1575.877184 805 | 1561.727838,1000,1578.467846 806 | 1606.372055,1000,1579.909358 807 | 1557.042974,1000,1582.696832 808 | 1380.593256,1000,1583.928659 809 | 1606.110341,1000,1586.753136 810 | 1529.399495,1000,1587.986454 811 | 1507.852256,1000,1590.743254 812 | 1594.922078,1000,1591.750613 813 | 1550.344808,1000,1592.786434 814 | 1586.480977,1000,1595.397214 815 | 1606.333019,1000,1596.491332 816 | 1641.782743,1000,1599.18556 817 | 1600.780051,1000,1600.503961 818 | 1577.878716,1000,1603.28211 819 | 1582.061892,1000,1604.620367 820 | 1558.406186,1000,1607.468039 821 | 1569.097534,1000,1608.555512 822 | 1564.807211,1000,1611.198658 823 | 1601.522193,1000,1612.435548 824 | 1575.829064,1000,1615.428615 825 | 1577.716938,1000,1616.705716 826 | 1572.595418,1000,1619.742862 827 | 1600.581017,1000,1621.087844 828 | 1555.659205,1000,1623.819891 829 | 1586.625722,1000,1624.756215 830 | 1605.774249,1000,1627.413513 831 | 1582.393286,1000,1628.641863 832 | 1560.264227,1000,1631.458863 833 | 1602.003799,1000,1632.808948 834 | 1604.815232,1000,1635.628681 835 | 1559.518861,1000,1636.777896 836 | 1572.220376,1000,1639.39799 837 | 1557.182885,1000,1640.492812 838 | 1627.377922,1000,1643.004068 839 | 1567.876578,1000,1644.199312 840 | 1556.200032,1000,1647.064078 841 | 1593.317962,1000,1648.120523 842 | 1582.209807,1000,1650.511347 843 | 1568.14402,1000,1651.448039 844 | 1543.634572,1000,1654.04035 845 | 1592.734727,1000,1655.248631 846 | 1537.670746,1000,1658.212183 847 | 1530.171834,1000,1659.251533 848 | 1540.295455,1000,1661.696147 849 | 1553.992182,1000,1663.034393 850 | 1614.806184,1000,1665.940093 851 | 1602.957705,1000,1667.257865 852 | 1376.591873,1000,1669.800991 853 | 1650.908495,1000,1670.705617 854 | 1596.006529,1000,1673.054279 855 | 1514.105903,1000,1674.384518 856 | 1572.439268,1000,1675.55309 857 | 1579.450733,1000,1678.464658 858 | 1583.97134,1000,1679.638113 859 | 1564.732541,1000,1682.25501 860 | 1588.825762,1000,1683.535007 861 | 1625.9209,1000,1686.237665 862 | 1625.716613,1000,1687.29431 863 | 1637.131476,1000,1690.089922 864 | 1624.252016,1000,1691.264525 865 | 1576.770863,1000,1693.920972 866 | 1635.839863,1000,1695.265519 867 | 1594.838518,1000,1698.059691 868 | 1588.666487,1000,1699.393612 869 | 1589.821749,1000,1702.120788 870 | 1548.477185,1000,1703.437636 871 | 1604.719371,1000,1706.419306 872 | 1605.248131,1000,1707.723914 873 | 1454.931611,1000,1710.200027 874 | 1607.343427,1000,1711.492798 875 | 1610.953069,1000,1714.364838 876 | 1550.778786,1000,1715.669801 877 | 1513.717707,1000,1718.132035 878 | 1586.566502,1000,1719.158841 879 | 1583.035699,1000,1721.788419 880 | 1617.53285,1000,1723.06562 881 | 1488.212427,1000,1725.946443 882 | 1609.186375,1000,1727.514482 883 | 1577.136658,1000,1730.445972 884 | 1571.131247,1000,1731.438909 885 | 1592.10085,1000,1733.951633 886 | 1604.648214,1000,1735.292283 887 | 1617.125647,1000,1738.116015 888 | 1570.455478,1000,1739.463315 889 | 1602.192773,1000,1742.008163 890 | 1616.787395,1000,1743.107378 891 | 1599.260868,1000,1745.671476 892 | 1535.810133,1000,1746.59748 893 | 1548.653286,1000,1749.168954 894 | 1636.54907,1000,1750.191305 895 | 1540.892106,1000,1752.884449 896 | 1623.639332,1000,1753.837548 897 | 1626.146679,1000,1756.302508 898 | 1600.33045,1000,1757.635103 899 | 1608.029018,1000,1758.779514 900 | 1561.567138,1000,1761.401371 901 | 1621.115484,1000,1762.755574 902 | 1668.608726,1000,1765.699998 903 | 1614.351791,1000,1767.018183 904 | 1624.759284,1000,1769.533846 905 | 1574.891339,1000,1770.814876 906 | 1587.047751,1000,1773.472541 907 | 1604.069858,1000,1774.713077 908 | 1573.508167,1000,1777.595241 909 | 1608.828644,1000,1778.668031 910 | 1566.458712,1000,1781.577085 911 | 1597.871438,1000,1782.935834 912 | 1596.54262,1000,1785.809067 913 | 1559.518582,1000,1787.173454 914 | 1602.140567,1000,1790.26825 915 | 1630.227234,1000,1791.657814 916 | 1588.121896,1000,1794.533857 917 | 1625.412756,1000,1795.692713 918 | 1551.011876,1000,1798.552838 919 | 1639.248292,1000,1799.892904 920 | 1626.990139,1000,1802.654802 921 | 1562.108443,1000,1803.577805 922 | 1620.261557,1000,1806.263093 923 | 1633.76654,1000,1807.19032 924 | 1574.226555,1000,1809.719351 925 | 1562.121445,1000,1810.847957 926 | 1643.196614,1000,1813.779945 927 | 1633.115158,1000,1815.019669 928 | 1589.450878,1000,1817.884043 929 | 1575.343368,1000,1819.15962 930 | 1587.055235,1000,1821.83988 931 | 1517.917688,1000,1823.069125 932 | 1624.776226,1000,1825.580729 933 | 1623.301241,1000,1826.564407 934 | 1650.491284,1000,1829.295597 935 | 1586.035193,1000,1830.37186 936 | 1546.427663,1000,1832.885118 937 | 1648.110717,1000,1834.14327 938 | 1590.688912,1000,1836.943925 939 | 1621.453865,1000,1838.30184 940 | 1615.499517,1000,1841.120758 941 | 1537.31359,1000,1842.078623 942 | 1585.917896,1000,1843.427171 943 | 1571.417986,1000,1846.159604 944 | 1603.660292,1000,1847.378742 945 | 1593.166822,1000,1849.990456 946 | 1609.033397,1000,1851.30287 947 | 1608.799382,1000,1854.120682 948 | 1571.36737,1000,1855.467232 949 | 1638.422293,1000,1858.265273 950 | 1558.059648,1000,1859.316003 951 | 1640.78339,1000,1862.122006 952 | 1627.595459,1000,1863.248134 953 | 1574.364689,1000,1865.961103 954 | 1587.767977,1000,1866.847883 955 | 1610.861123,1000,1869.399901 956 | 1599.016835,1000,1870.368382 957 | 1639.074922,1000,1873.099809 958 | 1659.572359,1000,1874.381682 959 | 1680.452388,1000,1876.990129 960 | 1559.742042,1000,1878.062166 961 | 1600.33277,1000,1880.887562 962 | 1607.296531,1000,1882.19049 963 | 1634.980183,1000,1884.975554 964 | 1581.214484,1000,1886.314507 965 | 1585.710491,1000,1888.997482 966 | 1581.096287,1000,1890.146019 967 | 1592.352129,1000,1893.187185 968 | 1462.579958,1000,1894.686065 969 | 1593.825958,1000,1897.668288 970 | 1590.684704,1000,1898.668852 971 | 1549.833059,1000,1901.439101 972 | 1609.45019,1000,1902.40733 973 | 1204.402028,1000,1905.254157 974 | 1621.917767,1000,1906.699061 975 | 1611.115139,1000,1909.453564 976 | 1636.94668,1000,1910.573252 977 | 1594.085668,1000,1913.457186 978 | 1616.638047,1000,1914.615857 979 | 1605.547248,1000,1916.952832 980 | 1634.00932,1000,1917.854815 981 | 1668.525749,1000,1919.929569 982 | 1682.201443,1000,1920.886685 983 | 1556.822362,1000,1923.457704 984 | 1606.777618,1000,1924.391846 985 | 1606.451842,1000,1925.320808 986 | 1629.870873,1000,1928.077232 987 | 1603.734133,1000,1928.990915 988 | 1635.861748,1000,1931.119751 989 | 1639.965111,1000,1931.955581 990 | 1629.199263,1000,1934.364514 991 | 1619.858142,1000,1935.24511 992 | 1675.588246,1000,1937.609981 993 | 1648.348864,1000,1938.513503 994 | 1616.621856,1000,1940.613111 995 | 1600.674349,1000,1941.455224 996 | 1650.558779,1000,1943.66263 997 | 1609.650711,1000,1944.427255 998 | 1618.302202,1000,1946.343391 999 | 1579.898389,1000,1947.211411 1000 | 1645.91724,1000,1948.916262 1001 | 1566.417847,1000,1949.695497 1002 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import glob 3 | import os 4 | import time 5 | from collections import deque 6 | 7 | import gym 8 | import numpy as np 9 | import torch 10 | import torch.nn as nn 11 | import torch.nn.functional as F 12 | import torch.optim as optim 13 | 14 | from a2c_ppo_acktr import algo, utils 15 | from a2c_ppo_acktr.algo import gail 16 | from a2c_ppo_acktr.arguments import get_args 17 | from a2c_ppo_acktr.envs import make_vec_envs 18 | from a2c_ppo_acktr.model import Policy 19 | from a2c_ppo_acktr.storage import RolloutStorage 20 | from evaluation import evaluate 21 | 22 | 23 | def main(): 24 | args = get_args() 25 | 26 | torch.manual_seed(args.seed) 27 | torch.cuda.manual_seed_all(args.seed) 28 | 29 | if args.cuda and torch.cuda.is_available() and args.cuda_deterministic: 30 | torch.backends.cudnn.benchmark = False 31 | torch.backends.cudnn.deterministic = True 32 | 33 | log_dir = os.path.expanduser(args.log_dir) 34 | eval_log_dir = log_dir + "_eval" 35 | utils.cleanup_log_dir(log_dir) 36 | utils.cleanup_log_dir(eval_log_dir) 37 | 38 | torch.set_num_threads(1) 39 | device = torch.device("cuda:0" if args.cuda else "cpu") 40 | 41 | envs = make_vec_envs(args.env_name, args.seed, args.num_processes, 42 | args.gamma, args.log_dir, device, False) 43 | 44 | actor_critic = Policy( 45 | envs.observation_space.shape, 46 | envs.action_space, 47 | base_kwargs={'recurrent': args.recurrent_policy}) 48 | actor_critic.to(device) 49 | 50 | if args.algo == 'a2c': 51 | agent = algo.A2C_ACKTR( 52 | actor_critic, 53 | args.value_loss_coef, 54 | args.entropy_coef, 55 | lr=args.lr, 56 | eps=args.eps, 57 | alpha=args.alpha, 58 | max_grad_norm=args.max_grad_norm) 59 | elif args.algo == 'ppo': 60 | agent = algo.PPO( 61 | actor_critic, 62 | args.clip_param, 63 | args.ppo_epoch, 64 | args.num_mini_batch, 65 | args.value_loss_coef, 66 | args.entropy_coef, 67 | lr=args.lr, 68 | eps=args.eps, 69 | max_grad_norm=args.max_grad_norm) 70 | elif args.algo == 'acktr': 71 | agent = algo.A2C_ACKTR( 72 | actor_critic, args.value_loss_coef, args.entropy_coef, acktr=True) 73 | 74 | if args.gail: 75 | assert len(envs.observation_space.shape) == 1 76 | discr = gail.Discriminator( 77 | envs.observation_space.shape[0] + envs.action_space.shape[0], 100, 78 | device) 79 | file_name = os.path.join( 80 | args.gail_experts_dir, "trajs_{}.pt".format( 81 | args.env_name.split('-')[0].lower())) 82 | 83 | expert_dataset = gail.ExpertDataset( 84 | file_name, num_trajectories=4, subsample_frequency=20) 85 | drop_last = len(expert_dataset) > args.gail_batch_size 86 | gail_train_loader = torch.utils.data.DataLoader( 87 | dataset=expert_dataset, 88 | batch_size=args.gail_batch_size, 89 | shuffle=True, 90 | drop_last=drop_last) 91 | 92 | rollouts = RolloutStorage(args.num_steps, args.num_processes, 93 | envs.observation_space.shape, envs.action_space, 94 | actor_critic.recurrent_hidden_state_size) 95 | 96 | obs = envs.reset() 97 | rollouts.obs[0].copy_(obs) 98 | rollouts.to(device) 99 | 100 | episode_rewards = deque(maxlen=10) 101 | 102 | start = time.time() 103 | num_updates = int( 104 | args.num_env_steps) // args.num_steps // args.num_processes 105 | for j in range(num_updates): 106 | 107 | if args.use_linear_lr_decay: 108 | # decrease learning rate linearly 109 | utils.update_linear_schedule( 110 | agent.optimizer, j, num_updates, 111 | agent.optimizer.lr if args.algo == "acktr" else args.lr) 112 | 113 | for step in range(args.num_steps): 114 | # Sample actions 115 | with torch.no_grad(): 116 | value, action, action_log_prob, recurrent_hidden_states = actor_critic.act( 117 | rollouts.obs[step], rollouts.recurrent_hidden_states[step], 118 | rollouts.masks[step]) 119 | 120 | # Obser reward and next obs 121 | obs, reward, done, infos = envs.step(action) 122 | 123 | for info in infos: 124 | if 'episode' in info.keys(): 125 | episode_rewards.append(info['episode']['r']) 126 | 127 | # If done then clean the history of observations. 128 | masks = torch.FloatTensor( 129 | [[0.0] if done_ else [1.0] for done_ in done]) 130 | bad_masks = torch.FloatTensor( 131 | [[0.0] if 'bad_transition' in info.keys() else [1.0] 132 | for info in infos]) 133 | rollouts.insert(obs, recurrent_hidden_states, action, 134 | action_log_prob, value, reward, masks, bad_masks) 135 | 136 | with torch.no_grad(): 137 | next_value = actor_critic.get_value( 138 | rollouts.obs[-1], rollouts.recurrent_hidden_states[-1], 139 | rollouts.masks[-1]).detach() 140 | 141 | if args.gail: 142 | if j >= 10: 143 | envs.venv.eval() 144 | 145 | gail_epoch = args.gail_epoch 146 | if j < 10: 147 | gail_epoch = 100 # Warm up 148 | for _ in range(gail_epoch): 149 | discr.update(gail_train_loader, rollouts, 150 | utils.get_vec_normalize(envs)._obfilt) 151 | 152 | for step in range(args.num_steps): 153 | rollouts.rewards[step] = discr.predict_reward( 154 | rollouts.obs[step], rollouts.actions[step], args.gamma, 155 | rollouts.masks[step]) 156 | 157 | rollouts.compute_returns(next_value, args.use_gae, args.gamma, 158 | args.gae_lambda, args.use_proper_time_limits) 159 | 160 | value_loss, action_loss, dist_entropy = agent.update(rollouts) 161 | 162 | rollouts.after_update() 163 | 164 | # save for every interval-th episode or for the last epoch 165 | if (j % args.save_interval == 0 166 | or j == num_updates - 1) and args.save_dir != "": 167 | save_path = os.path.join(args.save_dir, args.algo) 168 | try: 169 | os.makedirs(save_path) 170 | except OSError: 171 | pass 172 | 173 | torch.save([ 174 | actor_critic, 175 | getattr(utils.get_vec_normalize(envs), 'obs_rms', None) 176 | ], os.path.join(save_path, args.env_name + ".pt")) 177 | 178 | if j % args.log_interval == 0 and len(episode_rewards) > 1: 179 | total_num_steps = (j + 1) * args.num_processes * args.num_steps 180 | end = time.time() 181 | print( 182 | "Updates {}, num timesteps {}, FPS {} \n Last {} training episodes: mean/median reward {:.1f}/{:.1f}, min/max reward {:.1f}/{:.1f}\n" 183 | .format(j, total_num_steps, 184 | int(total_num_steps / (end - start)), 185 | len(episode_rewards), np.mean(episode_rewards), 186 | np.median(episode_rewards), np.min(episode_rewards), 187 | np.max(episode_rewards), dist_entropy, value_loss, 188 | action_loss)) 189 | 190 | if (args.eval_interval is not None and len(episode_rewards) > 1 191 | and j % args.eval_interval == 0): 192 | obs_rms = utils.get_vec_normalize(envs).obs_rms 193 | evaluate(actor_critic, obs_rms, args.env_name, args.seed, 194 | args.num_processes, eval_log_dir, device) 195 | 196 | 197 | if __name__ == "__main__": 198 | main() 199 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | gym 2 | matplotlib 3 | pybullet 4 | stable-baselines3 5 | h5py -------------------------------------------------------------------------------- /run_all.yaml: -------------------------------------------------------------------------------- 1 | session_name: run-all 2 | windows: 3 | - panes: 4 | - python main.py --env-name Reacher-v2 --algo ppo --use-gae --log-interval 1 --num-steps 5 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 6 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 7 | --no-cuda --log-dir /tmp/gym/reacher/reacher-0 --seed 0 --use-proper-time-limits 8 | - python main.py --env-name HalfCheetah-v2 --algo ppo --use-gae --log-interval 1 9 | --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 10 | 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 11 | 1000000 --use-linear-lr-decay --no-cuda --log-dir /tmp/gym/halfcheetah/halfcheetah-0 12 | --seed 0 --use-proper-time-limits 13 | - python main.py --env-name Walker2d-v2 --algo ppo --use-gae --log-interval 1 --num-steps 14 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 15 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 16 | --no-cuda --log-dir /tmp/gym/walker2d/walker2d-0 --seed 0 --use-proper-time-limits 17 | - python main.py --env-name Hopper-v2 --algo ppo --use-gae --log-interval 1 --num-steps 18 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 19 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 20 | --no-cuda --log-dir /tmp/gym/hopper/hopper-0 --seed 0 --use-proper-time-limits 21 | window_name: seed-0 22 | - panes: 23 | - python main.py --env-name Reacher-v2 --algo ppo --use-gae --log-interval 1 --num-steps 24 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 25 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 26 | --no-cuda --log-dir /tmp/gym/reacher/reacher-1 --seed 1 --use-proper-time-limits 27 | - python main.py --env-name HalfCheetah-v2 --algo ppo --use-gae --log-interval 1 28 | --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 29 | 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 30 | 1000000 --use-linear-lr-decay --no-cuda --log-dir /tmp/gym/halfcheetah/halfcheetah-1 31 | --seed 1 --use-proper-time-limits 32 | - python main.py --env-name Walker2d-v2 --algo ppo --use-gae --log-interval 1 --num-steps 33 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 34 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 35 | --no-cuda --log-dir /tmp/gym/walker2d/walker2d-1 --seed 1 --use-proper-time-limits 36 | - python main.py --env-name Hopper-v2 --algo ppo --use-gae --log-interval 1 --num-steps 37 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 38 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 39 | --no-cuda --log-dir /tmp/gym/hopper/hopper-1 --seed 1 --use-proper-time-limits 40 | window_name: seed-1 41 | - panes: 42 | - python main.py --env-name Reacher-v2 --algo ppo --use-gae --log-interval 1 --num-steps 43 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 44 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 45 | --no-cuda --log-dir /tmp/gym/reacher/reacher-2 --seed 2 --use-proper-time-limits 46 | - python main.py --env-name HalfCheetah-v2 --algo ppo --use-gae --log-interval 1 47 | --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 48 | 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 49 | 1000000 --use-linear-lr-decay --no-cuda --log-dir /tmp/gym/halfcheetah/halfcheetah-2 50 | --seed 2 --use-proper-time-limits 51 | - python main.py --env-name Walker2d-v2 --algo ppo --use-gae --log-interval 1 --num-steps 52 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 53 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 54 | --no-cuda --log-dir /tmp/gym/walker2d/walker2d-2 --seed 2 --use-proper-time-limits 55 | - python main.py --env-name Hopper-v2 --algo ppo --use-gae --log-interval 1 --num-steps 56 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 57 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 58 | --no-cuda --log-dir /tmp/gym/hopper/hopper-2 --seed 2 --use-proper-time-limits 59 | window_name: seed-2 60 | - panes: 61 | - python main.py --env-name Reacher-v2 --algo ppo --use-gae --log-interval 1 --num-steps 62 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 63 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 64 | --no-cuda --log-dir /tmp/gym/reacher/reacher-3 --seed 3 --use-proper-time-limits 65 | - python main.py --env-name HalfCheetah-v2 --algo ppo --use-gae --log-interval 1 66 | --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 67 | 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 68 | 1000000 --use-linear-lr-decay --no-cuda --log-dir /tmp/gym/halfcheetah/halfcheetah-3 69 | --seed 3 --use-proper-time-limits 70 | - python main.py --env-name Walker2d-v2 --algo ppo --use-gae --log-interval 1 --num-steps 71 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 72 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 73 | --no-cuda --log-dir /tmp/gym/walker2d/walker2d-3 --seed 3 --use-proper-time-limits 74 | - python main.py --env-name Hopper-v2 --algo ppo --use-gae --log-interval 1 --num-steps 75 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 76 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 77 | --no-cuda --log-dir /tmp/gym/hopper/hopper-3 --seed 3 --use-proper-time-limits 78 | window_name: seed-3 79 | - panes: 80 | - python main.py --env-name Reacher-v2 --algo ppo --use-gae --log-interval 1 --num-steps 81 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 82 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 83 | --no-cuda --log-dir /tmp/gym/reacher/reacher-4 --seed 4 --use-proper-time-limits 84 | - python main.py --env-name HalfCheetah-v2 --algo ppo --use-gae --log-interval 1 85 | --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 86 | 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 87 | 1000000 --use-linear-lr-decay --no-cuda --log-dir /tmp/gym/halfcheetah/halfcheetah-4 88 | --seed 4 --use-proper-time-limits 89 | - python main.py --env-name Walker2d-v2 --algo ppo --use-gae --log-interval 1 --num-steps 90 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 91 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 92 | --no-cuda --log-dir /tmp/gym/walker2d/walker2d-4 --seed 4 --use-proper-time-limits 93 | - python main.py --env-name Hopper-v2 --algo ppo --use-gae --log-interval 1 --num-steps 94 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 95 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 96 | --no-cuda --log-dir /tmp/gym/hopper/hopper-4 --seed 4 --use-proper-time-limits 97 | window_name: seed-4 98 | - panes: 99 | - python main.py --env-name Reacher-v2 --algo ppo --use-gae --log-interval 1 --num-steps 100 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 101 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 102 | --no-cuda --log-dir /tmp/gym/reacher/reacher-5 --seed 5 --use-proper-time-limits 103 | - python main.py --env-name HalfCheetah-v2 --algo ppo --use-gae --log-interval 1 104 | --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 105 | 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 106 | 1000000 --use-linear-lr-decay --no-cuda --log-dir /tmp/gym/halfcheetah/halfcheetah-5 107 | --seed 5 --use-proper-time-limits 108 | - python main.py --env-name Walker2d-v2 --algo ppo --use-gae --log-interval 1 --num-steps 109 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 110 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 111 | --no-cuda --log-dir /tmp/gym/walker2d/walker2d-5 --seed 5 --use-proper-time-limits 112 | - python main.py --env-name Hopper-v2 --algo ppo --use-gae --log-interval 1 --num-steps 113 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 114 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 115 | --no-cuda --log-dir /tmp/gym/hopper/hopper-5 --seed 5 --use-proper-time-limits 116 | window_name: seed-5 117 | - panes: 118 | - python main.py --env-name Reacher-v2 --algo ppo --use-gae --log-interval 1 --num-steps 119 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 120 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 121 | --no-cuda --log-dir /tmp/gym/reacher/reacher-6 --seed 6 --use-proper-time-limits 122 | - python main.py --env-name HalfCheetah-v2 --algo ppo --use-gae --log-interval 1 123 | --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 124 | 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 125 | 1000000 --use-linear-lr-decay --no-cuda --log-dir /tmp/gym/halfcheetah/halfcheetah-6 126 | --seed 6 --use-proper-time-limits 127 | - python main.py --env-name Walker2d-v2 --algo ppo --use-gae --log-interval 1 --num-steps 128 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 129 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 130 | --no-cuda --log-dir /tmp/gym/walker2d/walker2d-6 --seed 6 --use-proper-time-limits 131 | - python main.py --env-name Hopper-v2 --algo ppo --use-gae --log-interval 1 --num-steps 132 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 133 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 134 | --no-cuda --log-dir /tmp/gym/hopper/hopper-6 --seed 6 --use-proper-time-limits 135 | window_name: seed-6 136 | - panes: 137 | - python main.py --env-name Reacher-v2 --algo ppo --use-gae --log-interval 1 --num-steps 138 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 139 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 140 | --no-cuda --log-dir /tmp/gym/reacher/reacher-7 --seed 7 --use-proper-time-limits 141 | - python main.py --env-name HalfCheetah-v2 --algo ppo --use-gae --log-interval 1 142 | --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 143 | 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 144 | 1000000 --use-linear-lr-decay --no-cuda --log-dir /tmp/gym/halfcheetah/halfcheetah-7 145 | --seed 7 --use-proper-time-limits 146 | - python main.py --env-name Walker2d-v2 --algo ppo --use-gae --log-interval 1 --num-steps 147 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 148 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 149 | --no-cuda --log-dir /tmp/gym/walker2d/walker2d-7 --seed 7 --use-proper-time-limits 150 | - python main.py --env-name Hopper-v2 --algo ppo --use-gae --log-interval 1 --num-steps 151 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 152 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 153 | --no-cuda --log-dir /tmp/gym/hopper/hopper-7 --seed 7 --use-proper-time-limits 154 | window_name: seed-7 155 | - panes: 156 | - python main.py --env-name Reacher-v2 --algo ppo --use-gae --log-interval 1 --num-steps 157 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 158 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 159 | --no-cuda --log-dir /tmp/gym/reacher/reacher-8 --seed 8 --use-proper-time-limits 160 | - python main.py --env-name HalfCheetah-v2 --algo ppo --use-gae --log-interval 1 161 | --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 162 | 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 163 | 1000000 --use-linear-lr-decay --no-cuda --log-dir /tmp/gym/halfcheetah/halfcheetah-8 164 | --seed 8 --use-proper-time-limits 165 | - python main.py --env-name Walker2d-v2 --algo ppo --use-gae --log-interval 1 --num-steps 166 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 167 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 168 | --no-cuda --log-dir /tmp/gym/walker2d/walker2d-8 --seed 8 --use-proper-time-limits 169 | - python main.py --env-name Hopper-v2 --algo ppo --use-gae --log-interval 1 --num-steps 170 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 171 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 172 | --no-cuda --log-dir /tmp/gym/hopper/hopper-8 --seed 8 --use-proper-time-limits 173 | window_name: seed-8 174 | - panes: 175 | - python main.py --env-name Reacher-v2 --algo ppo --use-gae --log-interval 1 --num-steps 176 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 177 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 178 | --no-cuda --log-dir /tmp/gym/reacher/reacher-9 --seed 9 --use-proper-time-limits 179 | - python main.py --env-name HalfCheetah-v2 --algo ppo --use-gae --log-interval 1 180 | --num-steps 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 181 | 0.5 --ppo-epoch 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 182 | 1000000 --use-linear-lr-decay --no-cuda --log-dir /tmp/gym/halfcheetah/halfcheetah-9 183 | --seed 9 --use-proper-time-limits 184 | - python main.py --env-name Walker2d-v2 --algo ppo --use-gae --log-interval 1 --num-steps 185 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 186 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 187 | --no-cuda --log-dir /tmp/gym/walker2d/walker2d-9 --seed 9 --use-proper-time-limits 188 | - python main.py --env-name Hopper-v2 --algo ppo --use-gae --log-interval 1 --num-steps 189 | 2048 --num-processes 1 --lr 3e-4 --entropy-coef 0 --value-loss-coef 0.5 --ppo-epoch 190 | 10 --num-mini-batch 32 --gamma 0.99 --tau 0.95 --num-env-steps 1000000 --use-linear-lr-decay 191 | --no-cuda --log-dir /tmp/gym/hopper/hopper-9 --seed 9 --use-proper-time-limits 192 | window_name: seed-9 193 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | 3 | setup( 4 | name='a2c-ppo-acktr', 5 | packages=find_packages(), 6 | version='0.0.1', 7 | install_requires=['gym', 'matplotlib', 'pybullet', 'stable-baselines3']) 8 | -------------------------------------------------------------------------------- /time_limit_logs/halfcheetah/unfixhalfcheetah-7/0.monitor.csv: -------------------------------------------------------------------------------- 1 | # {"t_start": 1551815722.8808415, "env_id": "HalfCheetah-v2"} 2 | r,l,t 3 | -863.678411,1000,1.270044 4 | -700.912535,1000,2.374678 5 | -772.310924,1000,4.856986 6 | -772.683609,1000,5.844512 7 | -723.860562,1000,8.685917 8 | -791.691865,1000,9.807292 9 | -736.233194,1000,12.677814 10 | -695.788745,1000,14.198116 11 | -846.204917,1000,17.009144 12 | -687.24682,1000,18.161778 13 | -666.92304,1000,20.942555 14 | -616.226627,1000,22.056444 15 | -729.148965,1000,24.706241 16 | -798.819417,1000,26.059236 17 | -664.257965,1000,28.94857 18 | -664.685674,1000,30.245048 19 | -671.311888,1000,32.998655 20 | -640.374095,1000,34.224429 21 | -700.983878,1000,36.955018 22 | -687.099421,1000,38.282997 23 | -594.896593,1000,41.254902 24 | -640.796476,1000,42.553101 25 | -562.492923,1000,45.427089 26 | -693.462238,1000,46.505981 27 | -756.509615,1000,49.2586 28 | -622.836841,1000,50.610624 29 | -587.640866,1000,53.375638 30 | -530.450028,1000,54.732655 31 | -551.373347,1000,57.655889 32 | -663.893594,1000,58.967515 33 | -598.578523,1000,61.671952 34 | -655.82895,1000,62.738956 35 | -570.67664,1000,65.567579 36 | -474.074051,1000,66.911493 37 | -499.543311,1000,69.290626 38 | -471.268382,1000,70.392848 39 | -531.169756,1000,73.083794 40 | -506.748158,1000,73.894595 41 | -497.564792,1000,76.359683 42 | -431.901374,1000,77.707316 43 | -618.418294,1000,80.318377 44 | -472.344655,1000,81.666525 45 | -644.564038,1000,83.015632 46 | -543.783515,1000,85.964243 47 | -517.722645,1000,87.188323 48 | -510.570063,1000,90.105242 49 | -420.252919,1000,91.414707 50 | -372.621392,1000,94.083693 51 | -488.323566,1000,95.098325 52 | -429.283291,1000,97.861 53 | -454.401306,1000,98.966305 54 | -457.720907,1000,101.624549 55 | -672.839228,1000,102.71236 56 | -615.579986,1000,105.148578 57 | -433.01092,1000,106.112226 58 | -527.827902,1000,108.810722 59 | -409.580521,1000,110.090473 60 | -427.868252,1000,112.586834 61 | -439.841736,1000,113.764047 62 | -369.828057,1000,116.698338 63 | -240.997395,1000,118.05986 64 | -209.704656,1000,120.839584 65 | -282.945063,1000,121.950857 66 | -342.109156,1000,124.871808 67 | -361.910041,1000,125.980029 68 | -355.442376,1000,128.667034 69 | -291.725159,1000,129.836574 70 | -328.120244,1000,132.448013 71 | -271.392734,1000,133.490761 72 | -410.917491,1000,136.225601 73 | -439.182798,1000,137.575425 74 | -321.723659,1000,140.282876 75 | -348.678629,1000,141.608922 76 | -236.447657,1000,144.279105 77 | -264.181115,1000,145.62759 78 | -268.814078,1000,148.494909 79 | -238.783888,1000,149.835229 80 | -458.029752,1000,152.486758 81 | -429.322111,1000,153.669142 82 | -477.970809,1000,156.391834 83 | -274.342162,1000,157.422903 84 | -275.223563,1000,159.685244 85 | -349.522829,1000,160.838072 86 | -358.651981,1000,163.453405 87 | -101.678987,1000,164.68121 88 | -351.569727,1000,165.795642 89 | -22.892459,1000,168.842532 90 | -214.329165,1000,170.376852 91 | -510.281116,1000,173.283909 92 | -268.990539,1000,174.425146 93 | -94.764965,1000,177.166083 94 | -228.081757,1000,178.231446 95 | -365.247501,1000,180.934274 96 | -168.299396,1000,181.980191 97 | -513.730829,1000,184.775373 98 | -125.98019,1000,186.102003 99 | -281.31408,1000,188.793861 100 | -104.727444,1000,189.724763 101 | -189.522226,1000,192.453796 102 | -487.428103,1000,193.82684 103 | -257.504267,1000,196.420697 104 | -37.72444,1000,197.782645 105 | -118.973546,1000,200.449932 106 | 9.949679,1000,201.356417 107 | -61.721588,1000,203.958089 108 | -418.229057,1000,205.217112 109 | -29.283583,1000,208.048338 110 | -508.939615,1000,209.22136 111 | -292.983469,1000,212.078686 112 | -70.866691,1000,213.21601 113 | -274.070738,1000,216.061164 114 | -216.543896,1000,217.360408 115 | -166.121154,1000,220.04255 116 | -191.420191,1000,221.277 117 | 4.441336,1000,224.163773 118 | -45.42485,1000,225.537031 119 | -110.20159,1000,228.188265 120 | 128.331484,1000,229.07917 121 | 125.603861,1000,231.913433 122 | 5.260703,1000,233.281086 123 | -217.914377,1000,235.817633 124 | -67.819794,1000,236.765764 125 | 124.813546,1000,239.417984 126 | 84.279295,1000,240.60252 127 | -436.231481,1000,243.516502 128 | 61.665666,1000,244.807404 129 | 60.322904,1000,247.521209 130 | 180.521635,1000,248.845448 131 | 91.491932,1000,249.942643 132 | 67.133424,1000,252.742799 133 | 76.290691,1000,253.752346 134 | 40.233537,1000,256.852412 135 | 233.076268,1000,258.384603 136 | -46.776231,1000,261.377458 137 | -50.247884,1000,262.740336 138 | 115.26782,1000,265.487694 139 | 50.507738,1000,266.556135 140 | 141.161622,1000,269.412482 141 | -407.964874,1000,270.485374 142 | 84.189646,1000,273.284408 143 | -409.840911,1000,274.497227 144 | -7.720854,1000,276.998497 145 | -99.02024,1000,278.062643 146 | 191.34243,1000,280.622579 147 | 169.696142,1000,281.650733 148 | 250.659689,1000,284.227449 149 | -242.310327,1000,285.36029 150 | 250.982904,1000,288.070772 151 | 297.432861,1000,289.264542 152 | 141.133526,1000,292.017141 153 | 207.171375,1000,293.21194 154 | 275.305572,1000,296.167196 155 | 246.307441,1000,297.494594 156 | 127.272304,1000,300.489912 157 | -604.052659,1000,301.894576 158 | 326.564499,1000,304.64603 159 | 416.101075,1000,305.658294 160 | 285.998118,1000,308.214157 161 | 284.98485,1000,309.517291 162 | -272.401104,1000,312.353321 163 | 498.809862,1000,313.622446 164 | 431.276357,1000,316.582134 165 | -472.561644,1000,317.738127 166 | -370.923089,1000,320.700602 167 | 357.695026,1000,322.059034 168 | -571.797829,1000,324.883473 169 | 0.297635,1000,325.990373 170 | 405.747744,1000,328.396878 171 | 402.190047,1000,329.603871 172 | 214.285734,1000,332.225698 173 | 322.913786,1000,333.268534 174 | 379.721994,1000,334.173369 175 | -77.410426,1000,336.781828 176 | -63.032721,1000,338.094043 177 | 227.218544,1000,340.685913 178 | 303.504101,1000,342.058941 179 | 381.62307,1000,344.684216 180 | 532.29455,1000,345.932801 181 | 350.212277,1000,348.701369 182 | 391.841237,1000,349.762059 183 | -456.742296,1000,352.51211 184 | 487.963433,1000,353.872126 185 | 500.369886,1000,356.39227 186 | 471.499477,1000,357.479517 187 | 516.523093,1000,360.199998 188 | -363.024642,1000,361.353401 189 | 368.98432,1000,364.152922 190 | -275.827006,1000,365.161324 191 | 445.003325,1000,367.983468 192 | 510.519352,1000,369.34117 193 | 409.640661,1000,372.135124 194 | 561.413548,1000,373.18227 195 | -251.1152,1000,375.797151 196 | 491.095836,1000,377.118608 197 | 492.160557,1000,379.707086 198 | 185.670349,1000,380.622015 199 | 657.401948,1000,383.162882 200 | 322.911454,1000,384.350762 201 | 435.672946,1000,387.043924 202 | 233.30519,1000,388.297657 203 | -474.965623,1000,391.177328 204 | 724.016468,1000,392.306799 205 | 567.216707,1000,394.941492 206 | 551.798228,1000,396.194724 207 | 504.525757,1000,399.058649 208 | -247.654686,1000,400.380574 209 | 684.144843,1000,403.082328 210 | 629.937886,1000,404.184863 211 | 690.139805,1000,407.178244 212 | -9.978379,1000,408.270967 213 | 705.091563,1000,411.319355 214 | 559.940741,1000,412.474074 215 | 650.531686,1000,414.90951 216 | 608.893859,1000,416.093711 217 | -118.166494,1000,417.402917 218 | 141.29418,1000,420.334598 219 | 829.710118,1000,421.428565 220 | 719.053764,1000,424.051761 221 | 853.204189,1000,425.214733 222 | 675.8936,1000,428.176631 223 | 726.700177,1000,429.277927 224 | 761.640252,1000,432.199968 225 | -238.048623,1000,433.543412 226 | 607.665476,1000,436.26316 227 | 701.076952,1000,437.35841 228 | 904.775002,1000,440.068794 229 | 750.138744,1000,441.36354 230 | 802.748852,1000,444.242921 231 | 143.21123,1000,445.494254 232 | -14.71758,1000,448.290507 233 | 710.323561,1000,449.564373 234 | 736.385134,1000,452.506589 235 | 673.604288,1000,453.562172 236 | 635.100888,1000,456.365982 237 | 813.276614,1000,457.413584 238 | 520.049769,1000,459.957543 239 | 643.006031,1000,461.122054 240 | 772.825526,1000,463.940836 241 | 716.231407,1000,465.218309 242 | 861.087161,1000,468.051658 243 | 895.001669,1000,469.165863 244 | 854.830146,1000,471.348763 245 | 249.936422,1000,472.48165 246 | 825.640017,1000,475.30513 247 | -189.911315,1000,476.484321 248 | 819.031993,1000,479.288266 249 | 953.424313,1000,480.613809 250 | 943.867497,1000,483.369752 251 | 927.600228,1000,484.345697 252 | 847.484483,1000,486.851641 253 | 958.600672,1000,488.424646 254 | 868.647883,1000,491.499832 255 | 970.274855,1000,492.849548 256 | 392.34722,1000,495.475769 257 | 828.147827,1000,496.61538 258 | 874.570254,1000,497.613927 259 | 991.767772,1000,500.41718 260 | -451.351831,1000,501.516015 261 | 818.312797,1000,504.134541 262 | 883.520296,1000,505.149978 263 | 983.176349,1000,507.656857 264 | 1015.533605,1000,508.597117 265 | 938.704347,1000,511.272938 266 | 975.117484,1000,512.609225 267 | 970.011968,1000,515.437778 268 | -566.544146,1000,516.783621 269 | 856.249505,1000,519.728443 270 | 1068.618186,1000,521.3051 271 | 1000.013924,1000,524.166289 272 | 1046.94765,1000,525.433456 273 | 936.010753,1000,528.169366 274 | 1017.179897,1000,529.426038 275 | 1071.871489,1000,532.356493 276 | 1130.153515,1000,533.821733 277 | 26.66472,1000,536.726105 278 | 1134.294519,1000,538.07994 279 | 1078.055031,1000,540.927516 280 | 1114.599975,1000,542.059509 281 | 1028.883073,1000,545.015358 282 | 1188.929322,1000,546.546265 283 | -16.574233,1000,549.542164 284 | 1064.667075,1000,550.637791 285 | 1177.116764,1000,553.134045 286 | 1194.186035,1000,554.295923 287 | -414.719805,1000,557.274817 288 | 1036.244211,1000,558.647892 289 | -321.236451,1000,561.605417 290 | 1229.098753,1000,562.756155 291 | 1064.294097,1000,565.564996 292 | 1237.431996,1000,566.777708 293 | 1236.704624,1000,569.480664 294 | 1142.407569,1000,570.646869 295 | 1141.179836,1000,573.386768 296 | 1119.177993,1000,574.463339 297 | 1139.113251,1000,577.090222 298 | 1226.762531,1000,578.325426 299 | 1175.656782,1000,580.901611 300 | 1202.836401,1000,581.947695 301 | 367.790589,1000,582.993641 302 | 1253.696121,1000,585.629266 303 | 1181.954264,1000,586.721435 304 | 1214.35128,1000,589.431503 305 | -353.418844,1000,590.8165 306 | -115.810313,1000,593.778821 307 | -583.577126,1000,595.178192 308 | 1210.582867,1000,597.769948 309 | 1246.551827,1000,598.906173 310 | 1222.331167,1000,601.216419 311 | 1351.152867,1000,602.256095 312 | 1309.876557,1000,604.825627 313 | 1203.37969,1000,605.930581 314 | 1402.092117,1000,608.905299 315 | 1114.733658,1000,610.174509 316 | 1373.995958,1000,612.838707 317 | 1367.415104,1000,613.985248 318 | 1321.454282,1000,616.523201 319 | 180.988899,1000,617.662365 320 | 1358.319507,1000,620.470261 321 | 1560.243753,1000,621.575083 322 | -384.25854,1000,624.125113 323 | 1397.16732,1000,625.065551 324 | 1349.6627,1000,627.51013 325 | 1209.369068,1000,628.773412 326 | 1392.87968,1000,631.581605 327 | 1484.630922,1000,632.474376 328 | 1400.781257,1000,634.926574 329 | -636.911505,1000,636.292199 330 | -429.893792,1000,639.210103 331 | 1479.117604,1000,640.581329 332 | 1495.564354,1000,643.228138 333 | 1320.651091,1000,644.424299 334 | 1469.365614,1000,647.255884 335 | 1530.919385,1000,648.297429 336 | 567.723267,1000,651.002321 337 | 1282.06834,1000,652.325394 338 | 1441.472856,1000,655.209506 339 | 1484.919155,1000,656.411133 340 | 1474.917301,1000,659.308377 341 | 1564.127973,1000,660.602068 342 | 1510.362506,1000,663.592042 343 | 1418.879776,1000,664.795441 344 | 1353.068001,1000,666.145343 345 | 1530.14494,1000,669.047375 346 | 1372.198189,1000,670.221274 347 | 1444.321178,1000,673.387543 348 | 1617.47076,1000,674.75937 349 | 1192.930717,1000,677.447028 350 | 1478.70322,1000,678.540636 351 | 1380.224979,1000,681.566816 352 | 1579.669149,1000,682.942516 353 | 1498.061622,1000,686.034144 354 | 1718.851945,1000,687.234644 355 | 1608.002277,1000,690.121617 356 | 1540.802298,1000,691.26957 357 | 507.363118,1000,694.106515 358 | 1671.430846,1000,695.483377 359 | 1633.032737,1000,698.04668 360 | 1677.55558,1000,698.948219 361 | 664.530726,1000,701.51879 362 | 1570.681163,1000,702.443816 363 | 1778.043335,1000,704.748139 364 | 1782.951391,1000,705.850234 365 | 902.354067,1000,708.598624 366 | 1773.522955,1000,709.875235 367 | -545.133746,1000,712.477294 368 | 1689.663443,1000,713.567505 369 | 1787.383663,1000,716.2541 370 | -414.833966,1000,717.556142 371 | 1763.119496,1000,720.39104 372 | 1623.655985,1000,721.429973 373 | -148.088463,1000,724.01571 374 | 1708.642477,1000,725.357982 375 | 1863.537236,1000,728.033618 376 | 1751.140736,1000,729.127008 377 | 1624.835314,1000,731.641115 378 | 1724.581406,1000,732.532689 379 | 926.364944,1000,734.799047 380 | 1525.787487,1000,735.730151 381 | 1717.397707,1000,738.569702 382 | 1776.212416,1000,740.033023 383 | 1736.100889,1000,743.140321 384 | 1749.091752,1000,744.535556 385 | 1858.088785,1000,747.280756 386 | 1827.150056,1000,748.421439 387 | 1785.228257,1000,749.738946 388 | 1306.911863,1000,752.43872 389 | 1652.294474,1000,753.681318 390 | 1819.078035,1000,756.456449 391 | 1731.529392,1000,757.807441 392 | 1665.266207,1000,760.578624 393 | 1626.395631,1000,761.926625 394 | -614.808603,1000,764.806405 395 | 1937.942818,1000,766.100168 396 | 245.159076,1000,768.740189 397 | 1863.73233,1000,769.842787 398 | 298.977176,1000,772.692874 399 | 1863.162019,1000,774.066823 400 | 1918.662232,1000,777.175701 401 | 1914.747524,1000,778.452272 402 | 1624.025442,1000,781.103655 403 | -153.216929,1000,782.306849 404 | 1657.36047,1000,785.111067 405 | 1792.873162,1000,786.429814 406 | 1705.476379,1000,789.165858 407 | 1798.967752,1000,790.315367 408 | 1784.376045,1000,793.016191 409 | 1875.142942,1000,794.358678 410 | 1870.800979,1000,797.063024 411 | 1722.911609,1000,798.420754 412 | -269.065799,1000,800.86622 413 | 859.425894,1000,802.190101 414 | 1847.919783,1000,805.003834 415 | 213.726302,1000,806.11043 416 | 1901.689247,1000,809.033127 417 | 1855.65561,1000,810.370866 418 | 1817.489969,1000,813.214037 419 | 1866.347238,1000,814.251595 420 | 1836.927004,1000,816.997545 421 | -436.156355,1000,818.074801 422 | 12.606564,1000,820.625164 423 | 39.862393,1000,821.780948 424 | 1687.513058,1000,824.300728 425 | 1819.316638,1000,825.523062 426 | 1892.018247,1000,828.384151 427 | 433.161055,1000,829.721349 428 | 1886.123766,1000,832.437915 429 | 469.406418,1000,833.803558 430 | 1810.889167,1000,835.035598 431 | -223.202553,1000,837.930579 432 | 1611.151912,1000,839.122973 433 | 1748.876726,1000,841.734259 434 | 1722.849792,1000,843.085773 435 | 683.299246,1000,846.040733 436 | 1779.092491,1000,847.266615 437 | 1691.250821,1000,850.031522 438 | 1898.723981,1000,851.089195 439 | 1710.676976,1000,853.439175 440 | 1743.159908,1000,854.506418 441 | 1887.247935,1000,857.11919 442 | 2032.183949,1000,858.224755 443 | 1838.36137,1000,861.064527 444 | 1991.05145,1000,862.365203 445 | 1917.714169,1000,864.928159 446 | 2039.688015,1000,866.159473 447 | -355.252899,1000,869.187284 448 | 220.14463,1000,870.545287 449 | 1921.429145,1000,873.443837 450 | 1926.038223,1000,874.799482 451 | 1878.040188,1000,877.679934 452 | 1908.73356,1000,878.988311 453 | 1929.678028,1000,881.977046 454 | 1992.596927,1000,883.331908 455 | 1836.061813,1000,886.100355 456 | 1819.89819,1000,887.242654 457 | -79.404566,1000,890.058818 458 | 145.508576,1000,891.206962 459 | -316.131165,1000,893.804429 460 | 1974.119534,1000,894.908577 461 | 1791.320395,1000,897.681428 462 | 1970.87806,1000,898.765134 463 | 1652.995075,1000,901.456702 464 | 1795.223817,1000,902.731554 465 | 1896.572141,1000,905.454094 466 | 1857.850481,1000,906.739571 467 | 1895.610018,1000,909.597426 468 | 1893.862746,1000,910.926792 469 | 1864.08109,1000,913.65649 470 | -210.31546,1000,914.946846 471 | 1802.487843,1000,917.738491 472 | 1566.810207,1000,918.801473 473 | 1734.477643,1000,920.079052 474 | 1024.380215,1000,922.693444 475 | 1911.129916,1000,923.570511 476 | 1903.068545,1000,926.184268 477 | 1881.431691,1000,927.278 478 | 1912.710182,1000,930.171712 479 | 1913.513324,1000,931.309608 480 | 2074.871029,1000,934.097459 481 | 1904.61815,1000,935.338669 482 | 1383.85147,1000,938.170179 483 | 175.932358,1000,939.41184 484 | 1979.41178,1000,942.238326 485 | 1826.563209,1000,943.571295 486 | 1940.515236,1000,946.436478 487 | 1992.892876,1000,947.726131 488 | -4.582037,1000,950.481037 489 | 1888.490234,1000,951.594141 490 | -91.080591,1000,954.481757 491 | 2041.450674,1000,955.725146 492 | 968.264074,1000,958.660699 493 | 1915.717859,1000,959.903682 494 | 1983.576872,1000,962.765913 495 | 1498.683883,1000,963.896904 496 | 2130.142327,1000,966.510712 497 | 1962.951162,1000,967.672987 498 | 1911.408536,1000,970.312442 499 | 2024.863827,1000,971.671153 500 | 992.331795,1000,974.43529 501 | 2005.350728,1000,975.534361 502 | 2063.894236,1000,978.382325 503 | 2071.570633,1000,979.668815 504 | 2289.687187,1000,982.361641 505 | 2291.408277,1000,983.421775 506 | 2085.982716,1000,986.123109 507 | 430.587378,1000,987.242819 508 | 2057.422213,1000,990.184948 509 | 2049.340307,1000,991.296379 510 | 2033.772627,1000,994.055945 511 | 2098.801404,1000,995.354469 512 | 1993.859671,1000,998.297756 513 | 2200.252281,1000,999.620894 514 | 1970.00844,1000,1000.962353 515 | 2168.996686,1000,1003.701803 516 | 1939.963403,1000,1005.01292 517 | 1542.859016,1000,1007.952409 518 | 2234.974045,1000,1009.269866 519 | 2159.646377,1000,1012.340854 520 | 2024.474594,1000,1013.821357 521 | 2162.618547,1000,1016.568104 522 | 2151.986009,1000,1017.491918 523 | 2041.598821,1000,1020.214009 524 | 867.401362,1000,1021.596129 525 | 2107.450347,1000,1024.548611 526 | 2238.734274,1000,1025.711861 527 | 2003.29866,1000,1028.558042 528 | 2159.647969,1000,1029.767589 529 | 2150.774224,1000,1032.49649 530 | 2083.885597,1000,1033.547448 531 | 1950.566029,1000,1036.336141 532 | -191.079488,1000,1037.66938 533 | 2241.773582,1000,1040.40737 534 | 547.532252,1000,1041.524343 535 | 2064.569947,1000,1043.945358 536 | 2169.676023,1000,1045.111322 537 | 35.453561,1000,1047.911081 538 | 2116.627678,1000,1048.999077 539 | 2278.265985,1000,1051.557057 540 | 2109.412233,1000,1052.871688 541 | 2154.833854,1000,1055.888175 542 | 2236.400822,1000,1057.244972 543 | 2176.059014,1000,1060.135335 544 | 2186.578709,1000,1061.712131 545 | 2216.170926,1000,1064.55908 546 | 2259.742211,1000,1065.683109 547 | 2029.050382,1000,1068.420455 548 | 2117.356476,1000,1069.572803 549 | 2304.916414,1000,1072.250711 550 | 2227.645838,1000,1073.332006 551 | 2108.965149,1000,1075.75185 552 | 2124.816308,1000,1077.09287 553 | 2174.626006,1000,1079.917811 554 | 2216.755617,1000,1081.15591 555 | 2189.537174,1000,1083.845125 556 | 2156.356329,1000,1084.823886 557 | 2234.982512,1000,1086.002127 558 | 2181.812563,1000,1088.942831 559 | 2203.05321,1000,1090.034397 560 | 1096.984774,1000,1092.582823 561 | 2083.161129,1000,1093.505691 562 | 2205.313942,1000,1096.279012 563 | 859.63037,1000,1097.43047 564 | 2355.029549,1000,1100.075436 565 | 2364.585419,1000,1101.100958 566 | 2240.11467,1000,1103.809651 567 | 2295.315054,1000,1105.157722 568 | 2302.4482,1000,1108.164473 569 | 2310.522158,1000,1109.636508 570 | 2336.406455,1000,1112.29716 571 | 619.430263,1000,1113.658174 572 | 2337.164313,1000,1116.474069 573 | 2374.560032,1000,1117.660745 574 | 2346.781109,1000,1120.091228 575 | 2233.724874,1000,1121.290658 576 | 2318.573026,1000,1123.948629 577 | 2247.593689,1000,1125.046698 578 | 2415.638999,1000,1127.874407 579 | 2348.460873,1000,1129.247593 580 | 2279.978151,1000,1132.209889 581 | 2274.613597,1000,1133.481858 582 | 2355.031707,1000,1136.326413 583 | 2257.589045,1000,1137.57579 584 | 2286.38652,1000,1140.444957 585 | 972.992734,1000,1141.73807 586 | 2393.599862,1000,1144.63223 587 | 2122.745089,1000,1145.997929 588 | 2360.130107,1000,1148.710419 589 | 2404.641905,1000,1149.940688 590 | 2447.075519,1000,1152.770678 591 | 797.127769,1000,1153.868551 592 | 2198.303957,1000,1156.464664 593 | 768.950676,1000,1157.771234 594 | 2427.157315,1000,1160.650953 595 | 2399.000675,1000,1161.851869 596 | 2448.632996,1000,1164.458943 597 | 2461.097538,1000,1165.781372 598 | 2406.575546,1000,1168.327966 599 | 2362.907035,1000,1169.301562 600 | 2311.617039,1000,1170.323985 601 | 2592.876776,1000,1173.074452 602 | 2336.755117,1000,1174.281051 603 | 2432.85483,1000,1176.854292 604 | 2379.163965,1000,1177.763412 605 | 2526.706562,1000,1180.502438 606 | 2413.229156,1000,1181.624897 607 | 2493.101771,1000,1184.341691 608 | 2461.610687,1000,1185.551201 609 | -46.512142,1000,1188.24222 610 | 1987.888644,1000,1189.578445 611 | 2472.921722,1000,1192.352611 612 | 2405.55672,1000,1193.183887 613 | 2535.924624,1000,1195.897624 614 | 2378.677974,1000,1197.265664 615 | 604.01893,1000,1200.021041 616 | 2357.418945,1000,1201.166029 617 | 2590.012031,1000,1203.789635 618 | 1216.648323,1000,1204.925438 619 | 2616.294467,1000,1207.742212 620 | 2526.396153,1000,1209.051704 621 | 1980.503654,1000,1211.620301 622 | 2500.395529,1000,1212.676226 623 | 2456.770141,1000,1215.534576 624 | 608.880725,1000,1216.90745 625 | 2522.064589,1000,1219.820118 626 | 2457.739447,1000,1220.89684 627 | 2542.691962,1000,1223.76265 628 | 2493.522515,1000,1225.001006 629 | 1727.177548,1000,1227.559705 630 | 841.704885,1000,1228.933946 631 | 2554.570724,1000,1231.918345 632 | 2676.747155,1000,1233.239217 633 | 2344.696386,1000,1236.077066 634 | 974.558011,1000,1237.407281 635 | 2634.990529,1000,1239.989105 636 | 2525.650842,1000,1241.241929 637 | 2545.097085,1000,1243.829219 638 | 2409.575766,1000,1244.93031 639 | 2530.277769,1000,1247.590007 640 | 2573.045308,1000,1248.924351 641 | 2528.116647,1000,1251.670255 642 | 2566.598646,1000,1252.907161 643 | 806.296963,1000,1254.060305 644 | 2584.868421,1000,1256.763186 645 | 2498.115547,1000,1258.105158 646 | 2600.657934,1000,1260.842895 647 | 1374.887361,1000,1261.949584 648 | 1171.287819,1000,1264.618894 649 | 2445.099218,1000,1265.973487 650 | 2631.869264,1000,1268.878126 651 | 2535.586484,1000,1269.979178 652 | 2578.403352,1000,1271.991655 653 | 2477.696678,1000,1273.089191 654 | 2522.255996,1000,1275.899046 655 | 2509.660642,1000,1277.105712 656 | 368.503702,1000,1279.97465 657 | 2693.173384,1000,1281.19559 658 | 2377.580105,1000,1284.031276 659 | 80.796985,1000,1285.227866 660 | 2414.596892,1000,1287.948582 661 | 2508.312556,1000,1289.169907 662 | 2654.713379,1000,1292.007006 663 | 2710.608336,1000,1293.05641 664 | 2637.481844,1000,1295.755936 665 | 1683.933857,1000,1296.879443 666 | 2541.607625,1000,1299.584833 667 | 2513.55821,1000,1300.947324 668 | 255.472093,1000,1303.85288 669 | 980.955232,1000,1305.086351 670 | 2584.173824,1000,1307.686985 671 | 2651.650506,1000,1308.903193 672 | 1299.701832,1000,1311.524687 673 | 1100.310153,1000,1312.651151 674 | 2672.147143,1000,1315.271905 675 | 2730.541988,1000,1316.350173 676 | 2671.988986,1000,1318.912176 677 | 2571.780504,1000,1320.25627 678 | 2612.682636,1000,1323.006366 679 | 626.06043,1000,1324.349713 680 | 2633.39211,1000,1327.00623 681 | 2468.845749,1000,1328.336586 682 | 94.438326,1000,1331.137357 683 | 2691.750145,1000,1332.283959 684 | 2570.735788,1000,1335.203677 685 | 2630.37629,1000,1336.522533 686 | 2721.465485,1000,1337.808043 687 | 2553.204653,1000,1340.551357 688 | 2521.531895,1000,1341.877888 689 | 519.012231,1000,1344.6664 690 | 2151.655897,1000,1345.770372 691 | 2479.9953,1000,1348.617187 692 | 2618.538029,1000,1349.954861 693 | 2489.599488,1000,1352.757781 694 | 2652.136816,1000,1353.900375 695 | 2653.329586,1000,1356.61969 696 | 2642.324598,1000,1357.917341 697 | 2516.242965,1000,1360.68479 698 | 364.670657,1000,1361.872959 699 | 2575.837467,1000,1364.727766 700 | 2463.986157,1000,1366.013024 701 | 2545.378195,1000,1368.947666 702 | 2629.892617,1000,1370.101062 703 | 2640.111019,1000,1372.797863 704 | 780.569274,1000,1374.063534 705 | 2509.526527,1000,1376.760126 706 | 2627.68738,1000,1377.808442 707 | 1868.264373,1000,1380.317289 708 | 2633.949423,1000,1381.353216 709 | 2597.072033,1000,1383.865241 710 | 2704.215929,1000,1385.160076 711 | 2602.725946,1000,1387.880569 712 | 1347.142882,1000,1389.039386 713 | 1481.002216,1000,1391.699687 714 | 2686.168912,1000,1392.810499 715 | 2784.416149,1000,1395.69048 716 | 2566.808197,1000,1397.050171 717 | 2810.597368,1000,1399.588006 718 | 2612.964317,1000,1400.908252 719 | 2824.253104,1000,1403.352711 720 | 724.203327,1000,1404.558918 721 | 2877.751773,1000,1407.537043 722 | 2725.831693,1000,1408.86438 723 | 2741.962951,1000,1411.377318 724 | 2681.373799,1000,1412.612699 725 | 2859.720961,1000,1415.255044 726 | 438.76024,1000,1416.459376 727 | 2831.052103,1000,1419.029529 728 | 2734.352426,1000,1420.191892 729 | 2834.230824,1000,1421.531293 730 | 2772.420018,1000,1423.828373 731 | 2896.560924,1000,1424.858868 732 | 2746.807543,1000,1427.55324 733 | 2780.089522,1000,1428.594684 734 | 2650.958828,1000,1431.158987 735 | 2739.342127,1000,1432.492275 736 | 2851.118483,1000,1435.403463 737 | 2906.196599,1000,1436.883542 738 | 2821.006226,1000,1439.764541 739 | 2824.941586,1000,1440.925452 740 | 237.709413,1000,1443.449277 741 | 2839.100769,1000,1444.505203 742 | 2881.406815,1000,1447.578918 743 | 2793.045756,1000,1449.120055 744 | 2788.380361,1000,1451.833184 745 | 2814.745813,1000,1453.168784 746 | 1728.194586,1000,1456.081485 747 | 2722.25566,1000,1457.445436 748 | 2915.765571,1000,1460.352204 749 | 2910.409718,1000,1461.655164 750 | 2841.514306,1000,1464.425007 751 | 266.306474,1000,1465.76651 752 | 2769.069527,1000,1468.535197 753 | 2928.554101,1000,1469.794255 754 | 2944.420484,1000,1472.363419 755 | 3061.5408,1000,1473.423551 756 | 2894.365963,1000,1476.04738 757 | 2898.880858,1000,1477.371805 758 | 2751.415032,1000,1480.105249 759 | 2748.930854,1000,1481.232996 760 | 583.192469,1000,1484.078861 761 | 2806.898886,1000,1485.560438 762 | 2867.252496,1000,1488.439729 763 | 2818.595276,1000,1489.687582 764 | 2790.567504,1000,1492.633178 765 | 3117.332043,1000,1493.943871 766 | 2815.35011,1000,1496.841586 767 | 2813.994741,1000,1497.91307 768 | 2842.087375,1000,1500.643329 769 | 2787.013204,1000,1501.766279 770 | 1868.277083,1000,1502.859808 771 | 2870.899951,1000,1505.764295 772 | 2741.59127,1000,1506.878564 773 | 2702.266488,1000,1509.470759 774 | 2903.683213,1000,1510.705875 775 | 2895.1795,1000,1513.644043 776 | 2766.565115,1000,1514.880511 777 | 2730.981564,1000,1517.608583 778 | 597.113809,1000,1518.794677 779 | 1801.20402,1000,1521.558484 780 | 3028.316803,1000,1522.825025 781 | 2960.193023,1000,1525.710279 782 | 2993.685144,1000,1527.059247 783 | 2994.638577,1000,1529.727077 784 | 2844.991371,1000,1530.833488 785 | 2885.102554,1000,1533.759768 786 | 2901.796754,1000,1535.053029 787 | 2685.287765,1000,1537.957297 788 | 2832.932761,1000,1539.220782 789 | 2851.982799,1000,1541.974555 790 | 2573.071525,1000,1542.953994 791 | 2846.133451,1000,1545.542851 792 | 2688.552461,1000,1546.884028 793 | 2962.70391,1000,1549.719241 794 | 2917.547858,1000,1550.748543 795 | 1240.239768,1000,1553.417964 796 | 2718.167468,1000,1554.461427 797 | 469.357217,1000,1556.874103 798 | 2742.557553,1000,1557.895795 799 | 2903.619262,1000,1560.64342 800 | 2970.119428,1000,1561.729469 801 | 2732.663686,1000,1564.30817 802 | 2774.310464,1000,1565.427254 803 | 2871.681063,1000,1568.230918 804 | 3034.179834,1000,1569.433099 805 | 2993.3015,1000,1572.220516 806 | 2720.866224,1000,1573.426621 807 | 2930.79911,1000,1576.223603 808 | 1725.143146,1000,1577.603752 809 | 3157.168192,1000,1580.45167 810 | 3000.000291,1000,1581.727532 811 | 2940.374481,1000,1584.37186 812 | 2953.131006,1000,1585.717786 813 | 2594.257346,1000,1586.89611 814 | 931.519228,1000,1590.000528 815 | 2902.919324,1000,1591.381 816 | 3019.453728,1000,1594.576105 817 | 2738.838232,1000,1596.005714 818 | 2900.178924,1000,1599.028116 819 | 1359.060549,1000,1600.219134 820 | 260.479812,1000,1603.069005 821 | 2547.388297,1000,1604.401367 822 | 962.015895,1000,1607.032676 823 | 2287.970819,1000,1608.039507 824 | 2977.110436,1000,1610.7907 825 | 2834.720039,1000,1611.73491 826 | 2858.752132,1000,1613.979699 827 | 2148.309963,1000,1615.041402 828 | 2781.583092,1000,1617.75283 829 | 2892.092337,1000,1619.044136 830 | 2786.559293,1000,1621.45806 831 | 1590.786962,1000,1622.535251 832 | 3063.831161,1000,1625.029061 833 | 3105.126152,1000,1625.922165 834 | 2946.916897,1000,1628.486427 835 | 2892.02377,1000,1629.385144 836 | 3020.57606,1000,1631.80941 837 | 1369.079665,1000,1632.953771 838 | 2956.58782,1000,1635.740949 839 | 3040.633759,1000,1636.840221 840 | 2520.075365,1000,1639.539001 841 | 3015.23941,1000,1640.818192 842 | 2950.980989,1000,1643.555629 843 | 2025.51523,1000,1644.608022 844 | 3107.129918,1000,1647.134768 845 | 1118.436515,1000,1648.275788 846 | 2924.702415,1000,1650.823888 847 | 2975.174727,1000,1651.938739 848 | 2878.480658,1000,1654.766945 849 | 3006.854141,1000,1656.085998 850 | 3155.722516,1000,1658.877026 851 | 2981.3994,1000,1660.231929 852 | 2937.72478,1000,1662.859327 853 | 3225.416505,1000,1664.215257 854 | 3125.367206,1000,1666.940062 855 | 3042.765672,1000,1668.159265 856 | 3089.093562,1000,1669.343266 857 | 3149.151676,1000,1671.942626 858 | 3045.300169,1000,1673.216482 859 | 2907.241991,1000,1676.128276 860 | 3014.602422,1000,1677.313293 861 | 3174.909493,1000,1679.505258 862 | 3119.563903,1000,1680.506212 863 | 2986.795526,1000,1683.374099 864 | 3104.196181,1000,1684.660985 865 | 3060.504564,1000,1687.279557 866 | 1463.683734,1000,1688.381233 867 | 3074.33927,1000,1690.8575 868 | 2438.522703,1000,1691.994379 869 | 2941.582399,1000,1694.727015 870 | 3054.317451,1000,1695.91337 871 | 3011.667208,1000,1698.522906 872 | 2907.415979,1000,1699.566729 873 | 3211.171357,1000,1702.371828 874 | 3108.875907,1000,1703.706964 875 | 3051.363679,1000,1706.170801 876 | 2884.510086,1000,1707.064599 877 | 3186.246091,1000,1709.66475 878 | 3050.876679,1000,1710.983824 879 | 2946.282792,1000,1713.460019 880 | 1255.077369,1000,1714.586855 881 | 3157.406743,1000,1717.45738 882 | 2993.018853,1000,1718.738223 883 | 3092.251601,1000,1721.574147 884 | 3016.703678,1000,1723.084156 885 | 2997.482929,1000,1725.859273 886 | 3094.763573,1000,1726.80056 887 | 2944.701122,1000,1729.729798 888 | 3190.593738,1000,1731.076991 889 | 3216.655493,1000,1733.952358 890 | 547.975449,1000,1735.157149 891 | 3163.950881,1000,1737.747508 892 | 3043.655058,1000,1739.018747 893 | 2948.004284,1000,1741.857299 894 | 2943.954923,1000,1743.195288 895 | 3172.899564,1000,1745.899266 896 | 3067.337805,1000,1747.21175 897 | 3249.65467,1000,1750.049502 898 | 3026.740334,1000,1751.388625 899 | 3229.861938,1000,1752.691512 900 | 3224.319765,1000,1755.530849 901 | 2973.363229,1000,1757.105719 902 | 2819.046138,1000,1759.922084 903 | 3180.482873,1000,1761.257695 904 | 3079.044918,1000,1763.967693 905 | 2977.044636,1000,1765.250318 906 | 3022.422002,1000,1768.282567 907 | 3228.612038,1000,1769.830473 908 | 2974.695186,1000,1772.752688 909 | 2981.301,1000,1774.085111 910 | 3169.391579,1000,1776.917976 911 | 3107.440007,1000,1778.01987 912 | 3268.076248,1000,1780.701984 913 | 3111.703193,1000,1782.013138 914 | 3173.065611,1000,1784.888602 915 | 3112.79126,1000,1786.154233 916 | 3286.024846,1000,1788.953906 917 | 3167.762764,1000,1789.850667 918 | 3160.710837,1000,1792.485957 919 | 145.109417,1000,1793.612252 920 | 3075.108233,1000,1796.411764 921 | 2900.660969,1000,1797.325227 922 | 3200.848576,1000,1799.958774 923 | 3172.58009,1000,1801.286208 924 | 3191.755235,1000,1803.870031 925 | 3185.120422,1000,1805.020892 926 | 3140.813318,1000,1807.688366 927 | 3179.972344,1000,1809.236871 928 | 3281.672891,1000,1812.231656 929 | 3186.682195,1000,1813.237985 930 | 512.671678,1000,1815.919945 931 | 1128.675534,1000,1817.295639 932 | 3013.964057,1000,1820.171175 933 | 1331.273597,1000,1821.524191 934 | 3063.248261,1000,1824.272797 935 | 3140.267489,1000,1825.45677 936 | 2065.00165,1000,1827.977041 937 | 306.496553,1000,1829.0617 938 | 3119.795082,1000,1831.833755 939 | 3229.766838,1000,1832.934422 940 | 2959.411723,1000,1835.548917 941 | 3115.78846,1000,1836.892529 942 | 3180.458432,1000,1838.23319 943 | 3371.032101,1000,1840.829392 944 | 3122.796833,1000,1841.815206 945 | 2975.80781,1000,1844.501186 946 | 1575.084348,1000,1845.835019 947 | 3183.837905,1000,1848.631819 948 | 322.078804,1000,1849.801191 949 | 3164.859166,1000,1852.491848 950 | 3179.102624,1000,1853.825005 951 | 3313.055611,1000,1856.659611 952 | 3162.323106,1000,1857.734984 953 | 1763.073685,1000,1860.53712 954 | 1071.693112,1000,1861.679212 955 | 3249.282404,1000,1864.5105 956 | 3357.994215,1000,1865.853777 957 | 2992.747923,1000,1868.703924 958 | 1703.849356,1000,1870.058723 959 | 3108.513159,1000,1873.005844 960 | 3254.540372,1000,1874.116362 961 | 3306.896831,1000,1876.738155 962 | 3278.870483,1000,1877.993843 963 | 3196.580108,1000,1880.747774 964 | 2036.754954,1000,1881.693551 965 | 3132.955987,1000,1884.219015 966 | 3209.883892,1000,1885.401662 967 | 3168.126038,1000,1888.230103 968 | 303.770894,1000,1889.246305 969 | 3186.271608,1000,1891.922087 970 | 3182.214786,1000,1893.234203 971 | 2864.249856,1000,1895.690968 972 | 3232.956313,1000,1896.784583 973 | 2768.250616,1000,1899.296273 974 | 3081.828848,1000,1900.445126 975 | 3024.338047,1000,1903.332981 976 | 3215.00468,1000,1904.317798 977 | 3166.306837,1000,1907.242082 978 | 3329.066548,1000,1908.291642 979 | 2493.284227,1000,1911.114614 980 | 3043.233028,1000,1912.461655 981 | 3252.181533,1000,1915.272198 982 | 2652.610791,1000,1916.351356 983 | 3237.616627,1000,1918.534429 984 | 1497.456814,1000,1919.447169 985 | 3141.924194,1000,1920.351377 986 | 3053.623682,1000,1922.650311 987 | 3142.714919,1000,1923.504225 988 | 3011.297177,1000,1925.329419 989 | 2931.174892,1000,1926.165528 990 | 3277.854738,1000,1928.324992 991 | 3268.90269,1000,1929.097808 992 | 3184.857997,1000,1931.267418 993 | 3320.754103,1000,1932.085928 994 | 3198.553147,1000,1933.789114 995 | 3168.468181,1000,1934.570505 996 | 3245.960116,1000,1936.423882 997 | 3122.422409,1000,1937.217605 998 | 3220.351526,1000,1939.160449 999 | 390.694685,1000,1939.946317 1000 | 3221.308453,1000,1942.220497 1001 | 3278.758228,1000,1943.000054 1002 | -------------------------------------------------------------------------------- /visualize.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 64, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "from baselines.common import plot_util as pu" 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "If you want to average results for multiple seeds, LOG_DIRS must contain subfolders in the following format: ```-0```, ```-1```, ```-0```, ```-1```. Where names correspond to experiments you want to compare separated with random seeds by dash." 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": 80, 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "LOG_DIRS = 'logs/reacher/'\n", 26 | "# Uncomment below to see the effect of the timit limits flag\n", 27 | "# LOG_DIRS = 'time_limit_logs/reacher'" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 81, 33 | "metadata": {}, 34 | "outputs": [ 35 | { 36 | "name": "stderr", 37 | "output_type": "stream", 38 | "text": [ 39 | "/home/kostrikov/GitHub/baselines/baselines/bench/monitor.py:163: UserWarning: Pandas doesn't allow columns to be created via a new attribute name - see https://pandas.pydata.org/pandas-docs/stable/indexing.html#attribute-access\n", 40 | " df.headers = headers # HACK to preserve backwards compatibility\n" 41 | ] 42 | } 43 | ], 44 | "source": [ 45 | "results = pu.load_results(LOG_DIRS)" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 82, 51 | "metadata": {}, 52 | "outputs": [ 53 | { 54 | "data": { 55 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAacAAAGoCAYAAADiuSpNAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3XmcnWV9///XdfYzZ+bMmplMMtkgIWQhBLIQsCgKMUARKkoFqxWtoIhtre2j2mJxKfZr+Vm1LrVFqVSrUpcKKChIwaIsJQkQTYBsJCSTbfbl7Nv9++OaMzMJk8lMZjknud/PxyOPmbnPfe77c85Mrve5rvu679s4joOIiEg58ZS6ABERkWMpnEREpOwonEREpOwonEREpOwonEREpOwonEREpOwonEREpOwonEREpOwonEREpOz4Sl3AWDU0NDjz588vdRkiInISNm/e3OE4zoyxrl+ycDLGXA78M+AFvuk4zudGW3/+/Pls2rRpWmoTEZHJZYx5dTzrl2RYzxjjBb4GXAEsBW4wxiwtRS0iIlJ+SnXMaS2wy3GcVxzHyQD3AteUqBYRESkzpQqn2cD+YT+3Diw7ijHmZmPMJmPMpvb29mkrTkRESqusZ+s5jnOX4zirHcdZPWPGmI+jiYjIKa5U4XQAmDPs55aBZSIiIiULp43AImPMAmNMALgeeKBEtYiISJkpyVRyx3FyxpgPAw9jp5L/u+M420pRi4iIlJ+SnefkOM5DwEOl2r+IiJSvsp4QISIi7qRwEhGRsqNwEhGRsqNwEhGRsqNwEhGRsqNwEhGRsqNwEhGRsnPK3GxQRMpXJp/Ba7zkCjn8Xj8ec/zPvZ2JTnpSPcyIzKAqUIUxBoBcIUd3shuA6lA1AW+ATD7DkdgRosEo0WB0cF3Hccg7eXye0Zswx3HsVxx2d+1mZ9dOMvkMTZEmKgOVVPgr2NOzh8OxwxyOHSadS7Nm9hpCvhCpXIpYJkZnopOCUyBbyFJwCvg8Ps5vPp+qQBWHY4fZcmQL5808j329+9jatpVsIcva2WtZ1byKfb378Hq8JLNJCk4Bv9dPZ6KTdD6Nx3gI+UIsqls0uP1MPkM2b/dzJH6Eb73wLXKFHOta1lEbqiWbz9Kd6qYuXEdNqIbtHdvJOTkO9x+mtb+VdC5NTaiG7lQ3+UKe6lA1iUyCvJNneeNyKgOVbG3bioNDoVDA5/Xh8/iIZ+IsbljMmllraO1r5XDsMAf7D5Iv5GmqbOL1817PR9Z9ZDL+VMbMFH955W716tWObjYo5c5xHHKFHMYYXu15lWQuSUNFA3XhOpLZJMlckmQ2SXNVM36PH2PMUQ15KpdiZ+dOZlXNIhqM8mL7i+zu3k17vJ1kLkl/pp+gN0h7op0KfwUhr21Ew/4wjuPwTOszZAtZ5lTPoT/dT0eigxmRGXjwkC1k8RgPeSePwZAv5OlMdtKV7CLgDVBwCvSl+4hn4iRztsZkNklvqpdoKEpNqAbHcXAch7ZEG9FglHQuTXuinUQ2MfgaqgJVXDz3YqKhKIf6D9GR6CCZS1IdrMYYw3OHnhtcN+gNUheuI+gLcqDvANlCFoCwL0xloJL2xNDdCLzGSzQYJewL05m0DXzEHyESiJDIJkjlUoPrGgxhf5j+dD8O09fGeY0XgLyTn5Tt1YZqCXgDHIkfGVxWDG1gMJwrA5VEg1E8xkM6lyYSiOAxHlK5FEFvEGMMR2JHyBayNFQ04DgOxhgKToGCUyDgCXA4fphULoXP4yPsC1MVrMJgiGViLKpfxMabNk7otRhjNjuOs3rM6yucZDIU/46Kn2yP50jsCB2JDnKFHF6Pl8X1i/F5fLTF2zjYf5DqUDUzK2dS4a8YfE6+kKc90c7urt3s7t6Ng0PYFyboDRL0BelIdLDp4CaWzVjG1YuvZlv7NvKFPA0VDSSyCeor6vn1q79mR+cOmquaCfvD7Oneg8/jI+QL8UzrM2w+tJlkNklduG4wTMK+MPv79mMwYMBjPCysW8jvjvyO/X37qQpUURWsIuwLE8vE6En10JPqGWxgR1NsVBzHIRqMUh+uZ0ZkBts7t9OV7ALs/gpOYVy/h2gwiuM4xLNxQr4QFf6K1zTaxW0XP7mHfCFyhRwe4yHgDRDyhQDoTfcOvsepXIpkNonHePAaL2F/mFQuRcAbIBKIUOGrGOzJ9KZ62d+3n1whR4W/ggp/hf10no2TL+RZWLeQhooG4pk4fek+EtkEOSc3+D44OByJHSGdT1Mfrqc2XEsmn6Ev1Udfuo9MPkNloJKwP0w8EyedTxPyhfB7/YOvz8EhnUtT4a/A6/HiOA5VwSrOrD2TulAd+/v2k8wliWfiNFc1UxOsYVbVLJbMWMJjex6jM9FJ0B/Ea7wsqltEU6SJmlANbYk2YpkYmw9upivZRSQQ4YyaM2hLtIEDb13yVqpD1Ty+53Gebn2aaDBKxB/B6/ES8Udsj8UpML96PgUKpLIpftf2O4wxVAWq8Hq8+Dw+HMeud/mZl7N69mr29+4nkU2QzCVpjDTSHm9nR+cOFtUtIhwIE/AGaKhoIOQLkS/k8Xv9hHwhktkkXo+XqkAVXckuulPdVAYqyRfyeD1e/B4/2UKWCn8FPckeftf2OxrCDVSHq+17PPC37eBwdsPZ4/pbPJbCSSas4BTY1bULsJ9gw/4wIV+Itngb29q24fV4ebXnVfb07KEt3sb2ju1sPLiRkC/E2Q1nc07jOVT4K3jh8Atsbd9KXbiOpkgTh2KH2Ne776h9FT/VHduAtkRb8Hl8dKe66U31nvDTr8GMa53h31cHbSAW/zOn8rYhLjaCxfck7+TpT/dTH66nvqKeTD5DKpciW8jaRt4bIuS3X3OFHNFQlGggSiwTI5aN4ffY4S6/x093qntwKKzY6CRzSaoCVSyuX0wimyCWiTGzaiYLahawsG4hNaEa/B4/fek+osEoyZwdKpoTnUPBKdCR6GBezTz8Hj9HYkfweX0sqFlArpADwOvxEs/E8Xv9gA2oYsACxDPxwcYR7IeCYuAEvUE6k534PD68xkvAGxj8gBHwBqgMVJIr5PB5fIMfNrqSXYR8ocFGOVfIkcgmaKhoGPz0ns6nyeQzdlvGS3WoenDfnclOqoPVBH3Bwd9BX7qP/nQ/kUCEiD9Cf6afdC5Ndah6sFF3cAh4AySyCYLeIH6vn3zBvo4TfXg6leXz0NMDoRBEIsdfL5mEVAqqqsA3jQd2FE5yXPlCnp5UD1uObKEt3kZVoIrnDj1He6KdoC/IvVvv5WD/wcH/4GNR7P63VLVQcAq0J9oHe0bVoWpmVc4iU8iQyCao9Fcyp3oOtaFa2yugwL6efeSdPFXBKhrCDaTzaToSHbTF2wZ7SBX+CiKBCA3hBpqrmmkIN9Cb7iWWiQ1+an7Tgjexo3MHP9/1c2ZVzaI2WEtXsguf10cqm+KM2jO4YtEV7Ovdx+H4YWpDtVSHqulMdLJsxjKWNS7D7/EPfroHBoe7io15wSmQyqXwGi8t0Rb7ng4Els/jw+/1E/QGbcjlkvg8vsFjKolswg6pDXxiLTiFwcY7V8hRcAqD4TCzciZBb5D+TD8V/goC3sDU/EGUWKEAnkmcknWi7WUy8NBDdp2LL4baWrvccSAet//6+qCiAvx+6O2FWAzCYdi3DzZvtg3/5ZfD/Png9dpG/v/+D8480y7r7rbbCYWgocF+n05DXZ3dr+PYn/v7IZezX6urbe35vA2LbNb+3NYG//qvNkxaWuw2+vogkYArr7Tbf+45+/imTfDzn9ttNzTYbQWDsGaNXdbeDk1NcPgwPPkkVFbCnDnQ3GxDqqsLZs+Gc86x6/b22td34IB9/p/9GVx11cR+PwonF0tkE9zzwj08tucxtrZtZXZ0NlcuvJL9fft5bM9j7O7efdSxgaLi8NG86nnMqppFwSkMju3nCrnBT7d+j5850TmEfCHm186nOljNnOgcZkVn0RRpojZcS3+6n709e+lKdlHhr2BB7YLBoSWv8TIjMuOog9jFIajiUBHYEGiLt5HOpfF7/fg9doiiOI4+mmIPrDg0dbrKZoc+9fb32wbU77cNVaFgPzlns7Yhi0bBmKMb70TCNmAAO3ZAa6ttlGfPto32zp0QCNgG96mn7LYWL7aPBwK2IayttT8/9BB873u24ayqgvPPh499zNbzxBO2wevvh0OH4MgR+MY34NVXbeMcCsGqVTBvnn1+d7eta9Ei+2/PHnjlFdtYzpxpX1cuZ2tNJu170NhoA6K1Fc44w76m+nrb6CcSdh8vvggvvWTDBmzDHInY19DebuvKZu22c7nR3/uKChsA/f1DIRMKQU2Nrcnrtf88HhswtbX28WDQ/h56emyDH4/b+quq7LLqavt4b6/9CkO/o7E480xbQ1ub3VdHh90u2P3kcvbrOefY7e7bZ/flOLbWwggjyMXlCxfav4mJUDi5SL6Q5/nDz/Pkvid5uvVp/ueV/6Ej2UE0GKUh3MDB2MGjPunXhGqoC9cxo2IGMyIz6E/3M7NyJpfMv4RYOsbc2rksm7Fs8DhHNBil4BQGZxBlC1lqQjUnDAix/+F/+1vbsAeDQ5+Mr7zSNmiJBGzcaBuzxYvhrLNsA9DTA9/6ll2+fz/88pd2/ZYW+8k3n7efZl96yX76DYXstiorYcYMePllu++WFvsp2RhYutQ2/MVGs6PDNozFwEq89vPKIGPs9opCIbssmTx6vdpa+6+3Fzo7bSOYzx/93KL6ehsiwaANhl277Lp+vw1ZsPUO32cwOLSsWPdwoZANqX37bHhmMq+tr6XFvheHD8Pu3fY96O62vYe5c+3+PR5bu9c71Gjn8/b5NTX2te3ebX8n0ehQKO3aZZ8fjdraUikbdjNmDAVusUcUCNh/TU32fezvt72inh67jcpKGyTGwLJlNgwTCbuux2MfO3TIrtvQYIOuuXmolljM1m6MDari30k2a5dHozBrln2vslm73YoK23tKJm1YFt/jqiq7jVAIvva10f/mT2S84aSp5KeAPd17eHDngzy25zGWNy6nOljNT3f8lOcPP09f2v6PrQxUMjMyk8sXXs67VryLRfWLiGfi/GLXL5hXPY9zZ55LdaiaqkAVQV/whFNwi4YfND9dHT4M991nG/YZM+AP/9B+Sq+qsv/xAwH7nzidHmpQP/95++n/4ottg9fVZbfz9NP2E/yePfbnYxUbmmNVV8Mb3mDDrKNjaPncubam7dth61bbOEUidrgmlbKNXmWlramtDVasGGqUli2zDeOuXXb7sZhtcIq9Bsexz581a6hBT6Xs12IDmU7b/TuObURjMbt89mz7WKFgH29utl99Pvse7N1r99HUNNRAFnsOZ5wBS5bYbVZW2l5NPD7U24hE7PtXbEiLoVEo2OcXG05j7OvLZu12qqpsfaGQ/X14PPa5uZzdTk2NfT8bG+HZZ23Ytbfbuisq7HsUjdrX4Pfb3lxlpe1V9fTY/dXV2WB48UX7NzJzpt1uoQC//vVQz7S+3r6maHTob6f4HjqOrdHns6+1rc3WUAyqhga733374Lzz7L9Dh4YCPxi0v9N9++z7W1lpH1u50v5uh38oKPY66+rs85JJ+/uOROzvubvbvrZAwD6vOMyYz9t1i+/JkSOv+ZOdcuo5lal8Ic8nf/VJ7tp81+B02gp/xeCwXF2ojlnRWSysXciShiWc13wei+oWsbhh8eDwmBvE47Bliw2Xc86B3/s920B1d9sGo9j4/PKX8OCDtmF697tt49HWZnspTz119FBO8RNoNGobsMZG22j39g4dfygOkR37Cd4Y+9zaWjvMMneufS4MBUhzs102d65tIHp67LGDI0dsY/D7v28by2DQ1jlnjm0oDhywjWahAAsW2CG33buHhpV6emyDunCh7bXFYra3sHHjUGNVKNjGrLbWbsvnsw3T2Wfb19PTY0PV67WN5Isv2te/eLENjL17be0LFtgg3LnTNryrVtllYIPh6aftdlassPvs6rKvI5ezNTU0DL1nuZz9PVZVHT3seOSIfcxxbIPa32/rCoft+oHAUGBmMvZDQjw+NLTZ2Wnfz/377RBhc/PQPvN5W3drq91eS8vQUNpYZLP2/RuvZHIoTItBm8nY11Io2MeLkxmKw3CnCw3rneIy+Qzf3vJtvr3l2/x6369ZULOAlmgL5zady1VnXUVHooPNhzbzxvlv5Lzm82iubMbr8Za67HHJZOwnwULBBkBtrQ2Pri7bmNXVDQ2L/fjH9lOuzwfr1tnjI+eeaxvTn/wEnnnGPm8sip+sh//JB4OwerUNkqqqoU+axUa4eIA6FLK1JhK2ITv3XLt+a6tt6IoHu+fOtesuWGBDYs4c+zq7u+3jzzxj97twoW0cFy2yjXhbGzz+uG20Vq+2rzWTsdsLh+1zOzvtth3H7nuskklbo9dr36uZM+33ItNJ4XQK29uzl2v/61qeP/w8fo+fdS3ruP0Nt7OiaQUNFQ2nzLEexxk6GP+zn9nw6euzPZzf/MYGTEfH0LCO328b76Lip8hib6ax0X6iP/Y4gtc7dOC8ONziOEO9geIwUjxue09NTTZ4WluH1pk1yz62YoX9PpWyPY1s1oZBcbjL6x0aSqqthWuvtZ/Ei8dv8nk7zNfYaD/Bh0+i87pnj93/WWcpPOT0o3A6xfSn+/mnp/+JuzbfxaHYIfweP29a8CauWHgF6+asY+2stWV7bkZfH9x/v53G+thjR88yCgTscEo6ffRzIhHbq5g5c2iMO522PQSPx35fnL5bU2OPbVRV2U//3d02SHbtssFTnDlWWWnH22trbW/L57OBEo/bnksqZbd50UX2Odu32/309dnjH+ec89opyI5jexnV1bZnk8vZeoqz4iZzCrSIG2hCxClk44GNXPHdK+hMdjInOoc1s9awdvZabl51MyuaVpS6vNfo6LBDaR4P/PCHdppwcdbWjBlDU2GL53IsX257NV1d9vHa2qF1Zs2yvYx02vZ4gkF7AP/IEbvNw4ft8ZZ162xwBQJ2yC0YtEHz0kt2KG7WLPuv2NNYt+7Er2PGjBOvY4zdP9hhPBGZXgqnEuhMdPK9332PO564g1whxzuXv5N3nvNOljUuo6GiYfCqBKXkOPZkvQcftENcu3fDww8PhZHPZ4+7LF5sG/tIxB7kTiRs4BQP5p5/vu35tLfbdRsb7XrB4PhrWr586Pvzzpuc1yki5UnhNM3+ddO/8hcP/wWpXIr6cD3XL7+ez7zxMzRGGktSj+PYYzAPP2x7RXv22FDZv3/oBD6wPZP5823YgO1NVFXZkFi50vaIimfcF7dbPONdRGS81HRMo+//7vvc8uAtzInOYcPCDbxl0Vu4aO5FNFQ0nPjJk6B47kJ3tx22SqXg7W+3ExXADp01NtpeT1OTPUYzf75dHonY559zztBxoCVLjn/g3xgFk4icPDUf0+TBHQ/yx/f9MbOrZvPx132c96x8D5HAKFdnnCT79sG//Zu9vMvzzw9Nu/b7bdgEAvbcoHnzbOg0Ndkg8vvtMNqCBUMz5zweO5GhTOdniMhpROE0De57+T7e9oO30VDRwPvPez/vO/99U3bFhZ4e+Ku/sseLfD57EmWhYGeaVVXZnlBtrT2vJpu1Vzh4y1vsrLXmZjslW0Sk1BROU+y3R37LDT++gaZIEx9e82E+cuFHpiSYHMeerPrOd9oTSBsb7bLzzrOXulmxwvZ8zjrLhlJ9vb1cz7p1dtabiEg5UThNoa5kF1d//2r8Hj/vP+/9fPSij05qMPX22isrPPcc/Nd/2anW4TD8wR/AO94xNBT3+tfb6dbHWrVq0koREZlUCqcpki/kueFHN9Da18pNq27ittffNnjTtMmwdaudzLB9u/25qQne+Ea44AK48UY7bVtE5FSlcJoitz9+O4+88ghXLLyCv3/j309KMOXz9tI/d9xhe0qBAFxzjZ3MsGgRXHqpHaILnJ73phMRF1E4TYGfvPQT/uE3/8A5jefw1xf99aRMFf/ud+FDH7KX3PH57IVDr77a3qFy1ixdTkdETi8Kp0n2csfLvPsn76a5spn3n/d+fm/e701oe/39cMstNpyam2HtWjtk99a32vOQTuYCoyIi5U7hNIkcx+GDP/sgBafAu1a8i5tW3TTmm/qNZPt2e2+fV16xlwq64QZ7PKm2VkN3InJ6UzhNosf2PMb/vvq/vPmMN/NXF/3VhG7619MDV11lL4D69rfD9dfD+vXju4+PiMipSuE0ie588k4q/ZW8e8W7T/paeR0d8Kd/Cv/7v/YK3e95D3z2s0ffxVNE5HSncJokT+9/mkdeeYQ3zn8jV5515Ulto7/fTgffts2eRPuWt8DttyuYRMR9FE6T5O8e/zsi/gjXLL6GuvD4rwG0dSu87W2wc6cdzrv1Vnt/I91LSETcSBOQJ8Ezrc/wP3v+h7Wz1/KO5e8Y9/P/3/+zlxfav99OD7/tNtiwQcEkIu6lntMk+Mff/CNhX5jrllzHzMqZ43ruvffC3/7t0HlLt9xivxcRcTOF0wRt79jO/dvv56I5F3Hd8uvG9dxt2+Cmm+xJtJ/4hL0eXmhqLlYuInJK0bDeBH3zuW/iMR7efOabx3UliK1b4ZJL7PfvfKeCSURkOPWcJqDgFPje1u9xRu0ZvGvFu8b8vJ07bTBlMnaq+F//tYJJRGQ49Zwm4Ml9T3Kw/yDnNJ7D3Oq5Y3pOImFn5SWT8L73wac+BTNmTG2dIiKnGoXTBHx/6/fxe/xcvfjqMV2myHHsMaatW+3kh09+Ehomfk1YEZHTjsJpAh7a+RALahbwhvlvGNP6X/0qfO979p5LH/+4vUaeiIi8lsLpJLX2tfJq76vMr5lPS/TEJyT9+tfw0Y/C/PnwgQ/Y85pERGRkCqeT9MSrTwCwpGHJCYf0jhyBa6+Fykp7ZfG3vx2MmY4qRUROTVMWTsaY/88Y87Ix5rfGmJ8YY2qGPfY3xphdxpjtxpgNU1XDVHp87+MEvUGuXXLtCdf9i7+Ari74gz+wF3WtrJyGAkVETmFT2XP6JbDccZwVwA7gbwCMMUuB64FlwOXAvxhjvFNYx5R4fM/jtERbWFC7YNT1fvUr+P737f2YbrlFF3EVERmLKQsnx3EecRwnN/DjM0DxwMw1wL2O46Qdx9kD7ALWTlUdU6Ej0cHu7t3MqZ4z6q0xHMeew1RVZU+yXbVqGosUETmFTdcxp/cBPx/4fjawf9hjrQPLXsMYc7MxZpMxZlN7e/sUlzh2zx16DoAFNQsI+oLHXe+JJ2DjRhtKf/RH4D3l+ociIqUxoStEGGMeBUa60ultjuPcP7DObUAO+O54t+84zl3AXQCrV692JlDqpCqG0+/N/b1R1/vMZ6CiAj74QV1hXERkPCYUTo7jXDba48aYG4GrgEsdxymGywFgzrDVWgaWnTKePfAsNaEaVs9afdx1Nm+Gxx6DN7zB3v5CRETGbipn610O/DVwteM4iWEPPQBcb4wJGmMWAIuAZ6eqjqmw+eBmmiJNo17o9Y47IBi0F3WtqTnuaiIiMoKpPOb0VaAK+KUx5gVjzL8COI6zDfgB8CLwC+BWx3HyU1jHpOpOdrOvbx8zK2ceN5xeegnuvx9WroQrT+6O7SIirjZlVyV3HOe4t8xzHOezwGenat9T6YXDLwAwr3oeAW9gxHXuuAN8PhtMOtYkIjJ+ukLEOP2u7XcArGga+fpDe/fau9uefba9EoSIiIyfwmmcXmp/iaA3yLqWdSM+/rGP2a+XXgqLFk1jYSIipxGF0zhtbd9KXbhuxJNvX3gBfvADWL7cXkvP7y9BgSIipwGF0zjt7NxJXbiOmtBrp+B94Qs2kC67DM47rwTFiYicJhRO49Cf7udI/Aj14XqqQ9VHPZZIwI9+BGeeaS9VpIu7ioicPIXTOOzo3AHAzMqZr5mp99Of2luvr1qlezWJiEyUwmkctnduBxjxSuT33GN7S297G4RC01yYiMhpRuE0Di+1v4TBcGHLhUct7+mBRx+FhQvh/PNLVJyIyGlE4TQO29q3EQ1GmVs996jlv/gF5HL2nk066VZEZOIUTuOwq2sXNaGa10yG+O//hnAYfv/3dVsMEZHJoHAah1d7X6U6WE00GB1clsvBww/D3Lm25yQiIhOncBqj7mQ3fek+6irqjgqnp56Cvj5YuhTmzSthgSIipxGF0xi90v0KAI0VjUdNI7//fvB47D2bgse/Ka6IiIyDwmmMiuF07GSI+++H2bPhda8rRVUiIqcnhdMY7ezaCcCqWasGl+3aBbt32ynkc+ce75kiIjJeCqcx2tG5g7AvzBm1Zwwu+9nP7Nfzz4do9DhPFBGRcVM4jdHOrp1Uh6qPuuDrT38KdXXwlreUsDARkdOQwmmMXu2x08irAlUA5PPwzDP2pNszzjjBk0VEZFwUTmOQyWc4FDtEbaiWqqANp61b7ZXIFyyA5uYSFygicppROI3Bvt59FJwCDZEGIv4IYM9vAntjQZ+vhMWJiJyGFE5jsLtrNwBNkSaMMQD8+tf2KuQbNpSyMhGR05PCaQyK5zgtbVg6uOzJJ2HWLJgzp1RViYicvhROY7Cjawc+j4+ljTacWlth3z578m1TU4mLExE5DSmcxmB3126qAlXMrJwJwC9/aZcvXWqvRi4iIpNL4TQG+/v2Ew1GB6eRP/IIRCI63iQiMlUUTmNwsP8glYHKwWnkv/mNPb9pyZISFyYicppSOJ1ANp+lPd5ONBilMlDJoUP2mFNLi+56KyIyVRROJ3AodggHh9pQLWFfmKeftsvPOgtCodLWJiJyulI4nUBrXysA9eF6jDE8/bS9Ffvll5e4MBGR05jC6QT29+4HoKXajuH95jf2ckUrVpSyKhGR05vC6QT299lwWt64nGwWnnvOHmtqbCxxYSIipzGF0wns7dmL3+NnfvV8Xn4ZMhkbThUVpa5MROT0pXA6gVd7X6UqWEV1qJoXXrDLFi4sbU0iIqc7hdMJ7O/dT1Wgikggwgsv2CuQ6+RbEZGppXA6gUOxQ1QGKon4I2zebK+ld+aZpa5KROT0pnAaxfATcMO+CrZsgZkzoaGh1JWJiJzeFE6jGDwBN1xL55EwPT12Grku9ioiMrUUTqOkRPFlAAAgAElEQVQonuNUF6rjt1vsWzV3bikrEhFxB4XTKIpXh5hZOZOXX7bL1q4tYUEiIi6hcBpF8QTcpTOWsn27vU3GeeeVuCgRERdQOI1i8ATcmvm8+CLU12syhIjIdFA4jWJ/334qA5XUhevYudOGU01NqasSETn9KZxGcbD/IJFAhEKqio4OG066bJGIyNRTOI3iSOwIEX+Ew3ujgD3HSUREpp7C6Tgcx6Et3kZloJLWPREAli4tcVEiIi6hcDqOvnQf6XyaaDDK3t1BjIGLLy51VSIi7jDl4WSM+UtjjGOMaRj42RhjvmyM2WWM+a0x5vypruFkHOw/CEBNqIaXX/JQUwOzZ5e4KBERl5jScDLGzAHeDOwbtvgKYNHAv5uBr09lDSfrUOwQAHXhOrZv10w9EZHpNNU9py8Cfw04w5ZdA3zbsZ4BaowxzVNcx7gd6rfh1FI1l927bThFoyUuSkTEJaYsnIwx1wAHHMfZcsxDs4H9w35uHVhWVoo9p8b8SlIpe1t2r7fERYmIuIRvIk82xjwKjDTB+jbgb7FDehPZ/s3YoT/mTvMVV/f37cfn8ZFvt7e9nTNnWncvIuJqEwonx3EuG2m5MeYcYAGwxRgD0AI8Z4xZCxwAhjf1LQPLRtr+XcBdAKtXr3ZGWmeqHOg7QMQfoW2/PdB0wQXTuXcREXebkmE9x3F+5zhOo+M48x3HmY8dujvfcZzDwAPAHw/M2lsH9DqOc2gq6piIg/0H7TlOr0Tw+3XBVxGR6TShntNJegi4EtgFJID3lqCGEyrenv2VHSHq66G2ttQViYi4x7SE00Dvqfi9A9w6HfudiPZ4O4sbFrNnl18z9UREppmuEDGCeCZOPBsnYuo4cMBQVweVlaWuSkTEPRROIyhOI/f1LqZQMDQ2gkfvlIjItFGTO4LiCbhOx1kAzJpVympERNxH4TSCYs+JThtOa9aUsBgRERdSOI2g2HPKt59BJALLl5e4IBERl1E4jaC1rxWP8RBvn0F1taaRi4hMN4XTCFr7Won4I3QeDhONahq5iMh0UziNoHh1iPYjAaJRqKoqdUUiIu6icBrB4dhhwvkmEnEP0Sj4/aWuSETEXRROIzgSP0IgfgZg7+MkIiLTS+F0jHQuTW+6F29sHgBNTSUuSETEhRROxzgcOwyAt9+Gk6aRi4hMP4XTMYon4Dq9LQCsWFHKakRE3EnhdIy2eBsA2a5ZRCIwY0aJCxIRcSGF0zGK4ZTrnkU0qquRi4iUgsLpGMVwSnbWU10NkUiJCxIRcSGF0zEO9R8i4A3Q3RGmqgoqKkpdkYiI+yicjnE4fphwYQaJmJeaGjCm1BWJiLiPwukYbbG2wRNw6+pKXIyIiEspnI7RlmjDH18AwOzZJS5GRMSlFE7H6Eh0DJ6Au3RpiYsREXEphdMwBadAd7IbeucACicRkVJROA3Tk+oh7+Qp9M6mokIn4IqIlIrCaZjBE3C77Am4mkYuIlIaCqdhiuGU7p5BNKoTcEVESkXhNMzg1SG66tRzEhEpIYXTMO3xdsiEScfDugOuiEgJKZyGaYu3QZ+9VUZtbYmLERFxMYXTMO2Jdvy6PbuISMkpnIbpSHTgj80HYN680tYiIuJmCqdhupJdePrnA7BsWWlrERFxM4XTMN3JbkzfHEKhAnPnlroaERH3UjgN053qptA7myqd4yQiUlIKp2H60n3ke5qJVimcRERKSeE0TG+6l1xvI1VVRuEkIlJCvlIXUC6S2SSZTAHi1VRWKpxEREpJPacB3aluiM8Ax0N1ta4OISJSSgqnAd3JbojNBKCmpsTFiIi4nMJpQHdqKJx0dQgRkdJSOA0Y3nNauLDExYiIuJzCacDwntPZZ5e4GBERl1M4DehJ9UBsJv5gjpkzS12NiIi7KZwGFIf1KioKusmgiEiJKZwGdCW7MPFZVER0B1wRkVJTOA3oTHZCbCaRiEM4XOpqRETcTeE0wIZTk8JJRKQMTGk4GWP+1BjzsjFmmzHmzmHL/8YYs8sYs90Ys2Eqaxirjp4kTrqKykoIBEpdjYiIu03ZtfWMMW8ErgHOdRwnbYxpHFi+FLgeWAbMAh41xpzlOE5+qmoZi452LwDV1QZjSlmJiIhMZc/pFuBzjuOkARzHaRtYfg1wr+M4acdx9gC7gLVTWMeY9HXasbzqaiWTiEipTWU4nQVcbIz5P2PM/xpj1gwsnw3sH7Ze68Cykor3BgGordZhOBGRUpvQsJ4x5lFgpFNWbxvYdh2wDlgD/MAYc8Y4t38zcDPA3Cm8b3q+kCcdsz2nOS3eKduPiIiMzYTCyXGcy473mDHmFuC/HcdxgGeNMQWgATgAzBm2asvAspG2fxdwF8Dq1audidQ6mlgmBsk6AObN07CeiEipTeUY1n3AGwGMMWcBAaADeAC43hgTNMYsABYBz05hHSfUm+4dCKcCU9hBExGRMZrKO+H+O/DvxpitQAZ4z0Avapsx5gfAi0AOuLXUM/V6UzacvME0NTU6yUlEpNSmLJwcx8kA7zrOY58FPjtV+x6vvnTfQDilCIUUTiIipaapaQwN6/mDOYLBUlcjIiIKJ4Z6ToFQQeEkIlIGFE4MHXMKh4zCSUSkDEzlhIhTRm+6F1K1REIeQqFSVyMiIuo5Ab3JfkjWEanw66KvIiJlQOEEHOlKgOMlUqETcEVEyoHCCWjryAEQiZS4EBERARROAHR1269Vleo5iYiUA4UT0N1lQykaVTiJiJQDhRPQ3+MHoHmk66uLiMi0UzgBsT47RW/OHL0dIiLlQK0xkOy3JzfNn6d7OYmIlAOFE5Dpj2D8KZob/aUuRUREUDiRyqUoJGrwBOOEw5oQISJSDlwfTkP3ckrpunoiImXC9eFUvCK5L5hWOImIlAnXh1PxXk6+YFbhJCJSJlwfToP3ctKNBkVEyobrw6knaXtOwZDRFclFRMqE68OpvTcG+RAVIS8+3d1KRKQsuD6cjnRkAIhWqNskIlIuXB9Oxdtl1FRWlLgSEREpcn04dXTZcKqu0pieiEi5cH04dXblAaitKXEhIiIyyPXh1N1jvzbqdhkiImXD9eHU32vfgpbZJS5EREQGuT6cYn32SuTz5rr+rRARKRuub5ET/QFMIE5ttSZEiIiUC9eHUyoWxgTjRMIKJxGRcuH6cMrGI3iCSSordKNBEZFy4epwKjgF8slKvIGkek4iImXE1eHUn+6HZC3eYFoXfRURKSOuDqfedC+kavAFdLsMEZFy4upw6kv3KZxERMqQq8OpO9EL6Sj+QEG3yxARKSOuDqeDHXHAo16TiEiZcXU4HelIAxAJe0tciYiIDOfucOq04VRVoa6TiEg5cXU4Fe/lVKsbDYqIlBVXh1N3twNAU32oxJWIiMhwrg6nnh4DwMwmU+JKRERkOFeHU9/AvZxmNSucRETKiavDKdZnX35zs6vfBhGRsuPqVjkR90EgRk1UU8lFRMqJq8MpGfdiAgkqIwonEZFy4upwSiUCGH+SipCuXSQiUk6mLJyMMSuNMc8YY14wxmwyxqwdWG6MMV82xuwyxvzWGHP+VNVwItlEEBNIEQ4qnEREyslU9pzuBD7tOM5K4PaBnwGuABYN/LsZ+PoU1jCqbCqIx58iHNKwnohIOZnKcHKA6MD31cDBge+vAb7tWM8ANcaY5ims47jyqQo8gQwVQd2iXUSknEzleNZHgIeNMZ/HhuBFA8tnA/uHrdc6sOzQFNYyokK6Ak9VjFBQPScRkXIyoXAyxjwKzBzhoduAS4G/cBznx8aYPwTuBi4b5/Zvxg79MXfu3ImU+hoFp4CTjuD1Z3UvJxGRMjOhZtlxnOOGjTHm28CfD/z4Q+CbA98fAOYMW7VlYNlI278LuAtg9erVzkRqPVYym4RMFV5fjkBgMrcsIiITNZXHnA4Cbxj4/k3AzoHvHwD+eGDW3jqg13GcaR/S60vFbDj583hcPaFeRKT8TOWA1k3APxtjfECKgeE54CHgSmAXkADeO4U1HFdHbwIAnz9fit2LiMgopiycHMf5DbBqhOUOcOtU7Xes2rqTAPgDkzpaKCIik8C1A1qdPfYuuAG/rkguIlJuXBtOXX02nEJBhZOISLlxbTh192UACAc1VU9EpNy4Npx6+7IAVFYES1yJiIgcy7Xh1NOfA6A+qksXiYiUG9eGU3/cTiGvrwmVuBIRETmWa8MpFisA0NxQUeJKRETkWK4Np/6YPb+paYaG9UREyo1rwyket1/r6zSVXESk3Lg3nBIOeDJUR137FoiIlC3XtszJuAf8Kd3LSUSkDLk3nBJejD9JOKCbOYmIlBvXhlM65cX4UwQD6jmJiJQb14ZTNhmw4eRXOImIlBvXhlMuHcD4M4SDGtYTESk3Lg6nIB5fRsN6IiJlyLXhVMiE1HMSESlT7g2ndBiPL0tIs/VERMqOa8PJyVbg9WUJBHSFCBGRcuPKcMoX8pCpwOPL4dUhJxGRsuPKcEpkk5CN4PHnS12KiIiMwJXh1BNLguPF61M4iYiUI1eGU1dfGgCvek4iImXJneHUa8PJ73NKXImIiIzEleHU058BwBdQOImIlCN3hlNfFgC/boIrIlKWXBlOvbEcAMFAiQsREZERuTKc+vrtRIiKsE5yEhEpR64Mp/6BnlO0UuEkIlKO3BlO8QIANdW6rp6ISDlyZzjF7Cy9hhoddBIRKUeuDKd43IZTY32oxJWIiMhIXBlOiYT92qRwEhEpS64Mp2QS8GSZUV1Z6lJERGQE7g0nX5LKsI45iYiUI/eGkz9F0K+p5CIi5ciV4ZRKesCXIuB35csXESl7rmyd0ykPxpchFNB5TiIi5ciV4ZRJecGXJhjQsJ6ISDlyZzilfRhfmoDPlS9fRKTsubJ1zmV8GF9Ww3oiImXKleGUzwTweDMa1hMRKVMuDSc/xpdTz0lEpEy5MpwK2YCG9UREyphLwymIx5fTsJ6ISJlyZTg52RAebw6PK1+9iEj5m1DzbIy5zhizzRhTMMasPuaxvzHG7DLGbDfGbBi2/PKBZbuMMR+fyP5PWi6Ex5cvya5FROTEJtp32ApcCzwxfKExZilwPbAMuBz4F2OM1xjjBb4GXAEsBW4YWHfa5HIO5AN4vIXp3K2IiIzDhGYEOI7zEoAx5tiHrgHudRwnDewxxuwC1g48tstxnFcGnnfvwLovTqSO8eiLZ4EAXr96TiIi5WqqjrrMBvYP+7l1YNnxlk+b7r4UAF6fM527FRGRcThhz8kY8ygwc4SHbnMc5/7JL+mofd8M3Awwd+7cSdlmdywFRPH5NKwnIlKuThhOjuNcdhLbPQDMGfZzy8AyRlk+0r7vAu4CWL169aR0dXr7MwD4dIqTiEjZmqphvQeA640xQWPMAmAR8CywEVhkjFlgjAlgJ008MEU1jKgnlgbA55/OvYqIyHhMqP9gjHkr8BVgBvCgMeYFx3E2OI6zzRjzA+xEhxxwq+M4+YHnfBh4GPAC/+44zrYJvYJx6otlAQiHpnOvIiIyHhOdrfcT4CfHeeyzwGdHWP4Q8NBE9jsR/fEcAJGwzsAVESlXrmuh+xO25xSp0KWLRETKlfvCKWbPb4pWakaEiEi5cl04xRI2nGqimhEhIlKuXBdO8YFwqq8OlrgSERE5HveFU9KefFtXHShxJSIicjyuC6dEwp7LW18dLnElIiJyPO4Lp6T9OqOmorSFiIjIcbkunJIJB7wpaioipS5FRESOw3XhlEga8CcJBjSVXESkXLkunFJJA74Ufq/rXrqIyCnDdS10OmXDKRBw3UsXETlluK6FTqe8GH+aoF+XLxIRKVeuC6ds2ofxZQj4FE4iIuXKneHkzRBQz0lEpGy5LpxyGR/GlyUYUDiJiJQr14VTPhOw4eTXVHIRkXLlynDy+LKaECEiUsZcF06FrO056ZiTiEj5cl04OdkgHm8en07CFREpW65roZ1cEI8vV+oyRERkFK4KJ8cBsmE83nypSxERkVG4KpwyGQccLx6fwklEpJy5Kpy6+1MAeH1OiSsREZHRuDOcvIUSVyIiIqNxVTj19KcB8PnVcxIRKWeuCqfeWAYAn4b1RETKmrvCKV4MpxIXIiIio3JVOPXFsgD4/SUuRERERuWucIrbcAoGTYkrERGR0bgqnPrj9soQkQpdV09EpJy5KpxicXvybWXEVS9bROSU46pWOpaw4VRdqYNOIiLlzFXhFE/Yk29rqxVOIiLlzJXh1FATKHElIiIyGleFUzJpT75tqKkocSUiIjIal4UTQIGGqMJJRKScuSqcEknAnyQaVjiJiJQzV4VTKgn4kgQDun6RiEg5c1k4ecCfxu911csWETnluKqVTqU8GF+KQMBVL1tE5JTjqlY6k/KCL03Ap8sXiYiUM1eFUzbtxfgy+H2uetkiIqccV7XS2bQP48sQDKjnJCJSzlwVTrmMH+PLaFhPRKTMuSqc8pkAxptTOImIlDlXhVMhG8D4spqtJyJS5ibUShtjrjPGbDPGFIwxq4ctX2+M2WyM+d3A1zcNe2zVwPJdxpgvG2Om7ba0hUwQj3pOIiJlb6JdiK3AtcATxyzvAN7iOM45wHuA7wx77OvATcCigX+XT7CGMXOyQYwvR9CvK0SIiJSzCbXSjuO8BHBs58dxnOeH/bgNCBtjgkAdEHUc55mB530b+APg5xOpY8z15kJ4vHnN1hMRKXPTcfDlbcBzjuOkgdlA67DHWgeWTblCARgIJ79X4SQiUs5O2HMyxjwKzBzhodscx7n/BM9dBvwj8OaTKc4YczNwM8DcuXNPZhOD4ok84MXjLbympyciIuXlhOHkOM5lJ7NhY0wL8BPgjx3H2T2w+ADQMmy1loFlx9v3XcBdAKtXr3ZOpo6inlgKiOD1FSayGRERmQZTMqxnjKkBHgQ+7jjOk8XljuMcAvqMMesGZun9MTBq72uydPenARROIiKngIlOJX+rMaYVuBB40Bjz8MBDHwYWArcbY14Y+Nc48NiHgG8Cu4DdTNNkiK6+FAA+/4Q6YCIiMg0mOlvvJ9ihu2OX3wHccZznbAKWT2S/J6Oz14aTZpGLiJQ/11wqoac/A4DPX+JCRETkhNwTTn1ZAAKBEhciIiIn5Jpw6o3ZcKoIaxq5iEi5c0049fXnAKiM6ARcEZFy555wiuUBqKnSuJ6ISLlzTTjF4vb8pvoazYgQESl37gmnmD2/qbEuXOJKRETkRFwTTv1xB0yexurKUpciIiIn4Jpwiscc8MeprgyVuhQRETkB94RTAvAnqQjqmJOISLlzTTilEh7wJwj4XfOSRUROWa5pqVNJH8af1l1wRUROAa65DGom5QNfinCgptSliLhSNpultbWVVCpV6lJkCoVCIVpaWvD7J3YIxTXhlE0FMP6Eek4iJdLa2kpVVRXz58/X3ahPU47j0NnZSWtrKwsWLJjQtlwzrJdNhPEEkgT9CieRUkilUtTX1yuYTmPGGOrr6yeld+yacMqlKnTMSaTEFEynv8n6HbsmnAqpSjz+DOGga0YyRUROWa4IJ8cBUlV4A1n1nERkUv3qV7/iqquumtRt3nfffXzmM58B4IknnuD888/H5/Pxox/96Kj1/uM//oNFixaxaNEi/uM//mNw+WWXXUZ3d/ek1jTdXBFOXb0ZwIPPnycUUM9JxO0cx6FQKJS6DAByudxrlt1555186EMfAmDu3Lncc889vPOd7zxqna6uLj796U/zf//3fzz77LN8+tOfHgykd7/73fzLv/zL1Bc/hVzRUh9ojwF1CieRMvGRX3yEFw6/MKnbXDlzJV+6/EvHfXzv3r1s2LCBCy64gM2bN/PQQw+xfft2PvnJT5JOpznzzDP51re+RWVlJZ/5zGf46U9/SjKZ5KKLLuLf/u3fMMawa9cuPvjBD9Le3o7X6+WHP/whALFYjLe//e1s3bqVVatW8Z//+Z8YY9i8eTMf/ehHicViNDQ0cM8999Dc3Mwll1zCypUr+c1vfsMNN9zAX/7lXw7WuWPHDoLBIA0NDQDMnz8fAI/n6L7Eww8/zPr166mrqwNg/fr1/OIXv+CGG27g6quv5uKLL+a2226bzLd4Wrmi53SwPQaA3+8onERcbOfOnXzoQx9i27ZtRCIR7rjjDh599FGee+45Vq9ezRe+8AUAPvzhD7Nx40a2bt1KMpnkZz/7GQB/9Ed/xK233sqWLVt46qmnaG5uBuD555/nS1/6Ei+++CKvvPIKTz75JNlslj/90z/lRz/6EZs3b+Z973vfUWGRyWTYtGnTUcEE8OSTT3L++eef8LUcOHCAOXPmDP7c0tLCgQMHAKitrSWdTtPZ2TmxN6yEXNFSH+5MAuAPQMDnipcsUtZG6+FMpXnz5rFu3ToAnnnmGV588UVe97rXATYsLrzwQgAef/xx7rzzThKJBF1dXSxbtoxLLrmEAwcO8Na3vhWwJ5sWrV27lpaWFgBWrlzJ3r17qampYevWraxfvx6AfD4/GGYA73jHO0as8dChQ8yYMWPCr7WxsZGDBw9SX18/4W2Vgita6rYuO+c+FHBFR1FEjiMSiQx+7zgO69ev5/vf//5R66RSKT70oQ+xadMm5syZw6c+9akTnrcTDAYHv/d6veRyORzHYdmyZTz99NMnrGW4cDhMb2/vCV/L7Nmz+dWvfjX4c2trK5dccslRryMcPnXvX+eK1rq9Kw1ASDP1RGTAunXrePLJJ9m1axcA8XicHTt2DAZRQ0MDsVhscIZcVVUVLS0t3HfffQCk02kSicRxt7948WLa29sHwymbzbJt27YT1rVkyZLBmkazYcMGHnnkEbq7u+nu7uaRRx5hw4YNgA3ew4cPDx6vOhW5Ipw6u+1smHBI4SQi1owZM7jnnnu44YYbWLFiBRdeeCEvv/wyNTU13HTTTSxfvpwNGzawZs2awed85zvf4ctf/jIrVqzgoosu4vDhw8fdfiAQ4Ec/+hEf+9jHOPfcc1m5ciVPPfXUCet6/etfz/PPP4/j2Lt3b9y4kZaWFn74wx/ygQ98gGXLlgFQV1fH3/3d37FmzRrWrFnD7bffPjg5YvPmzaxbtw7fKXwYwxTfgHK3evVqZ9OmTSf13K8++D98/F+e5II55/M//zq55yOIyNi89NJLLFmypNRlnBL+/M//nLe85S1cdtllJ/38q6++mksvvXSSKxubkX7XxpjNjuOsHus2XNFz+vDvX8qVSy6hMjDyGK+ISDn527/921GHDE9k+fLlJQumyeKKcALI5QtM8AruIiLToqmpiauvvvqkn3/TTTdNYjWl4ZpwAqiI6KKTIiKnAleFU1WFJkSIiJwKXBNOHi9UhBVOIiKnAteEk8+LrkguInKKcE04zVsUZ815oROvKCIyTPE8qC9+8YvcfvvtPProo9New969e1m+fPmkbvP555/nT/7kTwB4+eWXufDCCwkGg3z+858/ar1f/OIXLF68mIULF/K5z31ucPn111/Pzp07J7Wm4U7dM7TG6XN/cqXuwiki43L48GE2btw4pis2jCSfz+P1ln7EJpfLveaE3H/4h3/gE5/4BGBP6P3yl788ePWLonw+z6233sovf/lLWlpaWLNmDVdffTVLly7llltu4c477+Qb3/jGlNTsmnBSMImUj498BF6Y3DtmsHIlfGmU68nu3buXq666iq1btwLw+c9/nlgsxqc+9SkuueQSLrjgAh5//HF6enq4++67ufjii3nzm9/MgQMHWLlyJV/5yle4++67ueqqq1i/fj1r167lgQceYPHixdxwww286U1v4qabbqKyspIPfOADPProo3zta18jHA6PeNuMb3zjG9x1111kMhkWLlzId77zHSoqKjhy5Agf/OAHeeWVVwD4+te/zqxZs8jn89x000089dRTzJ49m/vvv59wOMzu3bu59dZbaW9vp6Kigm984xucffbZ3HjjjYRCIZ5//nle97rXDV5xHaC/v5/f/va3nHvuuYC9SGxjYyMPPvjgUe/Zs88+y8KFCznjjDMA21u6//77Wbp0KRdffDE33njjiME3GVwzrCciMppcLsezzz7Ll770JT796U8D8MADD3DmmWfywgsvcPHFFw+uW11dzVe/+lVuvPFG7r33Xrq7uwfPLYrH41xwwQVs2bKFCy644Li3zbj22mvZuHEjW7ZsYcmSJdx9990A/Nmf/RlveMMb2LJlC88999zg5Yp27tzJrbfeyrZt26ipqeHHP/4xADfffDNf+cpX2Lx5M5///OcHb1II9mKwTz311FHBBLBp06YxDROOdlsOj8fDwoUL2bJly/je6DFyTc9JRMrHaD2cUrn22msBWLVqFXv37j3h+uvXr+eHP/zh4P2dirxeL29729sA2L59+3Fvm7F161Y+8YlP0NPTQywWG7xo62OPPca3v/3twW1VV1fT3d3NggULWLly5VE1xmIxnnrqKa677rrB/afT6cHvr7vuuhGHFSf7thyrVq2a8LaOpXASEVfw+XxH3Zr92NtgFG97UbzlxYkUCgVeeuklKioq6O7uHryfUygUGgyE0W6bceONN3Lfffdx7rnncs899xx1+4uRHHtbjmQySaFQoKamhheOM0Y62m05TnQbELC35di/f//gz62trcyePXvw56m8LYeG9UTEFZqammhra6Ozs5N0Oj14d9uT9cUvfpElS5bwve99j/e+971ks9nXrDPabTP6+/tpbm4mm83y3e9+d/A5l156KV//+tcB29Ma7d5O0WiUBQsWDN4u3nGcMQ2zjfW2HGvWrGHnzp3s2bOHTCbDvffee9RllXbs2DHpswiLFE4i4gp+v5/bb7+dtWvXsn79es4+++yT3tb27dv55je/yT/90z9x8cUX8/rXv5477rjjNeuNdtuMv//7v+eCCy7gda973VG1/PM//zOPP/4455xzDqtWreLFF18ctZbvfve73H333Zx77rksW7aM+++//4T1n3322fT29lTzjU8AAAciSURBVNLf3w/YWYktLS184Qtf4I477qClpYW+vj58Ph9f/epX2bBhA0uWLOEP//APB4+BHTlyhHA4zMyZM8f8vo2HK26ZISKlp1tmlJcvfvGLVFVV8f73v/+knx+NRgfPlRpOt8wQEZGTcssttxx1HGu8ampqeM973jOJFR1NEyJERFwoFArx7ne/+6Sf/973vncSq3kt9ZxEZNqcKocR5ORN1u9Y4SQi0yIUCtHZ2amAOo05jkNnZyeh0MSvY6phPRGZFi0tLbS2ttLe3l7qUmQKhUKhwXO+JkLhJCLTwu/3s2DBglKXIaeICQ3rGWOuM8ZsM8YUjDGvmSJojJlrjIkZY/5q2LLLjTHbjTG7jDEfn8j+RUTk9DTRY05bgWuBJ47z+BeAnxd/MMZ4ga8BVwBLgRuMMUsnWIOIiJxmJjSs5zjOSzDy7SiMMX8A7AHiwxavBXY5jvPKwDr3AtcAo58CLSIirjIlx5yMMZXAx4D1wF8Ne2g2sH/Yz63ABaNs52bg5oEfY8aY7RMoqwHomMDzTyd6L46m92OI3oshei+GTMZ7MW88K58wnIwxjwIjXTzpNsdxjncRp08BX3QcJzaRm/w5jnMXcNdJb2AYY8ym8Vw643Sm9+Joej+G6L34/9s7vxCrijiOf764qSWhaw+ytYIrSOBTmdAuRUSJlkQQ+LAWaP9eiqA/D+HiSz0mESVCCv0hokwzKdkeljKft5JKLV1dM2pF04SUHgLDXw/z271nl3vP4qo7d/f8PjDcOb+Zc34zc773zL1z5sypEW1RI0dbjNs5mdmKCRz3TmCNpE3APOCSpH+B/cDCQr524OQEjh8EQRBMY67JsJ6ZjbwyUtIrwD9mtkVSC7BEUgepU+oGHr0WZQiCIAimLlc6lfwRSUNAF/ClpL6y/Gb2H/Ac0AccBnaa2c9XUobL4KoMD04Toi1GE+1RI9qiRrRFjUlviynzyowgCIKgOsTaekEQBEHTEZ1TEARB0HRUonOaLksmSVooaZ+kX3zZqOfdPl/SV5KO+Wer2yVps9f7gKRlhWOt9/zHJK0v2O+QdND32Sx/FqCRj9xImiHpB0m9vt0hqd/Lv0PSTLfP8u1BT19UOEaP2wckrSrY6+qmkY/cSJonaZekI5IOS+qqqjYkvejfkUOStkuaXRVtSHpP0hlJhwq2bDoo81GKmU3rAMwAjgOLgZnAT8DS3OWaYF3agGUevxE4SloGahOwwe0bgNc8vpq0fJSATqDf7fOBX/2z1eOtnvat55Xv+6Db6/rIHYCXgI+BXt/eCXR7fCvwjMefBbZ6vBvY4fGlrolZQIdrZUaZbhr5yB2AD4CnPT6T9BhH5bRBetj/BHB94Xw9XhVtAPcAy4BDBVs2HTTyMW49cn+hJuFEdQF9he0eoCd3ua5S3b4grcIxALS5rQ0Y8Pg2YG0h/4CnrwW2Fezb3NYGHCnYR/I18pG5/u3AXuA+oNfF/xfQMvbck2aIdnm8xfNprB6G8zXSTZmPzG0xl3RB1hh75bRBbSWa+X6ue4FVVdIGsIjRnVM2HTTyMV4dqjCsV2/JpFsyleWq4UMPtwP9wAIzO+VJp4EFHm9U9zL7UB07JT5y8ibwMnDJt28C/rb0yAKMLv9InT39vOe/3DYq85GTDuAs8L7SMOc7kuZQQW2Y2UngdeB34BTpXO+nutqAvDqY0DW4Cp3TtENp7cLPgBfM7EIxzdJPk2v6fMBk+BgPSQ8BZ8xsf85yNBEtpKGct83sdtKCy6Pur1ZIG62kBaU7gJuBOcADOcvUTEwVHVShczrJNFoySdJ1pI7pIzPb7eY/JbV5ehtwxu2N6l5mb69jL/ORi7uAhyX9BnxCGtp7C5intBIJjC7/SJ09fS5wjstvo3MlPnIyBAyZWb9v7yJ1VlXUxgrghJmdNbOLwG6SXqqqDcirgwldg6vQOX2HL5nkM2e6gT2ZyzQhfFbMu8BhM3ujkLQHGJ5Ns550L2rYvs5ny3QC5/1vdx+wUlKr/8pcSRobPwVckNTpvtaNOVY9H1kwsx4zazezRaRz+o2ZPQbsA9Z4trFtMVz+NZ7f3N7tM7Y6gCWkG751deP7NPKRDTM7Dfwh6VY33U96FU3ltEEazuuUdIOXdbgtKqkNJ6cOGvkoJ8fNuskOpNkiR0kzbDbmLs8V1ONu0l/lA8CPHlaTxrr3AseAr4H5nl+klzseBw4CywvHehIY9PBEwb6c9BLJ48AWaquI1PXRDAG4l9psvcWkC8gg8Ckwy+2zfXvQ0xcX9t/o9R3AZx6V6aaRj9wBuA343vXxOWmWVSW1AbwKHPHyfkiacVcJbQDbSffaLpL+UT+VUwdlPspCLF8UBEEQNB1VGNYLgiAIphjROQVBEARNR3ROQRAEQdMRnVMQBEHQdETnFARBEDQd0TkFQRAETUd0TkEQBEHT8T/alYnNNjcA5QAAAABJRU5ErkJggg==\n", 56 | "text/plain": [ 57 | "
" 58 | ] 59 | }, 60 | "metadata": { 61 | "needs_background": "light" 62 | }, 63 | "output_type": "display_data" 64 | } 65 | ], 66 | "source": [ 67 | "fig = pu.plot_results(results, average_group=True, split_fn=lambda _: '', shaded_std=False)" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [] 76 | } 77 | ], 78 | "metadata": { 79 | "kernelspec": { 80 | "display_name": "Python 3", 81 | "language": "python", 82 | "name": "python3" 83 | }, 84 | "language_info": { 85 | "codemirror_mode": { 86 | "name": "ipython", 87 | "version": 3 88 | }, 89 | "file_extension": ".py", 90 | "mimetype": "text/x-python", 91 | "name": "python", 92 | "nbconvert_exporter": "python", 93 | "pygments_lexer": "ipython3", 94 | "version": "3.6.8" 95 | } 96 | }, 97 | "nbformat": 4, 98 | "nbformat_minor": 2 99 | } 100 | --------------------------------------------------------------------------------