├── .gitignore
├── LICENSE
├── README.md
├── code
├── alexnet.ipynb
├── attention.ipynb
├── cnn.ipynb
├── dataset.py
├── ddpm_1d_example.ipynb
├── ddpm_2d_example.ipynb
├── ddpm_cfg_1d_example.ipynb
├── diffusion.py
├── diffusion_constants.ipynb
├── diffusion_resblock.ipynb
├── diffusion_unet.ipynb
├── diffusion_unet_legacy.ipynb
├── googlenet.ipynb
├── gp.ipynb
├── hdm_1d_example.ipynb
├── hdm_constant.ipynb
├── mdn.py
├── mdn_reg.ipynb
├── mlp.ipynb
├── module.py
├── repaint_1d_example.ipynb
├── repaint_constant.ipynb
├── resnet.ipynb
├── updownsample.ipynb
├── util.py
└── vgg.ipynb
└── img
└── unet.jpg
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # poetry
98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102 | #poetry.lock
103 |
104 | # pdm
105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106 | #pdm.lock
107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108 | # in version control.
109 | # https://pdm.fming.dev/#use-with-ide
110 | .pdm.toml
111 |
112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113 | __pypackages__/
114 |
115 | # Celery stuff
116 | celerybeat-schedule
117 | celerybeat.pid
118 |
119 | # SageMath parsed files
120 | *.sage.py
121 |
122 | # Environments
123 | .env
124 | .venv
125 | env/
126 | venv/
127 | ENV/
128 | env.bak/
129 | venv.bak/
130 |
131 | # Spyder project settings
132 | .spyderproject
133 | .spyproject
134 |
135 | # Rope project settings
136 | .ropeproject
137 |
138 | # mkdocs documentation
139 | /site
140 |
141 | # mypy
142 | .mypy_cache/
143 | .dmypy.json
144 | dmypy.json
145 |
146 | # Pyre type checker
147 | .pyre/
148 |
149 | # pytype static type analyzer
150 | .pytype/
151 |
152 | # Cython debug symbols
153 | cython_debug/
154 |
155 | # PyCharm
156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158 | # and can be added to the global gitignore or merged into this file. For a more nuclear
159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160 | #.idea/
161 |
162 | # Mac
163 | .DS_Store
164 |
165 | # Data foloer
166 | data/
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Sungjoon
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## Yet Another Pytorch Tutorial v2
2 |
3 | Minimal implementations of
4 | - [MLP](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/mlp.ipynb): MNIST classification using multi-layer perception (MLP)
5 | - [CNN](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/cnn.ipynb): MNIST classification using convolutional neural networks (CNN)
6 | - [AlexNet](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/alexnet.ipynb): : MNIST classification using AlexNet
7 | - [VGG](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/vgg.ipynb): MNIST classification using VGG
8 | - [GoogLeNet](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/googlenet.ipynb): MNIST classification GoogLeNet
9 | - [MDN](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/mdn_reg.ipynb): Regression using Mixture Density Networks
10 | - [ResNet](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/resnet.ipynb): MNIST classification ResNet
11 | - [Attention](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/attention.ipynb): Attention block with legacy QKV attention mechanism
12 | - [ResBlock](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/diffusion_resblock.ipynb): Residual block for diffusion models
13 | - [Diffusion](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/diffusion_constants.ipynb): Diffusion constants
14 | - [DDPM 1D Example](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/ddpm_1d_example.ipynb): Denoising Diffusion Probabilistic Model (DDPM) example on generating trajectories
15 | - [DDPM 2D Example](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/ddpm_2d_example.ipynb): Denoising Diffusion Probabilistic Model (DDPM) example on generating images
16 | - [DDPM-CFG 1D Example](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/ddpm_cfh_1d_example.ipynb): Conditional Generation using Classifier-Free Guidance (CFG)
17 | - [Repaint 1D Example](https://github.com/sjchoi86/yet-another-pytorch-tutorial-v2/blob/main/code/repaint_1d_example.ipynb): Diffusion-based inpainting example on 1D data
18 |
19 | Contact
20 | - sungjoon dash choi at korea dot ac dot kr
21 |
--------------------------------------------------------------------------------
/code/attention.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "71051e5d",
6 | "metadata": {},
7 | "source": [
8 | "### Attention block"
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": 1,
14 | "id": "7ae3d6f4",
15 | "metadata": {},
16 | "outputs": [
17 | {
18 | "name": "stdout",
19 | "output_type": "stream",
20 | "text": [
21 | "PyTorch version:[2.0.1].\n"
22 | ]
23 | }
24 | ],
25 | "source": [
26 | "import numpy as np\n",
27 | "import matplotlib.pyplot as plt\n",
28 | "import torch as th\n",
29 | "from module import AttentionBlock\n",
30 | "from util import get_torch_size_string\n",
31 | "np.set_printoptions(precision=3)\n",
32 | "th.set_printoptions(precision=3)\n",
33 | "%matplotlib inline\n",
34 | "%config InlineBackend.figure_format='retina'\n",
35 | "print (\"PyTorch version:[%s].\"%(th.__version__))"
36 | ]
37 | },
38 | {
39 | "cell_type": "markdown",
40 | "id": "bfb7e801",
41 | "metadata": {},
42 | "source": [
43 | "### Let's see how `AttentionBlock` works\n",
44 | "- First, we assume that an input tensor has a shape of [B x C x W x H].\n",
45 | "- This can be thought of having a total of WH tokens with each token having C dimensions. \n",
46 | "- The MHA operates by initally partiting the channels, executing qkv attention process, and then merging the results. \n",
47 | "- Note the the number of channels should be divisible by the number of heads."
48 | ]
49 | },
50 | {
51 | "cell_type": "markdown",
52 | "id": "dca0e4fa",
53 | "metadata": {},
54 | "source": [
55 | "### `dims=2`\n",
56 | "#### `x` has a shape of `[B x C x W x H]`"
57 | ]
58 | },
59 | {
60 | "cell_type": "code",
61 | "execution_count": 2,
62 | "id": "8c6e06cc",
63 | "metadata": {},
64 | "outputs": [
65 | {
66 | "name": "stdout",
67 | "output_type": "stream",
68 | "text": [
69 | "input shape:[16x128x28x28] output shape:[16x128x28x28]\n",
70 | "[ x]:[ 16x128x28x28]\n",
71 | "[ x_rsh]:[ 16x128x784]\n",
72 | "[ x_nzd]:[ 16x128x784]\n",
73 | "[ qkv]:[ 16x384x784]\n",
74 | "[ h_att]:[ 16x128x784]\n",
75 | "[ h_proj]:[ 16x128x784]\n",
76 | "[ out]:[ 16x128x28x28]\n"
77 | ]
78 | }
79 | ],
80 | "source": [
81 | "layer = AttentionBlock(n_channels=128,n_heads=4,n_groups=32)\n",
82 | "x = th.randn(16,128,28,28)\n",
83 | "out,intermediate_output_dict = layer(x)\n",
84 | "print (\"input shape:[%s] output shape:[%s]\"%\n",
85 | " (get_torch_size_string(x),get_torch_size_string(out)))\n",
86 | "# Print intermediate values\n",
87 | "for key,value in intermediate_output_dict.items():\n",
88 | " print (\"[%10s]:[%15s]\"%(key,get_torch_size_string(value)))"
89 | ]
90 | },
91 | {
92 | "cell_type": "markdown",
93 | "id": "a91d9cec",
94 | "metadata": {},
95 | "source": [
96 | "### `dims=1`\n",
97 | "#### `x` has a shape of `[B x C x L]`"
98 | ]
99 | },
100 | {
101 | "cell_type": "code",
102 | "execution_count": 3,
103 | "id": "97d1bbdb",
104 | "metadata": {
105 | "scrolled": true
106 | },
107 | "outputs": [
108 | {
109 | "name": "stdout",
110 | "output_type": "stream",
111 | "text": [
112 | "input shape:[16x4x100] output shape:[16x4x100]\n",
113 | "[ x]:[ 16x4x100]\n",
114 | "[ x_rsh]:[ 16x4x100]\n",
115 | "[ x_nzd]:[ 16x4x100]\n",
116 | "[ qkv]:[ 16x12x100]\n",
117 | "[ h_att]:[ 16x4x100]\n",
118 | "[ h_proj]:[ 16x4x100]\n",
119 | "[ out]:[ 16x4x100]\n"
120 | ]
121 | }
122 | ],
123 | "source": [
124 | "layer = AttentionBlock(n_channels=4,n_heads=2,n_groups=1)\n",
125 | "x = th.randn(16,4,100)\n",
126 | "out,intermediate_output_dict = layer(x)\n",
127 | "print (\"input shape:[%s] output shape:[%s]\"%\n",
128 | " (get_torch_size_string(x),get_torch_size_string(out)))\n",
129 | "# Print intermediate values\n",
130 | "for key,value in intermediate_output_dict.items():\n",
131 | " print (\"[%10s]:[%15s]\"%(key,get_torch_size_string(value)))"
132 | ]
133 | },
134 | {
135 | "cell_type": "code",
136 | "execution_count": null,
137 | "id": "8e60d073",
138 | "metadata": {},
139 | "outputs": [],
140 | "source": []
141 | }
142 | ],
143 | "metadata": {
144 | "kernelspec": {
145 | "display_name": "Python 3 (ipykernel)",
146 | "language": "python",
147 | "name": "python3"
148 | },
149 | "language_info": {
150 | "codemirror_mode": {
151 | "name": "ipython",
152 | "version": 3
153 | },
154 | "file_extension": ".py",
155 | "mimetype": "text/x-python",
156 | "name": "python",
157 | "nbconvert_exporter": "python",
158 | "pygments_lexer": "ipython3",
159 | "version": "3.9.16"
160 | }
161 | },
162 | "nbformat": 4,
163 | "nbformat_minor": 5
164 | }
165 |
--------------------------------------------------------------------------------
/code/dataset.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import matplotlib.pyplot as plt
3 | import torch as th
4 | from torchvision import datasets,transforms
5 | from util import gp_sampler,periodic_step,get_torch_size_string
6 |
7 | def mnist(root_path='./data/',batch_size=128):
8 | """
9 | MNIST
10 | """
11 | mnist_train = datasets.MNIST(root=root_path,train=True,transform=transforms.ToTensor(),download=True)
12 | mnist_test = datasets.MNIST(root=root_path,train=False,transform=transforms.ToTensor(),download=True)
13 | train_iter = th.utils.data.DataLoader(mnist_train,batch_size=batch_size,shuffle=True,num_workers=1)
14 | test_iter = th.utils.data.DataLoader(mnist_test,batch_size=batch_size,shuffle=True,num_workers=1)
15 | # Data
16 | train_data,train_label = mnist_train.data,mnist_train.targets
17 | test_data,test_label = mnist_test.data,mnist_test.targets
18 | return train_iter,test_iter,train_data,train_label,test_data,test_label
19 |
20 | def get_1d_training_data(
21 | traj_type = 'step', # {'step','gp'}
22 | n_traj = 10,
23 | L = 100,
24 | device = 'cpu',
25 | seed = 1,
26 | plot_data = True,
27 | figsize = (6,2),
28 | ls = '-',
29 | lc = 'k',
30 | lw = 1,
31 | verbose = True,
32 | ):
33 | """
34 | 1-D training data
35 | """
36 | if seed is not None:
37 | np.random.seed(seed=seed)
38 | times = np.linspace(start=0.0,stop=1.0,num=L).reshape((-1,1)) # [L x 1]
39 | if traj_type == 'gp':
40 | traj = th.from_numpy(
41 | gp_sampler(
42 | times = times,
43 | hyp_gain = 2.0,
44 | hyp_len = 0.2,
45 | meas_std = 1e-8,
46 | n_traj = n_traj
47 | )
48 | ).to(th.float32).to(device) # [n_traj x L]
49 | elif traj_type == 'gp2':
50 | traj_np = np.zeros((n_traj,L))
51 | for i_idx in range(n_traj):
52 | traj_np[i_idx,:] = gp_sampler(
53 | times = times,
54 | hyp_gain = 2.0,
55 | hyp_len = np.random.uniform(1e-8,1.0),
56 | meas_std = 1e-8,
57 | n_traj = 1
58 | ).reshape(-1)
59 | traj = th.from_numpy(
60 | traj_np
61 | ).to(th.float32).to(device) # [n_traj x L]
62 | elif traj_type == 'step':
63 | traj_np = np.zeros((n_traj,L))
64 | for i_idx in range(n_traj):
65 | period = np.random.uniform(low=0.38,high=0.42)
66 | time_offset = np.random.uniform(low=-0.02,high=0.02)
67 | y_min = np.random.uniform(low=-3.2,high=-2.8)
68 | y_max = np.random.uniform(low=2.8,high=3.2)
69 | traj_np[i_idx,:] = periodic_step(
70 | times = times,
71 | period = period,
72 | time_offset = time_offset,
73 | y_min = y_min,
74 | y_max = y_max
75 | ).reshape(-1)
76 | traj = th.from_numpy(
77 | traj_np
78 | ).to(th.float32).to(device) # [n_traj x L]
79 | elif traj_type == 'step2':
80 | traj_np = np.zeros((n_traj,L))
81 | for i_idx in range(n_traj): # for each trajectory
82 | # First, sample value and duration
83 | rate = 5
84 | val = np.random.uniform(low=-3.0,high=3.0)
85 | dur_tick = int(L*np.random.exponential(scale=1/rate))
86 | dim_dur = 0.1 # minimum duration in sec
87 | dur_tick = max(dur_tick,int(dim_dur*L))
88 |
89 | tick_fr = 0
90 | tick_to = tick_fr+dur_tick
91 | while True:
92 | # Append
93 | traj_np[i_idx,tick_fr:min(L,tick_to)] = val
94 |
95 | # Termination condition
96 | if tick_to >= L: break
97 |
98 | # sample value and duration
99 | val = np.random.uniform(low=-3.0,high=3.0)
100 | dur_tick = int(L*np.random.exponential(scale=1/rate))
101 | dur_tick = max(dur_tick,int(dim_dur*L))
102 |
103 | # Update tick
104 | tick_fr = tick_to
105 | tick_to = tick_fr+dur_tick
106 | traj = th.from_numpy(
107 | traj_np
108 | ).to(th.float32).to(device) # [n_traj x L]
109 | elif traj_type == 'triangle':
110 | traj_np = np.zeros((n_traj,L))
111 | for i_idx in range(n_traj):
112 | period = 0.2
113 | time_offset = np.random.uniform(low=-0.02,high=0.02)
114 | y_min = np.random.uniform(low=-3.2,high=-2.8)
115 | y_max = np.random.uniform(low=2.8,high=3.2)
116 | times_mod = np.mod(times+time_offset,period)/period
117 | y = (y_max - y_min) * times_mod + y_min
118 | traj_np[i_idx,:] = y.reshape(-1)
119 | traj = th.from_numpy(
120 | traj_np
121 | ).to(th.float32).to(device) # [n_traj x L]
122 | else:
123 | print ("Unknown traj_type:[%s]"%(traj_type))
124 | # Plot
125 | if plot_data:
126 | plt.figure(figsize=figsize)
127 | for i_idx in range(n_traj):
128 | plt.plot(times,traj[i_idx,:].cpu().numpy(),ls=ls,color=lc,lw=lw)
129 | plt.xlim([0.0,1.0])
130 | plt.ylim([-4,+4])
131 | plt.xlabel('Time',fontsize=10)
132 | plt.title('Trajectory type:[%s]'%(traj_type),fontsize=10)
133 | plt.show()
134 | # Print
135 | x_0 = traj[:,None,:] # [N x C x L]
136 | if verbose:
137 | print ("x_0:[%s]"%(get_torch_size_string(x_0)))
138 | # Out
139 | return times,x_0
140 |
141 | def get_mdn_data(
142 | n_train = 1000,
143 | x_min = 0.0,
144 | x_max = 1.0,
145 | y_min = -1.0,
146 | y_max = 1.0,
147 | freq = 1.0,
148 | noise_rate = 1.0,
149 | seed = 0,
150 | FLIP_AUGMENT = True,
151 | ):
152 | np.random.seed(seed=seed)
153 |
154 | if FLIP_AUGMENT:
155 | n_half = n_train // 2
156 | x_train_a = x_min + (x_max-x_min)*np.random.rand(n_half,1) # [n_half x 1]
157 | x_rate = (x_train_a-x_min)/(x_max-x_min) # [n_half x 1]
158 | sin_temp = y_min + (y_max-y_min)*np.sin(2*np.pi*freq*x_rate)
159 | cos_temp = y_min + (y_max-y_min)*np.cos(2*np.pi*freq*x_rate)
160 | y_train_a = np.concatenate(
161 | (sin_temp+1*(y_max-y_min)*x_rate,
162 | cos_temp+1*(y_max-y_min)*x_rate),axis=1) # [n_half x 2]
163 | x_train_b = x_min + (x_max-x_min)*np.random.rand(n_half,1) # [n_half x 1]
164 | x_rate = (x_train_b-x_min)/(x_max-x_min) # [n_half x 1]
165 | sin_temp = y_min + (y_max-y_min)*np.sin(2*np.pi*freq*x_rate)
166 | cos_temp = y_min + (y_max-y_min)*np.cos(2*np.pi*freq*x_rate)
167 | y_train_b = -np.concatenate(
168 | (sin_temp+1*(y_max-y_min)*x_rate,
169 | cos_temp+1*(y_max-y_min)*x_rate),axis=1) # [n_half x 2]
170 | # Concatenate
171 | x_train = np.concatenate((x_train_a,x_train_b),axis=0) # [n_train x 1]
172 | y_train = np.concatenate((y_train_a,y_train_b),axis=0) # [n_train x 2]
173 | else:
174 | x_train = x_min + (x_max-x_min)*np.random.rand(n_train,1) # [n_train x 1]
175 | x_rate = (x_train-x_min)/(x_max-x_min) # [n_train x 1]
176 | sin_temp = y_min + (y_max-y_min)*np.sin(2*np.pi*freq*x_rate)
177 | cos_temp = y_min + (y_max-y_min)*np.cos(2*np.pi*freq*x_rate)
178 | y_train = np.concatenate(
179 | (sin_temp+1*(y_max-y_min)*x_rate,
180 | cos_temp+1*(y_max-y_min)*x_rate),axis=1) # [n_train x 2]
181 |
182 | # Add noise
183 | x_rate = (x_train-x_min)/(x_max-x_min) # [n_train x 1]
184 | noise = noise_rate * (y_max-y_min) * (2*np.random.rand(n_train,2)-1) * ((x_rate)**2) # [n_train x 2]
185 | y_train = y_train + noise # [n_train x 2]
186 | return x_train,y_train
--------------------------------------------------------------------------------
/code/diffusion.py:
--------------------------------------------------------------------------------
1 | import math,random
2 | import numpy as np
3 | import matplotlib.pyplot as plt
4 | import torch as th
5 | import torch.nn as nn
6 | from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors
7 | from module import (
8 | conv_nd,
9 | ResBlock,
10 | AttentionBlock,
11 | TimestepEmbedSequential,
12 | )
13 |
14 | def get_named_beta_schedule(
15 | schedule_name,
16 | num_diffusion_timesteps,
17 | scale_betas=1.0,
18 | np_type=np.float64
19 | ):
20 | """
21 | Get a pre-defined beta schedule for the given name.
22 |
23 | The beta schedule library consists of beta schedules which remain similar
24 | in the limit of num_diffusion_timesteps.
25 | Beta schedules may be added, but should not be removed or changed once
26 | they are committed to maintain backwards compatibility.
27 | """
28 | if schedule_name == "linear":
29 | # Linear schedule from Ho et al, extended to work for any number of
30 | # diffusion steps.
31 | scale = scale_betas * 1000 / num_diffusion_timesteps
32 | beta_start = scale * 0.0001
33 | beta_end = scale * 0.02
34 | return np.linspace(
35 | beta_start, beta_end, num_diffusion_timesteps, dtype=np_type
36 | )
37 | elif schedule_name == "cosine":
38 | return betas_for_alpha_bar(
39 | num_diffusion_timesteps,
40 | lambda t: math.cos((t + 0.008) / 1.008 * math.pi / 2) ** 2,
41 | )
42 | else:
43 | raise NotImplementedError(f"unknown beta schedule: {schedule_name}")
44 |
45 | def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999):
46 | """
47 | Create a beta schedule that discretizes the given alpha_t_bar function,
48 | which defines the cumulative product of (1-beta) over time from t = [0,1].
49 |
50 | :param num_diffusion_timesteps: the number of betas to produce.
51 | :param alpha_bar: a lambda that takes an argument t from 0 to 1 and
52 | produces the cumulative product of (1-beta) up to that
53 | part of the diffusion process.
54 | :param max_beta: the maximum beta to use; use values lower than 1 to
55 | prevent singularities.
56 | """
57 | betas = []
58 | for i in range(num_diffusion_timesteps):
59 | t1 = i / num_diffusion_timesteps
60 | t2 = (i + 1) / num_diffusion_timesteps
61 | betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))
62 | return np.array(betas)
63 |
64 | def get_ddpm_constants(
65 | schedule_name = 'cosine',
66 | T = 1000,
67 | np_type = np.float64
68 | ):
69 | timesteps = np.linspace(start=1,stop=T,num=T)
70 | betas = get_named_beta_schedule(
71 | schedule_name = schedule_name,
72 | num_diffusion_timesteps = T,
73 | scale_betas = 1.0,
74 | ).astype(np_type) # [1,000]
75 | alphas = 1.0 - betas
76 | alphas_bar = np.cumprod(alphas, axis=0) # cummulative product
77 | alphas_bar_prev = np.append(1.0,alphas_bar[:-1])
78 | sqrt_recip_alphas = np.sqrt(1.0/alphas)
79 | sqrt_alphas_bar = np.sqrt(alphas_bar)
80 | sqrt_one_minus_alphas_bar = np.sqrt(1.0-alphas_bar)
81 | posterior_variance = betas*(1.0-alphas_bar_prev)/(1.0-alphas_bar)
82 | posterior_variance = posterior_variance.astype(np_type)
83 |
84 | # Append
85 | dc = {}
86 | dc['schedule_name'] = schedule_name
87 | dc['T'] = T
88 | dc['timesteps'] = timesteps
89 | dc['betas'] = betas
90 | dc['alphas'] = alphas
91 | dc['alphas_bar'] = alphas_bar
92 | dc['alphas_bar_prev'] = alphas_bar_prev
93 | dc['sqrt_recip_alphas'] = sqrt_recip_alphas
94 | dc['sqrt_alphas_bar'] = sqrt_alphas_bar
95 | dc['sqrt_one_minus_alphas_bar'] = sqrt_one_minus_alphas_bar
96 | dc['posterior_variance'] = posterior_variance
97 |
98 | return dc
99 |
100 | def plot_ddpm_constants(dc):
101 | """
102 | Plot DDPM constants
103 | """
104 | plt.figure(figsize=(10,3))
105 | cs = [plt.cm.gist_rainbow(x) for x in np.linspace(0,1,8)]
106 | lw = 2
107 | plt.subplot(1,2,1)
108 | plt.plot(dc['timesteps'],dc['betas'],
109 | color=cs[0],label=r'$\beta_t$',lw=lw)
110 | plt.plot(dc['timesteps'],dc['alphas'],
111 | color=cs[1],label=r'$\alpha_t$',lw=lw)
112 | plt.plot(dc['timesteps'],dc['alphas_bar'],
113 | color=cs[2],label=r'$\bar{\alpha}_t$',lw=lw)
114 | plt.plot(dc['timesteps'],dc['sqrt_alphas_bar'],
115 | color=cs[5],label=r'$\sqrt{\bar{\alpha}_t}$',lw=lw)
116 |
117 | plt.plot(dc['timesteps'],dc['sqrt_one_minus_alphas_bar'],
118 | color=cs[6],label=r'$\sqrt{1-\bar{\alpha}_t}$',lw=lw)
119 |
120 |
121 | plt.plot(dc['timesteps'],dc['posterior_variance'],'--',
122 | color='k',label=r'$ Var[x_{t-1}|x_t,x_0] $',lw=lw)
123 |
124 | plt.xlabel('Diffusion steps',fontsize=8)
125 | plt.legend(fontsize=10,loc='center left',bbox_to_anchor=(1,0.5))
126 | plt.grid(lw=0.5)
127 | plt.title('DDPM Constants',fontsize=10)
128 | plt.subplot(1,2,2)
129 | plt.plot(dc['timesteps'],dc['betas'],color=cs[0],label=r'$\beta_t$',lw=lw)
130 | plt.plot(dc['timesteps'],dc['posterior_variance'],'--',
131 | color='k',label=r'$ Var[x_{t-1}|x_t,x_0] $',lw=lw)
132 | plt.xlabel('Diffusion steps',fontsize=8)
133 | plt.legend(fontsize=10,loc='center left',bbox_to_anchor=(1,0.5))
134 | plt.grid(lw=0.5)
135 | plt.title('DDPM Constants',fontsize=10)
136 | plt.subplots_adjust(wspace=0.7)
137 | plt.show()
138 |
139 | def timestep_embedding(timesteps, dim, max_period=10000):
140 | """
141 | Create sinusoidal timestep embeddings.
142 |
143 | :param timesteps: a 1-D Tensor of N indices, one per batch element.
144 | These may be fractional.
145 | :param dim: the dimension of the output.
146 | :param max_period: controls the minimum frequency of the embeddings.
147 | :return: an [N x dim] Tensor of positional embeddings.
148 | """
149 | half = dim // 2
150 | freqs = th.exp(
151 | -math.log(max_period) * th.arange(start=0, end=half, dtype=th.float32) / half
152 | ).to(device=timesteps.device)
153 | args = timesteps[:, None].float() * freqs[None]
154 | embedding = th.cat([th.cos(args), th.sin(args)], dim=-1)
155 | if dim % 2:
156 | embedding = th.cat([embedding, th.zeros_like(embedding[:, :1])], dim=-1)
157 | return embedding
158 |
159 | def forward_sample(x0_batch,t_batch,dc,M=None,noise_scale=1.0):
160 | """
161 | Forward diffusion sampling
162 | :param x0_batch: [B x C x ...]
163 | :param t_batch: [B]
164 | :param dc: dictionary of diffusion constants
165 | :param M: a matrix of [L x L] for [B x C x L] data
166 | :return: xt_batch of [B x C x ...] and noise of [B x C x ...]
167 | """
168 | # Gather diffusion constants with matching dimension
169 | out_shape = (t_batch.shape[0],) + ((1,)*(len(x0_batch.shape)-1))
170 | device = t_batch.device
171 | sqrt_alphas_bar_t = th.gather(
172 | input = th.from_numpy(dc['sqrt_alphas_bar']).to(device), # [T]
173 | dim = -1,
174 | index = t_batch
175 | ).reshape(out_shape) # [B x 1 x 1 x 1] if (rank==4) and [B x 1 x 1] if (rank==3)
176 | sqrt_one_minus_alphas_bar = th.gather(
177 | input = th.from_numpy(dc['sqrt_one_minus_alphas_bar']).to(device), # [T]
178 | dim = -1,
179 | index = t_batch
180 | ).reshape(out_shape) # [B x 1 x 1 x 1] if (rank==4) and [B x 1 x 1] if (rank==3)
181 |
182 | # Forward sample
183 | noise = th.randn_like(input=x0_batch) # [B x C x ...]
184 |
185 | # (optional) correlated noise
186 | if M is not None:
187 | B = x0_batch.shape[0]
188 | C = x0_batch.shape[1]
189 | L = x0_batch.shape[2]
190 | if isinstance(M, list): # if M is a list,
191 | M_use = random.choice(M)
192 | else:
193 | M_use = M # [L x L]
194 | M_exp = M_use[None,None,:,:].expand(B,C,L,L) # [L x L] => [B x C x L x L]
195 | noise_exp = noise[:,:,:,None] # [B x C x L x 1]
196 | noise_exp = M_exp @ noise_exp # [B x C x L x 1]
197 | noise = noise_exp.squeeze(dim=3) # [B x C x L]
198 |
199 | # Jump diffusion
200 | xt_batch = sqrt_alphas_bar_t*x0_batch + \
201 | sqrt_one_minus_alphas_bar*noise_scale*noise # [B x C x ...]
202 | return xt_batch,noise
203 |
204 | class DiffusionUNet(nn.Module):
205 | """
206 | U-Net for diffusion models
207 | """
208 | def __init__(
209 | self,
210 | name = 'unet',
211 | dims = 1, # spatial dimension, if dims==1, [B x C x L], if dims==2, [B x C x W x H]
212 | n_in_channels = 128, # input channels
213 | n_model_channels = 64, # base channel size
214 | n_emb_dim = 128, # time embedding size
215 | n_cond_dim = 0, # conditioning vector size (default is 0 indicating an unconditional model)
216 | n_enc_blocks = 2, # number of encoder blocks
217 | n_dec_blocks = 2, # number of decoder blocks
218 | n_groups = 16, # group norm paramter
219 | n_heads = 4, # number of heads
220 | actv = nn.SiLU(),
221 | kernel_size = 3, # kernel size
222 | padding = 1,
223 | use_resblock = True,
224 | use_attention = True,
225 | skip_connection = False,
226 | use_scale_shift_norm = True, # positional embedding handling
227 | device = 'cpu',
228 | ):
229 | super().__init__()
230 | self.name = name
231 | self.dims = dims
232 | self.n_in_channels = n_in_channels
233 | self.n_model_channels = n_model_channels
234 | self.n_emb_dim = n_emb_dim
235 | self.n_cond_dim = n_cond_dim
236 | self.n_enc_blocks = n_enc_blocks
237 | self.n_dec_blocks = n_dec_blocks
238 | self.n_groups = n_groups
239 | self.n_heads = n_heads
240 | self.actv = actv
241 | self.kernel_size = kernel_size
242 | self.padding = padding
243 | self.use_resblock = use_resblock
244 | self.use_attention = use_attention
245 | self.skip_connection = skip_connection
246 | self.use_scale_shift_norm = use_scale_shift_norm
247 | self.device = device
248 |
249 | # Time embedding
250 | self.time_embed = nn.Sequential(
251 | nn.Linear(in_features=self.n_model_channels,out_features=self.n_emb_dim),
252 | nn.SiLU(),
253 | nn.Linear(in_features=self.n_emb_dim,out_features=self.n_emb_dim),
254 | ).to(self.device)
255 |
256 | # Conditional embedding
257 | if self.n_cond_dim > 0:
258 | self.cond_embed = nn.Sequential(
259 | nn.Linear(in_features=self.n_cond_dim,out_features=self.n_emb_dim),
260 | nn.SiLU(),
261 | nn.Linear(in_features=self.n_emb_dim,out_features=self.n_emb_dim),
262 | ).to(self.device)
263 |
264 | # Lifting
265 | self.lift = conv_nd(
266 | dims = self.dims,
267 | in_channels = self.n_in_channels,
268 | out_channels = self.n_model_channels,
269 | kernel_size = 1,
270 | ).to(device)
271 |
272 | # Projection
273 | self.proj = conv_nd(
274 | dims = self.dims,
275 | in_channels = self.n_model_channels,
276 | out_channels = self.n_in_channels,
277 | kernel_size = 1,
278 | ).to(device)
279 |
280 | # Declare U-net
281 | # Encoder
282 | self.enc_layers = []
283 | for e_idx in range(self.n_enc_blocks):
284 | # Residual block in encoder
285 | if self.use_resblock:
286 | self.enc_layers.append(
287 | ResBlock(
288 | name = 'res',
289 | n_channels = self.n_model_channels,
290 | n_emb_channels = self.n_emb_dim,
291 | n_out_channels = self.n_model_channels,
292 | n_groups = self.n_groups,
293 | dims = self.dims,
294 | actv = self.actv,
295 | kernel_size = self.kernel_size,
296 | padding = self.padding,
297 | upsample = False,
298 | downsample = False,
299 | use_scale_shift_norm = self.use_scale_shift_norm,
300 | ).to(device)
301 | )
302 | # Attention block in encoder
303 | if self.use_attention:
304 | self.enc_layers.append(
305 | AttentionBlock(
306 | name = 'att',
307 | n_channels = self.n_model_channels,
308 | n_heads = self.n_heads,
309 | n_groups = self.n_groups,
310 | ).to(device)
311 | )
312 |
313 | # Decoder
314 | self.dec_layers = []
315 | for d_idx in range(self.n_dec_blocks):
316 | # Residual block in decoder
317 | if self.use_resblock:
318 | if d_idx == 0: n_channels = self.n_model_channels*self.n_enc_blocks
319 | else: n_channels = self.n_model_channels
320 | self.dec_layers.append(
321 | ResBlock(
322 | name = 'res',
323 | n_channels = n_channels,
324 | n_emb_channels = self.n_emb_dim,
325 | n_out_channels = self.n_model_channels,
326 | n_groups = self.n_groups,
327 | dims = self.dims,
328 | actv = self.actv,
329 | kernel_size = self.kernel_size,
330 | padding = self.padding,
331 | upsample = False,
332 | downsample = False,
333 | use_scale_shift_norm = self.use_scale_shift_norm,
334 | ).to(device)
335 | )
336 | # Attention block in decoder
337 | if self.use_attention:
338 | self.dec_layers.append(
339 | AttentionBlock(
340 | name = 'att',
341 | n_channels = self.n_model_channels,
342 | n_heads = self.n_heads,
343 | n_groups = self.n_groups,
344 | ).to(device)
345 | )
346 |
347 | # Define U-net
348 | self.enc_net = nn.Sequential()
349 | for l_idx,layer in enumerate(self.enc_layers):
350 | self.enc_net.add_module(
351 | name = 'enc_%02d'%(l_idx),
352 | module = TimestepEmbedSequential(layer)
353 | )
354 | self.dec_net = nn.Sequential()
355 | for l_idx,layer in enumerate(self.dec_layers):
356 | self.dec_net.add_module(
357 | name = 'dec_%02d'%(l_idx),
358 | module = TimestepEmbedSequential(layer)
359 | )
360 |
361 | def forward(self,x,timesteps,c=None):
362 | """
363 | :param x: [B x n_in_channels x ...]
364 | :param timesteps: [B]
365 | :param c: [B]
366 | :return: [B x n_in_channels x ...], same shape as x
367 | """
368 | intermediate_output_dict = {}
369 | intermediate_output_dict['x'] = x
370 |
371 | # time embedding
372 | emb = self.time_embed(
373 | timestep_embedding(timesteps,self.n_model_channels)
374 | ) # [B x n_emb_dim]
375 |
376 | # conditional embedding
377 | if self.n_cond_dim > 0:
378 | cond = self.cond_embed(c)
379 | emb = emb + cond
380 |
381 | # Lift input
382 | h = self.lift(x) # [B x n_model_channels x ...]
383 | intermediate_output_dict['x_lifted'] = h
384 |
385 | # Encoder
386 | self.h_enc_list = []
387 | for m_idx,module in enumerate(self.enc_net):
388 | h = module(h,emb)
389 | if isinstance(h,tuple): h = h[0] # in case of having tuple
390 | # Append
391 | module_name = module[0].name
392 | intermediate_output_dict['h_enc_%s_%02d'%(module_name,m_idx)] = h
393 | # Append encoder output
394 | if self.use_resblock and self.use_attention:
395 | if (m_idx%2) == 1:
396 | self.h_enc_list.append(h)
397 | elif self.use_resblock and not self.use_attention:
398 | self.h_enc_list.append(h)
399 | elif not self.use_resblock and self.use_attention:
400 | self.h_enc_list.append(h)
401 | else:
402 | self.h_enc_list.append(h)
403 |
404 | # Stack encoder outputs
405 | if not self.use_resblock and self.use_attention:
406 | h_enc_stack = h
407 | else:
408 | for h_idx,h_enc in enumerate(self.h_enc_list):
409 | if h_idx == 0: h_enc_stack = h_enc
410 | else: h_enc_stack = th.cat([h_enc_stack,h_enc],dim=1)
411 | intermediate_output_dict['h_enc_stack'] = h_enc_stack
412 |
413 | # Decoder
414 | h = h_enc_stack # [B x n_enc_blocks*n_model_channels x ...]
415 | for m_idx,module in enumerate(self.dec_net):
416 | h = module(h,emb) # [B x n_model_channels x ...]
417 | if isinstance(h,tuple): h = h[0] # in case of having tuple
418 | # Append
419 | module_name = module[0].name
420 | intermediate_output_dict['h_dec_%s_%02d'%(module_name,m_idx)] = h
421 |
422 | # Projection
423 | if self.skip_connection:
424 | out = self.proj(h) + x # [B x n_in_channels x ...]
425 | else:
426 | out = self.proj(h) # [B x n_in_channels x ...]
427 |
428 | # Append
429 | intermediate_output_dict['out'] = out # [B x n_in_channels x ...]
430 |
431 | return out,intermediate_output_dict
432 |
433 | class DiffusionUNetLegacy(nn.Module):
434 | """
435 | U-Net for diffusion models (legacy)
436 | """
437 | def __init__(
438 | self,
439 | name = 'unet',
440 | dims = 1, # spatial dimension, if dims==1, [B x C x L], if dims==2, [B x C x W x H]
441 | n_in_channels = 128, # input channels
442 | n_base_channels = 64, # base channel size
443 | n_emb_dim = 128, # time embedding size
444 | n_cond_dim = 0, # conditioning vector size (default is 0 indicating an unconditional model)
445 | n_enc_blocks = 3, # number of encoder blocks
446 | n_dec_blocks = 3, # number of decoder blocks
447 | n_groups = 16, # group norm paramter
448 | n_heads = 4, # number of heads
449 | actv = nn.SiLU(),
450 | kernel_size = 3, # kernel size
451 | padding = 1,
452 | use_attention = True,
453 | skip_connection = True, # (optional) additional final skip connection
454 | use_scale_shift_norm = True, # positional embedding handling
455 | chnnel_multiples = (1,2,4),
456 | updown_rates = (2,2,2),
457 | device = 'cpu',
458 | ):
459 | super().__init__()
460 | self.name = name
461 | self.dims = dims
462 | self.n_in_channels = n_in_channels
463 | self.n_base_channels = n_base_channels
464 | self.n_emb_dim = n_emb_dim
465 | self.n_cond_dim = n_cond_dim
466 | self.n_enc_blocks = n_enc_blocks
467 | self.n_dec_blocks = n_dec_blocks
468 | self.n_groups = n_groups
469 | self.n_heads = n_heads
470 | self.actv = actv
471 | self.kernel_size = kernel_size
472 | self.padding = padding
473 | self.use_attention = use_attention
474 | self.skip_connection = skip_connection
475 | self.use_scale_shift_norm = use_scale_shift_norm
476 | self.chnnel_multiples = chnnel_multiples
477 | self.updown_rates = updown_rates
478 | self.device = device
479 |
480 | # Time embedding
481 | self.time_embed = nn.Sequential(
482 | nn.Linear(in_features=self.n_base_channels,out_features=self.n_emb_dim),
483 | nn.SiLU(),
484 | nn.Linear(in_features=self.n_emb_dim,out_features=self.n_emb_dim),
485 | ).to(self.device)
486 |
487 | # Conditional embedding
488 | if self.n_cond_dim > 0:
489 | self.cond_embed = nn.Sequential(
490 | nn.Linear(in_features=self.n_cond_dim,out_features=self.n_emb_dim),
491 | nn.SiLU(),
492 | nn.Linear(in_features=self.n_emb_dim,out_features=self.n_emb_dim),
493 | ).to(self.device)
494 |
495 | # Lifting (1x1 conv)
496 | self.lift = conv_nd(
497 | dims = self.dims,
498 | in_channels = self.n_in_channels,
499 | out_channels = self.n_base_channels,
500 | kernel_size = 1,
501 | ).to(device)
502 |
503 | # Encoder
504 | self.enc_layers = []
505 | n_channels2cat = [] # channel size to concat for decoder (note that we should use .pop() )
506 | for e_idx in range(self.n_enc_blocks): # for each encoder block
507 | if e_idx == 0:
508 | in_channel = self.n_base_channels
509 | out_channel = self.n_base_channels*self.chnnel_multiples[e_idx]
510 | else:
511 | in_channel = self.n_base_channels*self.chnnel_multiples[e_idx-1]
512 | out_channel = self.n_base_channels*self.chnnel_multiples[e_idx]
513 | n_channels2cat.append(out_channel) # append out_channel
514 | updown_rate = updown_rates[e_idx]
515 |
516 | # Residual block in encoder
517 | self.enc_layers.append(
518 | ResBlock(
519 | name = 'res',
520 | n_channels = in_channel,
521 | n_emb_channels = self.n_emb_dim,
522 | n_out_channels = out_channel,
523 | n_groups = self.n_groups,
524 | dims = self.dims,
525 | actv = self.actv,
526 | kernel_size = self.kernel_size,
527 | padding = self.padding,
528 | downsample = updown_rate != 1,
529 | down_rate = updown_rate,
530 | use_scale_shift_norm = self.use_scale_shift_norm,
531 | ).to(device)
532 | )
533 | # Attention block in encoder
534 | if self.use_attention:
535 | self.enc_layers.append(
536 | AttentionBlock(
537 | name = 'att',
538 | n_channels = out_channel,
539 | n_heads = self.n_heads,
540 | n_groups = self.n_groups,
541 | ).to(device)
542 | )
543 |
544 | # Mid
545 | self.mid = conv_nd(
546 | dims = self.dims,
547 | in_channels = self.n_base_channels*self.chnnel_multiples[-1],
548 | out_channels = self.n_base_channels*self.chnnel_multiples[-1],
549 | kernel_size = 1,
550 | ).to(device)
551 |
552 | # Decoder
553 | self.dec_layers = []
554 | for d_idx in range(self.n_dec_blocks):
555 | # Residual block in decoder
556 | if d_idx == 0: # first decoder
557 | in_channel = self.chnnel_multiples[::-1][d_idx]*self.n_base_channels + n_channels2cat.pop()
558 | out_channel = self.chnnel_multiples[::-1][d_idx]*self.n_base_channels
559 | else:
560 | in_channel = self.chnnel_multiples[::-1][d_idx-1]*self.n_base_channels + n_channels2cat.pop()
561 | out_channel = self.chnnel_multiples[::-1][d_idx]*self.n_base_channels
562 |
563 | updown_rate = updown_rates[::-1][d_idx]
564 |
565 | self.dec_layers.append(
566 | ResBlock(
567 | name = 'res',
568 | n_channels = in_channel,
569 | n_emb_channels = self.n_emb_dim,
570 | n_out_channels = out_channel,
571 | n_groups = self.n_groups,
572 | dims = self.dims,
573 | actv = self.actv,
574 | kernel_size = self.kernel_size,
575 | padding = self.padding,
576 | upsample = updown_rate != 1,
577 | up_rate = updown_rate,
578 | use_scale_shift_norm = self.use_scale_shift_norm,
579 | ).to(device)
580 | )
581 | # Attention block in decoder
582 | if self.use_attention:
583 | self.dec_layers.append(
584 | AttentionBlock(
585 | name = 'att',
586 | n_channels = out_channel,
587 | n_heads = self.n_heads,
588 | n_groups = self.n_groups,
589 | ).to(device)
590 | )
591 |
592 | # Projection
593 | self.proj = conv_nd(
594 | dims = self.dims,
595 | in_channels = (1+self.chnnel_multiples[0])*self.n_base_channels,
596 | out_channels = self.n_in_channels,
597 | kernel_size = 1,
598 | ).to(device)
599 |
600 | # Define U-net
601 | self.enc_net = nn.Sequential()
602 | for l_idx,layer in enumerate(self.enc_layers):
603 | self.enc_net.add_module(
604 | name = 'enc_%02d'%(l_idx),
605 | module = TimestepEmbedSequential(layer)
606 | )
607 | self.dec_net = nn.Sequential()
608 | for l_idx,layer in enumerate(self.dec_layers):
609 | self.dec_net.add_module(
610 | name = 'dec_%02d'%(l_idx),
611 | module = TimestepEmbedSequential(layer)
612 | )
613 |
614 | def forward(self,x,timesteps,c=None):
615 | """
616 | :param x: [B x n_in_channels x ...]
617 | :param timesteps: [B]
618 | :param c:
619 | :return: [B x n_in_channels x ...], same shape as x
620 | """
621 | intermediate_output_dict = {}
622 | intermediate_output_dict['x'] = x
623 |
624 | # time embedding
625 | emb = self.time_embed(
626 | timestep_embedding(timesteps,self.n_base_channels)
627 | ) # [B x n_emb_dim]
628 |
629 | # conditional embedding
630 | if self.n_cond_dim > 0:
631 | cond = self.cond_embed(c)
632 | emb = emb + cond # [B x n_base_channels]
633 |
634 | # Lift input
635 | h = self.lift(x) # [B x n_base_channels x ...]
636 | if isinstance(h,tuple): h = h[0] # in case of having tuple
637 | intermediate_output_dict['x_lifted'] = h
638 |
639 | # Encoder
640 | self.h_enc_list = [h] # start with lifted input
641 | for m_idx,module in enumerate(self.enc_net):
642 | h = module(h,emb)
643 | if isinstance(h,tuple): h = h[0] # in case of having tuple
644 | # Append
645 | module_name = module[0].name
646 | intermediate_output_dict['h_enc_%s_%02d'%(module_name,m_idx)] = h
647 | # Append encoder output
648 | if self.use_attention:
649 | if (m_idx%2) == 1:
650 | self.h_enc_list.append(h)
651 | else:
652 | self.h_enc_list.append(h)
653 |
654 | # Mid
655 | h = self.mid(h)
656 | if isinstance(h,tuple): h = h[0] # in case of having tuple
657 |
658 | # Decoder
659 | for m_idx,module in enumerate(self.dec_net):
660 | if self.use_attention:
661 | if (m_idx%2) == 0:
662 | h = th.cat((h,self.h_enc_list.pop()),dim=1)
663 | else:
664 | h = th.cat((h,self.h_enc_list.pop()),dim=1)
665 | h = module(h,emb) # [B x n_base_channels x ...]
666 | if isinstance(h,tuple): h = h[0] # in cfase of having tuple
667 | # Append
668 | module_name = module[0].name
669 | intermediate_output_dict['h_dec_%s_%02d'%(module_name,m_idx)] = h
670 |
671 | # Projection
672 | h = th.cat((h,self.h_enc_list.pop()),dim=1)
673 |
674 | if self.skip_connection:
675 | out = self.proj(h) + x # [B x n_in_channels x ...]
676 | else:
677 | out = self.proj(h) # [B x n_in_channels x ...]
678 |
679 | # Append
680 | intermediate_output_dict['out'] = out # [B x n_in_channels x ...]
681 |
682 | return out,intermediate_output_dict
683 |
684 |
685 |
686 |
687 |
688 |
689 |
690 |
691 |
692 |
693 |
694 |
695 |
696 |
697 |
698 |
699 |
700 |
701 |
702 |
703 |
704 |
705 |
706 |
707 |
708 |
709 |
710 |
711 |
712 |
713 | def get_param_groups_and_shapes(named_model_params):
714 | named_model_params = list(named_model_params)
715 | scalar_vector_named_params = (
716 | [(n, p) for (n, p) in named_model_params if p.ndim <= 1],
717 | (-1),
718 | )
719 | matrix_named_params = (
720 | [(n, p) for (n, p) in named_model_params if p.ndim > 1],
721 | (1, -1),
722 | )
723 | return [scalar_vector_named_params, matrix_named_params]
724 |
725 | def make_master_params(param_groups_and_shapes):
726 | """
727 | Copy model parameters into a (differently-shaped) list of full-precision
728 | parameters.
729 | """
730 | master_params = []
731 | for param_group, shape in param_groups_and_shapes:
732 | master_param = nn.Parameter(
733 | _flatten_dense_tensors(
734 | [param.detach().float() for (_, param) in param_group]
735 | ).view(shape)
736 | )
737 | master_param.requires_grad = True
738 | master_params.append(master_param)
739 | return master_params
740 |
741 | def unflatten_master_params(param_group, master_param):
742 | return _unflatten_dense_tensors(master_param, [param for (_, param) in param_group])
743 |
744 | def master_params_to_state_dict(
745 | model, param_groups_and_shapes, master_params, use_fp16
746 | ):
747 | if use_fp16:
748 | state_dict = model.state_dict()
749 | for master_param, (param_group, _) in zip(
750 | master_params, param_groups_and_shapes
751 | ):
752 | for (name, _), unflat_master_param in zip(
753 | param_group, unflatten_master_params(param_group, master_param.view(-1))
754 | ):
755 | assert name in state_dict
756 | state_dict[name] = unflat_master_param
757 | else:
758 | state_dict = model.state_dict()
759 | for i, (name, _value) in enumerate(model.named_parameters()):
760 | assert name in state_dict
761 | state_dict[name] = master_params[i]
762 | return state_dict
763 |
764 | def state_dict_to_master_params(model, state_dict, use_fp16):
765 | if use_fp16:
766 | named_model_params = [
767 | (name, state_dict[name]) for name, _ in model.named_parameters()
768 | ]
769 | param_groups_and_shapes = get_param_groups_and_shapes(named_model_params)
770 | master_params = make_master_params(param_groups_and_shapes)
771 | else:
772 | master_params = [state_dict[name] for name, _ in model.named_parameters()]
773 | return master_params
774 |
775 | def zero_grad(model_params):
776 | for param in model_params:
777 | # Taken from https://pytorch.org/docs/stable/_modules/torch/optim/optimizer.html#Optimizer.add_param_group
778 | if param.grad is not None:
779 | param.grad.detach_()
780 | param.grad.zero_()
781 |
782 | def param_grad_or_zeros(param):
783 | if param.grad is not None:
784 | return param.grad.data.detach()
785 | else:
786 | return th.zeros_like(param)
787 |
788 | def model_grads_to_master_grads(param_groups_and_shapes, master_params):
789 | """
790 | Copy the gradients from the model parameters into the master parameters
791 | from make_master_params().
792 | """
793 | for master_param, (param_group, shape) in zip(
794 | master_params, param_groups_and_shapes
795 | ):
796 | master_param.grad = _flatten_dense_tensors(
797 | [param_grad_or_zeros(param) for (_, param) in param_group]
798 | ).view(shape)
799 |
800 | def check_overflow(value):
801 | return (value == float("inf")) or (value == -float("inf")) or (value != value)
802 |
803 | def zero_master_grads(master_params):
804 | for param in master_params:
805 | param.grad = None
806 |
807 | def master_params_to_model_params(param_groups_and_shapes, master_params):
808 | """
809 | Copy the master parameter data back into the model parameters.
810 | """
811 | # Without copying to a list, if a generator is passed, this will
812 | # silently not copy any parameters.
813 | for master_param, (param_group, _) in zip(master_params, param_groups_and_shapes):
814 | for (_, param), unflat_master_param in zip(
815 | param_group, unflatten_master_params(param_group, master_param.view(-1))
816 | ):
817 | param.detach().copy_(unflat_master_param)
818 |
819 | INITIAL_LOG_LOSS_SCALE = 20.0
820 | class MixedPrecisionTrainer:
821 | def __init__(
822 | self,
823 | *,
824 | model,
825 | use_fp16=False,
826 | fp16_scale_growth=1e-3,
827 | initial_lg_loss_scale=INITIAL_LOG_LOSS_SCALE,
828 | ):
829 | self.model = model
830 | self.use_fp16 = use_fp16
831 | self.fp16_scale_growth = fp16_scale_growth
832 |
833 | self.model_params = list(self.model.parameters())
834 | self.master_params = self.model_params
835 | self.param_groups_and_shapes = None
836 | self.lg_loss_scale = initial_lg_loss_scale
837 |
838 | if self.use_fp16:
839 | self.param_groups_and_shapes = get_param_groups_and_shapes(
840 | self.model.named_parameters()
841 | )
842 | self.master_params = make_master_params(self.param_groups_and_shapes)
843 | self.model.convert_to_fp16()
844 |
845 | def zero_grad(self):
846 | zero_grad(self.model_params)
847 |
848 | def backward(self, loss: th.Tensor):
849 | if self.use_fp16:
850 | loss_scale = 2 ** self.lg_loss_scale
851 | (loss * loss_scale).backward()
852 | else:
853 | loss.backward()
854 |
855 | def optimize(self, opt: th.optim.Optimizer):
856 | if self.use_fp16:
857 | return self._optimize_fp16(opt)
858 | else:
859 | return self._optimize_normal(opt)
860 |
861 | def _optimize_fp16(self, opt: th.optim.Optimizer):
862 | model_grads_to_master_grads(self.param_groups_and_shapes, self.master_params)
863 | grad_norm, param_norm = self._compute_norms(grad_scale=2 ** self.lg_loss_scale)
864 | if check_overflow(grad_norm):
865 | self.lg_loss_scale -= 1
866 | zero_master_grads(self.master_params)
867 | return False
868 |
869 | self.master_params[0].grad.mul_(1.0 / (2 ** self.lg_loss_scale))
870 | opt.step()
871 | zero_master_grads(self.master_params)
872 | master_params_to_model_params(self.param_groups_and_shapes, self.master_params)
873 | self.lg_loss_scale += self.fp16_scale_growth
874 | return True
875 |
876 | def _optimize_normal(self, opt: th.optim.Optimizer):
877 | grad_norm, param_norm = self._compute_norms()
878 | opt.step()
879 | return True
880 |
881 | def _compute_norms(self, grad_scale=1.0):
882 | grad_norm = 0.0
883 | param_norm = 0.0
884 | for p in self.master_params:
885 | with th.no_grad():
886 | param_norm += th.norm(p, p=2, dtype=th.float32).item() ** 2
887 | if p.grad is not None:
888 | grad_norm += th.norm(p.grad, p=2, dtype=th.float32).item() ** 2
889 | return np.sqrt(grad_norm) / grad_scale, np.sqrt(param_norm)
890 |
891 | def master_params_to_state_dict(self, master_params):
892 | return master_params_to_state_dict(
893 | self.model, self.param_groups_and_shapes, master_params, self.use_fp16
894 | )
895 |
896 | def state_dict_to_master_params(self, state_dict):
897 | return state_dict_to_master_params(self.model, state_dict, self.use_fp16)
898 |
899 | def eval_ddpm_1d(
900 | model,
901 | dc,
902 | n_sample,
903 | x_0,
904 | step_list_to_append,
905 | device,
906 | cond = None,
907 | M = None,
908 | noise_scale = 1.0
909 | ):
910 | """
911 | Evaluate DDPM in 1D case
912 | :param model: score function
913 | :param dc: dictionary of diffusion coefficients
914 | :param n_sample: integer of how many trajectories to sample
915 | :param x_0: [N x C x L] tensor
916 | :param step_list_to_append: an ndarry of diffusion steps to append x_t
917 | """
918 | model.eval()
919 | n_data,C,L = x_0.shape
920 | x_dummy = th.zeros(n_sample,C,L,device=device)
921 | step_dummy = th.zeros(n_sample).type(th.long).to(device)
922 | _,x_T = forward_sample(x_dummy,step_dummy,dc,M) # [n_sample x C x L]
923 | x_t = x_T.clone() # [n_sample x C x L]
924 | x_t_list = ['']*dc['T'] # empty list
925 | for t in range(0,dc['T'])[::-1]: # 999 to 0
926 | # Score function
927 | step = th.full(
928 | size = (n_sample,),
929 | fill_value = t,
930 | device = device,
931 | dtype = th.long) # [n_sample]
932 | with th.no_grad():
933 | if cond is None: # unconditioned model
934 | eps_t,_ = model(x_t,step) # [n_sample x C x L]
935 | else:
936 | cond_weight = 0.5
937 | eps_cont_d,_ = model(x_t,step,cond.repeat(n_sample,1))
938 | eps_uncond_d,_ = model(x_t,step,0.0*cond.repeat(n_sample,1))
939 | # Addup
940 | eps_t = (1+cond_weight)*eps_cont_d - cond_weight*eps_uncond_d # [n_sample x C x L]
941 | betas_t = th.gather(
942 | input = th.from_numpy(dc['betas']).to(device), # [T]
943 | dim = -1,
944 | index = step
945 | ).reshape((-1,1,1)) # [n_sample x 1 x 1]
946 | sqrt_one_minus_alphas_bar_t = th.gather(
947 | input = th.from_numpy(dc['sqrt_one_minus_alphas_bar']).to(device), # [T]
948 | dim = -1,
949 | index = step
950 | ).reshape((-1,1,1)) # [n_sample x 1 x 1]
951 | sqrt_recip_alphas_t = th.gather(
952 | input = th.from_numpy(dc['sqrt_recip_alphas']).to(device), # [T]
953 | dim = -1,
954 | index = step
955 | ).reshape((-1,1,1)) # [n_sample x 1 x 1]
956 | # Compute posterior mean
957 | mean_t = sqrt_recip_alphas_t * (
958 | x_t - betas_t*eps_t/sqrt_one_minus_alphas_bar_t
959 | ) # [n_sample x C x L]
960 | # Compute posterior variance
961 | posterior_variance_t = th.gather(
962 | input = th.from_numpy(dc['posterior_variance']).to(device), # [T]
963 | dim = -1,
964 | index = step
965 | ).reshape((-1,1,1)) # [n_sample x 1 x 1]
966 | # Sample
967 | if t == 0: # last sampling, use mean
968 | x_t = mean_t
969 | else:
970 | _,noise_t = forward_sample(x_dummy,step_dummy,dc,M) # [n_sample x C x 1]
971 | x_t = mean_t + noise_scale*th.sqrt(posterior_variance_t)*noise_t
972 | # Append
973 | if t in step_list_to_append:
974 | x_t_list[t] = x_t
975 | model.train()
976 | return x_t_list # list of [n_sample x C x L]
977 |
978 | def eval_ddpm_2d(
979 | model,
980 | dc,
981 | n_sample,
982 | x_0,
983 | step_list_to_append,
984 | device,
985 | cond=None,
986 | M=None,
987 | noise_scale=1.0
988 | ):
989 | """
990 | Evaluate DDPM in 2D case
991 | :param model: score function
992 | :param dc: dictionary of diffusion coefficients
993 | :param n_sample: integer of how many trajectories to sample
994 | :param x_0: [N x C x W x H] tensor
995 | :param step_list_to_append: an ndarry of diffusion steps to append x_t
996 | """
997 | model.eval()
998 | n_data,C,W,H = x_0.shape
999 | x_dummy = th.zeros(n_sample,C,W,H,device=device)
1000 | step_dummy = th.zeros(n_sample).type(th.long).to(device)
1001 | _,x_T = forward_sample(x_dummy,step_dummy,dc,M) # [n_sample x C x W x H]
1002 | x_t = x_T.clone() # [n_sample x C x W x H]
1003 | x_t_list = ['']*dc['T'] # empty list
1004 | for t in range(0,dc['T'])[::-1]: # 999 to 0
1005 | # Score function
1006 | step = th.full(
1007 | size = (n_sample,),
1008 | fill_value = t,
1009 | device = device,
1010 | dtype = th.long) # [n_sample]
1011 | with th.no_grad():
1012 | if cond is None: # unconditioned model
1013 | eps_t,_ = model(x_t,step) # [n_sample x C x W x H]
1014 | else:
1015 | cond_weight = 0.5
1016 | eps_cont_d,_ = model(x_t,step,cond.repeat(n_sample,1))
1017 | eps_uncond_d,_ = model(x_t,step,0.0*cond.repeat(n_sample,1))
1018 | # Addup
1019 | eps_t = (1+cond_weight)*eps_cont_d - cond_weight*eps_uncond_d # [n_sample x C x W x H]
1020 | betas_t = th.gather(
1021 | input = th.from_numpy(dc['betas']).to(device), # [T]
1022 | dim = -1,
1023 | index = step
1024 | ).reshape((-1,1,1,1)) # [n_sample x 1 x 1 x 1]
1025 | sqrt_one_minus_alphas_bar_t = th.gather(
1026 | input = th.from_numpy(dc['sqrt_one_minus_alphas_bar']).to(device), # [T]
1027 | dim = -1,
1028 | index = step
1029 | ).reshape((-1,1,1,1)) # [n_sample x 1 x 1 x 1]
1030 | sqrt_recip_alphas_t = th.gather(
1031 | input = th.from_numpy(dc['sqrt_recip_alphas']).to(device), # [T]
1032 | dim = -1,
1033 | index = step
1034 | ).reshape((-1,1,1,1)) # [n_sample x 1 x 1 x 1]
1035 | # Compute posterior mean
1036 | mean_t = sqrt_recip_alphas_t * (
1037 | x_t - betas_t*eps_t/sqrt_one_minus_alphas_bar_t
1038 | ) # [n_sample x C x W x H]
1039 | # Compute posterior variance
1040 | posterior_variance_t = th.gather(
1041 | input = th.from_numpy(dc['posterior_variance']).to(device), # [T]
1042 | dim = -1,
1043 | index = step
1044 | ).reshape((-1,1,1,1)) # [n_sample x 1 x 1 x 1]
1045 | # Sample
1046 | if t == 0: # last sampling, use mean
1047 | x_t = mean_t
1048 | else:
1049 | _,noise_t = forward_sample(x_dummy,step_dummy,dc,M) # [n_sample x C x W x H]
1050 | x_t = mean_t + noise_scale*th.sqrt(posterior_variance_t)*noise_t
1051 | # Append
1052 | if t in step_list_to_append:
1053 | x_t_list[t] = x_t
1054 | model.train()
1055 | return x_t_list # list of [n_sample x C x W x H]
--------------------------------------------------------------------------------
/code/diffusion_resblock.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "00d7ebaa",
6 | "metadata": {},
7 | "source": [
8 | "### Resdual block for diffusion models"
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": 1,
14 | "id": "c37dccb5",
15 | "metadata": {},
16 | "outputs": [
17 | {
18 | "name": "stdout",
19 | "output_type": "stream",
20 | "text": [
21 | "PyTorch version:[2.0.1].\n"
22 | ]
23 | }
24 | ],
25 | "source": [
26 | "import numpy as np\n",
27 | "import matplotlib.pyplot as plt\n",
28 | "import torch as th\n",
29 | "import torch.nn as nn\n",
30 | "import torch.nn.functional as F\n",
31 | "from module import (\n",
32 | " ResBlock\n",
33 | ")\n",
34 | "from dataset import mnist\n",
35 | "from util import get_torch_size_string,plot_4x4_torch_tensor\n",
36 | "np.set_printoptions(precision=3)\n",
37 | "th.set_printoptions(precision=3)\n",
38 | "%matplotlib inline\n",
39 | "%config InlineBackend.figure_format='retina'\n",
40 | "print (\"PyTorch version:[%s].\"%(th.__version__))"
41 | ]
42 | },
43 | {
44 | "cell_type": "markdown",
45 | "id": "257ff26d",
46 | "metadata": {},
47 | "source": [
48 | "### 1-D case `[B x C x L]`"
49 | ]
50 | },
51 | {
52 | "cell_type": "code",
53 | "execution_count": 5,
54 | "id": "abd8843c",
55 | "metadata": {
56 | "scrolled": true
57 | },
58 | "outputs": [
59 | {
60 | "name": "stdout",
61 | "output_type": "stream",
62 | "text": [
63 | "1. No upsample nor downsample\n",
64 | " Shape x:[16x32x200] => out:[16x32x400]\n",
65 | "2. Upsample\n",
66 | " Shape x:[16x32x200] => out:[16x32x400]\n",
67 | "3. Downsample\n",
68 | " Shape x:[16x32x200] => out:[16x32x100]\n"
69 | ]
70 | }
71 | ],
72 | "source": [
73 | "# Input\n",
74 | "x = th.randn(16,32,200) # [B x C x L]\n",
75 | "emb = th.randn(16,128) # [B x n_emb_channels]\n",
76 | "\n",
77 | "print (\"1. No upsample nor downsample\")\n",
78 | "resblock = ResBlock(\n",
79 | " n_channels = 32,\n",
80 | " n_emb_channels = 128,\n",
81 | " n_out_channels = 32,\n",
82 | " n_groups = 16,\n",
83 | " dims = 1,\n",
84 | " upsample = True,\n",
85 | " downsample = False,\n",
86 | " down_rate = 1\n",
87 | ")\n",
88 | "out = resblock(x,emb)\n",
89 | "print (\" Shape x:[%s] => out:[%s]\"%\n",
90 | " (get_torch_size_string(x),get_torch_size_string(out)))\n",
91 | "\n",
92 | "print (\"2. Upsample\")\n",
93 | "resblock = ResBlock(\n",
94 | " n_channels = 32,\n",
95 | " n_emb_channels = 128,\n",
96 | " n_out_channels = 32,\n",
97 | " n_groups = 16,\n",
98 | " dims = 1,\n",
99 | " upsample = True,\n",
100 | " downsample = False,\n",
101 | " down_rate = 2\n",
102 | ")\n",
103 | "out = resblock(x,emb)\n",
104 | "print (\" Shape x:[%s] => out:[%s]\"%\n",
105 | " (get_torch_size_string(x),get_torch_size_string(out)))\n",
106 | "\n",
107 | "print (\"3. Downsample\")\n",
108 | "resblock = ResBlock(\n",
109 | " n_channels = 32,\n",
110 | " n_emb_channels = 128,\n",
111 | " n_out_channels = 32,\n",
112 | " n_groups = 16,\n",
113 | " dims = 1,\n",
114 | " upsample = False,\n",
115 | " downsample = True,\n",
116 | " down_rate = 2\n",
117 | ")\n",
118 | "out = resblock(x,emb)\n",
119 | "print (\" Shape x:[%s] => out:[%s]\"%\n",
120 | " (get_torch_size_string(x),get_torch_size_string(out)))"
121 | ]
122 | },
123 | {
124 | "cell_type": "markdown",
125 | "id": "01d77ea8",
126 | "metadata": {},
127 | "source": [
128 | "### 2-D case `[B x C x W x H]`"
129 | ]
130 | },
131 | {
132 | "cell_type": "code",
133 | "execution_count": 3,
134 | "id": "890df0a2",
135 | "metadata": {},
136 | "outputs": [
137 | {
138 | "name": "stdout",
139 | "output_type": "stream",
140 | "text": [
141 | "1. No upsample nor downsample\n",
142 | " Shape x:[16x32x28x28] => out:[16x32x28x28]\n",
143 | "2. Upsample\n",
144 | " Shape x:[16x32x28x28] => out:[16x32x56x56]\n",
145 | "3. Downsample\n",
146 | " Shape x:[16x32x28x28] => out:[16x32x14x14]\n",
147 | "4. (uneven) Upsample\n",
148 | " Shape x:[16x32x28x28] => out:[16x32x56x28]\n",
149 | "5. (uneven) Downsample\n",
150 | " Shape x:[16x32x28x28] => out:[16x32x14x28]\n",
151 | "6. (fake) Downsample\n",
152 | " Shape x:[16x32x28x28] => out:[16x32x28x28]\n"
153 | ]
154 | }
155 | ],
156 | "source": [
157 | "# Input\n",
158 | "x = th.randn(16,32,28,28) # [B x C x W x H]\n",
159 | "emb = th.randn(16,128) # [B x n_emb_channels]\n",
160 | "\n",
161 | "print (\"1. No upsample nor downsample\")\n",
162 | "resblock = ResBlock(\n",
163 | " n_channels = 32,\n",
164 | " n_emb_channels = 128,\n",
165 | " n_out_channels = 32,\n",
166 | " n_groups = 16,\n",
167 | " dims = 2,\n",
168 | " upsample = False,\n",
169 | " downsample = False\n",
170 | ")\n",
171 | "out = resblock(x,emb)\n",
172 | "print (\" Shape x:[%s] => out:[%s]\"%\n",
173 | " (get_torch_size_string(x),get_torch_size_string(out)))\n",
174 | "\n",
175 | "print (\"2. Upsample\")\n",
176 | "resblock = ResBlock(\n",
177 | " n_channels = 32,\n",
178 | " n_emb_channels = 128,\n",
179 | " n_out_channels = 32,\n",
180 | " n_groups = 16,\n",
181 | " dims = 2,\n",
182 | " upsample = True,\n",
183 | " downsample = False\n",
184 | ")\n",
185 | "out = resblock(x,emb)\n",
186 | "print (\" Shape x:[%s] => out:[%s]\"%\n",
187 | " (get_torch_size_string(x),get_torch_size_string(out)))\n",
188 | "\n",
189 | "print (\"3. Downsample\")\n",
190 | "resblock = ResBlock(\n",
191 | " n_channels = 32,\n",
192 | " n_emb_channels = 128,\n",
193 | " n_out_channels = 32,\n",
194 | " n_groups = 16,\n",
195 | " dims = 2,\n",
196 | " upsample = False,\n",
197 | " downsample = True\n",
198 | ")\n",
199 | "out = resblock(x,emb)\n",
200 | "print (\" Shape x:[%s] => out:[%s]\"%\n",
201 | " (get_torch_size_string(x),get_torch_size_string(out)))\n",
202 | "\n",
203 | "print (\"4. (uneven) Upsample\")\n",
204 | "resblock = ResBlock(\n",
205 | " n_channels = 32,\n",
206 | " n_emb_channels = 128,\n",
207 | " n_out_channels = 32,\n",
208 | " n_groups = 16,\n",
209 | " dims = 2,\n",
210 | " upsample = True,\n",
211 | " downsample = False,\n",
212 | " up_rate = (2,1)\n",
213 | ")\n",
214 | "out = resblock(x,emb)\n",
215 | "print (\" Shape x:[%s] => out:[%s]\"%\n",
216 | " (get_torch_size_string(x),get_torch_size_string(out)))\n",
217 | "\n",
218 | "print (\"5. (uneven) Downsample\")\n",
219 | "resblock = ResBlock(\n",
220 | " n_channels = 32,\n",
221 | " n_emb_channels = 128,\n",
222 | " n_out_channels = 32,\n",
223 | " n_groups = 16,\n",
224 | " dims = 2,\n",
225 | " upsample = False,\n",
226 | " downsample = True,\n",
227 | " down_rate = (2,1)\n",
228 | ")\n",
229 | "out = resblock(x,emb)\n",
230 | "print (\" Shape x:[%s] => out:[%s]\"%\n",
231 | " (get_torch_size_string(x),get_torch_size_string(out)))\n",
232 | "\n",
233 | "print (\"6. (fake) Downsample\")\n",
234 | "resblock = ResBlock(\n",
235 | " n_channels = 32,\n",
236 | " n_emb_channels = 128,\n",
237 | " n_out_channels = 32,\n",
238 | " n_groups = 16,\n",
239 | " dims = 2,\n",
240 | " upsample = False,\n",
241 | " downsample = True,\n",
242 | " down_rate = (1,1)\n",
243 | ")\n",
244 | "out = resblock(x,emb)\n",
245 | "print (\" Shape x:[%s] => out:[%s]\"%\n",
246 | " (get_torch_size_string(x),get_torch_size_string(out)))"
247 | ]
248 | },
249 | {
250 | "cell_type": "code",
251 | "execution_count": null,
252 | "id": "184ebe32",
253 | "metadata": {},
254 | "outputs": [],
255 | "source": []
256 | }
257 | ],
258 | "metadata": {
259 | "kernelspec": {
260 | "display_name": "Python 3 (ipykernel)",
261 | "language": "python",
262 | "name": "python3"
263 | },
264 | "language_info": {
265 | "codemirror_mode": {
266 | "name": "ipython",
267 | "version": 3
268 | },
269 | "file_extension": ".py",
270 | "mimetype": "text/x-python",
271 | "name": "python",
272 | "nbconvert_exporter": "python",
273 | "pygments_lexer": "ipython3",
274 | "version": "3.9.16"
275 | }
276 | },
277 | "nbformat": 4,
278 | "nbformat_minor": 5
279 | }
280 |
--------------------------------------------------------------------------------
/code/diffusion_unet_legacy.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "e331df79-2400-4f60-aca2-15debac3e5de",
6 | "metadata": {},
7 | "source": [
8 | "### U-net Lagacy"
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": 1,
14 | "id": "a1021c53-7ada-41a7-86fc-ce2ec7faada5",
15 | "metadata": {},
16 | "outputs": [
17 | {
18 | "name": "stdout",
19 | "output_type": "stream",
20 | "text": [
21 | "PyTorch version:[2.0.1].\n"
22 | ]
23 | }
24 | ],
25 | "source": [
26 | "import numpy as np\n",
27 | "import matplotlib.pyplot as plt\n",
28 | "import torch as th\n",
29 | "import torch.nn as nn\n",
30 | "from util import (\n",
31 | " get_torch_size_string\n",
32 | ")\n",
33 | "from diffusion import (\n",
34 | " get_ddpm_constants,\n",
35 | " plot_ddpm_constants,\n",
36 | " DiffusionUNet,\n",
37 | " DiffusionUNetLegacy\n",
38 | ")\n",
39 | "from dataset import mnist\n",
40 | "np.set_printoptions(precision=3)\n",
41 | "th.set_printoptions(precision=3)\n",
42 | "%matplotlib inline\n",
43 | "%config InlineBackend.figure_format='retina'\n",
44 | "print (\"PyTorch version:[%s].\"%(th.__version__))"
45 | ]
46 | },
47 | {
48 | "cell_type": "code",
49 | "execution_count": 2,
50 | "id": "bc376285-0fa0-4a0b-aacb-f5b49b0e2f5e",
51 | "metadata": {},
52 | "outputs": [
53 | {
54 | "name": "stdout",
55 | "output_type": "stream",
56 | "text": [
57 | "device:[mps]\n"
58 | ]
59 | }
60 | ],
61 | "source": [
62 | "device = 'mps'\n",
63 | "print (\"device:[%s]\"%(device))"
64 | ]
65 | },
66 | {
67 | "cell_type": "code",
68 | "execution_count": 3,
69 | "id": "7c84b758-2535-439c-936b-1a20397d7598",
70 | "metadata": {},
71 | "outputs": [
72 | {
73 | "name": "stdout",
74 | "output_type": "stream",
75 | "text": [
76 | "Ready.\n"
77 | ]
78 | }
79 | ],
80 | "source": [
81 | "dc = get_ddpm_constants(\n",
82 | " schedule_name = 'cosine', # 'linear', 'cosine'\n",
83 | " T = 1000,\n",
84 | " np_type = np.float32)\n",
85 | "print(\"Ready.\") "
86 | ]
87 | },
88 | {
89 | "cell_type": "markdown",
90 | "id": "d8641679-0ee9-4f4b-9248-23e0c2e0de47",
91 | "metadata": {},
92 | "source": [
93 | "### Guided U-net\n",
94 | "
"
95 | ]
96 | },
97 | {
98 | "cell_type": "markdown",
99 | "id": "08f6f417-81a6-49fe-817c-663acd47414a",
100 | "metadata": {},
101 | "source": [
102 | "### 1-D case: `[B x C x L]` with attention"
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": 4,
108 | "id": "57655fad-1bcb-4a5c-9626-ded7373e2b9e",
109 | "metadata": {},
110 | "outputs": [
111 | {
112 | "name": "stdout",
113 | "output_type": "stream",
114 | "text": [
115 | "Input: x:[2x3x256] timesteps:[2]\n",
116 | "Output: out:[2x3x256]\n",
117 | "[ 0] key:[ x] shape:[ 2x3x256]\n",
118 | "[ 1] key:[ x_lifted] shape:[ 2x32x256]\n",
119 | "[ 2] key:[h_enc_res_00] shape:[ 2x32x128]\n",
120 | "[ 3] key:[h_enc_att_01] shape:[ 2x32x128]\n",
121 | "[ 4] key:[h_enc_res_02] shape:[ 2x64x64]\n",
122 | "[ 5] key:[h_enc_att_03] shape:[ 2x64x64]\n",
123 | "[ 6] key:[h_enc_res_04] shape:[ 2x128x32]\n",
124 | "[ 7] key:[h_enc_att_05] shape:[ 2x128x32]\n",
125 | "[ 8] key:[h_enc_res_06] shape:[ 2x256x16]\n",
126 | "[ 9] key:[h_enc_att_07] shape:[ 2x256x16]\n",
127 | "[10] key:[h_dec_res_00] shape:[ 2x256x32]\n",
128 | "[11] key:[h_dec_att_01] shape:[ 2x256x32]\n",
129 | "[12] key:[h_dec_res_02] shape:[ 2x128x64]\n",
130 | "[13] key:[h_dec_att_03] shape:[ 2x128x64]\n",
131 | "[14] key:[h_dec_res_04] shape:[ 2x64x128]\n",
132 | "[15] key:[h_dec_att_05] shape:[ 2x64x128]\n",
133 | "[16] key:[h_dec_res_06] shape:[ 2x32x256]\n",
134 | "[17] key:[h_dec_att_07] shape:[ 2x32x256]\n",
135 | "[18] key:[ out] shape:[ 2x3x256]\n"
136 | ]
137 | }
138 | ],
139 | "source": [
140 | "unet = DiffusionUNetLegacy(\n",
141 | " name = 'unet',\n",
142 | " dims = 1,\n",
143 | " n_in_channels = 3,\n",
144 | " n_base_channels = 32,\n",
145 | " n_emb_dim = 128,\n",
146 | " n_enc_blocks = 4, # number of encoder blocks\n",
147 | " n_dec_blocks = 4, # number of decoder blocks\n",
148 | " n_groups = 16, # group norm paramter\n",
149 | " use_attention = True,\n",
150 | " skip_connection = True, # additional skip connection\n",
151 | " chnnel_multiples = (1,2,4,8),\n",
152 | " updown_rates = (2,2,2,2),\n",
153 | " device = device,\n",
154 | ")\n",
155 | "# Inputs, timesteps:[B] and x:[B x C x L]\n",
156 | "batch_size = 2\n",
157 | "x = th.randn(batch_size,3,256).to(device) # [B x C x L]\n",
158 | "timesteps = th.linspace(1,dc['T'],batch_size).to(th.int64).to(device) # [B]\n",
159 | "out,intermediate_output_dict = unet(x,timesteps)\n",
160 | "print (\"Input: x:[%s] timesteps:[%s]\"%(\n",
161 | " get_torch_size_string(x),get_torch_size_string(timesteps)\n",
162 | "))\n",
163 | "print (\"Output: out:[%s]\"%(get_torch_size_string(out)))\n",
164 | "# Print intermediate layers\n",
165 | "for k_idx,key in enumerate(intermediate_output_dict.keys()):\n",
166 | " z = intermediate_output_dict[key]\n",
167 | " print (\"[%2d] key:[%12s] shape:[%12s]\"%(k_idx,key,get_torch_size_string(z)))"
168 | ]
169 | },
170 | {
171 | "cell_type": "markdown",
172 | "id": "05572674-6d35-46a6-a880-d41e4b4e5111",
173 | "metadata": {},
174 | "source": [
175 | "### 1-D case: `[B x C x L]` without attention"
176 | ]
177 | },
178 | {
179 | "cell_type": "code",
180 | "execution_count": 5,
181 | "id": "6962a40a-0c14-4c4a-a024-06b64b076df5",
182 | "metadata": {},
183 | "outputs": [
184 | {
185 | "name": "stdout",
186 | "output_type": "stream",
187 | "text": [
188 | "Input: x:[2x3x256] timesteps:[2]\n",
189 | "Output: out:[2x3x256]\n",
190 | "[ 0] key:[ x] shape:[ 2x3x256]\n",
191 | "[ 1] key:[ x_lifted] shape:[ 2x32x256]\n",
192 | "[ 2] key:[h_enc_res_00] shape:[ 2x32x128]\n",
193 | "[ 3] key:[h_enc_res_01] shape:[ 2x64x64]\n",
194 | "[ 4] key:[h_enc_res_02] shape:[ 2x128x32]\n",
195 | "[ 5] key:[h_enc_res_03] shape:[ 2x256x16]\n",
196 | "[ 6] key:[h_dec_res_00] shape:[ 2x256x32]\n",
197 | "[ 7] key:[h_dec_res_01] shape:[ 2x128x64]\n",
198 | "[ 8] key:[h_dec_res_02] shape:[ 2x64x128]\n",
199 | "[ 9] key:[h_dec_res_03] shape:[ 2x32x256]\n",
200 | "[10] key:[ out] shape:[ 2x3x256]\n"
201 | ]
202 | }
203 | ],
204 | "source": [
205 | "unet = DiffusionUNetLegacy(\n",
206 | " name = 'unet',\n",
207 | " dims = 1,\n",
208 | " n_in_channels = 3,\n",
209 | " n_base_channels = 32,\n",
210 | " n_emb_dim = 128,\n",
211 | " n_enc_blocks = 4, # number of encoder blocks\n",
212 | " n_dec_blocks = 4, # number of decoder blocks\n",
213 | " n_groups = 16, # group norm paramter\n",
214 | " use_attention = False,\n",
215 | " skip_connection = True, # additional skip connection\n",
216 | " chnnel_multiples = (1,2,4,8),\n",
217 | " updown_rates = (2,2,2,2),\n",
218 | " device = device,\n",
219 | ")\n",
220 | "# Inputs, timesteps:[B] and x:[B x C x L]\n",
221 | "batch_size = 2\n",
222 | "x = th.randn(batch_size,3,256).to(device) # [B x C x L]\n",
223 | "timesteps = th.linspace(1,dc['T'],batch_size).to(th.int64).to(device) # [B]\n",
224 | "out,intermediate_output_dict = unet(x,timesteps)\n",
225 | "print (\"Input: x:[%s] timesteps:[%s]\"%(\n",
226 | " get_torch_size_string(x),get_torch_size_string(timesteps)\n",
227 | "))\n",
228 | "print (\"Output: out:[%s]\"%(get_torch_size_string(out)))\n",
229 | "# Print intermediate layers\n",
230 | "for k_idx,key in enumerate(intermediate_output_dict.keys()):\n",
231 | " z = intermediate_output_dict[key]\n",
232 | " print (\"[%2d] key:[%12s] shape:[%12s]\"%(k_idx,key,get_torch_size_string(z)))"
233 | ]
234 | },
235 | {
236 | "cell_type": "code",
237 | "execution_count": null,
238 | "id": "1881fc4e-d713-4c24-955c-85e3c0ece8ff",
239 | "metadata": {},
240 | "outputs": [],
241 | "source": []
242 | },
243 | {
244 | "cell_type": "markdown",
245 | "id": "c7143a40-7d2f-46e6-a6b6-a089a1227bd7",
246 | "metadata": {},
247 | "source": [
248 | "### 2-D case: `[B x C x W x H]` without attention"
249 | ]
250 | },
251 | {
252 | "cell_type": "code",
253 | "execution_count": 6,
254 | "id": "0c3da514-b189-4c01-ba6f-1f2e8daa24b0",
255 | "metadata": {},
256 | "outputs": [
257 | {
258 | "name": "stdout",
259 | "output_type": "stream",
260 | "text": [
261 | "Input: x:[2x3x256x256] timesteps:[2]\n",
262 | "Output: out:[2x3x256x256]\n",
263 | "[ 0] key:[ x] shape:[ 2x3x256x256]\n",
264 | "[ 1] key:[ x_lifted] shape:[2x32x256x256]\n",
265 | "[ 2] key:[h_enc_res_00] shape:[2x32x256x256]\n",
266 | "[ 3] key:[h_enc_res_01] shape:[2x64x256x256]\n",
267 | "[ 4] key:[h_enc_res_02] shape:[2x128x256x256]\n",
268 | "[ 5] key:[h_dec_res_00] shape:[2x128x256x256]\n",
269 | "[ 6] key:[h_dec_res_01] shape:[2x64x256x256]\n",
270 | "[ 7] key:[h_dec_res_02] shape:[2x32x256x256]\n",
271 | "[ 8] key:[ out] shape:[ 2x3x256x256]\n"
272 | ]
273 | }
274 | ],
275 | "source": [
276 | "unet = DiffusionUNetLegacy(\n",
277 | " name = 'unet',\n",
278 | " dims = 2,\n",
279 | " n_in_channels = 3,\n",
280 | " n_base_channels = 32,\n",
281 | " n_emb_dim = 128,\n",
282 | " n_enc_blocks = 3, # number of encoder blocks\n",
283 | " n_dec_blocks = 3, # number of decoder blocks\n",
284 | " n_groups = 16, # group norm paramter\n",
285 | " use_attention = False,\n",
286 | " skip_connection = True, # additional skip connection\n",
287 | " chnnel_multiples = (1,2,4),\n",
288 | " updown_rates = (1,1,1),\n",
289 | " device = device,\n",
290 | ")\n",
291 | "# Inputs, timesteps:[B] and x:[B x C x W x H]\n",
292 | "batch_size = 2\n",
293 | "x = th.randn(batch_size,3,256,256).to(device) # [B x C x W x H]\n",
294 | "timesteps = th.linspace(1,dc['T'],batch_size).to(th.int64).to(device) # [B]\n",
295 | "out,intermediate_output_dict = unet(x,timesteps)\n",
296 | "print (\"Input: x:[%s] timesteps:[%s]\"%(\n",
297 | " get_torch_size_string(x),get_torch_size_string(timesteps)\n",
298 | "))\n",
299 | "print (\"Output: out:[%s]\"%(get_torch_size_string(out)))\n",
300 | "# Print intermediate layers\n",
301 | "for k_idx,key in enumerate(intermediate_output_dict.keys()):\n",
302 | " z = intermediate_output_dict[key]\n",
303 | " print (\"[%2d] key:[%12s] shape:[%12s]\"%(k_idx,key,get_torch_size_string(z)))"
304 | ]
305 | },
306 | {
307 | "cell_type": "markdown",
308 | "id": "66e7d820-fc72-41c3-88da-09e70147c3e0",
309 | "metadata": {},
310 | "source": [
311 | "### 2-D case: `[B x C x W x H]` without attention + updown pooling"
312 | ]
313 | },
314 | {
315 | "cell_type": "code",
316 | "execution_count": 7,
317 | "id": "12680faa-9bf0-4ef9-b65f-08952641dfa3",
318 | "metadata": {},
319 | "outputs": [
320 | {
321 | "name": "stdout",
322 | "output_type": "stream",
323 | "text": [
324 | "Input: x:[2x3x256x256] timesteps:[2]\n",
325 | "Output: out:[2x3x256x256]\n",
326 | "[ 0] key:[ x] shape:[ 2x3x256x256]\n",
327 | "[ 1] key:[ x_lifted] shape:[2x32x256x256]\n",
328 | "[ 2] key:[h_enc_res_00] shape:[2x32x256x256]\n",
329 | "[ 3] key:[h_enc_res_01] shape:[2x64x128x128]\n",
330 | "[ 4] key:[h_enc_res_02] shape:[ 2x128x64x64]\n",
331 | "[ 5] key:[h_dec_res_00] shape:[2x128x128x128]\n",
332 | "[ 6] key:[h_dec_res_01] shape:[2x64x256x256]\n",
333 | "[ 7] key:[h_dec_res_02] shape:[2x32x256x256]\n",
334 | "[ 8] key:[ out] shape:[ 2x3x256x256]\n"
335 | ]
336 | }
337 | ],
338 | "source": [
339 | "unet = DiffusionUNetLegacy(\n",
340 | " name = 'unet',\n",
341 | " dims = 2,\n",
342 | " n_in_channels = 3,\n",
343 | " n_base_channels = 32,\n",
344 | " n_emb_dim = 128,\n",
345 | " n_enc_blocks = 3, # number of encoder blocks\n",
346 | " n_dec_blocks = 3, # number of decoder blocks\n",
347 | " n_groups = 16, # group norm paramter\n",
348 | " use_attention = False,\n",
349 | " skip_connection = True, # additional skip connection\n",
350 | " chnnel_multiples = (1,2,4),\n",
351 | " updown_rates = (1,2,2),\n",
352 | " device = device,\n",
353 | ")\n",
354 | "# Inputs, timesteps:[B] and x:[B x C x W x H]\n",
355 | "batch_size = 2\n",
356 | "x = th.randn(batch_size,3,256,256).to(device) # [B x C x W x H]\n",
357 | "timesteps = th.linspace(1,dc['T'],batch_size).to(th.int64).to(device) # [B]\n",
358 | "out,intermediate_output_dict = unet(x,timesteps)\n",
359 | "print (\"Input: x:[%s] timesteps:[%s]\"%(\n",
360 | " get_torch_size_string(x),get_torch_size_string(timesteps)\n",
361 | "))\n",
362 | "print (\"Output: out:[%s]\"%(get_torch_size_string(out)))\n",
363 | "# Print intermediate layers\n",
364 | "for k_idx,key in enumerate(intermediate_output_dict.keys()):\n",
365 | " z = intermediate_output_dict[key]\n",
366 | " print (\"[%2d] key:[%12s] shape:[%12s]\"%(k_idx,key,get_torch_size_string(z)))"
367 | ]
368 | },
369 | {
370 | "cell_type": "code",
371 | "execution_count": null,
372 | "id": "b9bf9fc1-90f1-4700-a4b1-360f730d0e51",
373 | "metadata": {},
374 | "outputs": [],
375 | "source": []
376 | }
377 | ],
378 | "metadata": {
379 | "kernelspec": {
380 | "display_name": "Python 3 (ipykernel)",
381 | "language": "python",
382 | "name": "python3"
383 | },
384 | "language_info": {
385 | "codemirror_mode": {
386 | "name": "ipython",
387 | "version": 3
388 | },
389 | "file_extension": ".py",
390 | "mimetype": "text/x-python",
391 | "name": "python",
392 | "nbconvert_exporter": "python",
393 | "pygments_lexer": "ipython3",
394 | "version": "3.9.16"
395 | }
396 | },
397 | "nbformat": 4,
398 | "nbformat_minor": 5
399 | }
400 |
--------------------------------------------------------------------------------
/code/mdn.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import torch as th
3 | import torch.nn as nn
4 | import matplotlib.pyplot as plt
5 | from util import th2np
6 |
7 | def get_argmax_mu(pi,mu):
8 | """
9 | :param pi: [N x K x D]
10 | :param mu: [N x K x D]
11 | """
12 | max_idx = th.argmax(pi,dim=1) # [N x D]
13 | argmax_mu = th.gather(input=mu,dim=1,index=max_idx.unsqueeze(dim=1)).squeeze(dim=1) # [N x D]
14 | return argmax_mu
15 |
16 | def gmm_forward(pi,mu,sigma,data):
17 | """
18 | Compute Gaussian mixture model probability
19 |
20 | :param pi: GMM mixture weights [N x K x D]
21 | :param mu: GMM means [N x K x D]
22 | :param sigma: GMM stds [N x K x D]
23 | :param data: data [N x D]
24 | """
25 | data_usq = th.unsqueeze(data,1) # [N x 1 x D]
26 | data_exp = data_usq.expand_as(sigma) # [N x K x D]
27 | ONEOVERSQRT2PI = 1/np.sqrt(2*np.pi)
28 | probs = ONEOVERSQRT2PI * th.exp(-0.5 * ((data_exp-mu)/sigma)**2) / sigma # [N x K x D]
29 | probs = probs*pi # [N x K x D]
30 | probs = th.sum(probs,dim=1) # [N x D]
31 | log_probs = th.log(probs) # [N x D]
32 | log_probs = th.sum(log_probs,dim=1) # [N]
33 | nlls = -log_probs # [N]
34 |
35 | # Most probable mean [N x D]
36 | argmax_mu = get_argmax_mu(pi,mu) # [N x D]
37 |
38 | out = {
39 | 'data_usq':data_usq,'data_exp':data_exp,
40 | 'probs':probs,'log_probs':log_probs,'nlls':nlls,'argmax_mu':argmax_mu
41 | }
42 | return out
43 |
44 | def gmm_uncertainties(pi, mu, sigma):
45 | """
46 | :param pi: [N x K x D]
47 | :param mu: [N x K x D]
48 | :param sigma: [N x K x D]
49 | """
50 | # Compute Epistemic Uncertainty
51 | mu_avg = th.sum(th.mul(pi,mu),dim=1).unsqueeze(1) # [N x 1 x D]
52 | mu_exp = mu_avg.expand_as(mu) # [N x K x D]
53 | mu_diff_sq = th.square(mu-mu_exp) # [N x K x D]
54 | epis_unct = th.sum(th.mul(pi,mu_diff_sq), dim=1) # [N x D]
55 |
56 | # Compute Aleatoric Uncertainty
57 | alea_unct = th.sum(th.mul(pi,sigma), dim=1) # [N x D]
58 |
59 | # (Optional) sqrt operation helps scaling
60 | epis_unct,alea_unct = th.sqrt(epis_unct),th.sqrt(alea_unct)
61 | return epis_unct,alea_unct
62 |
63 | class MixturesOfGaussianLayer(nn.Module):
64 | def __init__(
65 | self,
66 | in_dim,
67 | out_dim,
68 | k,
69 | sig_max=None
70 | ):
71 | super(MixturesOfGaussianLayer,self).__init__()
72 | self.in_dim = in_dim
73 | self.out_dim = out_dim
74 | self.k = k
75 | self.sig_max = sig_max
76 |
77 | # Netowrks
78 | self.fc_pi = nn.Linear(self.in_dim,self.k*self.out_dim)
79 | self.fc_mu = nn.Linear(self.in_dim,self.k*self.out_dim)
80 | self.fc_sigma = nn.Linear(self.in_dim,self.k*self.out_dim)
81 |
82 | def forward(self,x):
83 | pi_logit = self.fc_pi(x) # [N x KD]
84 | pi_logit = th.reshape(pi_logit,(-1,self.k,self.out_dim)) # [N x K x D]
85 | pi = th.softmax(pi_logit,dim=1) # [N x K x D]
86 | mu = self.fc_mu(x) # [N x KD]
87 | mu = th.reshape(mu,(-1,self.k,self.out_dim)) # [N x K x D]
88 | sigma = self.fc_sigma(x) # [N x KD]
89 | sigma = th.reshape(sigma,(-1,self.k,self.out_dim)) # [N x K x D]
90 | if self.sig_max is None:
91 | sigma = th.exp(sigma) # [N x K x D]
92 | else:
93 | sigma = self.sig_max * th.sigmoid(sigma) # [N x K x D]
94 | return pi,mu,sigma
95 |
96 | class MixtureDensityNetwork(nn.Module):
97 | def __init__(
98 | self,
99 | name = 'mdn',
100 | x_dim = 1,
101 | y_dim = 1,
102 | k = 5,
103 | h_dim_list = [32,32],
104 | actv = nn.ReLU(),
105 | sig_max = 1.0,
106 | mu_min = -3.0,
107 | mu_max = +3.0,
108 | p_drop = 0.1,
109 | use_bn = False,
110 | ):
111 | super(MixtureDensityNetwork,self).__init__()
112 | self.name = name
113 | self.x_dim = x_dim
114 | self.y_dim = y_dim
115 | self.k = k
116 | self.h_dim_list = h_dim_list
117 | self.actv = actv
118 | self.sig_max = sig_max
119 | self.mu_min = mu_min
120 | self.mu_max = mu_max
121 | self.p_drop = p_drop
122 | self.use_bn = use_bn
123 |
124 | # Declare layers
125 | self.layer_list = []
126 | h_dim_prev = self.x_dim
127 | for h_dim in self.h_dim_list:
128 | # dense -> batchnorm -> actv -> dropout
129 | self.layer_list.append(nn.Linear(h_dim_prev,h_dim))
130 | if self.use_bn: self.layer_list.append(nn.BatchNorm1d(num_features=h_dim)) # (optional) batchnorm
131 | self.layer_list.append(self.actv)
132 | self.layer_list.append(nn.Dropout1d(p=self.p_drop))
133 | h_dim_prev = h_dim
134 | self.layer_list.append(
135 | MixturesOfGaussianLayer(
136 | in_dim = h_dim_prev,
137 | out_dim = self.y_dim,
138 | k = self.k,
139 | sig_max = self.sig_max
140 | )
141 | )
142 |
143 | # Definie network
144 | self.net = nn.Sequential()
145 | self.layer_names = []
146 | for l_idx,layer in enumerate(self.layer_list):
147 | layer_name = "%s_%02d"%(type(layer).__name__.lower(),l_idx)
148 | self.layer_names.append(layer_name)
149 | self.net.add_module(layer_name,layer)
150 |
151 | # Initialize parameters
152 | self.init_param(VERBOSE=False)
153 |
154 | def init_param(self,VERBOSE=False):
155 | """
156 | Initialize parameters
157 | """
158 | for m_idx,m in enumerate(self.modules()):
159 | if VERBOSE:
160 | print ("[%02d]"%(m_idx))
161 | if isinstance(m,nn.Conv2d): # init conv
162 | nn.init.kaiming_normal_(m.weight)
163 | nn.init.zeros_(m.bias)
164 | elif isinstance(m,nn.BatchNorm1d): # init BN
165 | nn.init.constant_(m.weight,1.0)
166 | nn.init.constant_(m.bias,0.0)
167 | elif isinstance(m,nn.Linear): # lnit dense
168 | nn.init.kaiming_normal_(m.weight,nonlinearity='relu')
169 | nn.init.zeros_(m.bias)
170 | # (Hueristics) mu bias between mu_min ~ mu_max
171 | self.layer_list[-1].fc_mu.bias.data.uniform_(self.mu_min,self.mu_max)
172 |
173 | def forward(self, x):
174 | """
175 | Forward propagate
176 | """
177 | intermediate_output_list = []
178 | for idx,layer in enumerate(self.net):
179 | x = layer(x)
180 | intermediate_output_list.append(x)
181 | # Final output
182 | final_output = x
183 | return final_output
184 |
185 | def eval_mdn_1d(
186 | mdn,
187 | x_train_np,
188 | y_train_np,
189 | figsize=(12,3),
190 | device='cpu',
191 | ):
192 | # Eval
193 | mdn.eval()
194 | x_margin = 0.2
195 | x_test = th.linspace(
196 | start = x_train_np.min()-x_margin,
197 | end = x_train_np.max()+x_margin,
198 | steps = 300
199 | ).reshape((-1,1)).to(device)
200 | pi_test,mu_test,sigma_test = mdn.forward(x_test)
201 |
202 | # Get the most probable mu
203 | argmax_mu_test = get_argmax_mu(pi_test,mu_test) # [N x D]
204 |
205 | # To numpy array
206 | x_test_np,pi_np,mu_np,sigma_np = th2np(x_test),th2np(pi_test),th2np(mu_test),th2np(sigma_test)
207 | argmax_mu_test_np = th2np(argmax_mu_test) # [N x D]
208 |
209 | # Uncertainties
210 | epis_unct,alea_unct = gmm_uncertainties(pi_test,mu_test,sigma_test) # [N x D]
211 | epis_unct_np,alea_unct_np = th2np(epis_unct),th2np(alea_unct)
212 |
213 | # Plot fitted results
214 | y_dim = y_train_np.shape[1]
215 | plt.figure(figsize=figsize)
216 | cmap = plt.get_cmap('gist_rainbow')
217 | colors = [cmap(ii) for ii in np.linspace(0, 1, mdn.k)] # colors
218 | pi_th = 0.1
219 | for d_idx in range(y_dim): # for each output dimension
220 | plt.subplot(1,y_dim,d_idx+1)
221 | # Plot training data
222 | plt.plot(x_train_np,y_train_np[:,d_idx],'.',color=(0,0,0,0.2),markersize=3,
223 | label="Training Data")
224 | # Plot mixture standard deviations
225 | for k_idx in range(mdn.k): # for each mixture
226 | pi_high_idx = np.where(pi_np[:,k_idx,d_idx] > pi_th)[0]
227 | mu_k = mu_np[:,k_idx,d_idx]
228 | sigma_k = sigma_np[:,k_idx,d_idx]
229 | upper_bound = mu_k + 2*sigma_k
230 | lower_bound = mu_k - 2*sigma_k
231 | plt.fill_between(x_test_np[pi_high_idx,0].squeeze(),
232 | lower_bound[pi_high_idx],
233 | upper_bound[pi_high_idx],
234 | facecolor=colors[k_idx], interpolate=False, alpha=0.3)
235 | # Plot mixture means
236 | for k_idx in range(mdn.k): # for each mixture
237 | pi_high_idx = np.where(pi_np[:,k_idx,d_idx] > pi_th)[0] # [?,]
238 | pi_low_idx = np.where(pi_np[:,k_idx,d_idx] <= pi_th)[0] # [?,]
239 | plt.plot(x_test_np[pi_high_idx,0],mu_np[pi_high_idx,k_idx,d_idx],'-',
240 | color=colors[k_idx],linewidth=1/2)
241 | plt.plot(x_test_np[pi_low_idx,0],mu_np[pi_low_idx,k_idx,d_idx],'-',
242 | color=(0,0,0,0.3),linewidth=1/2)
243 |
244 | # Plot most probable mu
245 | plt.plot(x_test_np[:,0],argmax_mu_test_np[:,d_idx],'-',color='b',linewidth=2,
246 | label="Argmax Mu")
247 | plt.xlim(x_test_np.min(),x_test_np.max())
248 | plt.legend(loc='lower right',fontsize=8)
249 | plt.title("y_dim:[%d]"%(d_idx),fontsize=10)
250 | plt.show()
251 |
252 | # Plot uncertainties
253 | plt.figure(figsize=figsize)
254 | for d_idx in range(y_dim): # for each output dimension
255 | plt.subplot(1,y_dim,d_idx+1)
256 | plt.plot(x_test_np[:,0],epis_unct_np[:,d_idx],'-',color='r',linewidth=2,
257 | label="Epistemic Uncertainty")
258 | plt.plot(x_test_np[:,0],alea_unct_np[:,d_idx],'-',color='b',linewidth=2,
259 | label="Aleatoric Uncertainty")
260 | plt.xlim(x_test_np.min(),x_test_np.max())
261 | plt.legend(loc='lower right',fontsize=8)
262 | plt.title("y_dim:[%d]"%(d_idx),fontsize=10)
263 | plt.show()
264 |
265 | # Back to train
266 | mdn.train()
267 |
--------------------------------------------------------------------------------
/code/mlp.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "95fb33ad",
6 | "metadata": {},
7 | "source": [
8 | "### Multi-Layer Perceptron"
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": 1,
14 | "id": "f1e30a24",
15 | "metadata": {},
16 | "outputs": [
17 | {
18 | "name": "stdout",
19 | "output_type": "stream",
20 | "text": [
21 | "PyTorch version:[2.0.1].\n"
22 | ]
23 | }
24 | ],
25 | "source": [
26 | "import numpy as np\n",
27 | "import matplotlib.pyplot as plt\n",
28 | "import torch as th\n",
29 | "import torch.nn as nn\n",
30 | "from dataset import mnist\n",
31 | "from util import (\n",
32 | " get_torch_size_string,\n",
33 | " print_model_parameters,\n",
34 | " print_model_layers,\n",
35 | " model_train,\n",
36 | " model_eval,\n",
37 | " model_test\n",
38 | ")\n",
39 | "np.set_printoptions(precision=3)\n",
40 | "th.set_printoptions(precision=3)\n",
41 | "%matplotlib inline\n",
42 | "%config InlineBackend.figure_format='retina'\n",
43 | "print (\"PyTorch version:[%s].\"%(th.__version__))"
44 | ]
45 | },
46 | {
47 | "cell_type": "markdown",
48 | "id": "bd42312f",
49 | "metadata": {},
50 | "source": [
51 | "### Hyperparameters"
52 | ]
53 | },
54 | {
55 | "cell_type": "code",
56 | "execution_count": 2,
57 | "id": "339cebfd",
58 | "metadata": {},
59 | "outputs": [
60 | {
61 | "name": "stdout",
62 | "output_type": "stream",
63 | "text": [
64 | "Ready.\n"
65 | ]
66 | }
67 | ],
68 | "source": [
69 | "device = 'cpu'\n",
70 | "n_epoch = 20\n",
71 | "batch_size = 128\n",
72 | "print_every = 1\n",
73 | "print (\"Ready.\")"
74 | ]
75 | },
76 | {
77 | "cell_type": "markdown",
78 | "id": "2aac096e",
79 | "metadata": {},
80 | "source": [
81 | "### Dataset"
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": 3,
87 | "id": "95ebd370",
88 | "metadata": {
89 | "scrolled": true
90 | },
91 | "outputs": [
92 | {
93 | "name": "stdout",
94 | "output_type": "stream",
95 | "text": [
96 | "MNIST ready.\n"
97 | ]
98 | }
99 | ],
100 | "source": [
101 | "train_iter,test_iter,train_data,train_label,test_data,test_label = \\\n",
102 | " mnist(root_path='../data',batch_size=128)\n",
103 | "print (\"MNIST ready.\")"
104 | ]
105 | },
106 | {
107 | "cell_type": "markdown",
108 | "id": "72dc751b",
109 | "metadata": {},
110 | "source": [
111 | "### Model"
112 | ]
113 | },
114 | {
115 | "cell_type": "code",
116 | "execution_count": 4,
117 | "id": "dfacec51",
118 | "metadata": {},
119 | "outputs": [
120 | {
121 | "name": "stdout",
122 | "output_type": "stream",
123 | "text": [
124 | "Ready.\n"
125 | ]
126 | }
127 | ],
128 | "source": [
129 | "class MultiLayerPerceptronClass(th.nn.Module):\n",
130 | " def __init__(\n",
131 | " self,\n",
132 | " name = 'mlp',\n",
133 | " x_dim = 784,\n",
134 | " h_dim_list = [256,256],\n",
135 | " y_dim = 10,\n",
136 | " actv = nn.ReLU(),\n",
137 | " p_drop = 0.2\n",
138 | " ):\n",
139 | " \"\"\"\n",
140 | " Initialize MLP\n",
141 | " \"\"\"\n",
142 | " super(MultiLayerPerceptronClass,self).__init__()\n",
143 | " self.name = name\n",
144 | " self.x_dim = x_dim\n",
145 | " self.h_dim_list = h_dim_list\n",
146 | " self.y_dim = y_dim\n",
147 | " self.actv = actv\n",
148 | " self.p_drop = p_drop\n",
149 | " \n",
150 | " # Declare layers\n",
151 | " self.layer_list = []\n",
152 | " h_dim_prev = self.x_dim\n",
153 | " for h_dim in self.h_dim_list:\n",
154 | " # dense -> batchnorm -> actv -> dropout\n",
155 | " self.layer_list.append(nn.Linear(h_dim_prev,h_dim))\n",
156 | " self.layer_list.append(nn.BatchNorm1d(num_features=h_dim))\n",
157 | " self.layer_list.append(self.actv)\n",
158 | " self.layer_list.append(nn.Dropout1d(p=self.p_drop))\n",
159 | " h_dim_prev = h_dim\n",
160 | " self.layer_list.append(nn.Linear(h_dim_prev,self.y_dim))\n",
161 | " \n",
162 | " # Define net\n",
163 | " self.net = nn.Sequential()\n",
164 | " self.layer_names = []\n",
165 | " for l_idx,layer in enumerate(self.layer_list):\n",
166 | " layer_name = \"%s_%02d\"%(type(layer).__name__.lower(),l_idx)\n",
167 | " self.layer_names.append(layer_name)\n",
168 | " self.net.add_module(layer_name,layer)\n",
169 | " \n",
170 | " # Initialize parameters\n",
171 | " self.init_param(VERBOSE=False)\n",
172 | " \n",
173 | " def init_param(self,VERBOSE=False):\n",
174 | " \"\"\"\n",
175 | " Initialize parameters\n",
176 | " \"\"\"\n",
177 | " for m_idx,m in enumerate(self.modules()):\n",
178 | " if VERBOSE:\n",
179 | " print (\"[%02d]\"%(m_idx))\n",
180 | " if isinstance(m,nn.Conv2d): # init conv\n",
181 | " nn.init.kaiming_normal_(m.weight)\n",
182 | " nn.init.zeros_(m.bias)\n",
183 | " elif isinstance(m,nn.BatchNorm1d): # init BN\n",
184 | " nn.init.constant_(m.weight,1.0)\n",
185 | " nn.init.constant_(m.bias,0.0)\n",
186 | " elif isinstance(m,nn.Linear): # lnit dense\n",
187 | " nn.init.kaiming_normal_(m.weight,nonlinearity='relu')\n",
188 | " nn.init.zeros_(m.bias)\n",
189 | " \n",
190 | " def forward(self,x):\n",
191 | " \"\"\"\n",
192 | " Forward propagate\n",
193 | " \"\"\"\n",
194 | " intermediate_output_list = []\n",
195 | " for idx,layer in enumerate(self.net):\n",
196 | " x = layer(x)\n",
197 | " intermediate_output_list.append(x)\n",
198 | " # Final output\n",
199 | " final_output = x\n",
200 | " return final_output,intermediate_output_list\n",
201 | " \n",
202 | "print (\"Ready.\") "
203 | ]
204 | },
205 | {
206 | "cell_type": "code",
207 | "execution_count": 5,
208 | "id": "fd93b90d",
209 | "metadata": {},
210 | "outputs": [
211 | {
212 | "name": "stdout",
213 | "output_type": "stream",
214 | "text": [
215 | "Ready.\n"
216 | ]
217 | }
218 | ],
219 | "source": [
220 | "mlp = MultiLayerPerceptronClass(\n",
221 | " name = 'mlp',\n",
222 | " x_dim = 784,\n",
223 | " h_dim_list = [512,256],\n",
224 | " y_dim = 10,\n",
225 | " actv = nn.ReLU(),\n",
226 | " p_drop = 0.25\n",
227 | ").to(device)\n",
228 | "loss = nn.CrossEntropyLoss()\n",
229 | "optm = th.optim.Adam(mlp.parameters(),lr=1e-3)\n",
230 | "print (\"Ready.\")"
231 | ]
232 | },
233 | {
234 | "cell_type": "markdown",
235 | "id": "f6c04e91",
236 | "metadata": {},
237 | "source": [
238 | "### Print model parameters"
239 | ]
240 | },
241 | {
242 | "cell_type": "code",
243 | "execution_count": 6,
244 | "id": "2eafd659",
245 | "metadata": {},
246 | "outputs": [
247 | {
248 | "name": "stdout",
249 | "output_type": "stream",
250 | "text": [
251 | "[ 0] parameter:[ net.linear_00.weight] shape:[ 512x784] numel:[ 401408]\n",
252 | "[ 1] parameter:[ net.linear_00.bias] shape:[ 512] numel:[ 512]\n",
253 | "[ 2] parameter:[ net.batchnorm1d_01.weight] shape:[ 512] numel:[ 512]\n",
254 | "[ 3] parameter:[ net.batchnorm1d_01.bias] shape:[ 512] numel:[ 512]\n",
255 | "[ 4] parameter:[ net.linear_04.weight] shape:[ 256x512] numel:[ 131072]\n",
256 | "[ 5] parameter:[ net.linear_04.bias] shape:[ 256] numel:[ 256]\n",
257 | "[ 6] parameter:[ net.batchnorm1d_05.weight] shape:[ 256] numel:[ 256]\n",
258 | "[ 7] parameter:[ net.batchnorm1d_05.bias] shape:[ 256] numel:[ 256]\n",
259 | "[ 8] parameter:[ net.linear_08.weight] shape:[ 10x256] numel:[ 2560]\n",
260 | "[ 9] parameter:[ net.linear_08.bias] shape:[ 10] numel:[ 10]\n"
261 | ]
262 | }
263 | ],
264 | "source": [
265 | "print_model_parameters(mlp)"
266 | ]
267 | },
268 | {
269 | "cell_type": "markdown",
270 | "id": "96310c96",
271 | "metadata": {},
272 | "source": [
273 | "### Print model layers"
274 | ]
275 | },
276 | {
277 | "cell_type": "code",
278 | "execution_count": 7,
279 | "id": "9161988d",
280 | "metadata": {},
281 | "outputs": [
282 | {
283 | "name": "stdout",
284 | "output_type": "stream",
285 | "text": [
286 | "batch_size:[16]\n",
287 | "[ ] layer:[ input] size:[ 16x784]\n",
288 | "[ 0] layer:[ linear_00] size:[ 16x512] numel:[ 8192]\n",
289 | "[ 1] layer:[ batchnorm1d_01] size:[ 16x512] numel:[ 8192]\n",
290 | "[ 2] layer:[ relu_02] size:[ 16x512] numel:[ 8192]\n",
291 | "[ 3] layer:[ dropout1d_03] size:[ 16x512] numel:[ 8192]\n",
292 | "[ 4] layer:[ linear_04] size:[ 16x256] numel:[ 4096]\n",
293 | "[ 5] layer:[ batchnorm1d_05] size:[ 16x256] numel:[ 4096]\n",
294 | "[ 6] layer:[ relu_06] size:[ 16x256] numel:[ 4096]\n",
295 | "[ 7] layer:[ dropout1d_07] size:[ 16x256] numel:[ 4096]\n",
296 | "[ 8] layer:[ linear_08] size:[ 16x10] numel:[ 160]\n"
297 | ]
298 | }
299 | ],
300 | "source": [
301 | "x_torch = th.randn(16,mlp.x_dim).to(device)\n",
302 | "print_model_layers(mlp,x_torch)"
303 | ]
304 | },
305 | {
306 | "cell_type": "markdown",
307 | "id": "5da9aaf1",
308 | "metadata": {},
309 | "source": [
310 | "### Train MLP"
311 | ]
312 | },
313 | {
314 | "cell_type": "code",
315 | "execution_count": 8,
316 | "id": "c73c3242",
317 | "metadata": {},
318 | "outputs": [
319 | {
320 | "name": "stdout",
321 | "output_type": "stream",
322 | "text": [
323 | "epoch:[ 0/20] loss:[1.137] train_accr:[0.9704] test_accr:[0.9631].\n",
324 | "epoch:[ 1/20] loss:[1.073] train_accr:[0.9839] test_accr:[0.9727].\n",
325 | "epoch:[ 2/20] loss:[1.050] train_accr:[0.9869] test_accr:[0.9746].\n",
326 | "epoch:[ 3/20] loss:[1.041] train_accr:[0.9889] test_accr:[0.9783].\n",
327 | "epoch:[ 4/20] loss:[1.037] train_accr:[0.9880] test_accr:[0.9772].\n",
328 | "epoch:[ 5/20] loss:[1.032] train_accr:[0.9929] test_accr:[0.9798].\n",
329 | "epoch:[ 6/20] loss:[1.024] train_accr:[0.9918] test_accr:[0.9754].\n",
330 | "epoch:[ 7/20] loss:[1.031] train_accr:[0.9926] test_accr:[0.9779].\n",
331 | "epoch:[ 8/20] loss:[1.028] train_accr:[0.9950] test_accr:[0.9779].\n",
332 | "epoch:[ 9/20] loss:[1.030] train_accr:[0.9955] test_accr:[0.9807].\n",
333 | "epoch:[10/20] loss:[1.017] train_accr:[0.9961] test_accr:[0.9812].\n",
334 | "epoch:[11/20] loss:[1.014] train_accr:[0.9968] test_accr:[0.9817].\n",
335 | "epoch:[12/20] loss:[1.016] train_accr:[0.9935] test_accr:[0.9752].\n",
336 | "epoch:[13/20] loss:[1.023] train_accr:[0.9964] test_accr:[0.9790].\n",
337 | "epoch:[14/20] loss:[1.022] train_accr:[0.9974] test_accr:[0.9805].\n",
338 | "epoch:[15/20] loss:[1.018] train_accr:[0.9972] test_accr:[0.9800].\n",
339 | "epoch:[16/20] loss:[1.005] train_accr:[0.9965] test_accr:[0.9794].\n",
340 | "epoch:[17/20] loss:[1.014] train_accr:[0.9973] test_accr:[0.9792].\n",
341 | "epoch:[18/20] loss:[1.012] train_accr:[0.9970] test_accr:[0.9794].\n",
342 | "epoch:[19/20] loss:[1.015] train_accr:[0.9984] test_accr:[0.9821].\n"
343 | ]
344 | }
345 | ],
346 | "source": [
347 | "model_train(mlp,optm,loss,train_iter,test_iter,n_epoch,print_every,device)"
348 | ]
349 | },
350 | {
351 | "cell_type": "markdown",
352 | "id": "0b8ec7e0",
353 | "metadata": {},
354 | "source": [
355 | "### Test MLP"
356 | ]
357 | },
358 | {
359 | "cell_type": "code",
360 | "execution_count": 9,
361 | "id": "619bd29c",
362 | "metadata": {},
363 | "outputs": [
364 | {
365 | "data": {
366 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAA9AAAARcCAYAAABye3nFAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAB7CAAAewgFu0HU+AADfrElEQVR4nOzdd3RU1fr/8WcC6QkECBhAivSudCEgGAxdulKiFOGCBUVBihVQQQWUH6KCoMJFrl+KgohcpRkCCCpFwEsElCZNIHABgQAx2b8/vBmJZx/Yk8xkkpn3ay3WYn1mz5mdk71n5skkz3EopZQAAAAAAIAbCvD2BAAAAAAAyA8ooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABjxXQ5cuXF4fDkeVfcHCwlC1bVnr27CkbNmzw1EO7bNy4ceJwOGTcuHHG90lLS5O1a9fKyJEjpWHDhhIVFSWBgYESExMjnTp1khUrVrh9nv379xeHwyH9+/d3+7Gv17JlS3E4HLJu3TqPPk6mzLVy6NAh4/ucP39eFi9eLAMHDpQaNWpIWFiYhISESIUKFeShhx6SH3/80XMTNsQeYA+Y8sU94Ovr/9ChQ5avz+7f+vXr3TJP1v9f5s6da3TuAwK88zmBr69/EZG9e/fK9OnTpX///lK7dm0pWLCgOBwOeeWVVzwyT9Z/VuvXr5eJEydK9+7ds6y3jRs3emaiLvKHPZDp2rVr8tZbb0mzZs2kaNGiEhISIrfeequ0a9dOFi5c6LZ5sgf0Tp06JWPGjJFatWpJRESEhIeHS8WKFaV3796ybds29072fwp65KjXiY2NlUqVKomIyLlz52Tr1q2yaNEiWbx4sUyZMkWGDx/u6Sl4RFJSksTHx4uISExMjDRr1kzCw8MlOTlZli9fLsuXL5fBgwfLzJkzxeFweHm2vmfy5MkyYcIEERGpUqWKtGvXTtLT02Xbtm0yZ84cmT9/vsyePVv69evn5ZmyB9gDnpFf9oCvrv+IiIgbntvk5GTZsmWLREZGSv369XNxZv6hUqVKNzz/X3/9tRw5ckTuvvvuXJyVla+ufxGRGTNmyLRp07w9Db/1xBNPyM6dO709jZvy5T0gInL06FFp06aNJCcnS3R0tMTGxkp4eLgcOXJE1q9fL+Hh4dKzZ09vT9NnJSYmSrdu3eTcuXNSqVIladu2rWRkZMihQ4dk0aJFEhsb65HXYI8X0IMGDcryk5IrV67IkCFDZN68eTJq1Cjp2LGjVKlSxdPTcLuAgADp3r27DBs2TJo3b57ltoULF0pCQoLMmjVLYmNjpW/fvl6ape8KDw+X4cOHy8MPPyyVK1d25mlpaTJ69GiZOnWqDB48OMsTt7ewB9gDnpBf9oCvrv/o6GiZO3eu7e3t27cXEZFevXpJeHh4Ls3KfzRr1kyaNWumve3KlStSqlQpEREZOHBgbk7LwlfXv4hIrVq15Omnn5a6detKvXr1ZOLEifLRRx95e1p+Iz4+Xrp27Sr16tWTevXqSWxsrBw+fNjb07Lw5T2Qmpoq8fHxsmfPHhk3bpw8++yzEhgY6Lz98uXLsm/fPi/O0LclJydLhw4dpECBArJkyRLp2rVrltt/++03uXz5skceO9d/tykkJETeeecdCQ8Pl/T0dFmyZEluT8Et4uLi5JNPPrEUDiIiPXv2dD5ZzJs3L5dn5h+eeeYZeeONN7IUDiIigYGBMmXKFKlSpYpcu3ZNFixY4KUZ2mMPwB3y6x7wlfV/I8eOHZOVK1eKiPcLOH+0dOlS+e9//ytRUVHSrVs3b08nC19a/4MGDZLJkydLnz59pFq1al77dXl/NXnyZBk7dqzce++9Urp0aW9Px5gv7YFXX31V9uzZI4MHD5axY8dmKZ5FRMLCwuSOO+7wzuT8wMMPPyypqakya9YsS/Es8udvR1aoUMEjj+2VZ7uIiAipWrWqiEiW33fP/BsJEZE5c+ZIkyZNpHDhwpbfiz9+/LgMHz5cqlevLmFhYRIZGSkNGzaUt99+W/744w/tY6ampsq4ceOkcuXKEhwcLCVLlpR+/frJr7/+6pGvsW7duiIicuTIEY8c30RaWprMnz9fEhISpFq1alKoUCEJDQ2VqlWryhNPPCHHjx+/6TGSkpKkdevWUrRoUQkLC5NGjRrd9CfMa9eulW7duknJkiUlKChISpQoIV27dpXNmze760u7oYCAAKlTp46IePf83wh7IHewB/LmHvD19T937lzJyMiQmjVrSuPGjd1+fFP+uv4//PBDERFJSEiQkJCQXHlMV/j6+s8r/HX95we+sAfS0tJkxowZIiIycuTIbB3D03x5D+zYsUM2bNggZcqUkV69erntuKa89uPCCxcuiIhIcHCw5bbHH39cBg0aJAULFpQOHTpI48aNnRtq/fr1UqtWLZk6dapcuXJF4uPjJTY2Vvbv3y+PP/64dOjQQdLS0rIc7/LlyxIXFyfjx4+XEydOSOvWraV58+aycuVKqVevnhw8eNB2npl/2H6jX9XT+fnnn0VEpGTJki7dz51OnjwpDz74oKxYsUKKFCkibdu2lbi4OLl48aJMnz5d7rjjDvnll19s77906VKJi4uTY8eOSZs2baRhw4aybds26du3r4wYMUJ7n6efflruueceWbZsmZQtW1a6dOkiFSpUkGXLlknz5s1lzpw5xvO/vkmPq40F8sL5vxn2gOexB/LuHvDl9Z851tufPvvj+v/111/l66+/FhHvn/8b8eX1n1f44/rPT/L7Hti+fbukpKRIqVKlpFKlSvLjjz/K+PHjZciQITJmzBhZsWKFZGRk5PAs5Ywv74HM3/Jq3ry5OBwOWblypYwaNUqGDBkiL7/8sseahzkpDylXrpwSETVnzhzLbTt37lQBAQFKRNSHH37ozEVEiYgqVKiQ2rx5s+V+J06cUMWKFVMOh0O9++67Kj093XlbSkqKiouLUyKixo8fn+V+Tz/9tBIRVa1aNXXs2DFnfunSJdW5c2fn444dO9alr8POiRMnVOHChZWIqLfeesv4fjfTr18/JSKqX79+RuMvXLigli1bpq5evZolv3btmnrmmWeUiKj27dtb7teiRQvnOZk4cWKW29atW6dCQ0OViKivvvoqy22zZs1SIqIqVaqkdu7cmeW2pKQkFRkZqYKCgtS+ffuy3JZ5jg8ePJglP3jwoHMef7/tRr788kslIsrhcFjmkZvYA+yB6/nbHvDX9b9u3TolIiooKEidPn3a6D6mWP83N27cOCUiqm7dukbjPcUf13/m+nz55ZdvOjY7WP83lnmcDRs23HRsbvD1PZD5/W7UqJEaPXq0cjgczuNk/qtbt646fPiw4Rm7OfbAX/r06aNERD366KPqnnvusZx7EVEJCQnqypUrJqfKZblaQJ87d06tWLFCVaxYUYmIKlWqlLp48eJfk/nfF/zSSy9pjzl69GglImro0KHa248ePaoCAwNV8eLFVUZGhlJKqcuXL6vIyEglIurLL7+03OfEiRMqJCTEduPExcWpqlWrqiVLlhh93WlpaapVq1ZKRFTt2rUtizYnXN04N1OqVCkVEBCgLly4kCXP3Dh2b0BGjBihRETFx8c7s/T0dFWqVCklImrr1q3a+02aNEmJiBoxYkSW3G7jHD16VFWtWlVVrVpVHT161OhrOnbsmHMegwcPNrqPp7AH2AN/5097wF/Xf9++fZWIqB49ehiNdwXr/8YyMjJU+fLllYiot99++6bjPckf139eK6BvxtfWf34ooH1pD7z66qtKRFRgYKASEfXYY4+pvXv3qvPnz6vVq1erKlWqKBFRtWrVUteuXbvZ6TLCHvhLmzZtnOc/LCxMTZ8+XR07dkydOnVKffjhh6pQoUIefR/k8QLa7l/FihXVjh07sk7mf7clJydrj1mrVq2bPjnUqFFDiYjau3evUkqpjRs3KhFR0dHRtvfp1KmT7cZx1cCBA5WIqGLFijnn4C7Z3Tg7duxQb7zxhho6dKgaMGCA6tevn+rXr58qUaKEEhG1ffv2LOMzN87UqVO1x9u1a5cSERUaGqr++OMPpZRSW7dudX5f7Xz//fdKRFSTJk2y5HYbx1Xnz59XdevWdf5EMDU1NUfHyyn2AHvg7/xpD/jj+j9//rwKCwtTIqL+/e9/5+hYOqz/G1u9erUSERUSEqLOnj2bo2PllD+u/7xaQPvL+s+rBbSv7oGJEyc659u7d2/L7YcPH3YW5vPmzTM+7o2wB/7SunVr5/l/7733LLcvXLhQiYgKCAjI8d7SydXrQGf+Ifmdd94pbdu2lYIF9Q9fvnx5bX7gwAEREW3X3787ffq0VKlSRY4ePXrDY4qI3HbbbTc9nolhw4bJBx98IEWKFJHVq1d7vS3/pUuX5MEHH5SlS5fecFzm36H8nd15ycxTU1PlzJkzUqJECef3Zv/+/Te95u/p06dvNnWXXbx4Udq1ayc//PCD1K1bV7766qs80zyGPeA97AHv7wF/Wv8LFiyQy5cvy6233ipt2rRxyzFzwp/Wv8hfzcO6du0qRYoU8chjuMqf1n9e42/rP6/y1T0QGRnp/P+QIUMst5ctW1Y6dOggn376qaxZs0YefPBBlx8jp3x5D2Se/8DAQHnooYcst99///3y2GOPSUpKiiQmJsqAAQNy/JjXy/XrQJsIDQ3V5pl/jN+jR4+bXlezWLFiLj1mTo0YMULeeustiYqKklWrVjk7EHvTM888I0uXLpVq1arJa6+9Jg0bNpTo6GgJCgoSEZGmTZvK5s2bRSmV7cfIvG/m9yYmJuambxyjo6Oz/Xg6ly5dkg4dOsimTZukTp06snr16jzz5kmEPeBN7AHv85f1L/JXAde/f/88cUkff1n/IiLnzp1zvknMS83D/Gn95zX+tP7zMl/dA9dfHsnuUkmZ+YkTJzw6Fzu+vAcyz22ZMmVsfxBz2223SUpKikfOv8cLaHcqU6aM/PzzzzJ69Ghp0KCB0X0yr413ow6GOe1uOGrUKHnzzTelcOHCsmrVKuO5edqiRYtERGThwoXOS9pcL7NLrx27roSZ5yskJMT5BFWmTBkR+fMJKze7dV6+fFk6dOgg69evlzp16sjatWt9+o0De8A17AHfklfXv4hIcnKyfPfdd+JwONz+k+7s8of1n+njjz+WK1euyG233SZxcXG5/vi5IS+v/7zIn9a/v8hLe6BevXricDhEKSUpKSnONXC9lJQUEfnzsl3e4Mt7oH79+iIicubMGdsxnjz/3v8RuQvatWsnIn8tCBP169eXiIgISUlJkVWrVlluP3nypDY3NWbMGJk8ebIULlxYVq9eLQ0bNsz2sdzt7NmzIiJSrlw5y20rV650Liw78+fP1+bz5s0TEZFmzZo5f+qT+VOt5ORk2b17d06mbSw1NVU6duwoSUlJzsLB13+yyx5wDXvAt+TF9Z/pgw8+EBGRu+++2/bTiNzm6+v/epnnf8CAATf99cH8Ki+v/7zIn9a/v8hLeyAmJkaaNWsmIiJr1qyx3J6WliZJSUkiItKoUSOXj+8OvrwH2rdvL2FhYXL+/HnZsmWL5fZ9+/bJ4cOHRcQz5z9fFdAjR46UqKgoefPNN+WNN96Qa9euWcYcPHgwyzc8NDRUBg8eLCIiTz31VJaP8VNTU+WRRx6R1NRU28ds1aqVVKtWTfv3A88//7y8/vrrEhUV5VLhsG7duiwXi/eU6tWri4jI9OnTs+R79+6Vhx9++Kb337Ztm0yaNClLtnHjRnnnnXdE5M/zmSkwMFDGjh0rSinp2rWrbNy40XK89PR0+frrr+Xbb781mv+xY8ekWrVqUq1aNTl27FiW265cuSKdOnWSxMREvygcMrEHXMMe8C15bf1nSktLcz6mya8Ps/5zvv6vt2vXLtm+fbsEBAS4/Kui+UleXf+uYv27d/37k7y2B8aOHSsiIq+++mqW7+sff/whI0aMkAMHDkhkZKTlt5LYAznfA5GRkc5rUT/yyCNy/Phx521nzpyRQYMGSUZGhjRq1EjuvPNOo8dzidvbkv1Pdq4dK//rpnYjSUlJKjo6WomIKlGihIqLi1MJCQmqY8eOzrb4jRs3znKfixcvqkaNGikRUREREeree+9V9913n4qJiVHFihVzXnbEleu/LVu2zDnfBg0aODva/f3f31u1K6XU2rVrlYioggULGp8bpf7qvhcdHa0aN25s+2/27NlKKaU+/fRT53XpateurXr16qXi4uJUYGCgiouLU02bNlUiohITE7M8Tmb3vSeeeEIFBASomjVrqt69e6sWLVo4r9s3bNgw7RxHjhzpPC81a9ZUnTt3Vr169VItW7ZUUVFRSkTUjBkztOfYleu/PfXUU87bOnbsaHv+M8+FN7AH2AP+vAd8ff1fb8mSJUpEVFRUlFHnc9Z/ztf/9Z544gklIqpt27YunU9P8of1v23btizrLnNet956a5b8+PHjWe7H+nfP+p89e3aWrzsoKEiJiKpRo4Yz69Kli0vn2J38YQ8opdTLL7/sXM9NmzZV3bp1c15OLzQ0VH3xxReW+7AH3LMHrl69qtq1a6dE/rx2eOvWrVX79u1V0aJFlYiocuXKqQMHDrh0jk3luwJaKaVOnjypXnjhBVWvXj3nRblvvfVW1bRpUzV27Fi1a9cuy30uXbqkXnjhBVWxYkUVFBSkbrnlFpWQkKAOHjyoxo4d6/LGmTNnjnO+N/pXrlw5yzEzr4M2ZMgQ01OjlPpr49zs3/Vfx/r161WrVq1UdHS0CgsLU7Vq1VITJkxQV69edW4Qu42TmJio1q5dq1q1aqUKFy6sQkNDVYMGDdTcuXNvOM9vvvlGJSQkqHLlyqng4GAVGRmpqlSporp06aLef/99y+VFsrNxTM+Fu66Vlx3sAfaAP+8BX1//1+vYsaMSEfXoo48afZ2s/5yv/0xXr15VxYoVUyKiFi1aZHoqPc4f1n9iYqLRevz794717571n/l9c/X1N7f4wx7ItHLlStWuXTtVtGhRFRgYqMqUKaP69++vfvrpJ+149oD7XgPS09PVu+++qxo1aqQiIiJUSEiIql69unr22WfVmTNnTE+tyxxK5aD1GrIlPj5eNm3aJPv375eYmBhvTwfIdewB+DPWP/wZ6x/+jj2Q/+Wrv4H2BampqbJx40Z56qmn2DTwS+wB+DPWP/wZ6x/+jj3gG/gEGgAAAAAAA3wCDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABgo6MmDOxwOTx4euCmllFcfnz0Ab/PmHmD9w9t4DYC/4zUA/sxT659PoAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwEBBb08A7vPRRx9p84SEBEtWvHhx7dgzZ864dU4AAAAARIKCgrT5E088Ycmef/557dj169dr8969e2vzS5cuGc4OpvgEGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMOBQSimPHdzh8NSh/dodd9yhzb/99lttfu3aNUtWtmxZ7dhz585ld1p5kgeXtxF/3gMtW7bU5mPHjnVpvDuMHz/eko0bN85jj5eXeHMP+PP6R97Aa0DeU7VqVW3+9ddfW7KrV69qx7700kvafO7cudmel6/iNcA7ChQooM0HDBigzd977z3jY9ud186dO2vz5cuXGx/b13hq/fMJNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIAu3HlYWFiYNt+4caM2t+vO/eOPP1qypk2basdeunTJbHL5BB1YPS8xMVGbe7KrtjusW7dOm9999925OxEPowMr/BmvAXmPXafgpUuXWjK783fo0CFtbvf8bTfeH/Aa4B2NGzfW5ps2bTI+xu7du7X5pEmTtPnnn3+uzS9cuGD8mL6GLtwAAAAAAHgRBTQAAAAAAAYooAEAAAAAMEABDQAAAACAgYLengD+pGu49PXXX7vl2LVr17ZkgwcP1o6dOnWqWx4T+Vt+bQzmCruvxS63azoGAPAMuwZA5cqV0+ZVq1bV5v7cRAyedeedd2rzL7/8MsfH7tatmzb/5Zdfcnxs5AyfQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAbowp3LSpcurc0///zzXJ1HgwYNcvXx4F12naXtum3nFePHj3dp/NixY3P8mHbnxOFw5PjYyN/Cw8Mt2bhx47Rj7boHu4Pdfq5fv77HHnP9+vXaXPfaxdUccCN2HbFPnz5tyYoXL+7h2QA5Y/d8V6hQIZeOk5ycbMkuX76crTnB8/gEGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEAX7lzWqVMnbR4REWF8jPT0dG3+yiuvaPPevXtbsi5dumjHVqlSRZvv27fPbHLIk+y69nrSunXrtHlSUpI2t+tm7I7HdEe3cbv5uWPeyFuKFCmizT/77DNL1qxZM+1YT3bhtusI78nHvOuuu7R54cKFLRlduHEjJUuW1OZRUVG5OxHARd27d7dkNWvWdOkYBw4c0OaxsbGW7MKFCy4dG7mHT6ABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADdOHOZT169MjxMZ577jltPmnSJG1eo0YNS2bXbTs0NDT7E4NfGj9+vDb3Rndquy7cgCs6duyozXVdUgG4Jjg4WJsHBgYaH+P06dPa/MSJE9maE3A9uysd9OzZ05KFh4drx16+fFmbDxgwQJvTcTt/4RNoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCJWC5LSkrS5jVr1rRkr7/+unbs//t//8+dU4IfcFdDL280BnNFy5YtvT0F+IC+ffvm+mMeOHDAkp07d0479qefftLmSilt3qpVK0tWsmRJ88kBecy+ffu0+a5du3J5JvBFJUqU0Obdu3c3Pobde/WNGzdmZ0rIY/gEGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEAX7lz29ttva/NZs2ZZst9++82lYwcE6H8eEhYW5tJx4D/yeldtV40dO9Zjx163bp3Hjg3vKF++vDavVq2axx7T7koMXbt2tWTp6enasZcuXXLpMb/66itL5q4u3DNnznTLcQBX3HHHHdo8NjZWm3/zzTcenA0Af8Mn0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAG6cOeys2fPeuzYNWrU0OYdOnSwZL/88ot27P79+906JyA3tWzZMsfHsOu2TRdu31OlShVtXqpUKeNj2F39ICMjQ5ufO3dOm1+4cMH4Me3cfvvt2jw+Pj7Hxz516pQ2111BAvC0M2fOaHNXr14CANnBJ9AAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABunDnspCQEG3et29fSxYbG6sdq5TS5u3btzeex8qVK7X5xYsXjY8BeEtiYqLHjn333Xd77NjIH+yeY3Xsum3bHWPmzJnZmpOJdu3auTQXV7jjGIC7lC1bVptXqlRJm3OFEbiDw+Hw9hREROSOO+7Q5i+88II279q1q/Gx7TrZt23bVpvv2rXL+Ni+hE+gAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIAmYjlUunRpbf7cc89p886dO2vzkiVLWjK7ZgXuaOZy4MCBHB8D8LRx48Zp85YtW+b42OvWrcvxMQA7ds+x27dvz/GxixQpos0fe+yxHB/bzhtvvOGxYwOu+vXXX7X5L7/8ksszgT9xx/vviIgIbd64cWNL1r9/f+3YhIQEbW43P1fmfcstt2jz0aNHuzQXX8cn0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAG6cGsEBOh/rvDiiy9asscff1w71q5Lalpamja/dOmSJQsPD7ebYo7Zzduuc+yyZcs8NhdARN9xe+zYsW45tq7j9t133+2WYyP/2rx5szb/+eeftXnlypWNj/3uu+9q8zNnzhgfw84//vEPba67moOr/vnPf2pzunAjLylbtqw2r1Spkjbfv3+/J6cDWAwePFib9+zZU5tXrFgxx495/Phxbf7ll19asn79+mnHFiyoLw3btWunzYsWLWrJzp49azdFn8En0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAG6cGu89tpr2vzpp582PsaPP/6ozR955BFtvmnTJkuWkZFh/HiuKl++vDZfsGCBNn/22We1+dSpU901JfgJXbdtEfd13NYZP368x46N/Ov333/X5nZdru+9915Ltn79eu3YtWvXZn9iNxEdHe2xY6ekpHjs2ICrHA6HNldKafOYmBhPTgd+4tSpU9r8k08+sWQ9evTQjrV7nnbl+dvuqg1du3bV5tu2bdPmV65csWRffPGFdqxdHVC4cGFtbte129fxCTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGDAoexaGbrj4DbdE/OKXr16afP58+drc1130gkTJmjHzp49W5uHhIRo87lz51qyTp06acdevXpVm8fHx2vzevXqWbLnnntOO7Z48eLafN++fdq8RYsWluzkyZPasd7gweVtJK/vAU9KTEzU5i1btvTYY959993afN26dR57zLzOm3vAn9e/O7Rr106b//vf/9bmrly5Yfv27drcbn9eunTJ+Nh5Ca8BeU+rVq20ua4rcHBwsEvH3rp1qzZv1KiRS8fxJbwGuM+bb75pyYYNG+aWY+vqiUmTJmnHXrx4McePFxgYqM3tunPXrVtXm1etWtWS/fe//83+xNzMU+ufT6ABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABgwC+aiNk17jp+/Lg2j4iI0OZxcXGWbOPGjdqx5cuX1+Yvv/yyNk9ISLBkf/zxh/E8bjQXnc6dO2vzIUOGuPSYK1assGTdu3c3noen0UDG87zRLMxVuiZiSUlJufp4N8o9iQYy+Zfda9Qtt9yizV35Xutec0REFi5caHyM/IDXgPxjy5Ytlqx+/fouHYMmYla8BriPrj7YvHmzdmyNGjVcOrauOWTv3r21Y93RRGzQoEHa/L333tPmCxYs0OZ2ryV5BU3EAAAAAADwIgpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGCjo7QnkhgoVKmjzqKgobf7mm29qc1e6XK9cuVKbV65c2fgYAwYMyPE87CxbtsylvEGDBtr89OnTOZ4L8g9dx+281G3bjm6Onpz32LFjtfn48eO1+bhx4zw2F+QPpUqVsmRBQUFuOfb27dstma7jK+BNn332mSVztQs34Em67terV6/Wjq1evbo2t+tM3r59e0u2dOlS7Vi7K3pkZGRo81q1almyXr16acfaSUtLc2m8r+MTaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwIBfdOF+7733XBrfuHFjbZ6UlGTJ7LpTh4SEaPN9+/Zp86FDh1qyNWvW2E0x123dutXbU0Au0nXbFskfHbeB/Ej3GmB3pQhXbdiwwZL9/vvvbjk24C66bvGuKleunDavU6eOJdu1a1eOHw8YPny4Nr/lllu0uSvdr+Pi4rT5Pffco83tunC7Yvbs2dr8xRdfzPGxfQmfQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAb8ogv3li1btHlsbKxLuSt++OEHbd69e3dtfujQoRw/JuAqX+u2PX78eI8du0WLFto8v54reIfderHr5OqK06dPa/MZM2bk+NhAflC8eHFtXr58eUtGF2540nPPPafNP/vsM20+YsQIS9awYUPtWKWUS3P59ddfLdmQIUO0Y3fu3KnNT5065dJj+jo+gQYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABv2giNmbMGG3epUsXba5rNiEisnv3bks2adIk7diPP/5Ym6enp2tzwBvyUgMsVxqAjRs3znMTAdwgMDBQm7dv396l8ToBAfqfff/yyy8u5UBekpGRYcnsmiU5HA5PTwfIEbvmwHb5ihUrLNn//d//accWLlxYm9s1MJ45c6Yl27t3r3YszPAJNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYMCh7FocuuPgdEmEl3lweRvJ63vArpv12LFjjY9h1z2bTtl5gzf3QF5f/550zz33aPOvvvoqx8e2u5pDt27dtLmuu6u/4DUgf/vmm2+0eZMmTbT5sWPHtHmrVq0s2b59+7I/sXyE1wD4M0+tfz6BBgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADNCFGz6NDqzwd3Rg9Y5//vOf2jwhISHHx96yZYs2t+tM7M94DYC/4zUA/owu3AAAAAAAeBEFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAwU9PYEAACAuU8//dTbUwAAwG/xCTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAM0EQMAAA327p1qzZPSEjI8TFmzJiRrTkBAICc4xNoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAgEMppTx2cIfDU4cGjHhweRthD8DbvLkHWP/wNl4D4O94DYA/89T65xNoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAgEe7cAMAAAAA4Cv4BBoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGPFZAly9fXhwOR5Z/wcHBUrZsWenZs6ds2LDBUw/tsnHjxonD4ZBx48Zl6/6LFy+Wli1bSpEiRSQ8PFxuv/12mTRpkqSlpXlkni1btnTrcf+uf//+4nA4ZO7cuR59nEwtW7YUh8Mh69atc/m+GRkZ8t5770njxo0lMjJSIiMjpXHjxjJr1ixRSrl/soZY/6x/U764/kX8Yw/861//kr59+8rtt98uJUqUkMDAQClcuLA0atRIXn31Vbl48aJb55m5Nvv37+/W4/5dTtZkdmSulUOHDhnfRyklL7/8snTt2lWqVq0qRYsWlcDAQClRooS0bt1a5s+fz2uAoeys/7S0NFm7dq2MHDlSGjZsKFFRURIYGCgxMTHSqVMnWbFihdvnyfr/y/nz52Xx4sUycOBAqVGjhoSFhUlISIhUqFBBHnroIfnxxx89N2FDvr4HMv36668ydOhQqVq1qoSGhkpISIjcdttt0q9fP9m5c6db58kesNq0aZP07NlTypQpI0FBQRIeHi61a9eW0aNHy6lTp9w/2f8p6LEj/09sbKxUqlRJRETOnTsnW7dulUWLFsnixYtlypQpMnz4cE9PwaOefPJJmTZtmhQsWFDi4uIkIiJCvv76axk9erQsX75cVq1aJaGhod6epk9KT0+X+++/X5YsWSJhYWHSqlUrERFZs2aNDBkyRNasWSMLFiyQgADv/aIF65/17yn5Yf2L+PYemDFjhmzatEmqV68u9erVk6JFi8rJkydl8+bNsmXLFvnwww8lKSlJSpUq5e2p+pz09HR58cUXJTg4WGrXru1883r48GFZs2aNrF69WhYtWiRLly6VAgUKeG2evrr+k5KSJD4+XkREYmJipFmzZhIeHi7JycmyfPlyWb58uQwePFhmzpwpDofDy7P1PZMnT5YJEyaIiEiVKlWkXbt2kp6eLtu2bZM5c+bI/PnzZfbs2dKvXz8vz9R394CIyHfffSfx8fHy+++/S+nSpaV169ZSoEAB2bFjh8ybN08+/vhj+fjjj+W+++7z9lR90rvvvitDhw4VpZTUqlVLmjRpIhcvXpRvv/1WJk2aJHPnzpWkpCSpVq2a+x9ceUi5cuWUiKg5c+ZkyVNTU1Xfvn2ViKgCBQqovXv3emoKxsaOHatERI0dO9al+y1dulSJiIqIiFDbtm1z5qdPn1a1a9dWIqJGjBjh9nm2aNHCbcfU6devn/Z75yktWrRQIqISExNdut/UqVOViKjSpUurAwcOOPMDBw6oUqVKKRFR06dPd/NszbD+Wf+mfHH9K+Ufe+Dbb79VZ86cseQpKSmqWbNmSkRUr1693DTLv9Zmv3793HZMneyuyezKXCsHDx40vk9GRoZKTExUqampltt27dqlSpQooUREzZw5040zNefr63/t2rWqe/fuav369ZbbFixYoAoUKKBERP3zn/902zxZ/3+ZOHGiGj58uNq3b1+W/Nq1a+qpp55SIqKCgoLUzz//7ObZmvP1PaCUUnXq1FEiogYPHqyuXbvmzNPT09Xzzz+vRERFRUVpn6eygz3wl99++00FBQUpEVFz587NctuFCxdU69atlYioNm3auHm2f8r1jyZCQkLknXfekfDwcElPT5clS5bk9hTcZuLEiSIiMmbMGKlXr54zj46OlnfffVdERN5++205f/68V+bnyzIyMuT1118XEZHXX39dbrvtNudtt912m/O2V199VTIyMrwyRx3WP9whv65/Ed/aA40bN5aiRYta8mLFijn3x6pVq3J7Wn4h8885QkJCLLfVrl1bhg4dKiJ57/z7yvqPi4uTTz75RJo3b265rWfPns5fMZ03b14uz8w/PPPMM/LGG29I5cqVs+SBgYEyZcoUqVKlily7dk0WLFjgpRna85U9cObMGdm1a5eIiLzyyisSGBjovC0gIEDGjRsnoaGhcu7cOfnpp5+8NU2ftXHjRrl27ZrUqFHD8psWkZGRMnbsWBER2bx5s0ce3yu/2xcRESFVq1YVEcny++6ZfyMhIjJnzhxp0qSJFC5c2PJ78cePH5fhw4dL9erVJSwsTCIjI6Vhw4by9ttvyx9//KF9zNTUVBk3bpxUrlxZgoODpWTJktKvXz/59ddfs/U1HDt2TLZs2SIiIn369LHc3qxZMylTpoxcvXpV/v3vf2frMdxhyZIlMmjQIKlVq5YUKVLE+bcZDz30kOzdu/em99+5c6d069ZNihcvLqGhoVKnTh2ZNm2apKen295n27ZtkpCQIGXLlpXg4GApWrSotGnTxq3nYfPmzfLbb79JcHCwdO/e3XJ79+7dJSgoSI4fPy7fffed2x7XHVj/uYf1n/fWv4hv7IGbKVjwz7+QCg4O9sjxTaSlpcn8+fMlISFBqlWrJoUKFZLQ0FCpWrWqPPHEE3L8+PGbHiMpKUlat24tRYsWlbCwMGnUqJF89NFHN7zP2rVrpVu3blKyZEkJCgqSEiVKSNeuXT32RkYnL5x/O/6w/uvWrSsiIkeOHPHI8U346/oPCAiQOnXqiIh3z/+N+MIecOW5JTo6OluPkVO+vAd0PzzV8dS599ofx124cEFE9Avw8ccfl0GDBknBggWlQ4cO0rhxY+eGWr9+vdSqVUumTp0qV65ckfj4eImNjZX9+/fL448/Lh06dLA0L7p8+bLExcXJ+PHj5cSJE9K6dWtp3ry5rFy5UurVqycHDx60nWfmH7b/vaHQDz/8ICIiRYsWzfLpz/UaNGiQZaw33H///fJ///d/EhoaKnFxcdKmTRsJCAiQOXPmSP369WXTpk229/3+++/lzjvvlB9++EFatWold911l+zdu1eefPJJ6dWrl7ZBy7Rp06RRo0by8ccfS7FixaRTp05Ss2ZNWbdunXTo0EFeeukll+af+WT690YGmee0Zs2a2k0UGhoqNWvWzDI2L2H95w7Wf95c/yL5fw/cyO+//+5sRtOpUyfj+7nbyZMn5cEHH5QVK1ZIkSJFpG3bthIXFycXL16U6dOnyx133CG//PKL7f2XLl0qcXFxcuzYMWnTpo00bNhQtm3bJn379pURI0Zo7/P000/LPffcI8uWLZOyZctKly5dpEKFCrJs2TJp3ry5zJkzx3j+hw4dcu4BV5rL7N+/3/lbMN48/zfiy+tfROTnn38WEZGSJUu6dD938tf1L5I3zv/N5Pc9EBER4fwNjOeffz7LY2ZkZMi4ceMkNTVV2rVrJ2XKlMnWOcopX94DmT9cSU5Oln/+859Zbrt48aKMHz9eRESGDBli/Hgu8cgvhiv7v31QSqmdO3eqgIAAJSLqww8/dOYiokREFSpUSG3evNlyvxMnTqhixYoph8Oh3n33XZWenu68LSUlRcXFxSkRUePHj89yv6efflqJiKpWrZo6duyYM7906ZLq3Lmz83F1f/tg93W89dZbSkTUHXfcYXsOnnjiCSUiqkePHrZjXJGdvwFdsGCBunjxYpYsIyNDvfPOO0pEVM2aNVVGRkaW2zP/xkJE1KOPPqrS0tKct/3nP/9RxYsX1/5t2VdffaUcDoeKjo5WSUlJWW7btWuXuvXWW5WIqHXr1mW57UZ/a5E5j7/fNnz4cCUiqkuXLrZfe6dOnZSIqKefftp2jKew/ln/1/O39a+U7++B661cuVL169dPPfjgg6p169YqMjJSiYhq27atOnfu3A3Okmtc/fu3CxcuqGXLlqmrV69mya9du6aeeeYZJSKqffv2lvtlrkkRURMnTsxy27p161RoaKgSEfXVV19luW3WrFlKRFSlSpXUzp07s9yWlJSkIiMjVVBQkOXvNu3+/u3gwYPOedzob+OmTZum+vXrp3r37q2aNWumChQooAICAtSYMWNs7+Np/rT+dfMsXLiwEhH11ltvGd/vZlj/Zr788kslIsrhcFjmkZv8YQ/s2bNHVahQQcn/+pF07txZdevWTd12220qKChIPfjgg+r8+fMGZ8sMeyCrzz77TIWHhysRUbVq1VL33Xefat++vSpSpIgqUqSImjBhguU9nrvkagF97tw5tWLFClWxYkUlIqpUqVJZ3txmnqSXXnpJe8zRo0crEVFDhw7V3n706FEVGBioihcv7jxhly9fdr6Z+fLLLy33OXHihAoJCbHdOHFxcapq1apqyZIlWfIJEyYoEVGxsbG25+DZZ59VIqJat25tO8YV7m6i1KRJEyUiavfu3VnyzA1asmRJbeOD6dOnKxFRlStXzpI3btxYiYj65JNPtI+3aNEiJSKqe/fuWfIbFRBVq1ZVVatWVd99912W/B//+IcSEZWQkGD79fXp00fJ/5o75DbWP+v/7/xp/Svl+3vgepkN3a7/16dPH/Xbb7/Z3ic73N1AplSpUiogIEBduHAhS565JuvWrau934gRI5SIqPj4eGeWnp7ubF63detW7f0mTZqkRKzNBe3ePB09etS5B44ePWr7dVz/BlhEVGBgoJo4caK6fPnyjb58j/Kn9X+9tLQ01apVKyUiqnbt2pY37jnB+r+5Y8eOOefhref+TP6yB06ePOlsWHX9vxo1aqgPPvjA7vRkC3vAauvWrc71dP2/1q1bWz6wcCePF9B2/ypWrKh27NiRdTL/uy05OVl7zFq1aikRURs2bLB93Bo1aigRcXb127hxoxIRFR0dbXufzE9qXOm+l58KiJ9//llNnz5dDRs2TD300EOqX79+ql+/fs6fmv39SSFzgw4bNkx7vAsXLji/V5k/yTt9+rRyOBwqNDTU9gXz1KlTzsLketnp9pfXCwjWP+v/7/xp/Svl+3tA59q1a+qXX35Rb7zxhipSpIgqWrSo5bcRciK7b5527Nih3njjDTV06FA1YMAA5x7I7FS9ffv2LOMz1+TUqVO1x9u1a5cSERUaGqr++OMPpdSfb2Iyv692vv/+eyUiqkmTJlny7HRg1bl06ZLatWuXeuyxx1RAQIC6/fbbs3zalJv8cf0rpdTAgQOViKhixYq5vbsy6//Gzp8/r+rWratERDVq1MhtnZ+zyx/2wMaNG1WJEiVUqVKl1Mcff6x+++03dfbsWbV8+XJVuXJlJSLqoYcecumYN8IeyOrdd99VgYGBqlGjRmrdunXq/Pnz6siRI+qdd95RhQoVUgEBAR67okquXgc68w/J77zzTmnbtq2zycfflS9fXpsfOHBARETb9fHvTp8+LVWqVJGjR4/e8JgiYvs3nDcSGRkpIiKXLl2yHXPx4kURESlUqJDLx3eH9PR0GTp0qLz33nvav9fMlPl3KH9nd14iIyOlWLFicubMGTl69KiUKlVKDh48KEopSU1NvWljhdOnT5t/ETbyw/kXYf2LsP7/zp/Wv4jv7gGdwMBAqVixogwfPlxiY2OlSZMm8sADD8jevXu9cj30S5cuyYMPPihLly694ThX90BmnpqaKmfOnJESJUo4vzf79++/6XV/3bEHdMLCwqR27dry9ttvS7ly5WTUqFHyxBNPyCeffOKRxzPhT+t/2LBh8sEHH0iRIkVk9erVUqVKFbccN7v8af1fvHhR2rVrJz/88IPUrVtXvvrqK+MmS57mq3vg3Llz0rVrV0lJSZHNmzdL48aNnbd17NhRatSoIbVr15YPP/xQHnjgAbn77rtdfoyc8uU98M0338ijjz4qJUuWlFWrVknhwoVF5M/3PI8++qgULVpUevfuLU8++aR06dJFoqKicvyY1/N4AT1o0CDn5QxM2b3RyLwcS48ePSQ8PPyGxyhWrJhLj+mqzI14ow6HmbfdaNN60rRp02TmzJkSExMjb775pjRt2lRuueUW55Nqnz595P/+7/9uWFzcTOZ9M783ERER2q7A7pZ5Tm/UPdHb51+E9X/92NzG+vf++hfx3T1wM40bN5YaNWrI7t27ZevWrUZv+NztmWeekaVLl0q1atXktddek4YNG0p0dLQEBQWJiEjTpk1l8+bNbt0DMTEx0qZNmxveJzc60g4YMEBGjRoly5cvl/T0dClQoIDHH1PHX9b/iBEj5K233pKoqChZtWqVswu3N/nL+r906ZJ06NBBNm3aJHXq1JHVq1dLkSJF3PoYOeGre2DFihVy+vRpqVixYpbiOVOFChWkcePGkpiYKGvWrPFKAe3LeyCzqVv79u2dxfP1evToIf369ZPz58/Lli1bJD4+PsePeT2PF9DuVKZMGfn5559l9OjRzg6/N1O6dGkRkRt2MHS1u6HIX5doOHPmjBw8eFD7U5qtW7eKiGS5Rm5uWrRokYiIvPfee9pOpJldGu3YdSX8/fff5cyZMyIicuutt4qIODsMOhwO+fDDDyUgwLMN3jPP6e7du+XKlSuWn7SmpqbK7t27s4zN71j/rmH9+9b6F8lbe8BE5hu8U6dOeeT4N5O5BxYuXOi8rM31srsHMs9XSEiI801q5h4oVqyYyx2bPSHz3F+7dk3OnTvn9R+ouENeXf+jRo2SN998UwoXLiyrVq0ynpun+cP6v3z5snTo0EHWr18vderUkbVr1/rEWreTl/ZA5g+wb/RbXpmF3dmzZ10+vjv48h642fkvWLCghIeHy7Vr1zxy/r12GavsaNeunYj8tSBM1K9fXyIiIiQlJUVWrVpluf3kyZPa/GZuvfVWadiwoYiIfPzxx5bbN27cKEeOHJHg4GBp3769y8d3h8wFU65cOcttu3fvlh07dtzw/osXL5arV69a8szrv1WqVMn5xFSqVCmpU6eO/P777/LVV1/lcOY316RJE4mJiZGrV6/Kp59+arn9008/lWvXrkmpUqW0PxnMj1j/rmH9+9b6F8lbe+BmUlJSZOfOnSIiXvtV1hvtgZUrV0pKSsoN7z9//nxtPm/ePBH583rvmb+CmfnJRnJysvOHN960du1aEfnzzVzRokW9PBv3yIvrf8yYMTJ58mQpXLiwrF692vm6kBf4+vpPTU2Vjh07SlJSkrN49tb1hnNLXtoDma//e/bskfPnz1tuT0tLk+3bt4uI+/5MwlW+vAcyz/93332nvX3v3r3y3//+V0Q8dP498pfVKnuXPpD/NQ+wc+TIERUVFaUKFCigpkyZom3Wc+DAAfXRRx9lyTIv+VKjRg11/PhxZ3758mXVtWtX5+O62n1v6dKlSkRURESE2rZtmzNPSUlRtWvXViLWTnNKZf/SBK42UcpsivCPf/wjS6v/48ePq3r16jnn8Pfv0fWX8Rk6dKizQYBSSiUnJ6tbbrlFiYh65513stzv888/VyKiSpQooT7//HPLfDIyMtS3336rVq5cmSXPThdipf7qfFu6dGl14MABZ37gwAFVunRpJSJq+vTpNzxHnsL6Z/3/nT+tf6V8fw/s3r1bzZ8/X9uoZ+/evaply5ZKRNSdd95puT0xMfGmX6uOqw1k6tSpo0REvfzyy1nyPXv2qPLlyzvn8Pe1d/0lTF5//fUst23YsEGFhYUpEVErVqzIctv1Hep1TX7++OMPtXbtWsvlabLTgXX58uUqMTFRe4mStWvXOi8b561LWfn6+ldKqeeee06JiIqKilLff/+98dfJ+s/5+k9NTVX33HOPEhFVp04ddfr0aZNTkqt8fQ+cOnXKeQml++67T/3+++/O265evaoee+wxJfLnVQH279+f5b7sgZzvgRUrVjjnOGnSpCyvBSdPnlSxsbFKRFSVKlWyvAd0l3xVQCv153XEoqOjnW9U4+LiVEJCgurYsaOzjXnjxo2z3OfixYuqUaNGzjf79957r7rvvvtUTEyMKlasmOrbt6/txrnZ15F5rdvAwEDVtm1b1b17dxUVFaVE/uxQrLuMxv79+51fq+mlCZT6q4CIjIxUjRs3tv2X2f7/22+/VUFBQUrkz2uy3X///apt27YqNDRU1axZ0/mkYVdAPPzwwyokJETddtttqlevXqpNmzbO43Xt2lX7xmXatGmqYMGCzsfs0KGD6tOnj4qPj3d2+xs9enSW+2TnOrhK/bkRM7+GsLAw1alTJ9WpUyfnxu7Ro4dHNo0J1j/r35/Xv1K+vwcy3wCFh4erZs2aqV69eqlu3bqpBg0aOK9vWr16dXX48GHLMdeuXatERBUsWND43Cj119qMjo6+4R6YPXu2UkqpTz/9VDkcDiXy5yWFevXqpeLi4lRgYKCKi4tTTZs2veGbpyeeeEIFBASomjVrqt69e6sWLVo4vza7LvUjR450fh9r1qypOnfurHr16qVatmzpfG6YMWOG9hy7cg3QzOeD4sWLq9atW6uEhATVoUMHVaVKFed9unbtqq5cueLSOXYXX1//y5Ytc863QYMGzq6+f/+n+yEq6z/n6/+pp55y3taxY0fb8595LrzB1/eAUkp99NFHztf84sWLq/bt26vOnTs7f4gdEBBg+X4rxR5wxx5QSqkhQ4Y4b69UqZLq1q2bio+PV4UKFVIif/5wT/cBhDvkuwJaqT9/svDCCy+oevXqOS/Kfeutt6qmTZuqsWPHql27dlnuc+nSJfXCCy+oihUrqqCgIHXLLbeohIQEdfDgQecLcXYKCKWUWrhwobrrrrtUoUKFVGhoqKpVq5Z67bXXbC9nk3k92DZt2tz0a71e5jxv9u/6n0zt2rVLderUSZUsWVKFhISoypUrq1GjRqkLFy44N6JdATFnzhy1fft2de+996pixYqp4OBgVbNmTfXmm2+qtLQ023n++OOPavDgwapy5coqJCREhYWFqQoVKqg2bdqot956y3JZkewWEEr9ed25mTNnqgYNGqjw8HAVHh6uGjZsqGbOnOmxi6ebYP2z/v15/Svl+3vg1KlTasKECapt27aqfPnyKjw8XAUFBamYmBgVHx+vZsyYYVu8ZV4Lc8iQIUbnJdP1vx1xo3/Xfx3r169XrVq1UtHR0SosLEzVqlVLTZgwQV29etV27V2fr127VrVq1UoVLlxYhYaGqgYNGqi5c+fecJ7ffPONSkhIUOXKlVPBwcEqMjJSValSRXXp0kW9//776uzZs1nGZ+fN065du9SoUaNU06ZNVenSpVVwcLDzB17333+/Wr58uUvn1t18ff3PmTPHaC2WK1fOckzWf87Xv+m5cNf1grPD1/dAph07dqj+/furChUqqODgYBUUFKTKlSunEhISbIs39kDO90Cmzz77TN17772qZMmSKjAwUIWGhqoaNWqop556Sh05csT01LrMoVQOWq8hW/7xj3/IBx98INu3b5c77rjD29MBchXrH/4uPj5eNm3aJPv375eYmBhvTwfIVax/+Dv2QP6Xr5qI+YrVq1dLnz59KB7gl1j/8GepqamyceNGeeqpp3jjBL/D+oe/Yw/4Bj6BBgAAAADAAJ9AAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYKevLgDofDk4cHbkop5dXHZw/A27y5B1j/8DZeA+DveA2AP/PU+ucTaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAQEFvT8BXBQYGavMePXpo8zFjxliy2rVra8c6HA5tnpqaqs1ff/11SzZx4kTt2LS0NG0O/9KiRQtt/vXXX3vsMZ966ilLdujQIe3Yzz//3GPzAAAAAOzwCTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGDAoZRSHju4TbdoX2LXbXvatGnafMiQIcbHPnfunDZPTk7W5rfffrs2Dw8Pt2Rly5bVjj127JjZ5PIJDy5vI3l9D+jWhojI/Pnztfm9997rsbnoztWlS5e0Y0eOHKnN33vvPbfOyRd4cw/k9fUP38drgG+Kjo7W5gEB+s+FatasackWLFigHWu3Zh555BFtvnTpUm2eV/AaAFfUqlVLmw8bNkybr1y50pJ98sknbp1TTnhq/fMJNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAzQRCyH3n//fW0+YMAAl47z8ssvW7K33npLO/bs2bPavF69etp8y5Ytlmz16tXasW3btrWbYr5EA5kbq1Spkjbfs2dPjo997do1bX748GFtrmv+Ehoaqh1buHBhbb5x40Zt3qFDB23uDlFRUZbs4Ycf1o6dPn26NrdrluYONJDJHwoVKmTJ7Jr8Pfvss9p827Zt2jw1NdWSRUREaMempKRo802bNmnz06dPa/O8gteA/EO33l955RXtWLuGrEFBQcaPZ/e9sVszV69e1eatWrWyZN9++63xPDyN1wDfU7JkSW1ep04dbd6jRw9LFhMTox3bpk0bbV6wYEFtrltfSUlJ2rFxcXHavECBAtpc19Bs586d2rF2aCIGAAAAAIAXUUADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAgL6lmp8LDg7W5tOmTbNkDz30kHasXSfTSZMmafM333zTkmVkZNhNUeunn37S5rruqXad8Bo1aqTNv//+e5fmArzxxhva/IUXXjA+Rrly5bT5F198oc3tuke6g66LpYjIY489ZsmaN2+uHfvDDz9o85UrV2Z/Ysi24sWLa/MGDRpo8xIlSliyu+++Wzu2aNGi2jw+Pl6b67rV2nWwDQwM1OaetHz5cm3euXPnXJ4JfFX58uUt2eOPP577E7Fh995Qd/WSvNSFG/nD7bffbsmGDRumHXvfffdpc7srN+icP39em9tdcUG3P0VEypYta8kuXLigHWv32vX8889rc12NVaZMGe3Y3MYn0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAG6cGu0bt1am//jH/8wPka3bt20+caNG7M1JxOpqanaXNcl9ZdffjEeK0IXbrjOHWv98OHD2rx27dravHv37jl+TDvNmjXT5nfddZclU0p5bB74k65jqYhI3bp1LZnd965x48bavGbNmtmfmI+69957tfnAgQMt2QcffODp6cBP2HWiz0vHPn78uFuOA//wzDPPaPOxY8dasqCgIO3YM2fOaPPVq1dr88WLFxuPtbuK0LvvvqvNdVcoGTVqlHbsAw88oM3HjBmjzevXr6/N8wI+gQYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAw4lAfbxXqye6I73HHHHdp806ZN2jwkJMSSVatWTTt237592Z5Xbjh9+rQ2L1asmDa3+17OmTPHkj300EPZn5ibebsbcl7ZA8WLF9fmc+fO1eZt2rQxPnZSUpI2t+vae/nyZeNj5yX/7//9P23++OOPWzK7dbd582Zt3rx582zP62a8uQc8uf7tnqfvvPNOjz2mO5w6dUqbHzx40JKdPHlSO/bNN9/U5j/++KM21z0nT5482W6KLnnkkUcs2XvvveeWY7sDrwH5R3h4uCVbv369dqxdF347+/fvt2SVKlXSjnV1zSQnJ1syu87/Fy5ccOnY7uCrrwF5nd17oEWLFmnz4OBgS2bXKbtv377a3O41wx22bt2qzd9//31L1rBhQ+1Yu3mPHDlSm9u973KFp9Y/n0ADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAQEFvTyA3BATof07QtWtXba77Q34RkbFjx1qyvN4szM7333+vzdu2bavNr1y5os0feOABS7Zz507t2GnTphnODu7WoEEDbe5KszA7dk3B8muzME+Kjo729hR8RmhoaI6Pce7cOW1u11zvk08+sWQ///yzS4959epVba7bL3/88Yd2bEJCgjafNWuWNq9QoYLh7OzZNXPSNZIEsuPSpUuWrFWrVtqxTZo00ealS5fW5suWLbNkv/32mwuzs1e5cmVLFhMTox3rjSZi8KyoqChtPnHiRG1uV2N88cUXlqxPnz7asRcvXjSbnBvpXv9ERGJjYy2Z3WuUXYNJdzQLy218Ag0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABjwiy7c999/vzZ//vnntfnRo0e1+TvvvOO2OXnbiBEjtHn16tW1+T//+U9t/uKLL1qyFi1aaMfShds3LVmyxNtTgB8aPny4Np8wYYIls7vqwKuvvqrNT548mf2JZVO7du0sWffu3bVj+/btq80LFsz5S/pHH32kzYcNG6bNr127luPHBOzYdcpfu3atNr/vvvu0+ZdffumuKVls2rTJkuXXK7TAdcWKFdPmrl79YOnSpZbMG922y5cvr807d+6szevXr2/JVqxYoR07atSobM8rr+ETaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwIBfdOHu1q2bS+PXrVunzc+ePeuG2eRtdh2VJ06cqM0rVapkyXr06KEdW7RoUW3uD+fV2+w6oDscjhwfe/369Tk+Rn5gd64CAqw/h8zIyDAei+xJTEzU5k2bNs3lmeiVKVNGm9s9P44dO9aSFSpUyK1zMtGxY0dtvnjxYm3+n//8x5IdOnTInVOCH+vSpYs2t3tf16dPHw/ORk+3B+A/AgMDtXlQUJBLxylXrpw7pmOsd+/e2nzSpEnaPCYmRpvrOmtPnTo1+xPLJ3g3BwAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGPCLLty1atVyafyrr77qoZnkHYcPH9bmL730kjZPS0vT5j/++KMls+uC6Y6Oz8ie3bt3a/MKFSrk8kzyvvDwcG1etmxZba7ruK2U0o61+z7A99x9993a/I033sjxsa9du6bNf/rpJ22+ceNGS3b16lXt2LCwMG3+4osvavPKlStbsjFjxmjHvvfee9ocN1ayZElt/tBDD2nzTp06WbIGDRpox9p1kLa78sYXX3xhyS5duqQda/dcatfp/bnnnrNkNWvW1I51B7urIthdRWH27NnafNiwYW6bE/KfPXv2aPN///vf2vzee+/V5gkJCZZszZo12rG653QR+/d0PXv2tGTjxo3Tjj116pQ279ChgzZftWqVNvd1fAINAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAA37RRMyuedXnn3+uze0aAviS1NRUl3I7ukYGdufbruHJ5MmTXXpMuC4lJcXbU8g37Br22DX+cAVNlPzHokWLtLldc7GQkBBLZtdwzG4/Hzp0yGxy2VC4cGFt3rt3b0v2zDPPaMfaNa/86quvsj8xP/D9999rc7vnKh27xoZ2Tbo+/vhjbb5jxw5Ldv78ee3YqKgobV6nTh1trnvvYDdvd1i4cKE2X7ZsmTb/5JNPPDYX+J65c+dq89jYWG1esWJFS/bPf/5TO9auQVmXLl20eenSpS3ZzJkztWOHDx+uza9cuaLN/RWfQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAb8ogu3XRdHT3Z39Bf79u2zZHbntVu3btqcLtyet2TJEm0+YMCAHB+7R48e2vz111/P8bG9we7rAVxh17HUHXvOG+w6Les6uZYpU0Y71u65ni7cN1aqVClt7o33MLfffnuuP6aO3de+a9cuba7rFq97/wK4y9KlS7W53dp9//33LVmFChW0Yx977DFtvm3bNm3+4IMPWrJ169Zpx8IMn0ADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGfK4Ld7Vq1SxZ6dKltWP37Nnj6engOlu2bPH2FPzW4cOHtfn8+fO1ua5jo50HHnhAmy9YsMClueS28uXLa/OEhARt7nA4tHlAgPXnkAcPHtSOzStfO+AukZGRlqxkyZLasVFRUR6eDfzFhQsXtPnAgQO1ua7jdnh4uHasXaf8L774QpsfOnRIm8O/VaxY0aXxaWlplszVTvvvvvuuNqfjtvvxCTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGDA57pwFytWzJJFRER4YSb+oWHDhsZj//Of/3hwJriR5ORkbf7ss89q8+bNm2vzcuXKWbLq1atrx+o64ovknU7U//rXv7S53ddj1w0zIyPDkn3yySfasXT+R36l6zYvIjJ48GBL1r9/f+3YXbt2uXNKfuP555/X5i+//HIuzyTvKFy4sDZfv369Nte97oSEhGjH3nbbbdr8rrvu0ub333+/Nod/uOOOO7T5v//9b20eExOjzXVduP/73/9qxxYpUkSb//jjj9oc7scn0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADDgc03EkLtGjx5tyeyaRH388ceeng5cdPz4cW0+d+5cbT527FjjYxcvXjw7U/KIgQMHWrLatWu75djnzp2zZG+//bZbjg336NWrlzbv2rWrNn/88ce1+alTp9w2p7yqcePG2tyukVWHDh2Mj/37779na07+7tVXX9XmuqZDIiL9+vXL8WNu2LBBm+uaFG3ZskU79sSJE9q8U6dO2nz48OGWrEKFCnZT1AoPD9fmNWrUsGR2jfF0jSFFRLp3767Njxw5YsnatGmjHWvX0BN5S2RkpDZ//fXXLVlCQoJ2bGBgoDafNWuWNv9//+//WbIffvjBZoZ6Fy9edGk8so9PoAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMOpZTy2MEdDk8d2tYtt9xiyey62MXExGjzhx9+WJvbdc7zJXXq1NHmixcv1uaVK1e2ZHbnye68epIHl7cRb+wBd7Cbt13XVx1dd2oRkW7dumnz9evXGx/bVenp6ZbMXWtD13H7ySefdMux3cGbeyCvrH+75553331Xmz/66KPafObMmW6bkycULVpUm+s6At95553asX379tXmhQsXNp5HamqqNh8/frw2nzRpkvGxXcVrQP6h66D98ssva8fec8892lzXbduO3ffGHWvG7goXZcuWzfGxXcVrgL3HHntMm48ZM0ably5d2pKdPHlSOzY+Pl6b/+c//9HmkydPtmQjRozQjv3iiy+0uV2He3/mqfXPJ9AAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAICBgt6egLvpuuF98MEH2rHPPvusNh8+fLg2X7JkiSVLSUlxYXbeUahQIUtm1yV45MiR2jwoKEibr1ixwpI9/fTT5pNDnmTXtVC3N6ZOnaodW6RIEW2emJiozQsUKGA4O5GBAwdqc7sO8AEB1p8VZmRkGD/ejWzcuNEtx4Hn2F1FwK4jvN3zY0hIiCU7duyYduzatWu1eVRUlDavXr26JatZs6Z2bNu2bbV5gwYNtHlERIQ2d4fz589bsvvuu087ds2aNR6bB/K/S5cuWTK792O6jt0iIpUqVdLmnTt3tmR23fZd7dqr67i9bNkyl44Bz7LriD1gwABtruu2LSKyevVqS2bXEbtgQX2JNW/ePG3etWtXS7Z//37t2Pnz52tz5B4+gQYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAw4lKvtBl05uMPhqUO7pFatWtp89uzZ2rxRo0bGx16+fLk2/7//+z9tvnDhQuNj27njjju0eceOHbX5I488YsliYmK0Yw8fPqzNX375ZW0+Z84cbZ5XeHB5G8kre8BdChcubMmio6O1Y+3211133aXNDxw4YMnsvn8lS5bU5mFhYdpc932wO/ahQ4e0eZs2bbS5XZfMvMKbeyCvr/9p06Zp88cffzyXZ5L70tLStPnu3bu1+ZtvvqnNv/rqK0uWl65OwWsA/B2vAfbv1Tt06KDNP/vsM20+bNgwS2ZXY/Tp00ebJyQkaPP//ve/luz555/Xjp0xY4Y2h5Wn1j+fQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMCAXzQRsxMUFKTNR48erc0ffvhhS2bXzCg9PV2b65oEuCoiIkKbBwcHa/Pff//dki1btkw7dvz48dpc1+ApP6CBjPfYNRHr2rWrNi9SpIglc9f3T/d9OHjwoHbspEmTtPmsWbPcMpfcRgMZe3YN8FavXq3Nb7/9duNj233t7vh+2B1j586d2nzx4sWWzO41IDk5OfsTy4N4DYC/4zVAZMuWLdq8fv362vz8+fPaXPe+wa6xb2pqqjZfsGCBNn/11Vct2S+//KIdC3M0EQMAAAAAwIsooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGDAr7twu+qWW26xZK+88op2bPfu3bV54cKFczyPU6dOafPXXntNm69cudKS7dmzJ8fzyA/owJr33HXXXdp86NChlqxbt24uHfuTTz7R5hs3brRkH330kXasXffN/IoOrK6z68597733WrK4uDiPzUN3BQURkU8//VSbr1271mNzya94DYC/4zVAJDExUZu3aNHCpeNcuXLFkp07d0479rHHHtPmS5cudekxkTN04QYAAAAAwIsooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCALtzwaXRghb+jAyv8Ga8B8He8Bog0aNBAm5cuXdql4+iuaqPrzI28gy7cAAAAAAB4EQU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADNCFGz6NDqzwd3RghT/jNQD+jtcA+DO6cAMAAAAA4EUU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYMChlFLengQAAAAAAHkdn0ADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAgMcK6PLly4vD4cjyLzg4WMqWLSs9e/aUDRs2eOqhXTZu3DhxOBwybtw4l+63d+9emT59uvTv319q164tBQsWFIfDIa+88opH5tm/f39xOBzSv39/jxw/U8uWLcXhcMi6des8+jiZMtfKoUOHXLrf+vXrZeLEidK9e/cs623jxo2emaiL/GEPiIgcOXJEhg4dKhUrVpTg4GCJjo6WNm3ayIoVK9w+T/aA1XfffSc9evSQmJgYCQoKktKlS8sDDzwgycnJ7p+oC3x9/aelpcnatWtl5MiR0rBhQ4mKipLAwECJiYmRTp06sf5dkJ31f/78eVm8eLEMHDhQatSoIWFhYRISEiIVKlSQhx56SH788UfPTdiAr6//6127dk3eeustadasmRQtWlRCQkLk1ltvlXbt2snChQvdNk/Wv96pU6dkzJgxUqtWLYmIiJDw8HCpWLGi9O7dW7Zt2+beybqAPcAeMJUf90BBjxz1OrGxsVKpUiURETl37pxs3bpVFi1aJIsXL5YpU6bI8OHDPT0Fj5kxY4ZMmzbN29PwW0888YTs3LnT29O4KV/eA1u2bJG2bdvK2bNnpWTJktKuXTs5c+aMJCYmyqpVq+TFF1+U8ePHe3uaPmvWrFny6KOPSnp6ulSvXl2aNWsmP//8s/zrX/+STz75RD777DNp27atV+foq+s/KSlJ4uPjRUQkJiZGmjVrJuHh4ZKcnCzLly+X5cuXy+DBg2XmzJnicDi8PFvfM3nyZJkwYYKIiFSpUkXatWsn6enpsm3bNpkzZ47Mnz9fZs+eLf369fPqPH11/Wc6evSotGnTRpKTkyU6OlpiY2MlPDxcjhw5IuvXr5fw8HDp2bOnt6fpsxITE6Vbt25y7tw5qVSpkrRt21YyMjLk0KFDsmjRIomNjZX69et7dY7sAfaAJ3lrD3i8gB40aFCWn5RcuXJFhgwZIvPmzZNRo0ZJx44dpUqVKp6ehkfUqlVLnn76aalbt67Uq1dPJk6cKB999JG3p+U34uPjpWvXrlKvXj2pV6+exMbGyuHDh709LQtf3QNXrlyR7t27y9mzZ6Vnz54yZ84cCQ0NFZE/C+t27drJSy+9JM2aNXMWGnCfXbt2OYvnN998U5566innbXPmzJGHHnpIevXqJb/88otER0d7bZ6+uv4DAgKke/fuMmzYMGnevHmW2xYuXCgJCQkya9YsiY2Nlb59+3pplr4rPDxchg8fLg8//LBUrlzZmaelpcno0aNl6tSpMnjw4Cxv3r3BV9e/iEhqaqrEx8fLnj17ZNy4cfLss89KYGCg8/bLly/Lvn37vDhD35acnCwdOnSQAgUKyJIlS6Rr165Zbv/tt9/k8uXLXprdX9gD7AFP8eYeyPW/gQ4JCZF33nlHwsPDJT09XZYsWZLbU3CbQYMGyeTJk6VPnz5SrVo1CQjgT8pz0+TJk2Xs2LFy7733SunSpb09HWO+sgeWLl0qR44ckaioKJk5c6azeBYRadiwobz44osiIvLSSy95a4o+7Z133pH09HRp0aJFluJZRGTAgAHStWtXOX/+fJ77LRlfWf9xcXHyySefWIpnEZGePXs63zDOmzcvl2fmH5555hl54403shTPIiKBgYEyZcoUqVKlily7dk0WLFjgpRnq+cr6FxF59dVXZc+ePTJ48GAZO3ZslsJBRCQsLEzuuOMO70zODzz88MOSmpoqs2bNshQOIn/+ZkyFChW8MLMbYw/AXby5B7xS8UVEREjVqlVFRLL8vnvm30iI/PkJSpMmTaRw4cKW34s/fvy4DB8+XKpXry5hYWESGRkpDRs2lLffflv++OMP7WOmpqbKuHHjpHLlyhIcHCwlS5aUfv36ya+//uqxr9Pb0tLSZP78+ZKQkCDVqlWTQoUKSWhoqFStWlWeeOIJOX78+E2PkZSUJK1bt5aiRYtKWFiYNGrU6Kafsq9du1a6desmJUuWlKCgIClRooR07dpVNm/e7K4vLd/zhT2wZcsWERGpX7++REVFWW6/5557RETkm2++kd9++y1bj5FTvrwHMs9/5nn+u8z8k08+cdtjuosvrP+bqVu3roj82SPAW3x5/d9IQECA1KlTR0S8e/7t+ML6T0tLkxkzZoiIyMiRI7N1DE/z5fW/Y8cO2bBhg5QpU0Z69erltuPmFvZA7mAPeI7XPjK9cOGCiIgEBwdbbnv88cdl0KBBUrBgQenQoYM0btzYuaHWr18vtWrVkqlTp8qVK1ckPj5eYmNjZf/+/fL4449Lhw4dJC0tLcvxLl++LHFxcTJ+/Hg5ceKEtG7dWpo3by4rV66UevXqycGDB23nmfmH7XPnznXfF59LTp48KQ8++KCsWLFCihQpIm3btpW4uDi5ePGiTJ8+Xe644w755ZdfbO+/dOlSiYuLk2PHjkmbNm2kYcOGsm3bNunbt6+MGDFCe5+nn35a7rnnHlm2bJmULVtWunTpIhUqVJBly5ZJ8+bNZc6cOcbzP3TokPPJNLuNBfKy/L4HLl68KCIixYoV094v89eGlVKyfft2s5PiZr68B0zP/969e+XSpUvGj5lb8vv6v5mff/5ZRERKlizp0v3cyZfX/83khfN/I/l9/W/fvl1SUlKkVKlSUqlSJfnxxx9l/PjxMmTIEBkzZoysWLFCMjIycniWcsaX1//KlStFRKR58+bicDhk5cqVMmrUKBkyZIi8/PLLXm0eZoo94HnsAQ9SHlKuXDklImrOnDmW23bu3KkCAgKUiKgPP/zQmYuIEhFVqFAhtXnzZsv9Tpw4oYoVK6YcDod69913VXp6uvO2lJQUFRcXp0REjR8/Psv9nn76aSUiqlq1aurYsWPO/NKlS6pz587Oxx07dqxLX8ff9evXT4mIevnll286Njsyj9+vXz+j8RcuXFDLli1TV69ezZJfu3ZNPfPMM0pEVPv27S33a9GihfOcTJw4Mctt69atU6GhoUpE1FdffZXltlmzZikRUZUqVVI7d+7McltSUpKKjIxUQUFBat++fVluyzzHBw8ezJIfPHjQOY+/36aTeZwNGzbcdGxu8PU98NxzzykRUQ0aNNB+/Zs3b3Ye9+2339aOcRV74C+xsbFKRNTTTz+t/dpfffVV533/85//aMd4kq+v/xs5ceKEKly4sBIR9dZbbxnf72ZY/2a+/PJLJSLK4XBY5pFbfH39Z36vGzVqpEaPHq0cDofzOJn/6tatqw4fPmx4xm6O9f+XPn36KBFRjz76qLrnnnss515EVEJCgrpy5YrJqfII9gB74Hq+tgdytYA+d+6cWrFihapYsaISEVWqVCl18eLFvybzvy/4pZde0h5z9OjRSkTU0KFDtbcfPXpUBQYGquLFi6uMjAyllFKXL19WkZGRSkTUl19+abnPiRMnVEhIiO3GiYuLU1WrVlVLliy56dec1wromylVqpQKCAhQFy5cyJJnbpy6detq7zdixAglIio+Pt6Zpaenq1KlSikRUVu3btXeb9KkSUpE1IgRI7Lkdhvn6NGjqmrVqqpq1arq6NGjN/168kMB7Ut7YO3atc43qdu3b7fcL3O96p6As4s98JcXXnhBiYgqWbKkunTpUpbbrl27pipXruw8/5s2bbrhefAEX1//dtLS0lSrVq2UiKjatWtb3rjkBOv/5o4dO+acx+DBg43u4wm+vv4zf0AXGBioREQ99thjau/ever8+fNq9erVqkqVKkpEVK1atdS1a9dudrqMsP7/0qZNG+f5DwsLU9OnT1fHjh1Tp06dUh9++KEqVKgQe4A9cFPsgezzeAFt969ixYpqx44dWSfzv9uSk5O1x6xVq9ZNC6QaNWooEVF79+5VSim1ceNGJSIqOjra9j6dOnWy3TiuyKsF9I4dO9Qbb7yhhg4dqgYMGKD69eun+vXrp0qUKKFExFL8ZG6cqVOnao+3a9cuJSIqNDRU/fHHH0oppbZu3er8vtr5/vvvlYioJk2aZMntNo6r8moB7ct74K677lIiom699Vb1+eefq3Pnzqn9+/erESNGKIfD4Xxhee2111w6rh32wF9+++03FR0drURENWvWTH3//ffq4sWLaseOHap169bOcy8i6ttvvzU+rrv4w/rXGThwoBIRVaxYMecc3IX1f2Pnz59XdevWVSJ/fiqUmpqao+PlhK+v/4kTJzrn27t3b8vthw8fdhYl8+bNMz7ujbD+/9K6dWvn+X/vvfcsty9cuFCJiAoICMjxvsou9gB74O98aQ/k6nWgM/+Q/M4775S2bdtKwYL6hy9fvrw2P3DggIiItuvp350+fVqqVKkiR48eveExRURuu+22mx4vP7p06ZI8+OCDsnTp0huOy/w7lL+zOy+ZeWpqqpw5c0ZKlCjh/N7s37//ptc8PX369M2m7lN8eQ8sXrxYunXrJt9884106tQpy21PPvmkbNy4UbZu3SpFixbN1vFzypf3wC233CIrVqyQbt26ycaNG6VRo0bO20JDQ2XGjBkyaNAgERGvnX8R317/fzds2DD54IMPpEiRIrJ69WqvX5rFl9f/3128eFHatWsnP/zwg9StW1e++uorCQkJcfvjuMpX139kZKTz/0OGDLHcXrZsWenQoYN8+umnsmbNGnnwwQddfoyc8uX1n3n+AwMD5aGHHrLcfv/998tjjz0mKSkpkpiYKAMGDMjxY2YXe4A98He+sAdy/TrQJq6/HM71Mv8Yv0ePHhIeHn7DY9g11vEnzzzzjCxdulSqVasmr732mjRs2FCio6MlKChIRESaNm0qmzdvFqVUth8j876Z35uYmBhp06bNDe/jzWvSeoMv74ESJUrIhg0bZM2aNfL111/LmTNn5JZbbpHOnTtLgwYNpFSpUiIiUrt2bY/PRcfX90CjRo3k559/lqVLl8qWLVskNTVVKlasKL169ZLU1FQR+fN6ud68lIkvr//rjRgxQt566y2JioqSVatWObtwe5Ovr/9Mly5dkg4dOsimTZukTp06snr1ailSpIhbHyO7fHX9X/+cYvf8kpmfOHHCo3Ox48vrP/PclilTxrYIve222yQlJcVr5z8Te4A98He+sAc8XkC7U5kyZeTnn3+W0aNHS4MGDYzuk3l94Bt18PTFDs8iIosWLRIRkYULFzov6XG9zC6lduy6Emaer5CQEOcTVJkyZUTkzyes/NixPL/Ii3vA4XBIfHy8xMfHZ8n3798vJ06ckGLFikm9evWyffyc8Ic9EBoaKn369JE+ffpkyT/44AMREbn77rulQIECuTYfT8qL619EZNSoUfLmm29K4cKFZdWqVcZz8zR/WP+XL1+WDh06yPr166VOnTqydu1an/0Bel5a//Xq1ROHwyFKKUlJSXF+/6+XkpIiIn9essgbfHn9169fX0REzpw5YzvG2+ffE9gDrmEPeO78e+0yVtnRrl07EflrQZioX7++RERESEpKiqxatcpy+8mTJ7W5Lzh79qyIiJQrV85y28qVK50Ly878+fO1+bx580REpFmzZs6f+mT+VCs5OVl2796dk2njBvLTHpgyZYqIiAwePNj5087c5q974I8//pBp06aJiMjQoUO9Ohd3yovrf8yYMTJ58mQpXLiwrF69Who2bJjtY7mbr6//1NRU6dixoyQlJTmLZ1/+Dae8tP5jYmKkWbNmIiKyZs0ay+1paWmSlJQkIpLlz0tyky+v//bt20tYWJicP39etmzZYrl93759cvjwYRHx3vn3BPaAa9gDHtwDbv+r6v/JzqU/5H9/DG7nyJEjKioqShUoUEBNmTJF2930wIED6qOPPsqSDR8+XImIqlGjhjp+/Lgzv3z5suratavzcXOrC3diYuJNv9YbHd+0eUCdOnW089mzZ48qX768cw6JiYlZbr++ff3rr7+e5bYNGzaosLAwJSJqxYoVWW6bPn26EhFVuXJlbYOHP/74Q61du9ZyaQJ/6sJ9M/ltD+zevVudP38+S5aWlqYmTJigHA6HqlSpUpYOm5nYA+7ZA1u2bFFpaWlZsjNnzqgePXrYNjbJLf6w/jMv5RYVFaW+//5746+T9Z/z9Z+amuq8dEmdOnXU6dOnTU5JrvGH9b9mzRolIqpIkSJZvqdpaWnq8ccfVyKiIiMj1W+//Zblfqx/9zz/Z16JoX79+lkuzZSSkqKaN2+uRP5spuct7AH2wN/50h7IVwW0Un9eRyyz82yJEiVUXFycSkhIUB07dnS2xW/cuHGW+1y8eFE1atRIiYiKiIhQ9957r7rvvvtUTEyMKlasmOrbt6/txrnR17Ft2zbVuHFj57/Med16661Z8us3q1J/Xf6nYMGCxudGqb82TnR0dJbj//3f7NmzlVJKffrpp87r0tWuXVv16tVLxcXFqcDAQBUXF6eaNm16w43zxBNPqICAAFWzZk3Vu3dv1aJFC+d1+4YNG6ad48iRI53fx5o1a6rOnTurXr16qZYtW6qoqCglImrGjBnac+zqNUBnz56d5esOCgpyPkFmZl26dHHpHLuTP+yBYcOGqZCQEBUbG6vuv/9+1blzZ3XLLbcokT+vA2jX+ZA94J49cPvtt6vo6GgVFxenevfureLj41V4eLgSEdWxY8c80YXYV9f/smXLnPNt0KCBs6vp3//9/XIdSrH+3bH+n3rqKedtHTt2tD3/mecit/n6+s/08ssvO9dy06ZNVbdu3ZxvzENDQ9UXX3xhuQ/r3z3P/1evXlXt2rVTIn9eN7l169aqffv2qmjRokpEVLly5dSBAwdcOsfuxB5gD/jyHsh3BbRSSp08eVK98MILql69es6Lct96662qadOmauzYsWrXrl2W+1y6dEm98MILqmLFiiooKEjdcsstKiEhQR08eFCNHTs2Wxvn+p8g3ejf37/pmddBGzJkiOmpUUplva7ujf5d/3WsX79etWrVSkVHR6uwsDBVq1YtNWHCBHX16lXnBrHbOImJiWrt2rWqVatWqnDhwio0NFQ1aNBAzZ0794bz/Oabb1RCQoIqV66cCg4OVpGRkapKlSqqS5cu6v3331dnz57NMj67Gyfz+3ajf+XKlXPhDLuXP+yBlStXqs6dO6syZcqo4OBgVahQIdWwYUM1adIkdfnyZdv5swfcswemT5+uWrZsqUqUKKECAwNVdHS0atOmjVqwYIErp9UjfH39z5kzx2gt6p6DWP85X/+m58Jd10t1la+v/+utXLlStWvXThUtWlQFBgaqMmXKqP79+6uffvpJO571757nf6X+vPbuu+++qxo1aqQiIiJUSEiIql69unr22WfVmTNnTE+tR7AH2AO+vAccSuWg9RqyJT4+XjZt2iT79++XmJgYb08HyHXsAfgz1j/8Gesf/o49kP/lqyZiviA1NVU2btwoTz31FJsGfok9AH/G+oc/Y/3D37EHfAOfQAMAAAAAYIBPoAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADBT15cIfD4cnDAzellPLq47MH4G3e3AOsf3gbrwHwd7wGwJ95av3zCTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGCnp7AgAAAICnFCtWzJKtXLlSO/bq1avaPDY21q1zApB/8Qk0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggC7cAAAA8FkzZsywZHXr1tWOffvttz09HcDrOnbsqM1HjBihzVu2bKnNMzIyLJldx/pvv/3WbHL5AJ9AAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABujC7aeKFy+uzU+ePKnNlVLavECBAm6bEwAAQHbdd9992rxHjx6W7F//+pd27GuvvebWOQG5pUyZMtpct9btutCHhoZqc123bRF9fWBXM/gSPoEGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAM0IXbTz3zzDPa3K5znl1erVo1S7Znz57sTwwA4DVRUVHa/LnnntPmCxcutGRbt25155QAiyJFimjzd955R5svXrzYkg0cOFA79tq1a9mfGJALevfurc1ffPFFbV65cmWPzeXrr7+2ZLt37/bY4+UVfAINAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAAzQR81MbNmzQ5k8++aQ2T0lJ0ebVq1e3ZDQRA4C8o1KlSpbsgQce0I61a6xUtGhRbf7FF19kf2JANiUmJmrzq1evavPZs2dbMpqFIS8pWNBakg0ZMkQ7dtCgQdrcHc3Czp07p80vXbqkzd944w1LdvHixRzPI6/jE2gAAAAAAAxQQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAAXbj9QPHixS3Zm2++qR2rlNLmS5Ys0eZLly7N/sTgEwoXLqzN7bpB9uvXT5v37t3bktl1/rVbp3bmzJljyR577DHtWLsurkBeoevWKiLSrl07bT5//nxLFhERoR3rcDi0eYcOHbT5f/7zH+P5/fHHH9ocsNOsWTNtrrsCiIjIqFGjtPmaNWvcNicgJ8qXL6/Nn3nmGUtmd1UEu+dpV94b/fOf/9Tm77zzjjbfvn278bH9AZ9AAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABujC7QceeOABSxYWFqYde+bMGW3+2WefuXNKyKfq1atnyV599VXt2FatWuX48c6fP6/Nf/nlF21es2ZNbd6/f3/jxxw0aJDxWMAb3n77bW3+j3/8w2OP+cUXXxiPTUpK0uYvvPCCNv/mm2+yNSf4luDgYEv2/vvva8du3rxZm3/44YdunROQXXbPx7pu2yIi5cqVMz52QID+88/33ntPmycnJ1uyt956y/jxYMUn0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAGHUkp57OAOh6cODRfoOqJWrVpVO3bPnj3avG/fvtr8119/zf7EcoEHl7eR/LoH7rrrLm0+efJkS1a/fn3tWLtz/69//UubT5w40ZKdPn1aO/bKlSvavHTp0tp8w4YNlqxo0aLasSVLltTmZ8+e1eZ5nTf3QH5d/94QGxtryTZu3Kgd68nvqV0X48TERG0eGRlpyXR7WUTkhx9+0Obu6Nhvh9eA/GPRokWWrHv37tqx7du31+YrV65065x8Aa8B7nPHHXdYss8//1w79pZbbtHmBQoUMH68HTt2aPMPPvhAm8+aNUubp6enGz+mr/HU+ucTaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABgo6O0JwHXh4eHavHnz5sa53R/Vt2zZUpvn9WZhcK933nlHm9eoUcOSnTp1SjvWrpnF2LFjsz+xm/jll1+0efXq1S3Ztm3btGP379+vzXVNnkREkpOTDWcHiAQHB2vzN99805LZPU///vvv2nzBggXa/JNPPrFkduvf1WZ5jzzyiCWze42ya1IJiIjcd999luzLL7/UjqVZGDypUaNG2vzTTz+1ZKVKldKOdfX5W9c0cvDgwdqxJ06c0ObIPXwCDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKALdz40b948bW7XJfj06dOWbOLEidqxdEn1L3adJitWrGh8DF0XXhGRzz77LDtTypG77rpLm8+ZM8eSlStXTjv23Llz2vzatWvZnheQ6erVq9r8888/t2SXL1/Wjh04cKA2P3DgQPYnlk1PPvmkJStYUP/WYvny5R6eDfKSwMBAbT506FBtrutabPf6AriD3XugRYsWafOSJUsaH9vu+fupp57S5nPnzjU+NryPT6ABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADdOHOw5577jlt3rVrV22u62ApIvLxxx9bsmnTpmV/YvAZYWFh2jw4OFibOxwOo8xdmjVrps3t9kbr1q1z/Jh23Td/+eWXHB8bCAjQ/9xa95w8YcIET0/H2J133qnNb731Vkt2/vx57dhDhw65c0rI44oVK6bNp0yZos2//vprS3bs2DG3zgm43tSpU7W57nnNTlJSkjafPHmyNv/yyy+Nj+2qF198UZsXKlQox8e2e6/30ksvWTK71wBfwifQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAYeya93sjoN7sDuvr9F11p43b552rF3nZLtvZa1atSzZnj17XJhd/uXB5W0kr++BmjVravONGzdqc10nx1OnTmnH/vTTT9mf2P+0aNFCm3vy+1quXDltnl+7wXpzD+T19e8OCxcu1OYRERHaPDAwUJtXr17dkvXt21c7NjEx0XB2rmvQoIE2t3tOuHLliiXr2LGjS8fwJF4DvMeuc/s333yjzXWdj0+cOOHWOfkjXgNEpk+frs0fffTRHB+7QIECOT6GiP55s3Llytqxdu+N7r33XrfMRcfuChIZGRmW7K233tKOnTFjhjbft29f9id2E55a/3wCDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMFvT0Bf1O8eHFt/sADD1gyu2Zhdk0Z7BrO+EvDMLhu9+7d2vzZZ5/V5lOmTLFkdmvaLreTmppqyU6fPq0da9dwomnTpsaP9+6772rz/NosDPbCw8O1+V133aXNy5cvr811a8aTDXq+/vprbf7qq69qc7t9GxQUZMlatWqlHfv0008bH0NE5NNPP7Vk3mgWBu+JjIzU5v/617+0ud17GF3DsCpVqmjH2u3RChUqaPPPP//ckh0/flw7FvmbrsHWgw8+qB3r6vP3yy+/bDy2TJky2txuX9StW9eShYaGasfa7SFPvh7pmoXZPebjjz+uHduhQwdtbtd40pPNxXKKT6ABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADdOHOZVWrVtXmXbp0sWR23fRSUlK0+YYNG7I9L+B6M2bM0Oa6joh2nX/trFq1SpuvXLnSku3YsUM79r777tPmrnThnjlzpvFY5G8DBgzQ5tOmTdPmds+xS5YssWSTJ0/WjnVH91C7eYwYMUKbHzlyRJufO3fOks2fP1871q676yuvvKLNx48fr83hP+w6X9t1yl69erU21+2l/v37a8cWLVrUaG6ZdJ3rH3nkEe3YBQsWuHRs5C1NmjSxZHZXEbBz9epVba67qk2pUqW0YxcvXqzNGzRo4NJcdDZt2qTN7bqEnz9/3vjYnTp10uZ2V62oX7++JbM733bPFe3bt9fmdOEGAAAAACCfo4AGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAbpwe0i5cuW0+YQJE7S5XedTnRdeeEGb//rrr8bHALJj7dq1Rpmn1axZM9cfE/mD7vnxueee046161rduHFjbX7y5MnsTywbdu3apc1r166tzadOnarN09PTjR+zTZs22vzrr7/O8bHhm0JCQlwaf88992hzXZffWbNmacf+8MMP2vy///2vNu/Tp48lmz17tnbslStXtPlnn32mzZG36N5n213Vxq7btt1VB3RXC0lMTNSOtXuetnvMU6dOWbLPP/9cO3bYsGHa3B2+++47l8Z36NDBkk2ZMkU7tnLlyto8KirKpcfMC/gEGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEAXbg8ZNGiQNo+NjdXmug6BycnJ2rFLlizJ/sSAfKRkyZLafODAgdrcrpt9UlKSJbPbX8gfqlSpos11a0P3/RcRGTx4sDb3ZLftgAD9z62LFCliyUqVKuXSsQMDA7W57uu57777tGO///57lx4TGDlypEvjDxw4oM179uxpybZv356tOf3dxo0bLdnevXu1Y5s0aaLN6cLte86ePavN//3vf2vz559/3pLVqVNHO9au8/f48eO1+aRJk7R5XrdixQpL9uyzz7p0DLuO5XkZn0ADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAE3EcsiuCU337t21uV2TI11+5swZ7diUlBTD2QH520MPPaTN7ZqLXblyRZtPnz7dbXNC3vDBBx9o89DQUEs2evRo7djDhw/neB7R0dHavE2bNtq8Zs2a2lw3R7vXC7vmNL///rs2j4uLs2T79+/XjgVcZdcc1a4ZX+XKlT05HS1d0zzdc4WIyG+//ebp6cCDdI0aMzIytGPtGjW60rzuhx9+0OatWrXS5ufPnzc+dl5Sv359bb569WpLVrhwYe3Yo0ePanO7mikv4xNoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAF24XVC8eHFL1qxZM+3YqlWranO77qm6ztrDhw93YXZA/qbriPrss8+6dAy7rq9Lly7N1pyQd127dk2b656n69atqx1r183dTp8+fSxZjx49tGODg4NdOvbWrVst2ccff6wd++qrr2rzAgUKaPOCBXmph+fo1q6ISIsWLbR548aNtfl3331n/JhVqlTR5h06dNDmuis6LF68WDuWqzbkb+PHj7dkzz//vMce7/jx49q8devWLh1n8+bNlsyua7Xd+r/99tuNHy8qKkqb252ryMhIbV6oUCFLZlfr2HUsz4/4BBoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADDgUHat0txxcIfDU4f2ivfee8+SDRo0SDvW7mu3O92PPPKIJZs1a5YLs4OOB5e3EV/bA5702WefWbJ7771XO9bu+/rwww9r8/fffz/b88rvvLkHPLn+R40apc3tOlS7w/nz5y3Znj17tGN///13bf7pp59q8zlz5lgyuy6pP//8szYPCND/TLxhw4aWbO/evdqxvobXAM+z67b99ddfa/P//ve/2lzXcdju+2fXhTgkJESb69Z7mzZttGOPHDmizfMrX30NsKO7uoLd+2m7K+nYdZzWcfX9vh3d8/rZs2e1Y+2uIFG2bFmXHtMVdl+nrgv/G2+8oR27du1abX7mzJnsT+wmPLX++QQaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQBduF5w8edKSFStWTDvW7mtPTk7W5nfffbclS0lJcWF20KEDa/7x448/WrIaNWpox65cuVKbd+rUSZv/8ccf2Z9YPuerHVjDw8O1uV13Une4fPmyJTt+/LjHHs/ua7TrEhwWFqbNGzdubMl27tyZ/YnlI7wGeJ5dx+KWLVtq8+7du2vzvn37WjJdh18R+/dSdp2/V69ebclOnDihHetrfPU1wB10771FRCpUqKDNn3/+eUtWqFAh7djChQtnf2I3YXde7TrcX7t2zZJdvXrVpcccOXKkNl+xYoUlS01NdenYnkQXbgAAAAAAvIgCGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAATcQ0qlWrps1/+uknS5aRkaEdu2rVKm0+YcIEbb5x40bD2cEVNJDJe+way6xdu9aSBQTof8Y3dOhQbf7OO+9ke16+igYy+Vfp0qW1+VNPPaXN7Rq3vP7665bs4sWL2Z9YPsJrAPwdrwGeVa9ePW1+11135fJMRNavX6/NdQ3zaKKXM3wCDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGCjo7QnkRXv27NHmn376qSXr0qWLdqxdJzy6bQN6uk6J586d045NTEz08GwA7zt27Jg2f/rpp3N5JgAAne3bt7uUwzfwCTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCALtwu6NGjh7enAOR7oaGhxmN//vlnbZ6cnOyu6QAAAADG+AQaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQBduALmqf//+xmPnzJnjuYkAAAAALuITaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABigiRiAXLV7925t3r17d+OxAAAAgDfwCTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGDAoZRSHju4w+GpQwNGPLi8jbAH4G3e3AOsf3gbrwHwd7wGwJ95av3zCTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGDAo124AQAAAADwFXwCDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMeK6DLly8vDocjy7/g4GApW7as9OzZUzZs2OCph3bZuHHjxOFwyLhx47J1/2vXrslbb70lzZo1k6JFi0pISIjceuut0q5dO1m4cKHb5tm/f39xOBzSv39/tx1Tp2XLluJwOGTdunUefZxMmWvl0KFD2br/qVOnZMyYMVKrVi2JiIiQ8PBwqVixovTu3Vu2bdvm3sm6wNf3QFpamqxdu1ZGjhwpDRs2lKioKAkMDJSYmBjp1KmTrFixwu3zZA/85fz587J48WIZOHCg1KhRQ8LCwiQkJEQqVKggDz30kPz444+em7ABX1//11u8eLG0bNlSihQpIuHh4XL77bfLpEmTJC0tzSPzbNmypVuP+3eZ+2zu3LkefZxMOdlvGRkZ8t5770njxo0lMjJSIiMjpXHjxjJr1ixRSrl/soZY/6x/U764/kX8aw9cb9SoUc6v95VXXsn55K7De6AbS09PlyZNmjjP/8aNG90zQY2CHjvy/8TGxkqlSpVEROTcuXOydetWWbRokSxevFimTJkiw4cP9/QUPOro0aPSpk0bSU5OlujoaImNjZXw8HA5cuSIrF+/XsLDw6Vnz57enqbPSkxMlG7dusm5c+ekUqVK0rZtW8nIyJBDhw7JokWLJDY2VurXr+/VOfrqHkhKSpL4+HgREYmJiZFmzZpJeHi4JCcny/Lly2X58uUyePBgmTlzpjgcDi/P1vdMnjxZJkyYICIiVapUkXbt2kl6erps27ZN5syZI/Pnz5fZs2dLv379vDpPX13/mZ588kmZNm2aFCxYUOLi4iQiIkK+/vprGT16tCxfvlxWrVoloaGh3p6mT0pPT5f7779flixZImFhYdKqVSsREVmzZo0MGTJE1qxZIwsWLJCAAO/9sh3rn/XvKflh/Yv4/h643qZNm+SNN94Qh8Ph9R9g+KMpU6bIt99+myvn3+MF9KBBg7L8pOTKlSsyZMgQmTdvnowaNUo6duwoVapU8fQ0PCI1NVXi4+Nlz549Mm7cOHn22WclMDDQefvly5dl3759Xpyhb0tOTpYOHTpIgQIFZMmSJdK1a9cst//2229y+fJlL83uL766BwICAqR79+4ybNgwad68eZbbFi5cKAkJCTJr1iyJjY2Vvn37emmWvis8PFyGDx8uDz/8sFSuXNmZp6WlyejRo2Xq1KkyePDgLG9evMFX17+IyGeffSbTpk2TiIgISUpKknr16omISEpKisTFxcnGjRvlhRdekClTpnh5pr5p+vTpsmTJEildurRs2LBBbrvtNhEROXjwoDRr1uz/t3ff0VFVax/HnwQSUqkBSQTpTYqANCmiQXrHBuZipEhRROlFvQFpAoIXECyIKHq5ogIXFemX3ryAiIoUEVBC0QQFAqEl+/3Dd+YSzj6wJ5nJJDPfz1qu5frNmX12TvbOzJMJz5FPP/1U7r//fhkwYIDX5sj6Z/17Sm5Y/yK+vQdudOnSJXnqqackOjpa6tatK//+97+9PSW/8sMPP0hCQoK0a9dOvvvuOzl+/LhHz5ftv5YKCQmR2bNnS3h4uKSlpcmSJUuyewpuM2nSJDlw4ID06dNHEhISMhTPIiJhYWFSs2ZN70zOD/Tr109SU1PlnXfesRTPIn99Klq2bFkvzOzWfGUPxMbGymeffWYpnkVEHn/8cecL5oIFC7J5Zv5h1KhRMm3atAzFs4hIUFCQvPbaa1KxYkW5evWqfPzxx16aoZ6vrH8RkYkTJ4qIyMiRI53Fg4hIVFSUzJkzR0RE3njjDTl37pxX5ufL0tPTZfLkySIiMnnyZGfxICJSpkwZ52OTJk2S9PR0r8xRh/UPd8it61/Et/bAjUaNGiWHDx+Wd955RwoUKODt6fiV69evS3x8vISGhsrbb7+dLef0yt91RERESKVKlUREMvy9u+Nv1kVE5s+fL/fdd58UKFDA8nfxJ0+elMGDB0uVKlUkLCxMIiMjpW7duvLGG2/I9evXtedMTU2VMWPGSIUKFSRfvnwSHR0t8fHx8ssvv2Tqa7h27Zq8+eabIiIybNiwTI3hadeuXZOPPvpI4uLipHLlypI/f34JDQ2VSpUqycCBA+XkyZO3HWPjxo3SokULKVy4sISFhUm9evXkww8/vOVz1q1bJ126dJHo6GgJDg6WYsWKSefOnWX79u3u+tJk7969snnzZilZsqR07drVbeNmF1/YA7dTq1YtERH59ddfPTK+CV/eA7cSGBgoNWrUEBHvXn87vrD+ExMT5b///a+IiDzxxBOWxxs3biwlS5aUK1euyFdffZWpc7jDkiVLpHfv3lKtWjUpVKiQhISESJkyZaRnz55y8ODB2z7/22+/lS5dukjRokUlNDRUatSoITNmzJC0tDTb5+zevVvi4uLkrrvuknz58knhwoWlZcuWbr0O27dvl9OnT0u+fPnk4Ycftjz+8MMPS3BwsJw8eVJ27tzptvO6A+s/+7D+c976F/GNPXCjDRs2yKxZs+TJJ5+UNm3aZHk8d/GX90ATJ06U3bt3y/Tp0yUmJsYj57iZ1/5hxPnz50VEJF++fJbHnnvuOendu7fkzZtX2rZtK/Xr13duqE2bNkm1atXk9ddfl8uXL0vz5s2lUaNGcuTIEXnuueekbdu2lsYVly5dktjYWBk7dqycOnVKWrRoIU2aNJFVq1ZJ7dq15ejRo7bzdPzD9pubSezZs0eSkpIkJiZGypcvL999952MHTtW+vbtKyNHjpTly5d7/bd+Z86cke7du8vy5culUKFC0qpVK4mNjZWUlBSZNWuW1KxZU3766Sfb5y9dulRiY2MlMTFRWrZsKXXr1pXdu3fLk08+KUOGDNE+Z+jQofLQQw/JsmXL5K677pJOnTpJ2bJlZdmyZdKkSROZP3++8fyPHTvm/GF6c2OBVatWiYhIkyZNJCAgQFatWiXDhw+Xvn37yrhx47zaPMxUbt8Dt3P48GEREYmOjnbpee7ky3vgdnLC9b+V3L7+v/nmGxERKVy4cIZPf25Up06dDMd6w2OPPSb/+te/JDQ0VGJjY6Vly5YSGBgo8+fPl3vvvVe2bdtm+9yvv/5aGjRoIN988400a9ZM7r//fjl48KC88MIL0rVrV+2/MZsxY4bUq1dPFi5cKEWKFJEOHTpI1apVZcOGDdK2bVt55ZVXXJq/Y/3f3MzGcU2rVq0qISEhlueFhoZK1apVMxybk7D+swfrP2euf5HcvwccUlJSpGfPnnLHHXfIP/7xj8xdDA/xh/dAe/fulfHjx0vLli2lR48exmNnmfKQUqVKKRFR8+fPtzz27bffqsDAQCUi6r333nPmIqJEROXPn19t377d8rxTp06pIkWKqICAADVnzhyVlpbmfCwpKUnFxsYqEVFjx47N8LyhQ4cqEVGVK1dWiYmJzvzixYuqY8eOzvMmJCQYfx3vvPOOEhFVr149NWLECBUQEOAcx/FfrVq11PHjxw2v2O3Fx8crEVHx8fFGx58/f14tW7ZMXblyJUN+9epVNWrUKCUiqk2bNpbnNW3a1Pk1TJw4McNjGzZsUKGhoUpE1MqVKzM85rgm5cuXV99++22GxzZu3KgiIyNVcHCwOnToUIbHHNf46NGjGfKjR48653HzY0888YQSEfXMM8+ohx56yHLtRUTFxcWpy5cvm1wqj/D1PXArp06dUgUKFFAiombOnGn8vNthD5hZsWKFEhEVEBBgmUd28fX1P3PmTCUiqmbNmrbXYODAgUpE1COPPGJ7jCsSEhKUiKimTZsaP+fjjz9WKSkpGbL09HQ1e/ZsJSKqatWqKj09PcPjjn3m+Bl77do152Pff/+9Klq0qBIR9dZbb2V43sqVK1VAQICKiopSGzduzPDYvn37VIkSJZSIqA0bNmR4zLHf1q9fb5m/Yx43PzZ48GAlIqpTp062X3uHDh2UiKihQ4faHuMprH/W/438bf0r5ft7wKFv375KRNTSpUudmWMNjRs3zubqZA7vgTK6cuWKqlGjhoqMjMxQbznG27x5s+21yapsLaD//PNPtXz5clWuXDklIiomJibDDzbHRXrllVe0Y44YMUKJiBowYID28RMnTqigoCBVtGhR5w/ES5cuqcjISCUiasWKFZbnnDp1SoWEhNhunNjYWFWpUiW1ZMmSDPmkSZOUiKigoCAlIurZZ59VBw8eVOfOnVNr1qxRFStWVCKiqlWrpq5evXq7y2XE1Y1zOzExMSowMFCdP38+Q+7YOLVq1dI+b8iQIUpEVPPmzZ1ZWlqaiomJUSKidu3apX3elClTlIioIUOGZMjtNs6JEydUpUqVVKVKldSJEycyPNayZUvn9Q8LC1OzZs1SiYmJ6rffflPvvfeeyp8/vxIR1adPH9PL4Xa+vgfsXLt2TTVr1kyJiKpevbrlB3dWsAduLzEx0TkP1r/n1v+ECROUiKhGjRrZXoPRo0crEVEtWrSwPcYVmSkgbuW+++5TIqJ++OGHDLljn0VHR6vU1FTL82bNmqVERFWoUCFDXr9+fSUi6rPPPtOe75NPPlEioh5++OEM+a0KCMf637lzZ4b86aefViJ//aLUjuMXrd7YB6x/1v/N/Gn9K+X7e0AppVatWqVERHXt2jVDnlMK6NvJ7e+BHD9jbv5lVnYU0B7vwt2jRw/tR+rlypWTxYsXS3h4uOWxRx55RDuW476ydreFuvPOO6VChQqyf/9+OXz4sFSsWFH27NkjFy5ckKioKGnVqpXlOcWLF5cWLVrI559/rh1z3bp12lz9/5/uXLt2Tbp16yZvvPGG87GHHnpI1qxZI5UqVZLvv/9ePv74Y+nevbt2nOzw7bffyrp16+To0aNy8eJF55+WX79+XdLT0+Wnn35y/nvVG9l1To6Pj5dp06bJli1bJC0tTfLkySPffPONnDx5UsqVK2d72yjHvRtv9SdTN7rzzjvlwIED2sduvP5vvPGG9OnTx/lYjx49nLcPe/fdd2XUqFFSunRpo3N6gq/uATv9+vWTdevWSZEiReSzzz6T4OBgl57vCb64B3TOnz8v7dq1k5MnT0q9evVkxowZxs/1FH9b/znRTz/9JCtXrpSffvpJLly44Pz3m2fOnBERkYMHD8rdd99ted5jjz2m/fPQ+Ph4ee655+Tw4cNy8uRJiYmJkaSkJPn6668lNDRU2rdvr52Hq+tfRFxa/zkR69/7WP/e5at74Ny5c9KrVy8pWrSozJo1S3tMTuGL74G+/vprmTx5ssTGxmaoAbJLtt4H2vEPyRs0aCCtWrWSvHn1p7crdn7++WcREW3X35v9/vvvUrFiRTlx4sQtxxQR23+/cyuRkZHO/+/bt6/l8bvuukvatm0rixcvlrVr13qlgL548aJ0795dli5desvjHP8O5WZ218WRp6amSnJyshQrVsz5vTly5Mht7/n7+++/327qt+W4/kFBQdKzZ0/L44899pg8++yzkpSUJOvXr8/efxdxE1/dAzrPP/+8zJs3TwoVKiRr1qzx+q0pfHkP3CwlJUVat24t33zzjdSqVUtWrlypffOX3Xx1/Tt+Bl28eNH2mJSUFBERyZ8/v8vju0NaWpoMGDBA3n777VveE9PV9R8ZGSlFihSR5ORkOXHihMTExMjRo0dFKSWpqanaf9N4I3e+BuTk6y/C+hdh/d/Mn9a/iO/ugRdeeEFOnDghixYtkqioKJefnx189T3Q5cuX5amnnpKQkBB59913b3s+T8j2+0CbCA0N1eaO35g88sgj2t9Y3ahIkSIundNVN94eye5WSY781KlTHp2LnVGjRsnSpUulcuXK8uqrr0rdunUlKirK+Ylgw4YNZfv27Vm62bjjuY7vTfHixaVly5a3fI47ftA4rm3JkiVtfwCXKVNGkpKSvHb9HXx1D9xsyJAhMnPmTClYsKCsXr1a+9vM7ObLe+BGFy9elLZt28q2bdukRo0asmbNGilUqJBbz5FZvrr+HW/GbtXl3PGYt/4CZsaMGfLWW29J8eLFZfr06dKwYUO54447nL9YeeKJJ+Rf//qXW9d/RESEtiuwuzmu6a066Hr7+ouw/m88Nrux/r2//kV8dw8sXbpU8ubNK3PmzHHets3B8anpvHnzZO3atVK8eHGv3FLSV98DHThwQH788UeJiorSfkB2+vRpEfmrGV2BAgWkVatWMnLkyCyd82YeL6DdqWTJknL48GEZMWKEs7vj7dx5550iIrfsYOtqd1sRkdq1a0tAQIAopSQpKUlKlixpOSYpKUlE/vqB6g2ffPKJiIgsWrTIeUubGzm69Nqx60rouF4hISHOH1COr79IkSIud2vODMefhyQnJ9se4+3r7wk5aQ/caPjw4TJ9+nQpUKCArF692nhunubLe8Dh0qVL0rZtW9m0aZPUqFHD+efzvignrX/HL4iSk5Pl6NGj2t/U79q1S0Qkwz1ys5Nj/b/99tvSoUMHy+OZXf8XLlxw/uwtUaKEiPxv/QcEBMh7770ngYGevcmH45r+8MMPcvnyZctfW6SmpsoPP/yQ4djcjvXvGta/b61/kZy1B0T++hPojRs33nLcY8eOSalSpTI1flb5+nugpKSkW17/vXv3iohnfonktdtYZUbr1q1F5H8LwsS9994rERERkpSUJKtXr7Y8fubMGW1+O8WLF5fGjRuLiMjatWstj1+7ds35Ta1Xr57L47vD2bNnRUS0G3fVqlXOAtPORx99pM0XLFggIn/d59Hx6a/jt1r79+93/tD2pDZt2khYWJicO3fOeS/KGx06dEiOHz8uIt67/p6Qk/aAw8iRI2Xq1KlSoEABWbNmjdStWzfTY7mbL+8Bkb/eJLVr1042btzoLJ5z6p+SuUNOWv8lSpRwrvWFCxdaHt+yZYv8+uuvki9fPq/dF/RW6/+HH35wvrmw8+mnn8qVK1csueMeoOXLl3e+OY2JiZEaNWrIhQsXZOXKlVmc+e3dd999Urx4cbly5YosXrzY8vjixYvl6tWrEhMTI/Xr1/f4fLID6981rH/fWv8iOWsP/Pnnn6L+asZs+S8+Pl5ERMaNGydKqSx/SJFZvvoeqGbNmrbXXinl/Ho3b94sSimPFPS5qoAeNmyYFCxYUKZPny7Tpk2Tq1evWo45evRohm94aGio8x+XDxo0KMOf86ampkr//v0lNTXV9pzNmjWTypUra//9QEJCgoiITJo0SXbs2OHMr1+/LkOGDJGff/5ZIiMjLX9esGHDhgw3i/eUKlWqiIhYmhscPHhQ+vXrd9vn7969W6ZMmZIh27Jli8yePVtE/rqeDkFBQZKQkCBKKencubNs2bLFMl5aWpr85z//yXCtbiUxMVEqV64slStXlsTExAyPRUZGOu9B179//ww3g09OTpbevXtLenq61KtXTxo0aGB0vtwgp+2Bl156SSZPniwFCxZ0qXhmD2R9D1y+fFk6dOgg69ev94viWSTnrf/Ro0eLiMirr74qe/bscebJycnyzDPPiIjIgAEDpECBAhmel5X7e7vCsf5nz57t/PM6kb/+WdGTTz4p169fv+XzT548KUOHDnU2XRIR+fHHH533sr1x/YuIjB8/XkT+ahr0xRdfWMZTSsnOnTtderPqWP9ff/11hjwwMFBGjBghIiIjRozI8EnJ0aNHnX+uN2rUKI9/GphdWP+uYf371voXyXl7ILN4D5T190Be56n23pm5d6z8f/v6W9m4caOKiopSIqKKFSumYmNjVVxcnGrXrp2zLX79+vUzPCclJUXVq1dPiYiKiIhQ7du3V48++qgqXry4KlKkiHryySdt29ff7usYN26cEhGVN29e1bBhQ9WlSxdVunRpJSIqNDRUffnll5bnrFu3zvkcVzja10dFRan69evb/jd37lyllFKLFy923p+6evXqqmvXrio2NlYFBQWp2NhY1bBhQ+2tExzt6wcOHKgCAwNV1apVVbdu3VTTpk2d9+17/vnntXMcNmyY8/tYtWpV1bFjR9W1a1f1wAMPqIIFCyoRUW+++ab2Gmfm/m+tW7dWIn/dM7BFixaqTZs2qnDhwkpEVKlSpdTPP//s0jV2J1/fA8uWLXPOt06dOio+Pl773823K1CKPeCOPTBo0CDnY+3atbO9/o5rkd18ff07OO51GxQUpFq1aqUefvhh5/e5UaNG6tKlS5bnHDlyxPm1mt6eTKn/3cYnMjLyluvfcQuYHTt2qODgYCXy1305H3vsMdWqVSsVGhqqqlatqjp37qz92hz7rF+/fiokJESVKVNGde3aVbVs2dI5XufOnS33z1VKqRkzZqi8efM6z9m2bVv1xBNPqObNm6tixYopEVEjRozI8JzM3AdXKaWuX7/u/BrCwsJUhw4dVIcOHVRYWJgS+ev+wzfeJzY7sf5Z//68/pXynz2gc7vbWPEeyD11gB2fuw/0bSdjsHGUUurMmTPq5ZdfVrVr13belLtEiRKqYcOGKiEhQe3bt8/ynIsXL6qXX35ZlStXTgUHB6s77rhDxcXFqaNHjzp/KGd246xatUq1bt1aFS5cWAUFBamSJUuqp556Sv3444/a4x33Qevbt+9tv9YbOTbO7f678evYtGmTatasmYqKilJhYWGqWrVqasKECerKlSu2P7RvzNetW6eaNWumChQooEJDQ1WdOnXU+++/f8t5bt26VcXFxalSpUqpfPnyqcjISFWxYkXVqVMn9e6776qzZ89mOD4rGyctLU3NmTNH1atXT0VERKiQkBBVpUoVNXr0aJWcnGx6aT3C1/fA/PnzjdZjqVKlLGOyB7K+B0yvhbvuF+kqX1//N1q0aJG6//77Vf78+VVoaKiqVq2aevXVV23vge64H2zLli1v+7XeyDFPV77n+/btUx06dFDR0dEqJCREVahQQQ0fPlydP3/euYbsCoj58+erPXv2qPbt26siRYqofPnyqapVq6rp06era9eu2c7zu+++U3369FEVKlRQISEhKiwsTJUtW1a1bNlSzZw5UyUmJmY4PrMFhFJ/vQa89dZbqk6dOio8PFyFh4erunXrqrfeektb4GQX1j/r35/Xv1L+tQdudrsCmvdA7qsDdLKjgA5QKgut15ApzZs3l23btsmRI0ekePHi3p4OkO3YA/BnTz/9tMybN0/27NkjNWvW9PZ0gGzF+oe/4z1Q7uc7/zAil0hNTZUtW7bIoEGD2DTwS+wB+Ls1a9bIE088QfEAv8T6hz/jPZBv4BNoAAAAAAAM8Ak0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYCCvJwcPCAjw5PDAbSmlvHp+9gC8zZt7gPUPb+M1AP6O1wD4M0+tfz6BBgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAA3m9PQF43syZMy3ZgAEDtMe+9dZb2vyZZ55x65yAmwUGWn+fV7ZsWe2x8fHxHpvHvn37tPlnn32mzZVSHpsLAAAAchY+gQYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAzQhduHzJo1S5u70kG7cePG7poO4JImTZpYsvXr13thJnr9+vXT5u+++64lS09P9/R0AMCn3H333ZbshRde0B779NNPa/OFCxdq8127dlmy119/3XxyAHADPoEGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMBCillMcGDwjw1NB+oVixYtr8pZde0ubPPvusNnfl+zB16lRtPmLECOMxchIPLm8j7AGrXr16afMXX3zRkpUuXdrDs8m6mJgYS3b69GkvzETPm3uA9Z81RYoU0eYVKlTQ5g888IA2L168uCWbM2eO9thDhw6ZTS6X4DUg54mPj9fm48aNs2R33nmnx+aRnJyszXfs2KHN4+LitPmFCxfcNidP4DXAO4YNG6bNw8PDtfmYMWM8OBv/5an1zyfQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEATsRxC1+Rl7ty52mPbtm3r0tg///yzJXv66ae1x27evFmbX79+3aVz5hQ0kPG8vHnzavN+/fpp80GDBmnzMmXKGJ/z6tWr2nznzp3a/MCBA5bMbg/Y2bp1qzZv2bKlJbt06ZJLY3sSDWRylrCwMG3eu3dvS9a/f3/tsXZNxOyut24NnD17VnusXcPI+fPna/OcjtcAzwsKCtLmup+NIiKLFy/W5navJTnFkSNHtPkjjzxiyfbt2+fp6RjjNcA71q9fr82/+uorbW7XxNdTQkJCtLldM8qVK1d6cDaeQxMxAAAAAAC8iAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABnJ2y0MfZNdl8pVXXrFkrnbbvnDhgjbXdYO06w4I3Ipu/Y4aNUp77NixY7N8vv/+97/aPCEhQZvbdYns0aOHJXO1C7ddh++c1HEbOce9996rzceMGaPNW7du7cHZWBUuXFib2939ITY2VpuPGzfOkh06dCjzE0OOpuvcO378eO2xdndcyK3KlSunzf/+979bMrvXlz/++MOtc4L35cmTR5tHRERo8zVr1nhyOsbsuuTHx8dr89zahdtT+AQaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwEKCUUh4bPCDAU0PnWn379tXmb775pvEYFy9e1OadO3fW5rpOxufOnTM+X27mweVtxNf2wPPPP2/JXn/9dbeM/fXXX1syuzV96tQpbR4eHm48dpUqVVyYnUjp0qW1+S+//OLSONnNm3vA19a/K3SdeUXsu9Onp6cbj52SkqLNP//8c22+Z88eS1ahQgXtsb1799bmQUFB2lw371atWmmP9Ub3WV4DMkfXbVtE33V98ODBnp5OliUlJVkyu+7JhQoVyvL5/v3vf2vzp556Spvb3UXFHXgN8KyCBQtq87Nnz2rz2rVra/O9e/e6aUZm1q5dq83ffvttbf7pp596cjoe46n1zyfQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAgbzenoCvKlWqlDafMmWK8Rh23bY7deqkzdetW2c8NpAZP/zwgyU7fPiw9li7Lq7vvvuucW7XbdvOo48+qs1d6bi9efNmbZ6cnOzSXOAfqlWrps379++vze26bes6hdqtuS5dumjzrVu3anN3sLuDhG7ec+fO1R7brl07bf79999nfmLIkjp16mjzUaNGaXO79x/Z7dKlS9p8wYIF2lx3pxO7uzyMGTMm0/NysLtOH330kTbv2LFjls8J76hVq5Y2t3sP/+eff3pwNnq61ym7166vvvrK09PxCXwCDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAM0EfOQJ554QptHRkZqc10jFruGY+5oFpY3r/5bnz9/fm1+7tw5bZ6WlpbluSD3WLt2rSW79957tccWKFBAmycmJmZ5Hq1atdLm06ZNMx7jjz/+0OYbN27U5nYNQeB7atasqc3vv/9+S/bKK69oj42IiHDpnLqGYXZN8TzZLGzRokXa3K6JmE6JEiW0eXx8vDYfNmyY8dhwrw8++ECbV65cOZtnojd79mxtPn36dG1+7Ngx47ELFiyozUePHq3Ng4ODjce2Exsbq80bN26szbds2ZLlc8KzmjRpos2XL1+uzV1Zo+7SqFEjS7Zq1SrtsbzXMcMn0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAG6cGdRyZIltfnw4cNdGmflypWWbNy4cS6Nceedd2rzv/3tb5asU6dO2mPr16+vzVevXq3NdV1Vz5w5YzND+KKUlBSXcju6DvVt2rTRHjtnzhxtXqhQIePz9ezZU5svW7bMeAz4ptdff12b23VbdYXdvujSpYsl82S3bTu1a9fO9nMCIiLz5s2zZJMnT9Ye6467Odh1uG7ZsqU2t3ttsLt7iU5YWJg2b9CggTanC3fOV6tWLW1+9OjRbJ6JvUceecSSzZgxwwsz8R18Ag0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABigC3cW6bpQi4gUKFBAm1+9elWbv/HGG8bnLFGihDZfuHChNm/cuLHx2HZatGihzYcOHWrJhg0bluXzwXdVrVpVm7/44ouWrGvXrm455z//+U9Ltm7dOreMjZzPrvPt4cOHtXl0dLQ2V0pleS6ff/65NvdGx22dTZs2afOAgIAsj/3AAw9keQxkTrly5bS5Kx2kXTV16lRtPmvWLG1++vRpS5aWlubWOZmw2wPJycna3JPXEHBFTEyMNi9SpIgls7u7DszwCTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCALtxZdN9997l0/Lx587T5ihUrjMeYNGmSNndHt21XuaMzK3K38PBwbd6hQwdtPnPmTG2u6xLpKl23bRGR/v37W7KUlJQsnw85T8WKFS3Z/Pnztcfecccd2tyu27Y7unCfOnUqy2N4kqsdyF25JrVq1crUnJB1PXv21OZ2XXtd8c0332jz119/XZufOXMmy+cEYOXKfra7KxDM8Ak0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADNBEzAXNmjWzZC1btnRpjJ07d2rzPHnyWLL33ntPe2xcXJxL5zx48KAlGz16tPbYDz74QJtHRERo89OnT7s0F/ie+++/X5vbNfRyxdmzZ7W5rimYiH0zPhqG+Z6wsDBtPnHiREtWv359t5zz+++/t2T9+vXTHtu7d29tfujQIbfMxVNeeuklb08BHvD00097bOy5c+dqc5qFWV2/fl2b//bbb9k8E+RmQUFB2nzs2LHZPBP/xSfQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAbpwu+CBBx6wZIGBrv0O4ueff9bmU6dOtWTdu3d3aexp06Zp81dffdWSNWjQQHusXbft33//XZsvWLDAcHbwBQ8++KAl++ijj9wytq7j9hNPPKE9dvXq1W45J3KvIkWKaPNOnTpleWy7ru3z5s2zZDt27NAea5fndKdOnfLY2Hv27PHY2PifyMhIS+bqexU7ui7S3OXA3B9//KHNeS/lewICAjw2dtGiRbV5mzZttHnPnj09Nhd/xSfQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAbpwa4SHh2vzxx9/3HiMDRs2aPPSpUtr8/79+xuPPWLECG3+2muvafOyZctasvfee0977MWLF7X5M888o81/++03bY7cIV++fNrcrgP8lClTLFnBggXdMpdDhw5ZMrptw84LL7ygzd3R+TQxMVGbz5o1K8tj5xTbtm3T5vXr19fm7riu48ePz/IYuD3d+4lChQq5ZexffvnFkv3zn/90y9jZLW9e/VvgCRMmaHO7zv+uSEtLy/IYyB0aNWqU5THCwsK0+ZIlS7T5uXPntDnvpdyPT6ABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADdOHWGDZsmDYvX7688RibN2/W5r1799bmum7IX375pfbYqVOnanO7+c2dO9eSFS1aVHvsm2++qc0XL16szZE7BAUFafP3339fm7vScR7whujoaG2ulMry2K+88kqWx/AGu676bdq0sWR2rxeuXr8LFy5YMrsOsWvWrHFpbMBddB23V6xYoT02NjbWY/N48cUXPTY2cpYaNWpo83r16mnzr7/+2pLNnj1be+zQoUO1+YIFC7S53Z0lkHl8Ag0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADNBHTiIiIyPIYhw4d0uYvv/yy8RifffaZNm/atKk2nzdvnjYvW7asJVu0aJH22EGDBhnODjlVnjx5LNlHH32kPfbRRx/19HSM1a5d25JNmDBBe+ysWbO0+enTp906J3d75513tHmLFi20eenSpT04m9zNHc3C7Nxzzz3afMuWLZbsxIkTHpuHq+z2Rc+ePT12Tl3DME+eD96VP39+S3b33Xdrj92/f7/H5tGuXTttXrVqVW0+evRoS+aO93p2PvjgA23+z3/+02PnhHesXr1am3fs2FGbjx07Vptfu3bNks2cOVN7bIMGDbT5d999p83hfnwCDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGAhQHmxlGhAQ4KmhPeq1117T5oMHDzYe45tvvtHmtWrVMh7jp59+0ubly5c3HkNE5PDhw5asZcuW2mOPHTvm0tg5nSc79Zrwxh749NNPLdnDDz/ssfOtW7dOm5cqVUqbu7p+dc6ePavNdV0sRUQOHDhgyew6xHbp0iXzE7uNYsWKafN//OMf2nzIkCFZPqc394An179dh9PHHnvMkj3++OMujW037+TkZEuWmpqqPfbdd9/V5vfee682d+W1wU6JEiW0uStrYO7cudp8xYoV2nzNmjWWzO6aeIM/vgYMHz7ckk2aNMlj50tKStLmFy5c0Oa6jtgiItWrV7dk3bp10x57xx13aPOwsDBt7klff/21JWvWrJn22EuXLnl6Oha++hqQUxQsWFCb292NJyoqSpv36NHDki1YsEB77KZNm7T5xIkTtbndz29/4Kn1zyfQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAbpwa/Tv31+bz549O5tnomd3Xe26IXfv3t2SnTp1yq1zyqn8sQOr7mt29TpcvHhRm3fo0MGSbdu2TXvsM888o82nTZvm0lx8ya5du7R5bGysNk9JScnyOenAKlKxYkVtbvdz8MMPP9Tmus66rnb9tbsm7vg+uTL21q1btce2bdtWm9t1VM7p/PE1IDo62pJ98cUX2mPd0f3dX+i6bYuIJCQkWLLVq1d7ejrGeA3wjpEjR7p0/MyZMy2ZXdf2P/74Q5sXKlTIpXP6A7pwAwAAAADgRRTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEAXbo0CBQpo8y+//NKSNWrUyGPzuHr1qjafMmWKNh87dqw2T0tLc9uccht/7MDqShdu3ZoWEenWrZs2t+vOrRMYqP/9XHx8vDZ/8MEHLdnf/vY34/N5i647/3/+8x/tscuXL9fmdnvdHejA6j6dO3e2ZIMHD9YeGxMTo81Lly6tzd3xfdq5c6c21+3zd955R3tscnJylueRk/jja4BOyZIltfmWLVu0eYkSJTw5nRwhKSlJm/fr10+br1q1SpvbdUrOKXgN8D104TZHF24AAAAAALyIAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAE3EXFC5cmVL9vzzz2uPffTRR7X5N998o8137NhhyebMmaM99tSpU3ZTxE38sYHMuHHjLNmVK1e0x06aNEmbe6PxnK7pWJ48edwydqVKlSzZY489pj1237592nzZsmXa/Pr165bM2+vuRjSQ8Y7o6GhtXqpUKY+dU/c64u+8vRdz+h7QNcYTERk+fLg2v+eeeyxZvnz53DonE7qfuyIix44d0+a617q9e/dqj7XLcyteA3IvXXNVEZGJEydq8/vuu8+T08mVaCIGAAAAAIAXUUADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAF244dPowAp/RwdW+DNeA9yrY8eOlqx69eraY8eOHevS2BMmTLBk58+f1x7722+/afMFCxa4dE5/wGtA7tW7d29tfvHiRW3+r3/9y5PTyZXowg0AAAAAgBdRQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAAXbjh0+jACn9HB1b4M14D4O94Dci95s+fr83feustbb5z505PTidXogs3AAAAAABeRAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAA3Thhk+jAyv8HR1Y4c94DYC/4zUA/owu3AAAAAAAeBEFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGAhQSilvTwIAAAAAgJyOT6ABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABgwGMFdOnSpSUgICDDf/ny5ZO77rpLHn/8cdm8ebOnTu2yMWPGSEBAgIwZM8b4OdeuXZN169bJsGHDpG7dulKwYEEJCgqS4sWLS4cOHWT58uVun+dTTz0lAQEB8tRTT7l97Bs98MADEhAQIBs2bPDoeRwca+XYsWPGzzl37px8+umn0qtXL7n77rslLCxMQkJCpGzZstKzZ0/57rvvPDdhQ76+B0REDh48KLNmzZKnnnpKqlevLnnz5pWAgAAZP368R+bJHsho06ZNMnHiRHn44YczrLctW7Z4ZqIu8If1LyLyyy+/yIABA6RSpUoSGhoqISEhUqZMGYmPj5dvv/3WrfNk/Vtt27ZNHn/8cSlZsqQEBwdLeHi4VK9eXUaMGCG//fab+ydryF/Wv4jI1atXZebMmdK4cWMpXLiwhISESIkSJaR169ayaNEit82T9a/322+/yciRI6VatWoSEREh4eHhUq5cOenWrZvs3r3bvZN1gT/sAd4DuUdufA+U19MnaNSokZQvX15ERP7880/ZtWuXfPLJJ/Lpp5/Ka6+9JoMHD/b0FDxi48aN0rx5cxERKV68uDRu3FjCw8Nl//798sUXX8gXX3whffr0kbfeeksCAgK8PFvfM3XqVJkwYYKIiFSsWFFat24taWlpsnv3bpk/f7589NFHMnfuXImPj/fyTH13D4iIvPnmmzJjxgxvT8NvDRw40O1Fmrv58vrfuXOnNG/eXC5cuCB33nmntGjRQvLkySN79+6VBQsWyMKFC2XhwoXy6KOPenuqPmnOnDkyYMAAUUpJtWrV5L777pOUlBTZsWOHTJkyRd5//33ZuHGjVK5c2Wtz9OX1LyJy4sQJadmypezfv1+ioqKkUaNGEh4eLr/++qts2rRJwsPD5fHHH/f2NH3W+vXrpUuXLvLnn39K+fLlpVWrVpKeni7Hjh2TTz75RBo1aiT33nuvV+foy3uA90De5c33QB4voHv37p3hNyWXL1+Wvn37yoIFC2T48OHSrl07qVixoqen4XaBgYHy8MMPy/PPPy9NmjTJ8NiiRYskLi5O3nnnHWnUqJE8+eSTXpql7woPD5fBgwdLv379pEKFCs782rVrMmLECHn99delT58+GX5we4uv7gERkWrVqsnQoUOlVq1aUrt2bZk4caJ8+OGH3p6W32jevLl07txZateuLbVr15ZGjRrJ8ePHvT2tDHx5/ffp00cuXLggffr0kTfeeEOCgoJERCQ9PV0SEhJk/Pjx0qdPH2nfvr2EhIR4eba+5cyZMzJo0CBRSsn777+f4ZelFy5ckEceeURWr14tL7zwgqxcudJr8/Tl9Z+amirNmzeXAwcOyJgxY2T06NHOPSAicunSJTl06JAXZ+jb9u/fL23btpU8efLIkiVLpHPnzhkeP336tFy6dMlLs/sfX94DvAfyLm++B8r2fwMdEhIis2fPlvDwcElLS5MlS5Zk9xTcIjY2Vj777DNL8Swi8vjjjzt/WCxYsCCbZ+YfRo0aJdOmTctQPIuIBAUFyWuvvSYVK1aUq1evyscff+ylGdrzlT0g8tcL49SpU+WJJ56QypUrS2AgbRWy09SpUyUhIUHat28vd955p7enY8RX1n9ycrLs27dPRETGjx+foXAIDAyUMWPGSGhoqPz555/y448/emuaPmvLli1y9epVufvuuy1/aRQZGSkJCQkiIrJ9+3ZvTM+Wr6x/EZFJkybJgQMHpE+fPpKQkJBhD4iIhIWFSc2aNb0zOT/Qr18/SU1NlXfeecdSPIv89deRZcuW9cLMbs2X9gDvgbzLm++BvPKdjoiIkEqVKomIZPh7d8ffrouIzJ8/X+677z4pUKCA5e/iT548KYMHD5YqVapIWFiYREZGSt26deWNN96Q69eva8+ZmpoqY8aMkQoVKki+fPkkOjpa4uPj5ZdffvHI11irVi0REfn11189Mr6Ja9euyUcffSRxcXFSuXJlyZ8/v4SGhkqlSpVk4MCBcvLkyduOsXHjRmnRooUULlxYwsLCpF69erf97dq6deukS5cuEh0dLcHBwVKsWDHp3Llztr2RCQwMlBo1aoiId6//rfjDHsgJ/HUP5HS+sP7z5ctnfGxUVFSmzpFVvrz+TT/R99a1vxVfWP/Xrl2TN998U0REhg0blqkxPM2X1//evXtl8+bNUrJkSenatavbxs0uvrAHcgNf3gPe5rVflZw/f15E9G9CnnvuOendu7fkzZtX2rZtK/Xr13duqE2bNkm1atXk9ddfl8uXL0vz5s2lUaNGcuTIEXnuueekbdu2cu3atQzjXbp0SWJjY2Xs2LFy6tQpadGihTRp0kRWrVoltWvXlqNHj9rO0/GP0t9//32Xvr7Dhw+LiEh0dLRLz3OnM2fOSPfu3WX58uVSqFAhadWqlcTGxkpKSorMmjVLatasKT/99JPt85cuXSqxsbGSmJgoLVu2lLp168ru3bvlySeflCFDhmifM3ToUHnooYdk2bJlctddd0mnTp2kbNmysmzZMmnSpInMnz/feP7Hjh1z/jB1tbFATrj+t+PreyAn8Oc9kNPl9vUfERHh/Aukl156KcM509PTZcyYMZKamiqtW7eWkiVLZuoaZZUvr3/HG+v9+/fLBx98kOGxlJQUGTt2rIiI9O3b1/h82Sm3r/89e/ZIUlKSxMTESPny5eW7776TsWPHSt++fWXkyJGyfPlySU9Pz+JVyhpfXv+rVq0SEZEmTZpIQECArFq1SoYPHy59+/aVcePGebV5mKncvgdyA1/eA16nPKRUqVJKRNT8+fMtj3377bcqMDBQiYh67733nLmIKBFR+fPnV9u3b7c879SpU6pIkSIqICBAzZkzR6WlpTkfS0pKUrGxsUpE1NixYzM8b+jQoUpEVOXKlVViYqIzv3jxourYsaPzvAkJCS59HXZOnTqlChQooEREzZw50/h5txMfH69ERMXHxxsdf/78ebVs2TJ15cqVDPnVq1fVqFGjlIioNm3aWJ7XtGlT5zWZOHFihsc2bNigQkNDlYiolStXZnjsnXfeUSKiypcvr7799tsMj23cuFFFRkaq4OBgdejQoQyPOa7x0aNHM+RHjx51zuPmx25lxYoVSkRUQECAZR7ZyR/3gGONjhs37rbHZgZ74NYc42zevPm2x3qaP6z/AwcOqLJlyyoRUXfeeafq2LGj6tKliypTpowKDg5W3bt3V+fOnTO4WmZY/xn9+9//VuHh4UpEVLVq1dSjjz6q2rRpowoVKqQKFSqkJkyYoNLT0293mTzC19e/43tdr149NWLECBUQEOAcx/FfrVq11PHjxw2v2O2x/v/niSeeUCKinnnmGfXQQw9Zrr2IqLi4OHX58mWTS+URvr4HdHgP5D/vgbK1gP7zzz/V8uXLVbly5ZSIqJiYGJWSkvK/yfz/RXrllVe0Y44YMUKJiBowYID28RMnTqigoCBVtGhR54vmpUuXVGRkpBIRtWLFCstzTp06pUJCQmw3TmxsrKpUqZJasmSJ0dd97do11axZMyUiqnr16pZFmxWubpzbiYmJUYGBger8+fMZcsfGqVWrlvZ5Q4YMUSKimjdv7szS0tJUTEyMEhG1a9cu7fOmTJmiREQNGTIkQ263cU6cOKEqVaqkKlWqpE6cOGH0NSUmJjrn0adPH6PneIo/7oGc9uJxO762B3J6Ae2L6//MmTOqRYsWljevd999t5o3b57d5ckU1r/Vrl27nOvpxv9atGihNmzYcLtL4DG+vv4nTZqkREQFBQUpEVHPPvusOnjwoDp37pxas2aNqlixovMXG1evXr3d5TLC+v+fli1bOq9/WFiYmjVrlkpMTFS//fabeu+991T+/Pm9/j7I1/eADu+B/Oc9kMcLaLv/ypUrp/bu3ZtxMv//2P79+7VjVqtW7bYX5u6771Yiog4ePKiUUmrLli1KRFRUVJTtczp06GC7cVzVq1cvJSKqSJEizjm4S2Y3zt69e9W0adPUgAEDVI8ePVR8fLyKj49XxYoVUyKi9uzZk+F4x8Z5/fXXtePt27dPiYgKDQ1V169fV0r99SbG8X218/XXXysRUffdd1+G3G7juOrcuXOqVq1aSuSv34qnpqZmabys8sc9kFNfPPxlD+TEAtqX1/+WLVtUsWLFVExMjFq4cKE6ffq0Onv2rPriiy9UhQoVlIionj17ujTmrbD+M5ozZ44KCgpS9erVUxs2bFDnzp1Tv/76q5o9e7bKnz+/CgwMdOmvx9zJ19f/xIkTnfPt1q2b5fHjx487i5IFCxYYj3srrP//ufGXdm+//bbl8UWLFikRUYGBgVl+XcksX98DOrwH8p/3QNl6H2jHPyRv0KCBtGrVSvLm1Z++dOnS2vznn38WEdF2vr7Z77//LhUrVpQTJ07cckwRkTJlytx2PBPPP/+8zJs3TwoVKiRr1qzxelv+ixcvSvfu3WXp0qW3PM7x71BuZnddHHlqaqokJydLsWLFnN+bI0eO3Pa+17///vvtpu6ylJQUad26tXzzzTdSq1YtWblyZY65bYw/7YGcxp/2QE7lq+v/zz//lM6dO0tSUpJs375d6tev73ysXbt2cvfdd0v16tXlvffek7/97W/y4IMPunyOrPLl9b9161Z55plnJDo6WlavXi0FChQQEZH8+fPLM888I4ULF5Zu3brJCy+8IJ06dZKCBQtm+ZyZ4avrPzIy0vn/un9nftddd0nbtm1l8eLFsnbtWunevbvL58gqX17/jusfFBQkPXv2tDz+2GOPybPPPitJSUmyfv166dGjR5bPmVm+ugdyA1/eA96W7feBNhEaGqrNHQ0pHnnkEQkPD7/lGEWKFHHpnFk1ZMgQmTlzphQsWFBWr17t7MLtTaNGjZKlS5dK5cqV5dVXX5W6detKVFSUBAcHi4hIw4YNZfv27aKUyvQ5HM91fG+KFy8uLVu2vOVz3N0V9eLFi9K2bVvZtm2b1KhRQ9asWSOFChVy6zmywl/2QE7kL3sgJ/PV9b98+XL5/fffpVy5chmKZ4eyZctK/fr1Zf369bJ27VqvFNC+vP4dDX3atGnjLJ5v9Mgjj0h8fLycO3dO/vvf/0rz5s2zfM7M8NX1f+PtkexuleTIT5065dG52PHl9e+4tiVLlrQtQsuUKSNJSUleu/4OvroHcgNf3gPe5vEC2p1Kliwphw8flhEjRkidOnWMnuO4L9iturdltbPb8OHDZfr06VKgQAFZvXq18dw87ZNPPhERkUWLFjlv63QjR6dqO3ZdCR3XKyQkxPkDytFltkiRItnaqfDSpUvStm1b2bRpk9SoUUPWrVvn0z80c+oeyKn8YQ/4k5y0/h23PsmfP7/tMY7C7uzZsy6P7w6+vP5vd/3z5s0r4eHhcvXqVa9df3fLSeu/du3aEhAQIEopSUpK0naaT0pKEpG/OtZ7gy+v/3vvvVdE/rofvR1vX39PyEl7IDfw5T3gbbnqjt+tW7cWkf8tCBP33nuvRERESFJSkqxevdry+JkzZ7S5qZEjR8rUqVOlQIECsmbNGqlbt26mx3I3x5uGUqVKWR5btWqV84ernY8++kibL1iwQEREGjdu7PzNp+O3Wvv375cffvghK9M2lpqaKu3atZONGzc6i2df+K3WreTEPZCT+foe8Dc5af073pQdOHBAzp07Z3n82rVrsmfPHhHx3p8H+vL6d1z/nTt3ah8/ePCg/PHHHyLiO3+emZPWf/HixaVx48YiIrJ27VrL49euXZONGzeKiEi9evVcHt8dfHn9t2nTRsLCwpx/YXGzQ4cOyfHjx0XEe9ffE3LSHsgNfHkPeFuuKqCHDRsmBQsWlOnTp8u0adPk6tWrlmOOHj2a4RseGhoqffr0ERGRQYMGZfhTltTUVOnfv7+kpqbanrNZs2ZSuXJl7b8feOmll2Ty5MlSsGBBl4rnDRs2ZLhZvKdUqVJFRERmzZqVIT948KD069fvts/fvXu3TJkyJUO2ZcsWmT17toj8dT0dgoKCJCEhQZRS0rlzZ9myZYtlvLS0NPnPf/4jO3bsMJp/YmKiVK5cWSpXriyJiYkZHrt8+bJ06NBB1q9f7zfFs0jO2wOZxR7I+h7wRzlp/bdu3VrCw8MlNTVVnn76aUlJSXE+dvXqVRk0aJD88ssvEhQUJI888kiG57L+s77+Hdd027ZtMnXq1Ax/gvjbb79Jr169RESkYsWKOeavwrIqJ61/EZGEhAQREZk0aVKG7+n169dlyJAh8vPPP0tkZKTl39+y/rO+/iMjI5334e3fv7+cPHnS+VhycrL07t1b0tPTpV69etKgQQOj8+UGOW0PZBZ7wAfeA3mqO1lm7p8s/99971Y2btyooqKilIioYsWKqdjYWBUXF6fatWvnbItfv379DM9JSUlR9erVUyKiIiIiVPv27dWjjz6qihcvrooUKaKefPJJ2+57dl/HsmXLnPOtU6eOs6Pdzf/d3KpdKaXWrVunRETlzZvX+Noo9b/ue1FRUap+/fq2/82dO1cppdTixYud92asXr266tq1q4qNjVVBQUEqNjZWNWzYUImIWr9+fYbzOLrvDRw4UAUGBqqqVauqbt26qaZNmzrv2/f8889r5zhs2DDndalatarq2LGj6tq1q3rggQdUwYIFlYioN998U3uNXbn/26BBg5yPtWvXzvb6O66FN/j6HlBKqd27d2dYe455lShRIkN+8uTJDM9jD2R9Dyil1Ny5czN83cHBwUrkr1soObJOnTq5dI3dxR/W/4cffqjy5s2rREQVLVpUtWnTRnXs2FHdeeedzg64N3+vlWL9u2v99+3b1/l4+fLlVZcuXVTz5s2dt/ApWLCg2rlzp0vX2F38Yf0rpdS4ceOca7lhw4aqS5cuqnTp0s4OvV9++aXlOax/96z/K1euqNatWyuRv+6b3KJFC9WmTRtVuHBhJSKqVKlS6ueff3bpGruTP+wB3gNZ+ct7oFxXQCv11303X375ZVW7dm3nTblLlCihGjZsqBISEtS+ffssz7l48aJ6+eWXVbly5VRwcLC64447VFxcnDp69KhKSEhweePMnz/fOd9b/VeqVCnLmI77oPXt29f00iil/rdxbvffjV/Hpk2bVLNmzVRUVJQKCwtT1apVUxMmTFBXrlxxbhC7jbN+/Xq1bt061axZM1WgQAEVGhqq6tSpo95///1bznPr1q0qLi5OlSpVSuXLl09FRkaqihUrqk6dOql3331XnT17NsPxmdk4ptfCXffKywxf3wNKKbV+/Xqj78PN3z/2QNb3gFLK+X1z9WdQdvCH9a/UX7cHeeqpp1TZsmVVvnz5VHBwsCpVqpSKi4uzLd5Y/+5Z/0op9e9//1u1b99eRUdHq6CgIBUaGqruvvtuNWjQIPXrr7+aXlq385f1r5RSq1atUq1bt1aFCxdWQUFBqmTJkuqpp55SP/74o/Z41r/71n9aWpqaM2eOqlevnoqIiFAhISGqSpUqavTo0So5Odn00nqEP+wB3gPp+cN7oAClstB6DZnSvHlz2bZtmxw5ckSKFy/u7ekA2Y49AH/G+oc/Y/3D37EHcr9c9W+gfUFqaqps2bJFBg0axKaBX2IPwJ+x/uHPWP/wd+wB38An0AAAAAAAGOATaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMBAXk8OHhAQ4MnhgdtSSnn1/OwBeJs39wDrH97GawD8Ha8B8GeeWv98Ag0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAICBvN6eAAAAAGAqICBAm99xxx3afODAgZasc+fO2mMrV67s0lxq1aplyfbu3evSGAByFz6BBgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADNCFGwAAADlORESENu/SpYs2f//997N8zrNnz2rzK1euaPNr165l+ZwAchc+gQYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAzQhTubRUVFafOnn37akrVv3157bIMGDbT5pk2btPnEiRMt2erVq+2mCORaH374oTYvUKCAJevQoYOnpwMAMFSwYEFLtmLFCu2xdu+Drl+/rs2PHj1qyZYuXao9dtasWdr8xIkT2hyA/+ETaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwABduLOZXWft8ePHG4+hlNLm999/v/HxW7du1R578eJF43kAOU3ZsmW1eY0aNSyZruOriMiff/7pxhkBZmJiYixZ/vz5tcfq7trgqsjISG3eq1evLI9t93o2efJkbX7p0qUsnxO5x4MPPqjNp0+fbsnuuece7bHp6enaXHfXERGRhIQEw9kB3qHrLL99+3btsXbr+ZVXXnHrnGCPT6ABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggCZiWRQdHa3NP/74Y21ep04dT05HS9dcbNKkSdpjBw4c6OnpAFlWunRpbV61alVtHhYWZskCA/n9IbLObi0WL15cm3ft2lWb65p36datiH0jSVcEBAS4NPb333+vzXXNyF566SXtsXfddZc279GjhzZH7la7dm1tPmbMGG1u1zBMZ9y4cS6NDeRGdj+PGzdunM0zwc14BwkAAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABigC7cLYmJiLNmnn36qPbZBgwYujX358mVLtmzZMu2x58+f1+Z9+vQxPp+ucyqQW+g6FouwruEeJUuW1OYPPfSQJRs/frz22DvuuCPL8/j222+1+aFDh7T5nj17tLmuG7Jdh2+7TtlNmjTR5r1797Zkr732mvbY8uXLa3PkbkWLFtXmdu+PypQpYzx2ixYttPnatWuNxwB8TVBQkEv5tWvXPDkdv8Qn0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAG6cLsgLi7OkrnabfvYsWPavEuXLpbMrgNriRIltLkrXbiB3Kxu3bouHa/rXJ+Wluau6cDHJCcna/MlS5ZYsu+++0577I4dO1w659y5cy3Z8OHDtcdeuHDBpbF18ubVv/xXrVpVm0+ePFmbP/7445YsJSVFe+zIkSMNZ4ecStdx29Vu23bvg958801Ltn79evPJAX6iadOm2lx3xwURkZ07d3pyOn6JT6ABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADdOHW6NGjhzb/+9//bjzG2rVrtXlCQoI213XczpMnj/ZYu/m54sSJE1keA/C0xo0ba/MHHnjApXE2bdpkyc6dO5eZKcEPXLp0yfjYM2fOaPOAgACXxv7yyy8tmTu6bYuIREZGWrKOHTtqj33ppZe0eYUKFbS5ruP2woULtcdu3brVborIJXRd1++//36XxpgxY4ZLOeAPfv75Z0v2448/ao+tUqWKp6eD2+ATaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABigiZjGK6+8os3DwsKMx3j11Ve1+Y4dO4zHKF26tDYfM2aM8RgiIn/88YclmzVrlktjAN7Qq1cvbR4UFOTSOMePH3fHdABjSiltnpycrM1deW2wU61aNW0+Z84cS9aoUSPtsXbzXrx4sTafMGGCJdM1xUTuYtes9LXXXjMe44svvtDms2fPztScAF/222+/WbKTJ09qj6WJmPfxCTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCALtwaCxYs0OZDhw61ZHnz6i9h0aJFtfkdd9yhzZOSkizZiy++aDdFlyxcuNCS6br9ATlNVFSUS8dfunRJmw8aNMgd0wEs7NbciRMntHmJEiW0+cSJEy3ZtGnTtMd2795dmz/33HPaPCIiwpLZvQZ07txZm//www/a/MKFC9ocuUPhwoW1+eDBg7V5cHCw8djjxo3T5tevXzceA4C5hx9+WJvv3Lkzm2fi+/gEGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEAXbg277tfff/+9JatTp4722A8++ECb23U+XbdunSWLj4+3m6LW1atXtfnnn3/u0jiAN9SoUcOStW3bVnusUkqbf/fdd9o8LS0t8xMDbsHu5+7Zs2e1uV0X7l69elmyRx99VHtsUFCQNg8NDdXme/bssWRjx47VHrtjxw5tDt8UHR2tzatVq6bN09PTLdmIESO0x+7atSvzE/OiPHnyaPOwsDBtruvEz2sOvKFKlSrenoLf4BNoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAF24XfCvf/3LKBMR+eKLL7T56dOntfkPP/yQ+Yn9v+nTp2vztWvXZnlswNOGDRuW5TEWL17shpkA5i5cuKDNR44cqc1XrFhhPHbBggW1+bFjx7R5o0aNtPmpU6eMzwn/MmTIEG1ud6eDrVu3WrLXXnvNrXPKCrsO2j169LBkujs/iIjExMRo8y5dumjzMWPGWLIZM2Zojz137pw2B5C78Ak0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADNBEzEM2bNigzZcuXarNAwICsnzO0aNHZ3kMwFuio6Mtmd2+sGvGt2DBArfOCbiRrrnQk08+qT22f//+2tyuOZMrIiIitHlkZKQ2p4kY7BrSNW7c2KVxvvzySzfMxly5cuW0eWxsrDZv1qyZNn/sscfcNqeb6ZqIVa9eXXtst27dtPn169fdOSUAHsYn0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAG6cHvI559/rs1btmypzV3pzLpq1apMzQnICXSdjEVEGjZsaMns9sU//vEPbf77779nel7A7SxZssSS1a1bV3usq922Dx06ZMkCA/W/4y5fvrw2b9CggfHY8C+hoaHa3G4t2dm3b1+W51KkSBFt3qVLF0s2Z84c7bF58+rfvrqy73bu3KnNp02bps1r1aqlzUeNGmXJHn74Ye2xUVFR2tzuzhKAK44ePertKfgNPoEGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAM0IU7i55//nlt3rx5c21u1znSFXadvBMTE7V5enq6JZs8ebL22PPnz2vzr776SpsnJSVpc8DO7NmztXm+fPksWVpamvbYf/7zn26dE3Cjdu3aaXO7LryusOuIPXLkSONjv//+e21u93q0dOlSS3bhwgW7KQJuUbRoUW0+ePBgbT5ixAjjsQ8fPqzNBw4caDzG3r17tXlqaqo2j4uLMx77zJkz2vzq1avGYwCu4r1R9uETaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwABduF2g66w9ceJE7bHBwcGeno5FdHS0NldKWbIZM2a4NPbx48e1+eXLly3ZW2+9pT125syZLp0TvqlKlSrGx9p1lLTrOA+4g133YN1dFAID9b+H1v1sFBH529/+ps337NljyUqWLKk9NiAgQJvXrFlTm3fo0MGS0a0VmdG9e3dLtnLlSu2x8+bN0+Z2Xe511qxZo8179+6tzX/99Vfjse32S0JCgjbv2LGj8dh2dy45e/as8RiAnZ07d2pzuzs0wP34BBoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGaCKm8fzzz2vzZ555xpKFhIR4ejo5QqlSpYyPnTRpkjaPi4vL8jzq16+f5TGQPV588UVtXrZsWeMxFi9e7K7pAMbatm2rzXUNGc+fP689tlevXtpc1yzMTnJysjZfsmSJNu/cubM2b9OmjSWjiZh/sWtqd+zYMW1eunRpbV6rVi1LNnLkSO2xzZo1M5qbw08//WTJ7Bp32X09oaGh2rx9+/aWbNCgQdpjXX2foWt0NnbsWJfGAO655x5L1qBBA+2x27dv1+YXL15065xgj0+gAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAA37RhTsmJkabz507V5vHxsZq8+Dg4CzPZdu2bdr873//e5bHDgzU/z4kPT3dko0aNUp7bGRkpDavWrWqNg8PD7dkdp3Ja9Sooc337t2rzVetWqXNkbPky5dPm48ePVqb58mTR5vv37/fkn3xxReZnxiQDb799lttvn79+iyPfenSJW2emJjo0ji61wD4lz/++EObd+3aVZvv2LFDm1euXNmSTZw4MfMTu8Hx48ctWbdu3bTHPvjgg9q8cePG2tyuq7jOmTNntPlXX32lzWfOnGnJfvnlF+PzASL6DvK699jIGfgEGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMOAXXbiHDh2qzVu1apXlse26pNp11f7444+1+alTp7I8F1e42iG2adOm2rxQoULGY6Smpmpzum3nHrqO24sWLdIea9eN3c6UKVMyNSfA3aKjo42PXbp0qTZPTk7O8jzs7opw//33uzTO6dOnszwX+KY9e/Zo82effVabDx8+3JKVKlXKLXNp1qyZUSYiEhAQoM2VUsbnW7FihTa3e0+i67YNuMvAgQO9PQW4gE+gAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAA37RhXvQoEHaPD093aVxLl++bMmee+457bHvv/++S2PndBs3bvT2FJADREREWLL27du7NMY333yjzRcuXJipOQHuZtfhV5fbdcp2lW6cuLg47bH33HOPS2Nv3rw5U3OC77t+/bo2f/PNN7X5mjVrLNn48eO1xz722GOZn9ht2L0ncaWD9pUrV7TH2l0TwB1q1KihzTt37mw8xq5du9w1HWQSn0ADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAQIBSSnlscJtGLNktNTVVmwcHB2vzlJQUbT5gwABL9uGHH2Z+YvA4Dy5vIzllD7hLt27dLNlHH33k0hj16tXT5rt3787UnHBr3twDuXX9jxo1SpuPGzfOkiUmJmqPLVWqlDavVq2aNp8zZ44la9SokfZYu+/pkiVLtHl8fLwls3td9DW8BsDf8RqQs9SuXVub6xqD2b2+PPDAA9r8yJEjmZ6Xr/LU+ucTaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwEBeb08gOzz44IPa3K6L3ezZs7X5hQsX3DUlIFf64osvLNmJEye0x54/f16b//jjj26dE+Bu77zzjjZv1aqVJbPrKr9161ZtXrVqVW0eERFhODv7btu9evXS5v7ScRsAfIldV226bXsfn0ADAAAAAGCAAhoAAAAAAAMU0AAAAAAAGKCABgAAAADAAAU0AAAAAAAGApRSymODBwR4amjAiAeXtxH2ALzNm3vA19Z/06ZNLdlrr72mPbZWrVoujb1nzx5L9tVXX2mPnTx5sjan27YVrwHwd7wG5CwxMTHafMOGDZZs0aJF2mNffvlld07Jp3lq/fMJNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIAu3PBpdGCFv6MDK/wZrwHwd7wGwJ/RhRsAAAAAAC+igAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAAQpoAAAAAAAMUEADAAAAAGCAAhoAAAAAAAMBSinl7UkAAAAAAJDT8Qk0AAAAAAAGKKABAAAAADBAAQ0AAAAAgAEKaAAAAAAADFBAAwAAAABggAIaAAAAAAADFNAAAAAAABiggAYAAAAAwAAFNAAAAAAABiigAQAAAAAwQAENAAAAAIABCmgAAAAAAAxQQAMAAAAAYIACGgAAAAAAAxTQAAAAAAAYoIAGAAAAAMAABTQAAAAAAAYooAEAAAAAMEABDQAAAACAgf8Db0nGSrpTizAAAAAASUVORK5CYII=",
367 | "text/plain": [
368 | ""
369 | ]
370 | },
371 | "metadata": {
372 | "image/png": {
373 | "height": 558,
374 | "width": 488
375 | }
376 | },
377 | "output_type": "display_data"
378 | }
379 | ],
380 | "source": [
381 | "model_test(mlp,test_data,test_label,device)"
382 | ]
383 | },
384 | {
385 | "cell_type": "code",
386 | "execution_count": null,
387 | "id": "b557e4ed",
388 | "metadata": {},
389 | "outputs": [],
390 | "source": []
391 | }
392 | ],
393 | "metadata": {
394 | "kernelspec": {
395 | "display_name": "Python 3 (ipykernel)",
396 | "language": "python",
397 | "name": "python3"
398 | },
399 | "language_info": {
400 | "codemirror_mode": {
401 | "name": "ipython",
402 | "version": 3
403 | },
404 | "file_extension": ".py",
405 | "mimetype": "text/x-python",
406 | "name": "python",
407 | "nbconvert_exporter": "python",
408 | "pygments_lexer": "ipython3",
409 | "version": "3.9.16"
410 | }
411 | },
412 | "nbformat": 4,
413 | "nbformat_minor": 5
414 | }
415 |
--------------------------------------------------------------------------------
/code/module.py:
--------------------------------------------------------------------------------
1 | import math
2 | import torch as th
3 | import torch.nn as nn
4 | import torch.nn.functional as F
5 | from abc import abstractmethod
6 |
7 | class TimestepBlock(nn.Module):
8 | """
9 | Any module where forward() takes timestep embeddings as a second argument.
10 | """
11 |
12 | @abstractmethod
13 | def forward(self, x, emb):
14 | """
15 | Apply the module to `x` given `emb` timestep embeddings.
16 | """
17 |
18 | class TimestepEmbedSequential(nn.Sequential, TimestepBlock):
19 | """
20 | A sequential module that passes timestep embeddings to the children that
21 | support it as an extra input.
22 | """
23 |
24 | def forward(self, x, emb):
25 | for layer in self:
26 | if isinstance(layer, TimestepBlock):
27 | x = layer(x, emb)
28 | else:
29 | x = layer(x)
30 | return x
31 |
32 | def zero_module(module):
33 | """
34 | Zero out the parameters of a module and return it.
35 | """
36 | for p in module.parameters():
37 | p.detach().zero_()
38 | return module
39 |
40 | def conv_nd(dims, *args, **kwargs):
41 | """
42 | Create a 1D, 2D, or 3D convolution module.
43 | """
44 | if dims == 1:
45 | return nn.Conv1d(*args, **kwargs)
46 | elif dims == 2:
47 | return nn.Conv2d(*args, **kwargs)
48 | elif dims == 3:
49 | return nn.Conv3d(*args, **kwargs)
50 | raise ValueError(f"unsupported dimensions: {dims}")
51 |
52 | def avg_pool_nd(dims, *args, **kwargs):
53 | """
54 | Create a 1D, 2D, or 3D average pooling module.
55 | """
56 | if dims == 1:
57 | return nn.AvgPool1d(*args, **kwargs)
58 | elif dims == 2:
59 | return nn.AvgPool2d(*args, **kwargs)
60 | elif dims == 3:
61 | return nn.AvgPool3d(*args, **kwargs)
62 | raise ValueError(f"unsupported dimensions: {dims}")
63 |
64 | class GroupNorm32(nn.GroupNorm):
65 | def forward(self, x):
66 | return super().forward(x.float()).type(x.dtype)
67 |
68 | def normalization(n_channels,n_groups=1):
69 | """
70 | Make a standard normalization layer.
71 |
72 | :param n_channels: number of input channels.
73 | :param n_groups: number of groups. if this is 1, then it is identical to layernorm.
74 | :return: an nn.Module for normalization.
75 | """
76 | return GroupNorm32(num_groups=n_groups,num_channels=n_channels)
77 |
78 | class Upsample(nn.Module):
79 | """
80 | An upsampling layer with an optional convolution.
81 |
82 | :param n_channels: number of channels in the inputs and outputs.
83 | :param use_conv: a bool determining if a convolution is applied.
84 | :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then
85 | upsampling occurs in the inner-two dimensions.
86 | """
87 |
88 | def __init__(
89 | self,
90 | n_channels,
91 | up_rate = 2, # upsample rate
92 | up_mode = 'nearest', # upsample mode ('nearest' or 'bilinear')
93 | use_conv = False, # (optional) use output conv
94 | dims = 2, # (optional) spatial dimension
95 | n_out_channels = None, # (optional) in case output channels are different from the input
96 | padding_mode = 'zeros',
97 | padding = 1
98 | ):
99 | super().__init__()
100 | self.n_channels = n_channels
101 | self.up_rate = up_rate
102 | self.up_mode = up_mode
103 | self.use_conv = use_conv
104 | self.dims = dims
105 | self.n_out_channels = n_out_channels or n_channels
106 | self.padding_mode = padding_mode;
107 | self.padding = padding
108 |
109 | if use_conv:
110 | self.conv = conv_nd(
111 | dims = dims,
112 | in_channels = self.n_channels,
113 | out_channels = self.n_out_channels,
114 | kernel_size = 3,
115 | padding = padding,
116 | padding_mode = padding_mode)
117 |
118 | def forward(self, x):
119 | """
120 | :param x: [B x C x W x H]
121 | :return: [B x C x 2W x 2H]
122 | """
123 | assert x.shape[1] == self.n_channels
124 | if self.dims == 3: # 3D convolution
125 | x = F.interpolate(
126 | input = x,
127 | size = (x.shape[2],x.shape[3]*2,x.shape[4]*2),
128 | mode = self.up_mode
129 | )
130 | else:
131 | x = F.interpolate(
132 | input = x,
133 | scale_factor = self.up_rate,
134 | mode = self.up_mode
135 | ) # 'nearest' or 'bilinear'
136 |
137 | # (optional) final convolution
138 | if self.use_conv:
139 | x = self.conv(x)
140 | return x
141 |
142 | class Downsample(nn.Module):
143 | """
144 | A downsampling layer with an optional convolution.
145 |
146 | :param channels: channels in the inputs and outputs.
147 | :param use_conv: a bool determining if a convolution is applied.
148 | :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then
149 | downsampling occurs in the inner-two dimensions.
150 | """
151 |
152 | def __init__(
153 | self,
154 | n_channels,
155 | down_rate = 2, # down rate
156 | use_conv = False, # (optional) use output conv
157 | dims = 2, # (optional) spatial dimension
158 | n_out_channels = None, # (optional) in case output channels are different from the input
159 | padding_mode = 'zeros',
160 | padding = 1
161 | ):
162 | super().__init__()
163 | self.n_channels = n_channels
164 | self.down_rate = down_rate
165 | self.n_out_channels = n_out_channels or n_channels
166 | self.use_conv = use_conv
167 | self.dims = dims
168 | stride = self.down_rate if dims != 3 else (1, self.down_rate, self.down_rate)
169 | if use_conv:
170 | self.op = conv_nd(
171 | dims = dims,
172 | in_channels = self.n_channels,
173 | out_channels = self.n_out_channels,
174 | kernel_size = 3,
175 | stride = stride,
176 | padding = padding,
177 | padding_mode = padding_mode
178 | )
179 | else:
180 | assert self.n_channels == self.n_out_channels
181 | self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride)
182 |
183 | def forward(self, x):
184 | assert x.shape[1] == self.n_channels
185 | return self.op(x)
186 |
187 | class QKVAttentionLegacy(nn.Module):
188 | """
189 | A module which performs QKV attention. Matches legacy QKVAttention + input/ouput heads shaping
190 | """
191 | def __init__(self, n_heads):
192 | super().__init__()
193 | self.n_heads = n_heads
194 |
195 | def forward(self, qkv):
196 | """
197 | Apply QKV attention.
198 | (B:#batches, C:channel size, T:#tokens, H:#heads)
199 |
200 | :param qkv: an [B x (3*C) x T] tensor of Qs, Ks, and Vs.
201 | :return: an [B x C x T] tensor after attention.
202 | """
203 | n_batches, width, n_tokens = qkv.shape
204 | assert width % (3 * self.n_heads) == 0
205 | ch = width // (3 * self.n_heads)
206 | q, k, v = qkv.reshape(n_batches * self.n_heads, ch * 3, n_tokens).split(ch, dim=1)
207 | scale = 1 / math.sqrt(math.sqrt(ch))
208 | weight = th.einsum(
209 | "bct,bcs->bts", q * scale, k * scale
210 | ) # More stable with f16 than dividing afterwards
211 | weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)
212 | a = th.einsum("bts,bcs->bct", weight, v) # [(H*B) x (C//H) x T]
213 | out = a.reshape(n_batches, -1, n_tokens) # [B x C x T]
214 | return out
215 |
216 | class AttentionBlock(nn.Module):
217 | """
218 | An attention block that allows spatial positions to attend to each other.
219 | Input: [B x C x W x H] tensor
220 | Output: [B x C x W x H] tensor
221 | """
222 | def __init__(
223 | self,
224 | name = 'attentionblock',
225 | n_channels = 1,
226 | n_heads = 1,
227 | n_groups = 32,
228 | ):
229 | super().__init__()
230 | self.name = name
231 | self.n_channels = n_channels
232 | self.n_heads = n_heads
233 | assert (
234 | n_channels % n_heads == 0
235 | ), f"n_channels:[%d] should be divisible by n_heads:[%d]."%(n_channels,n_heads)
236 |
237 | # Normalize
238 | self.norm = normalization(n_channels=n_channels,n_groups=n_groups)
239 |
240 | # Tripple the channel
241 | self.qkv = nn.Conv1d(
242 | in_channels = self.n_channels,
243 | out_channels = self.n_channels*3,
244 | kernel_size = 1
245 | )
246 |
247 | # QKV Attention
248 | self.attention = QKVAttentionLegacy(
249 | n_heads = self.n_heads
250 | )
251 |
252 | # Projection
253 | self.proj_out = zero_module(
254 | nn.Conv1d(
255 | in_channels = self.n_channels,
256 | out_channels = self.n_channels,
257 | kernel_size = 1
258 | )
259 | )
260 |
261 | def forward(self, x):
262 | """
263 | :param x: [B x C x W x H] tensor
264 | :return out: [B x C x W x H] tensor
265 | """
266 | intermediate_output_dict = {}
267 | b, c, *spatial = x.shape
268 | # Triple the channel
269 | x_rsh = x.reshape(b, c, -1) # [B x C x WH]
270 | x_nzd = self.norm(x_rsh) # [B x C x WH]
271 | qkv = self.qkv(x_nzd) # [B x 3C x WH]
272 | # QKV attention
273 | h_att = self.attention(qkv) # [B x C x WH]
274 | h_proj = self.proj_out(h_att) # [B x C x WH]
275 | # Residual connection
276 | out = (x_rsh + h_proj).reshape(b, c, *spatial) # [B x C x W x H]
277 | # Append
278 | intermediate_output_dict['x'] = x
279 | intermediate_output_dict['x_rsh'] = x_rsh
280 | intermediate_output_dict['x_nzd'] = x_nzd
281 | intermediate_output_dict['qkv'] = qkv
282 | intermediate_output_dict['h_att'] = h_att
283 | intermediate_output_dict['h_proj'] = h_proj
284 | intermediate_output_dict['out'] = out
285 | return out,intermediate_output_dict
286 |
287 | class ResBlock(TimestepBlock):
288 | """
289 | A residual block that can optionally change the number of channels and resolution
290 |
291 | :param n_channels: the number of input channels
292 | :param n_emb_channels: the number of timestep embedding channels
293 | :param n_out_channels: (if specified) the number of output channels
294 | :param n_groups: the number of groups in group normalization layer
295 | :param dims: spatial dimension
296 | :param p_dropout: the rate of dropout
297 | :param actv: activation
298 | :param use_conv: if True, and n_out_channels is specified,
299 | use 3x3 conv instead of 1x1 conv
300 | :param use_scale_shift_norm: if True, use scale_shift_norm for handling emb
301 | :param upsample: if True, upsample
302 | :param downsample: if True, downsample
303 | :param sample_mode: upsample, downsample mode ('nearest' or 'bilinear')
304 | :param padding_mode: str
305 | :param padding: int
306 | """
307 | def __init__(
308 | self,
309 | name = 'resblock',
310 | n_channels = 128,
311 | n_emb_channels = 128,
312 | n_out_channels = None,
313 | n_groups = 16,
314 | dims = 2,
315 | p_dropout = 0.5,
316 | kernel_size = 3,
317 | actv = nn.SiLU(),
318 | use_conv = False,
319 | use_scale_shift_norm = True,
320 | upsample = False,
321 | downsample = False,
322 | up_rate = 2,
323 | down_rate = 2,
324 | sample_mode = 'nearest',
325 | padding_mode = 'zeros',
326 | padding = 1,
327 | ):
328 | super().__init__()
329 | self.name = name
330 | self.n_channels = n_channels
331 | self.n_emb_channels = n_emb_channels
332 | self.n_groups = n_groups
333 | self.dims = dims
334 | self.n_out_channels = n_out_channels or self.n_channels
335 | self.kernel_size = kernel_size
336 | self.p_dropout = p_dropout
337 | self.actv = actv
338 | self.use_conv = use_conv
339 | self.use_scale_shift_norm = use_scale_shift_norm
340 | self.upsample = upsample
341 | self.downsample = downsample
342 | self.up_rate = up_rate
343 | self.down_rate = down_rate
344 | self.sample_mode = sample_mode
345 | self.padding_mode = padding_mode
346 | self.padding = padding
347 |
348 | # Input layers
349 | self.in_layers = nn.Sequential(
350 | normalization(n_channels=self.n_channels,n_groups=self.n_groups),
351 | self.actv,
352 | conv_nd(
353 | dims = self.dims,
354 | in_channels = self.n_channels,
355 | out_channels = self.n_out_channels,
356 | kernel_size = self.kernel_size,
357 | padding = self.padding,
358 | padding_mode = self.padding_mode
359 | )
360 | )
361 |
362 | # Upsample or downsample
363 | self.updown = self.upsample or self.downsample
364 | if self.upsample:
365 | self.h_upd = Upsample(
366 | n_channels = self.n_channels,
367 | up_rate = self.up_rate,
368 | up_mode = self.sample_mode,
369 | dims = self.dims)
370 | self.x_upd = Upsample(
371 | n_channels = self.n_channels,
372 | up_rate = self.up_rate,
373 | up_mode = self.sample_mode,
374 | dims = self.dims)
375 | elif self.downsample:
376 | self.h_upd = Downsample(
377 | n_channels = self.n_channels,
378 | down_rate = self.down_rate,
379 | dims = self.dims)
380 | self.x_upd = Downsample(
381 | n_channels = self.n_channels,
382 | down_rate = self.down_rate,
383 | dims = self.dims)
384 | else:
385 | self.h_upd = nn.Identity()
386 | self.x_upd = nn.Identity()
387 |
388 | # Embedding layers
389 | self.emb_layers = nn.Sequential(
390 | self.actv,
391 | nn.Linear(
392 | in_features = self.n_emb_channels,
393 | out_features = 2*self.n_out_channels if self.use_scale_shift_norm
394 | else self.n_out_channels,
395 | ),
396 | )
397 |
398 | # Output layers
399 | self.out_layers = nn.Sequential(
400 | normalization(n_channels=self.n_out_channels,n_groups=self.n_groups),
401 | self.actv,
402 | nn.Dropout(p=self.p_dropout),
403 | zero_module(
404 | conv_nd(
405 | dims = self.dims,
406 | in_channels = self.n_out_channels,
407 | out_channels = self.n_out_channels,
408 | kernel_size = self.kernel_size,
409 | padding = self.padding,
410 | padding_mode = self.padding_mode
411 | )
412 | ),
413 | )
414 | if self.n_channels == self.n_out_channels:
415 | self.skip_connection = nn.Identity()
416 | elif use_conv:
417 | self.skip_connection = conv_nd(
418 | dims = self.dims,
419 | in_channels = self.n_channels,
420 | out_channels = self.n_out_channels,
421 | kernel_size = self.kernel_size,
422 | padding = self.padding,
423 | padding_mode = self.padding_mode
424 | )
425 | else:
426 | self.skip_connection = conv_nd(
427 | dims = self.dims,
428 | in_channels = self.n_channels,
429 | out_channels = self.n_out_channels,
430 | kernel_size = 1
431 | )
432 |
433 | def forward(self,x,emb):
434 | """
435 | :param x: [B x C x ...]
436 | :param emb: [B x n_emb_channels]
437 | :return: [B x C x ...]
438 | """
439 | # Input layer (groupnorm -> actv -> conv)
440 | if self.updown: # upsample or downsample
441 | in_norm_actv = self.in_layers[:-1]
442 | in_conv = self.in_layers[-1]
443 | h = in_norm_actv(x)
444 | h = self.h_upd(h)
445 | h = in_conv(h)
446 | x = self.x_upd(x)
447 | else:
448 | h = self.in_layers(x) # [B x C x ...]
449 |
450 | # Embedding layer
451 | emb_out = self.emb_layers(emb).type(h.dtype)
452 | while len(emb_out.shape) < len(h.shape):
453 | emb_out = emb_out[..., None] # match 'emb_out' with 'h': [B x C x ...]
454 |
455 | # Combine input with embedding
456 | if self.use_scale_shift_norm:
457 | out_norm = self.out_layers[0] # layernorm
458 | out_actv_dr_conv = self.out_layers[1:] # activation -> dropout -> conv
459 | # emb_out: [B x 2C x ...]
460 | scale,shift = th.chunk(emb_out, 2, dim=1) # [B x C x ...]
461 | h = out_norm(h) * (1.0 + scale) + shift # [B x C x ...]
462 | h = out_actv_dr_conv(h) # [B x C x ...]
463 | else:
464 | # emb_out: [B x C x ...]
465 | h = h + emb_out
466 | h = self.out_layers(h) # layernorm -> activation -> dropout -> conv
467 |
468 | # Skip connection
469 | out = h + self.skip_connection(x) # [B x C x ...]
470 | return out # [B x C x ...]
471 |
472 |
--------------------------------------------------------------------------------
/code/util.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import matplotlib.pyplot as plt
3 | import torch as th
4 | from scipy.spatial import distance
5 |
6 | def th2np(x):
7 | return x.detach().cpu().numpy()
8 |
9 | def get_torch_size_string(x):
10 | return "x".join(map(str,x.shape))
11 |
12 | def plot_4x4_torch_tensor(
13 | x_torch,
14 | figsize = (4,4),
15 | cmap = 'gray',
16 | info_str = '',
17 | top = 0.92,
18 | hspace = 0.1
19 | ):
20 | """
21 | :param x_torch: [B x C x W x H]
22 | """
23 | batch_size = x_torch.shape[0]
24 | fig = plt.figure(figsize=figsize)
25 | for i in range(batch_size):
26 | ax = plt.subplot(4,4,i+1)
27 | plt.imshow(x_torch.permute(0,2,3,1).detach().numpy()[i,:,:,:], cmap=cmap)
28 | plt.axis('off')
29 | plt.subplots_adjust(
30 | left=0.0,right=1.0,bottom=0.0,top=top,wspace=0.0,hspace=hspace)
31 | plt.suptitle('%s[%d] Images of [%dx%d] sizes'%
32 | (info_str,batch_size,x_torch.shape[2],x_torch.shape[3]),fontsize=10)
33 | plt.show()
34 |
35 | def plot_1xN_torch_img_tensor(
36 | x_torch,
37 | title_str_list = None,
38 | title_fontsize = 20):
39 | """
40 | : param x_torch: [B x C x W x H]
41 | """
42 | xt_np = x_torch.cpu().numpy() # [B x C x W x H]
43 | n_imgs = xt_np.shape[0]
44 | plt.figure(figsize=(n_imgs*2,3))
45 | for img_idx in range(n_imgs):
46 | plt.subplot(1,n_imgs,img_idx+1)
47 | if xt_np.shape[1]==1:
48 | plt.imshow(xt_np[img_idx,0,:,:], cmap='gray')
49 | else:
50 | plt.imshow(xt_np[img_idx,:,:,:].transpose(1,2,0))
51 | if title_str_list:
52 | plt.title(title_str_list[img_idx],fontsize=title_fontsize)
53 | plt.axis('off')
54 | plt.tight_layout()
55 | plt.show()
56 |
57 | def plot_1xN_torch_traj_tensor(
58 | times,
59 | x_torch,
60 | title_str_list = None,
61 | title_fontsize = 20,
62 | ylim = None,
63 | figsize = None,
64 | ):
65 | """
66 | : param x_torch: [B x C x ...]
67 | """
68 | xt_np = x_torch.cpu().numpy() # [B x C x W x H]
69 | n_trajs = xt_np.shape[0]
70 | L = times.shape[0]
71 | if figsize is None: figsize = (n_trajs*2,3)
72 | plt.figure(figsize=figsize)
73 | for traj_idx in range(n_trajs):
74 | plt.subplot(1,n_trajs,traj_idx+1)
75 | plt.plot(times,x_torch[traj_idx,0,:].cpu().numpy(),'-',color='k')
76 | if title_str_list:
77 | plt.title(title_str_list[traj_idx],fontsize=title_fontsize)
78 | if ylim:
79 | plt.ylim(ylim)
80 | plt.tight_layout()
81 | plt.show()
82 |
83 | def print_model_parameters(model):
84 | """
85 | Print model parameters
86 | """
87 | for p_idx,(param_name,param) in enumerate(model.named_parameters()):
88 | print ("[%2d] parameter:[%27s] shape:[%12s] numel:[%10d]"%
89 | (p_idx,
90 | param_name,
91 | get_torch_size_string(param),
92 | param.numel()
93 | )
94 | )
95 |
96 | def print_model_layers(model,x_torch):
97 | """
98 | Print model layers
99 | """
100 | y_torch,intermediate_output_list = model(x_torch)
101 | batch_size = x_torch.shape[0]
102 | print ("batch_size:[%d]"%(batch_size))
103 | print ("[ ] layer:[%15s] size:[%14s]"
104 | %('input',"x".join(map(str,x_torch.shape)))
105 | )
106 | for idx,layer_name in enumerate(model.layer_names):
107 | intermediate_output = intermediate_output_list[idx]
108 | print ("[%2d] layer:[%15s] size:[%14s] numel:[%10d]"%
109 | (idx,
110 | layer_name,
111 | get_torch_size_string(intermediate_output),
112 | intermediate_output.numel()
113 | ))
114 |
115 | def model_train(model,optm,loss,train_iter,test_iter,n_epoch,print_every,device):
116 | """
117 | Train model
118 | """
119 | model.init_param(VERBOSE=False)
120 | model.train()
121 | for epoch in range(n_epoch):
122 | loss_val_sum = 0
123 | for batch_in,batch_out in train_iter:
124 | # Forward path
125 | if isinstance(model.x_dim,int):
126 | y_pred,_ = model(batch_in.view(-1,model.x_dim).to(device))
127 | else:
128 | y_pred,_ = model(batch_in.view((-1,)+model.x_dim).to(device))
129 | loss_out = loss(y_pred,batch_out.to(device))
130 | # Update
131 | optm.zero_grad() # reset gradient
132 | loss_out.backward() # back-propagate loss
133 | optm.step() # optimizer update
134 | loss_val_sum += loss_out
135 | loss_val_avg = loss_val_sum/len(train_iter)
136 | # Print
137 | if ((epoch%print_every)==0) or (epoch==(n_epoch-1)):
138 | train_accr = model_eval(model,train_iter,device)
139 | test_accr = model_eval(model,test_iter,device)
140 | print ("epoch:[%2d/%d] loss:[%.3f] train_accr:[%.4f] test_accr:[%.4f]."%
141 | (epoch,n_epoch,loss_val_avg,train_accr,test_accr))
142 |
143 | def model_eval(model,data_iter,device):
144 | """
145 | Evaluate model
146 | """
147 | with th.no_grad():
148 | n_total,n_correct = 0,0
149 | model.eval() # evaluate (affects DropOut and BN)
150 | for batch_in,batch_out in data_iter:
151 | y_trgt = batch_out.to(device)
152 | if isinstance(model.x_dim,int):
153 | model_pred,_ = model(batch_in.view(-1,model.x_dim).to(device))
154 | else:
155 | model_pred,_ = model(batch_in.view((-1,)+model.x_dim).to(device))
156 | _,y_pred = th.max(model_pred.data,1)
157 | n_correct += (y_pred==y_trgt).sum().item()
158 | n_total += batch_in.size(0)
159 | val_accr = (n_correct/n_total)
160 | model.train() # back to train mode
161 | return val_accr
162 |
163 | def model_test(model,test_data,test_label,device):
164 | """
165 | Test model
166 | """
167 | n_sample = 25
168 | sample_indices = np.random.choice(len(test_data),n_sample,replace=False)
169 | test_data_samples = test_data[sample_indices]
170 | test_label_samples = test_label[sample_indices]
171 | with th.no_grad():
172 | model.eval()
173 | if isinstance(model.x_dim,int):
174 | x_in = test_data_samples.view(-1,model.x_dim).type(th.float).to(device)/255.
175 | else:
176 | x_in = test_data_samples.view((-1,)+model.x_dim).type(th.float).to(device)/255.
177 | y_pred,_ = model(x_in)
178 | y_pred = y_pred.argmax(axis=1)
179 | # Plot
180 | plt.figure(figsize=(6,6))
181 | plt.subplots_adjust(top=1.0)
182 | for idx in range(n_sample):
183 | plt.subplot(5,5, idx+1)
184 | plt.imshow(test_data_samples[idx],cmap='gray')
185 | plt.axis('off')
186 | fontcolor = 'k' if (y_pred[idx] == test_label_samples[idx]) else 'r'
187 | plt.title("Pred:%d, Label:%d"%(y_pred[idx],test_label_samples[idx]),
188 | fontsize=8,color=fontcolor)
189 | plt.show()
190 |
191 | def kernel_se(x1,x2,hyp={'gain':1.0,'len':1.0}):
192 | """ Squared-exponential kernel function """
193 | D = distance.cdist(x1/hyp['len'],x2/hyp['len'],'sqeuclidean')
194 | K = hyp['gain']*np.exp(-D)
195 | return K
196 |
197 | def gp_sampler(
198 | times = np.linspace(start=0.0,stop=1.0,num=100).reshape((-1,1)), # [L x 1]
199 | hyp_gain = 1.0,
200 | hyp_len = 1.0,
201 | meas_std = 0e-8,
202 | n_traj = 1
203 | ):
204 | """
205 | Gaussian process sampling
206 | """
207 | if len(times.shape) == 1: times = times.reshape((-1,1))
208 | L = times.shape[0]
209 | K = kernel_se(times,times,hyp={'gain':hyp_gain,'len':hyp_len}) # [L x L]
210 | K_chol = np.linalg.cholesky(K+1e-8*np.eye(L,L)) # [L x L]
211 | traj = K_chol @ np.random.randn(L,n_traj) # [L x n_traj]
212 | traj = traj + meas_std*np.random.randn(*traj.shape)
213 | return traj.T
214 |
215 | def hbm_sampler(
216 | times = np.linspace(start=0.0,stop=1.0,num=100).reshape((-1,1)), # [L x 1]
217 | hyp_gain = 1.0,
218 | hyp_len = 1.0,
219 | meas_std = 0e-8,
220 | n_traj = 1
221 | ):
222 | """
223 | Hilbert Brownian motion sampling
224 | """
225 | if len(times.shape) == 1: times = times.reshape((-1,1))
226 | L = times.shape[0]
227 | K = kernel_se(times,times,hyp={'gain':hyp_gain,'len':hyp_len}) # [L x L]
228 | K = K + 1e-8*np.eye(L,L)
229 | U,V = np.linalg.eigh(K,UPLO='L')
230 | traj = V @ np.diag(np.sqrt(U)) @ np.random.randn(L,n_traj) # [L x n_traj]
231 | traj = traj + meas_std*np.random.randn(*traj.shape)
232 | return traj.T
233 |
234 | def get_colors(n):
235 | return [plt.cm.Set1(x) for x in np.linspace(0,1,n)]
236 |
237 | def periodic_step(times,period,time_offset=0.0,y_min=0.0,y_max=1.0):
238 | times_mod = np.mod(times+time_offset,period)
239 | y = np.zeros_like(times)
240 | y[times_mod < (period/2)] = 1
241 | y*=(y_max-y_min)
242 | y+=y_min
243 | return y
244 |
245 | def plot_ddpm_1d_result(
246 | times,x_data,step_list,x_t_list,
247 | plot_ancestral_sampling=True,
248 | plot_one_sample=False,
249 | lw_gt=1,lw_sample=1/2,
250 | ls_gt='-',ls_sample='-',
251 | lc_gt='b',lc_sample='k',
252 | ylim=(-4,+4),figsize=(6,3),title_str=None
253 | ):
254 | """
255 | :param times: [L x 1] ndarray
256 | :param x_0: [N x C x L] torch tensor, training data
257 | :param step_list: [M] ndarray, diffusion steps to append x_t
258 | :param x_t_list: list of [n_sample x C x L] torch tensors
259 | """
260 |
261 | x_data_np = x_data.detach().cpu().numpy() # [n_data x C x L]
262 | n_data = x_data_np.shape[0] # number of GT data
263 | C = x_data_np.shape[1] # number of GT data
264 |
265 | # Plot a seqeunce of ancestral sampling procedure
266 | if plot_ancestral_sampling:
267 | for c_idx in range(C):
268 | plt.figure(figsize=(15,2)); plt.rc('xtick',labelsize=6); plt.rc('ytick',labelsize=6)
269 | for i_idx,t in enumerate(step_list):
270 | plt.subplot(1,len(step_list),i_idx+1)
271 | x_t = x_t_list[t] # [n_sample x C x L]
272 | x_t_np = x_t.detach().cpu().numpy() # [n_sample x C x L]
273 | n_sample = x_t_np.shape[0]
274 | for i_idx in range(n_data): # GT
275 | plt.plot(times.flatten(),x_data_np[i_idx,c_idx,:],ls='-',color=lc_gt,lw=lw_gt)
276 | for i_idx in range(n_sample): # sampled trajectories
277 | plt.plot(times.flatten(),x_t_np[i_idx,c_idx,:],ls='-',color=lc_sample,lw=lw_sample)
278 | plt.xlim([0.0,1.0]); plt.ylim(ylim)
279 | plt.xlabel('Time',fontsize=8); plt.title('Step:[%d]'%(t),fontsize=8)
280 | plt.tight_layout(); plt.show()
281 |
282 | # Plot generated data
283 | for c_idx in range(C):
284 | plt.figure(figsize=figsize)
285 | x_0_np = x_t_list[0].detach().cpu().numpy() # [n_sample x C x L]
286 | for i_idx in range(n_data): # GT
287 | plt.plot(times.flatten(),x_data_np[i_idx,c_idx,:],ls=ls_gt,color=lc_gt,lw=lw_gt)
288 | n_sample = x_0_np.shape[0]
289 | if plot_one_sample:
290 | i_idx = np.random.randint(low=0,high=n_sample)
291 | plt.plot(times.flatten(),x_0_np[i_idx,c_idx,:],ls=ls_sample,color=lc_sample,lw=lw_sample)
292 | else:
293 | for i_idx in range(n_sample): # sampled trajectories
294 | plt.plot(times.flatten(),x_0_np[i_idx,c_idx,:],ls=ls_sample,color=lc_sample,lw=lw_sample)
295 | plt.xlim([0.0,1.0]); plt.ylim(ylim)
296 | plt.xlabel('Time',fontsize=8)
297 | if title_str is None:
298 | plt.title('[%d] Groundtruth and Generated trajectories'%(c_idx),fontsize=10)
299 | else:
300 | plt.title('[%d] %s'%(c_idx,title_str),fontsize=10)
301 | plt.tight_layout(); plt.show()
302 |
303 |
304 | def plot_ddpm_2d_result(
305 | x_data,step_list,x_t_list,n_plot=1,
306 | tfs=10
307 | ):
308 | """
309 | :param x_data: [N x C x W x H] torch tensor, training data
310 | :param step_list: [M] ndarray, diffusion steps to append x_t
311 | :param x_t_list: list of [n_sample x C x L] torch tensors
312 | """
313 | for sample_idx in range(n_plot):
314 | plt.figure(figsize=(15,2))
315 | for i_idx,t in enumerate(step_list):
316 | x_t = x_t_list[t] # [n_sample x C x W x H]
317 | x_t_np = x_t.detach().cpu().numpy() # [n_sample x C x W x H]
318 | plt.subplot(1,len(step_list),i_idx+1)
319 | if x_data.shape[1]==1: # gray image
320 | plt.imshow(x_t_np[sample_idx,0,:,:], cmap='gray')
321 | else:
322 | plt.imshow(x_t_np[sample_idx,:,:,:].transpose(1,2,0))
323 | plt.axis('off')
324 | plt.title('Step:[%d]'%(t),fontsize=tfs)
325 | plt.tight_layout()
326 | plt.show()
327 |
328 |
329 | def get_hbm_M(times,hyp_gain=1.0,hyp_len=0.1,device='cpu'):
330 | """
331 | Get a matrix M for Hilbert Brownian motion
332 | :param times: [L x 1] ndarray
333 | :return: [L x L] torch tensor
334 | """
335 | L = times.shape[0]
336 | K = kernel_se(times,times,hyp={'gain':hyp_gain,'len':hyp_len}) # [L x L]
337 | K = K + 1e-8*np.eye(L,L)
338 | U,V = np.linalg.eigh(K,UPLO='L')
339 | M = V @ np.diag(np.sqrt(U))
340 | M = th.from_numpy(M).to(th.float32).to(device) # [L x L]
341 | return M
342 |
343 | def get_resampling_steps(t_T, j, r,plot_steps=False,figsize=(15,4)):
344 | """
345 | Get resampling steps for repaint, inpainting method using diffusion models
346 | :param t_T: maximum time steps for inpainting
347 | :param j: jump length
348 | :param r: the number of resampling
349 | """
350 | jumps = np.zeros(t_T+1)
351 | for i in range(1, t_T-j, j):
352 | jumps[i] = r-1
353 | t = t_T+1
354 | resampling_steps = []
355 | while t > 1:
356 | t -= 1
357 | resampling_steps.append(t)
358 | if jumps[t] > 0:
359 | jumps[t] -= 1
360 | for _ in range(j):
361 | t += 1
362 | resampling_steps.append(t)
363 | resampling_steps.append(0)
364 |
365 | # (optional) plot
366 | if plot_steps:
367 | plt.figure(figsize=figsize)
368 | plt.plot(resampling_steps,'-',color='k',lw=1)
369 | plt.xlabel('Number of Transitions')
370 | plt.ylabel('Diffusion time step')
371 | plt.show()
372 |
373 | # Return
374 | return resampling_steps
--------------------------------------------------------------------------------
/img/unet.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sjchoi86/yet-another-pytorch-tutorial-v2/1b2fdabfc11586fcabc9d3aa1123c90a026f1c1f/img/unet.jpg
--------------------------------------------------------------------------------