├── utils
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-37.pyc
│ ├── loaders.cpython-37.pyc
│ └── maskedLayers.cpython-37.pyc
├── loaders.py
├── avg_speed_calc.py
├── maskedLayers.py
└── plots.ipynb
├── .gitignore
├── knowledge_distillation
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-37.pyc
│ └── student.cpython-37.pyc
├── student.py
├── train_student.py
└── distillation.py
├── models
├── exp1.pth
├── exp2.pth
├── exp3.pth
├── best_acc.pth
├── best_acc_v1.4.pth
├── best_acc_student.pth
├── best_acc_student_v1.4.pth
├── best_acc_student_with_distillation.pth
└── best_acc_student_with_distillation_v1.4.pth
├── README.md
├── imgs
├── pruningResults.png
└── Architecture-of-LeNet-5.png
├── metrics
├── __pycache__
│ ├── flops.cpython-37.pyc
│ ├── memory.cpython-37.pyc
│ ├── size.cpython-37.pyc
│ ├── utils.cpython-37.pyc
│ ├── __init__.cpython-37.pyc
│ ├── accuracy.cpython-37.pyc
│ ├── modules.cpython-37.pyc
│ ├── maskedLayers.cpython-37.pyc
│ ├── abstract_flops.cpython-37.pyc
│ └── global_sparsity.cpython-37.pyc
├── global_sparsity.py
├── __init__.py
├── size.py
├── flops.py
├── abstract_flops.py
└── utils.py
├── .vscode
└── settings.json
├── .pre-commit-config.yaml
├── .github
└── workflows
│ └── ci.yml
├── experiments.py
├── lenet_pytorch.py
├── pruning_loop.py
└── Results.md
/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | Articles/
2 | .vscode/
--------------------------------------------------------------------------------
/knowledge_distillation/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/models/exp1.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/models/exp1.pth
--------------------------------------------------------------------------------
/models/exp2.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/models/exp2.pth
--------------------------------------------------------------------------------
/models/exp3.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/models/exp3.pth
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # pruningExperiments
2 | Repository to perform simple pruning experiments on neural networks
3 |
--------------------------------------------------------------------------------
/models/best_acc.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/models/best_acc.pth
--------------------------------------------------------------------------------
/imgs/pruningResults.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/imgs/pruningResults.png
--------------------------------------------------------------------------------
/models/best_acc_v1.4.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/models/best_acc_v1.4.pth
--------------------------------------------------------------------------------
/models/best_acc_student.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/models/best_acc_student.pth
--------------------------------------------------------------------------------
/imgs/Architecture-of-LeNet-5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/imgs/Architecture-of-LeNet-5.png
--------------------------------------------------------------------------------
/models/best_acc_student_v1.4.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/models/best_acc_student_v1.4.pth
--------------------------------------------------------------------------------
/metrics/__pycache__/flops.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/metrics/__pycache__/flops.cpython-37.pyc
--------------------------------------------------------------------------------
/metrics/__pycache__/memory.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/metrics/__pycache__/memory.cpython-37.pyc
--------------------------------------------------------------------------------
/metrics/__pycache__/size.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/metrics/__pycache__/size.cpython-37.pyc
--------------------------------------------------------------------------------
/metrics/__pycache__/utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/metrics/__pycache__/utils.cpython-37.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/utils/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/loaders.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/utils/__pycache__/loaders.cpython-37.pyc
--------------------------------------------------------------------------------
/metrics/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/metrics/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/metrics/__pycache__/accuracy.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/metrics/__pycache__/accuracy.cpython-37.pyc
--------------------------------------------------------------------------------
/metrics/__pycache__/modules.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/metrics/__pycache__/modules.cpython-37.pyc
--------------------------------------------------------------------------------
/models/best_acc_student_with_distillation.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/models/best_acc_student_with_distillation.pth
--------------------------------------------------------------------------------
/utils/__pycache__/maskedLayers.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/utils/__pycache__/maskedLayers.cpython-37.pyc
--------------------------------------------------------------------------------
/metrics/__pycache__/maskedLayers.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/metrics/__pycache__/maskedLayers.cpython-37.pyc
--------------------------------------------------------------------------------
/metrics/__pycache__/abstract_flops.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/metrics/__pycache__/abstract_flops.cpython-37.pyc
--------------------------------------------------------------------------------
/metrics/__pycache__/global_sparsity.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/metrics/__pycache__/global_sparsity.cpython-37.pyc
--------------------------------------------------------------------------------
/models/best_acc_student_with_distillation_v1.4.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/models/best_acc_student_with_distillation_v1.4.pth
--------------------------------------------------------------------------------
/knowledge_distillation/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/knowledge_distillation/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/knowledge_distillation/__pycache__/student.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olegpolivin/pruningExperiments/HEAD/knowledge_distillation/__pycache__/student.cpython-37.pyc
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "python.pythonPath": "/home/userlocal/miniconda3/envs/torch/bin/python",
3 | "python.linting.enabled": true,
4 | "python.linting.flake8Enabled": true,
5 | "python.linting.pycodestyleEnabled": false
6 | }
7 |
--------------------------------------------------------------------------------
/metrics/global_sparsity.py:
--------------------------------------------------------------------------------
1 | import torch
2 |
3 |
4 | def calculate_global_sparsity(model):
5 | global_sparsity = (
6 | 100.0
7 | * float(
8 | torch.sum(model.conv1.weight == 0)
9 | + torch.sum(model.conv2.weight == 0)
10 | + torch.sum(model.fc1.weight == 0)
11 | + torch.sum(model.fc2.weight == 0)
12 | )
13 | / float(
14 | model.conv1.weight.nelement()
15 | + model.conv2.weight.nelement()
16 | + model.fc1.weight.nelement()
17 | + model.fc2.weight.nelement()
18 | )
19 | )
20 |
21 | global_compression = 100 / (100 - global_sparsity)
22 |
23 | return global_sparsity, global_compression
24 |
--------------------------------------------------------------------------------
/metrics/__init__.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import torch
3 |
4 |
5 | def nonzero(tensor):
6 | """Returns absolute number of values different from 0
7 |
8 | Arguments:
9 | tensor {numpy.ndarray} -- Array to compute over
10 |
11 | Returns:
12 | int -- Number of nonzero elements
13 | """
14 | return np.sum(tensor != 0.0)
15 |
16 |
17 | # https://pytorch.org/docs/stable/tensor_attributes.html
18 | dtype2bits = {
19 | torch.float32: 32,
20 | torch.float: 32,
21 | torch.float64: 64,
22 | torch.double: 64,
23 | torch.float16: 16,
24 | torch.half: 16,
25 | torch.uint8: 8,
26 | torch.int8: 8,
27 | torch.int16: 16,
28 | torch.short: 16,
29 | torch.int32: 32,
30 | torch.int: 32,
31 | torch.int64: 64,
32 | torch.long: 64,
33 | torch.bool: 1,
34 | }
35 |
36 |
37 | from .flops import flops
38 | from .size import model_size
39 |
--------------------------------------------------------------------------------
/metrics/size.py:
--------------------------------------------------------------------------------
1 | """Model size metrics
2 | """
3 |
4 | import numpy as np
5 |
6 | from . import dtype2bits, nonzero
7 |
8 |
9 | def model_size(model, as_bits=False):
10 | """Returns absolute and nonzero model size
11 |
12 | Arguments:
13 | model {torch.nn.Module} -- Network to compute model size over
14 |
15 | Keyword Arguments:
16 | as_bits {bool} -- Whether to account for the size of dtype
17 |
18 | Returns:
19 | int -- Total number of weight & bias params
20 | int -- Out total_params exactly how many are nonzero
21 | """
22 |
23 | total_params = 0
24 | nonzero_params = 0
25 | for tensor in model.parameters():
26 | t = np.prod(tensor.shape)
27 | nz = nonzero(tensor.detach().cpu().numpy())
28 | if as_bits:
29 | bits = dtype2bits[tensor.dtype]
30 | t *= bits
31 | nz *= bits
32 | total_params += t
33 | nonzero_params += nz
34 | return int(total_params), int(nonzero_params)
35 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | # I took this configuration from https://github.com/ternaus/iglovikov_helper_functions/blob/master/.pre-commit-config.yaml
2 |
3 | exclude: _pb2\.py$
4 | repos:
5 | - repo: https://github.com/pre-commit/mirrors-isort
6 | rev: f0001b2 # Use the revision sha / tag you want to point at
7 | hooks:
8 | - id: isort
9 | args: ["--profile", "black"]
10 | - repo: https://github.com/psf/black
11 | rev: 20.8b1
12 | hooks:
13 | - id: black
14 | - repo: https://github.com/asottile/yesqa
15 | rev: v1.1.0
16 | hooks:
17 | - id: yesqa
18 | additional_dependencies:
19 | - flake8-bugbear==20.1.4
20 | - flake8-builtins==1.5.2
21 | - flake8-comprehensions==3.2.2
22 | - flake8-tidy-imports==4.1.0
23 | - flake8==3.7.9
24 | - repo: https://github.com/pre-commit/pre-commit-hooks
25 | rev: v2.3.0
26 | hooks:
27 | - id: check-docstring-first
28 | - id: check-merge-conflict
29 | - id: check-yaml
30 | - id: debug-statements
31 | - id: end-of-file-fixer
32 | - id: trailing-whitespace
33 |
--------------------------------------------------------------------------------
/knowledge_distillation/student.py:
--------------------------------------------------------------------------------
1 | import torch.nn as nn
2 | import torch.nn.functional as F
3 |
4 |
5 | class LeNet(nn.Module):
6 | def __init__(self):
7 | super().__init__()
8 | self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
9 | self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
10 | self.conv2_drop = nn.Dropout2d()
11 | self.fc1 = nn.Linear(320, 50)
12 | self.fc2 = nn.Linear(50, 10)
13 |
14 | def forward(self, x):
15 | x = F.relu(F.max_pool2d(self.conv1(x), 2))
16 | x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
17 | x = x.view(-1, 320)
18 | x = F.relu(self.fc1(x))
19 | x = F.dropout(x, training=self.training)
20 | x = self.fc2(x)
21 | return x
22 |
23 |
24 | class LeNetStudent(nn.Module):
25 | def __init__(self):
26 | super().__init__()
27 | # kernel_size = 2 was too strong, so 4
28 | self.conv1 = nn.Conv2d(1, 3, kernel_size=4)
29 | # Model with batchnorm was too strong reaching 0.973 accuracy
30 | # so I switched it off
31 | # self.bn = nn.BatchNorm2d(6)
32 | self.flatten = nn.Flatten()
33 | self.fc2 = nn.Linear(1875, 10)
34 |
35 | def forward(self, x):
36 | x = self.conv1(x)
37 | # x = self.bn(x)
38 | # x = F.relu(x)
39 | # x = F.max_pool2d(x, 2)
40 | x = self.flatten(x)
41 | x = self.fc2(x)
42 | return x
43 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: CI
5 |
6 | on:
7 | push:
8 | branches: [ main ]
9 | pull_request:
10 | branches: [ main ]
11 | jobs:
12 | build:
13 | runs-on: ubuntu-latest
14 | strategy:
15 | matrix:
16 | python-version: [3.7]
17 | steps:
18 | - uses: actions/checkout@v2
19 | - name: Set up Python
20 | uses: actions/setup-python@v2
21 | with:
22 | python-version: ${{ matrix.python-version }}
23 | - name: Cache pip
24 | uses: actions/cache@v1
25 | with:
26 | path: ~/.cache/pip # This path is specific to Ubuntu
27 | # Look to see if there is a cache hit for the corresponding requirements file
28 | key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
29 | restore-keys: |
30 | ${{ runner.os }}-pip-
31 | ${{ runner.os }}-
32 | # You can test your matrix by printing the current Python version
33 | - name: Display Python version
34 | run: python -c "import sys; print(sys.version)"
35 | - name: Install dependencies
36 | run: |
37 | python -m pip install --upgrade pip
38 | pip install black flake8 isort pylint
39 | - name: Run black
40 | run:
41 | black --check .
42 |
--------------------------------------------------------------------------------
/utils/loaders.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torchvision
3 |
4 |
5 | def get_loaders(batch_size_train, batch_size_test):
6 | """Function to return train and test datasets for MNIST
7 |
8 | :param batch_size_train: Batch size used for train
9 | :param batch_size_test: Batch size used for test
10 |
11 | :return: Data loaders for train and test data
12 | """
13 |
14 | train_loader = torch.utils.data.DataLoader(
15 | torchvision.datasets.MNIST(
16 | "~/.cache/database/",
17 | train=True,
18 | download=True,
19 | transform=torchvision.transforms.Compose(
20 | [
21 | torchvision.transforms.ToTensor(),
22 | torchvision.transforms.Normalize((0.1307,), (0.3081,)),
23 | ]
24 | ),
25 | ),
26 | batch_size=batch_size_train,
27 | shuffle=True,
28 | )
29 |
30 | test_loader = torch.utils.data.DataLoader(
31 | torchvision.datasets.MNIST(
32 | "~/.cache/database/",
33 | train=False,
34 | download=True,
35 | transform=torchvision.transforms.Compose(
36 | [
37 | torchvision.transforms.ToTensor(),
38 | torchvision.transforms.Normalize((0.1307,), (0.3081,)),
39 | ]
40 | ),
41 | ),
42 | batch_size=batch_size_test,
43 | shuffle=False,
44 | )
45 |
46 | return train_loader, test_loader
47 |
--------------------------------------------------------------------------------
/utils/avg_speed_calc.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | import torch
4 | import torch.nn as nn
5 | import torch.nn.functional as F
6 |
7 | from loaders import get_loaders
8 |
9 | batch_size_train = 2048
10 | batch_size_test = 2048
11 |
12 |
13 | class LeNet(nn.Module):
14 | def __init__(self):
15 | super().__init__()
16 | self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
17 | self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
18 | self.conv2_drop = nn.Dropout2d()
19 | self.fc1 = nn.Linear(320, 50)
20 | self.fc2 = nn.Linear(50, 10)
21 |
22 | def forward(self, x):
23 | x = F.relu(F.max_pool2d(self.conv1(x), 2))
24 | x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
25 | x = x.view(-1, 320)
26 | x = F.relu(self.fc1(x))
27 | x = F.dropout(x, training=self.training)
28 | x = self.fc2(x)
29 | return F.log_softmax(x, 1)
30 |
31 |
32 | def go_through_data(net, data_loader, device):
33 |
34 | net.eval()
35 | with torch.no_grad():
36 | for (idx, (x, t)) in enumerate(data_loader):
37 | x = net.forward(x.to(device))
38 | t = t.to(device)
39 | return 1
40 |
41 |
42 | device = "cuda"
43 | train_loader, _ = get_loaders(batch_size_train, batch_size_test)
44 | net = LeNet().to(device)
45 | net.load_state_dict(torch.load("models/exp1.pth"))
46 |
47 | t0 = time.time()
48 | for i in range(20):
49 | go_through_data(net, train_loader, device)
50 |
51 | total_time = time.time() - t0
52 | print(total_time, total_time / (i + 1))
53 | # 46.204389810562134 9.240877962112426
54 |
--------------------------------------------------------------------------------
/experiments.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import torch.nn.utils.prune as prune
3 |
4 | from pruning_loop import PruningExperiment
5 |
6 | experiment_number = 3
7 |
8 | # Experiment 1: Random weights pruning
9 | # Change amount = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7]
10 | amount = 0.7
11 | kwargs = {}
12 | pruning_strategy_1 = [
13 | ("fc1", prune.random_unstructured, "weight", amount, kwargs),
14 | ("fc2", prune.random_unstructured, "weight", amount, kwargs),
15 | ("conv1", prune.random_unstructured, "weight", amount, kwargs),
16 | ("conv2", prune.random_unstructured, "weight", amount, kwargs),
17 | ]
18 |
19 | # Experiment 2: Pruning based on norm
20 | amount = 0.7
21 | kwargs = {}
22 | pruning_strategy_2 = [
23 | ("fc1", prune.l1_unstructured, "weight", amount, kwargs),
24 | ("fc2", prune.l1_unstructured, "weight", amount, kwargs),
25 | ("conv1", prune.l1_unstructured, "weight", amount, kwargs),
26 | ("conv2", prune.l1_unstructured, "weight", amount, kwargs),
27 | ]
28 |
29 | # Experiment 3: Structural pruning with L1 norm
30 | amount = 0.7
31 | kwargs = {"n": 1, "dim": 0}
32 |
33 | pruning_strategy_3 = [
34 | ("fc1", prune.ln_structured, "weight", amount, kwargs),
35 | ("conv1", prune.ln_structured, "weight", amount, kwargs),
36 | ("conv2", prune.ln_structured, "weight", amount, kwargs),
37 | ]
38 |
39 |
40 | if experiment_number == 1:
41 | pe = PruningExperiment(
42 | pruning_strategy=pruning_strategy_1,
43 | epochs_prune_finetune=3,
44 | epochs_finetune=4,
45 | save_model="exp1",
46 | )
47 |
48 | if experiment_number == 2:
49 | pe = PruningExperiment(
50 | pruning_strategy=pruning_strategy_2,
51 | epochs_prune_finetune=3,
52 | epochs_finetune=4,
53 | save_model="exp2",
54 | )
55 |
56 | if experiment_number == 3:
57 | pe = PruningExperiment(
58 | pruning_strategy=pruning_strategy_3,
59 | epochs_prune_finetune=3,
60 | epochs_finetune=4,
61 | save_model="exp3",
62 | )
63 |
64 |
65 | print(pe.run())
66 |
--------------------------------------------------------------------------------
/knowledge_distillation/train_student.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | import torch
5 | import torch.nn as nn
6 | import torch.optim as optim
7 | from student import LeNetStudent
8 |
9 | # To make relative imports work
10 | # See https://stackoverflow.com/questions/16981921/relative-imports-in-python-3
11 |
12 | PACKAGE_PARENT = ".."
13 | SCRIPT_DIR = os.path.dirname(
14 | os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__)))
15 | )
16 | sys.path.append(os.path.normpath(os.path.join(SCRIPT_DIR, PACKAGE_PARENT)))
17 |
18 | from utils.loaders import get_loaders
19 |
20 |
21 | def train(net, loss_fn, optimizer, data_loader, device):
22 | net.train()
23 | for (idx, (x, t)) in enumerate(data_loader):
24 | optimizer.zero_grad()
25 | x = net.forward(x.to(device))
26 | t = t.to(device)
27 | loss = loss_fn(x, t)
28 | loss.backward()
29 | optimizer.step()
30 |
31 |
32 | def test(net, data_loader, device):
33 | top1 = 0
34 | correct_samples = 0
35 | total_samples = 0
36 | net.eval()
37 | with torch.no_grad():
38 | for (idx, (x, t)) in enumerate(data_loader):
39 | x = net.forward(x.to(device))
40 | t = t.to(device)
41 | _, indices = torch.max(x, 1)
42 | correct_samples += torch.sum(indices == t)
43 | total_samples += t.shape[0]
44 |
45 | top1 = float(correct_samples) / total_samples
46 | return top1
47 |
48 |
49 | if __name__ == "__main__":
50 |
51 | device = "cuda"
52 |
53 | net = LeNetStudent().to(device)
54 |
55 | batch_size_train = 512
56 | batch_size_test = 1024
57 | nb_epoch = 60
58 |
59 | train_loader, test_loader = get_loaders(batch_size_train, batch_size_test)
60 |
61 | loss_fn = nn.CrossEntropyLoss()
62 | optimizer = optim.SGD(net.parameters(), lr=0.01, momentum=0.5)
63 |
64 | best_model = None
65 | best_acc = 0
66 |
67 | for epoch in range(nb_epoch):
68 | train(net, loss_fn, optimizer, train_loader, device)
69 | test_top1 = test(net, test_loader, device)
70 | print(f"Epoch {epoch}. Top1 {test_top1:.4f}")
71 | if test_top1 > best_acc:
72 | best_model = net
73 | best_acc = test_top1
74 |
75 | torch.save(best_model.state_dict(), "models/best_acc_student.pth")
76 |
--------------------------------------------------------------------------------
/metrics/flops.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from torch import nn
3 |
4 | from utils.maskedLayers import Conv2dMasked, LinearMasked
5 |
6 | from . import nonzero
7 | from .abstract_flops import conv2d_flops, dense_flops
8 | from .utils import get_activations
9 |
10 |
11 | def _conv2d_flops(module, activation):
12 | # Auxiliary func to use abstract flop computation
13 |
14 | # Drop batch & channels. Channels can be dropped since
15 | # unlike shape they have to match to in_channels
16 | input_shape = activation.shape[2:]
17 | # TODO Add support for dilation and padding size
18 | return conv2d_flops(
19 | in_channels=module.in_channels,
20 | out_channels=module.out_channels,
21 | input_shape=input_shape,
22 | kernel_shape=module.kernel_size,
23 | padding=module.padding_mode,
24 | strides=module.stride,
25 | dilation=module.dilation,
26 | )
27 |
28 |
29 | def _linear_flops(module, activation):
30 | # Auxiliary func to use abstract flop computation
31 | return dense_flops(module.in_features, module.out_features)
32 |
33 |
34 | def flops(model, input):
35 | """Compute Multiply-add FLOPs estimate from model
36 |
37 | Arguments:
38 | model {torch.nn.Module} -- Module to compute flops for
39 | input {torch.Tensor} -- Input tensor needed for activations
40 |
41 | Returns:
42 | tuple:
43 | - int - Number of total FLOPs
44 | - int - Number of FLOPs related to nonzero parameters
45 | """
46 | FLOP_fn = {
47 | nn.Conv2d: _conv2d_flops,
48 | nn.Linear: _linear_flops,
49 | Conv2dMasked: _conv2d_flops,
50 | LinearMasked: _linear_flops,
51 | }
52 |
53 | total_flops = nonzero_flops = 0
54 | activations = get_activations(model, input)
55 |
56 | # The ones we need for backprop
57 | for m, (act, _) in activations.items():
58 | if m.__class__ in FLOP_fn:
59 | w = m.weight.detach().cpu().numpy().copy()
60 | module_flops = FLOP_fn[m.__class__](m, act)
61 | total_flops += module_flops
62 | # For our operations, all weights are symmetric so we can just
63 | # do simple rule of three for the estimation
64 | nonzero_flops += module_flops * nonzero(w).sum() / np.prod(w.shape)
65 |
66 | return total_flops, nonzero_flops
67 |
--------------------------------------------------------------------------------
/utils/maskedLayers.py:
--------------------------------------------------------------------------------
1 | # Taken from here
2 | # https://github.com/wanglouis49/pytorch-weights_pruning/blob/master/pruning/layers.py
3 |
4 | import torch.nn as nn
5 | import torch.nn.functional as F
6 | from torch.autograd import Variable
7 |
8 |
9 | class LinearMasked(nn.Linear):
10 | def __init__(self, in_features, out_features, bias=True):
11 | super(LinearMasked, self).__init__(in_features, out_features, bias)
12 | self.mask_flag = False
13 |
14 | def set_mask(self, mask):
15 | self.mask = Variable(mask, requires_grad=False, volatile=False)
16 | self.weight.data = self.weight.data * self.mask.data
17 | self.mask_flag = True
18 |
19 | def get_mask(self):
20 | print(self.mask_flag)
21 | return self.mask
22 |
23 | def forward(self, x):
24 | if self.mask_flag:
25 | weight = self.weight * self.mask
26 | return F.linear(x, weight, self.bias)
27 | else:
28 | return F.linear(x, self.weight, self.bias)
29 |
30 |
31 | class Conv2dMasked(nn.Conv2d):
32 | def __init__(
33 | self,
34 | in_channels,
35 | out_channels,
36 | kernel_size,
37 | stride=1,
38 | padding=0,
39 | dilation=1,
40 | groups=1,
41 | bias=True,
42 | ):
43 | super(Conv2dMasked, self).__init__(
44 | in_channels,
45 | out_channels,
46 | kernel_size,
47 | stride,
48 | padding,
49 | dilation,
50 | groups,
51 | bias,
52 | )
53 | self.mask_flag = False
54 |
55 | def set_mask(self, mask):
56 | self.mask = Variable(mask, requires_grad=False, volatile=False)
57 | self.weight.data = self.weight.data * self.mask.data
58 | self.mask_flag = True
59 |
60 | def get_mask(self):
61 | print(self.mask_flag)
62 | return self.mask
63 |
64 | def forward(self, x):
65 | if self.mask_flag:
66 | weight = self.weight * self.mask
67 | return F.conv2d(
68 | x,
69 | weight,
70 | self.bias,
71 | self.stride,
72 | self.padding,
73 | self.dilation,
74 | self.groups,
75 | )
76 | else:
77 | return F.conv2d(
78 | x,
79 | self.weight,
80 | self.bias,
81 | self.stride,
82 | self.padding,
83 | self.dilation,
84 | self.groups,
85 | )
86 |
--------------------------------------------------------------------------------
/lenet_pytorch.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import torch
4 | import torch.nn as nn
5 | import torch.nn.functional as F
6 | import torch.optim as optim
7 | import torchvision
8 |
9 |
10 | class LeNet(nn.Module):
11 | def __init__(self):
12 | super().__init__()
13 | self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
14 | self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
15 | self.conv2_drop = nn.Dropout2d()
16 | self.fc1 = nn.Linear(320, 50)
17 | self.fc2 = nn.Linear(50, 10)
18 |
19 | def forward(self, x):
20 | x = F.relu(F.max_pool2d(self.conv1(x), 2))
21 | x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
22 | x = x.view(-1, 320)
23 | x = F.relu(self.fc1(x))
24 | x = F.dropout(x, training=self.training)
25 | x = self.fc2(x)
26 | return F.log_softmax(x, 1)
27 |
28 |
29 | def train(net, optimizer, data_loader, device):
30 | net.train()
31 | for (idx, (x, t)) in enumerate(data_loader):
32 | optimizer.zero_grad()
33 | x = net.forward(x.to(device))
34 | t = t.to(device)
35 | loss = F.nll_loss(x, t)
36 | loss.backward()
37 | optimizer.step()
38 |
39 |
40 | def test(net, data_loader, device):
41 | top1 = 0 # TODO compute top1
42 | correct_samples = 0
43 | total_samples = 0
44 | net.eval()
45 | with torch.no_grad():
46 | for (idx, (x, t)) in enumerate(data_loader):
47 | x = net.forward(x.to(device))
48 | t = t.to(device)
49 | _, indices = torch.max(x, 1)
50 | correct_samples += torch.sum(indices == t)
51 | total_samples += t.shape[0]
52 |
53 | top1 = float(correct_samples) / total_samples
54 | return top1
55 |
56 |
57 | if __name__ == "__main__":
58 | nb_epoch = 80
59 | batch_size_train = 1024
60 | batch_size_test = 5120
61 | device = "cuda" # change to 'cpu' if needed
62 |
63 | best_model = None
64 | best_acc = 0
65 |
66 | train_loader = torch.utils.data.DataLoader(
67 | torchvision.datasets.MNIST(
68 | "~/.cache/database/",
69 | train=True,
70 | download=True,
71 | transform=torchvision.transforms.Compose(
72 | [
73 | torchvision.transforms.ToTensor(),
74 | torchvision.transforms.Normalize((0.1307,), (0.3081,)),
75 | ]
76 | ),
77 | ),
78 | batch_size=batch_size_train,
79 | shuffle=True,
80 | )
81 |
82 | test_loader = torch.utils.data.DataLoader(
83 | torchvision.datasets.MNIST(
84 | "~/.cache/database/",
85 | train=False,
86 | download=True,
87 | transform=torchvision.transforms.Compose(
88 | [
89 | torchvision.transforms.ToTensor(),
90 | torchvision.transforms.Normalize((0.1307,), (0.3081,)),
91 | ]
92 | ),
93 | ),
94 | batch_size=batch_size_test,
95 | shuffle=False,
96 | )
97 | net = LeNet().to(device)
98 | optimizer = optim.SGD(net.parameters(), lr=0.01, momentum=0.5)
99 |
100 | for epoch in range(nb_epoch):
101 | train(net, optimizer, train_loader, device)
102 | test_top1 = test(net, test_loader, device)
103 | print(f"Epoch {epoch}. Top1 {test_top1:.4f}")
104 | if test_top1 > best_acc:
105 | best_model = net
106 | best_acc = test_top1
107 |
108 | torch.save(best_model.state_dict(), "models/best_acc.pth")
109 |
--------------------------------------------------------------------------------
/metrics/abstract_flops.py:
--------------------------------------------------------------------------------
1 | """Module for computing FLOPs from specification, not from Torch objects
2 | """
3 | import numpy as np
4 |
5 |
6 | def dense_flops(in_neurons, out_neurons):
7 | """Compute the number of multiply-adds used by a Dense (Linear) layer"""
8 | return in_neurons * out_neurons
9 |
10 |
11 | def conv2d_flops(
12 | in_channels,
13 | out_channels,
14 | input_shape,
15 | kernel_shape,
16 | padding="same",
17 | strides=1,
18 | dilation=1,
19 | ):
20 | """Compute the number of multiply-adds used by a Conv2D layer
21 | Args:
22 | in_channels (int): The number of channels in the layer's input
23 | out_channels (int): The number of channels in the layer's output
24 | input_shape (int, int): The spatial shape of the rank-3 input tensor
25 | kernel_shape (int, int): The spatial shape of the rank-4 kernel
26 | padding ({'same', 'valid'}): The padding used by the convolution
27 | strides (int) or (int, int): The spatial stride of the convolution;
28 | two numbers may be specified if it's different for the x and y axes
29 | dilation (int): Must be 1 for now.
30 | Returns:
31 | int: The number of multiply-adds a direct convolution would require
32 | (i.e., no FFT, no Winograd, etc)
33 | >>> c_in, c_out = 10, 10
34 | >>> in_shape = (4, 5)
35 | >>> filt_shape = (3, 2)
36 | >>> # valid padding
37 | >>> ret = conv2d_flops(c_in, c_out, in_shape, filt_shape, padding='valid')
38 | >>> ret == int(c_in * c_out * np.prod(filt_shape) * (2 * 4))
39 | True
40 | >>> # same padding, no stride
41 | >>> ret = conv2d_flops(c_in, c_out, in_shape, filt_shape, padding='same')
42 | >>> ret == int(c_in * c_out * np.prod(filt_shape) * np.prod(in_shape))
43 | True
44 | >>> # valid padding, stride > 1
45 | >>> ret = conv2d_flops(c_in, c_out, in_shape, filt_shape, \
46 | padding='valid', strides=(1, 2))
47 | >>> ret == int(c_in * c_out * np.prod(filt_shape) * (2 * 2))
48 | True
49 | >>> # same padding, stride > 1
50 | >>> ret = conv2d_flops(c_in, c_out, in_shape, filt_shape, \
51 | padding='same', strides=2)
52 | >>> ret == int(c_in * c_out * np.prod(filt_shape) * (2 * 3))
53 | True
54 | """
55 | # validate + sanitize input
56 | assert in_channels > 0
57 | assert out_channels > 0
58 | assert len(input_shape) == 2
59 | assert len(kernel_shape) == 2
60 | padding = padding.lower()
61 | assert padding in (
62 | "same",
63 | "valid",
64 | "zeros",
65 | ), "Padding must be one of same|valid|zeros"
66 | try:
67 | strides = tuple(strides)
68 | except TypeError:
69 | # if one number provided, make it a 2-tuple
70 | strides = (strides, strides)
71 | assert dilation == 1 or all(
72 | d == 1 for d in dilation
73 | ), "Dilation > 1 is not supported"
74 |
75 | # compute output spatial shape
76 | # based on TF computations https://stackoverflow.com/a/37674568
77 | if padding in ["same", "zeros"]:
78 | out_nrows = np.ceil(float(input_shape[0]) / strides[0])
79 | out_ncols = np.ceil(float(input_shape[1]) / strides[1])
80 | else: # padding == 'valid'
81 | out_nrows = np.ceil((input_shape[0] - kernel_shape[0] + 1) / strides[0]) # noqa
82 | out_ncols = np.ceil((input_shape[1] - kernel_shape[1] + 1) / strides[1]) # noqa
83 | output_shape = (int(out_nrows), int(out_ncols))
84 |
85 | # work to compute one output spatial position
86 | nflops = in_channels * out_channels * int(np.prod(kernel_shape))
87 |
88 | # total work = work per output position * number of output positions
89 | return nflops * int(np.prod(output_shape))
90 |
91 |
92 | if __name__ == "__main__":
93 | import doctest
94 |
95 | doctest.testmod()
96 |
--------------------------------------------------------------------------------
/knowledge_distillation/distillation.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | import torch
5 | import torch.nn as nn
6 | import torch.nn.functional as F
7 | from student import LeNet, LeNetStudent
8 |
9 | PACKAGE_PARENT = ".."
10 | SCRIPT_DIR = os.path.dirname(
11 | os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__)))
12 | )
13 | sys.path.append(os.path.normpath(os.path.join(SCRIPT_DIR, PACKAGE_PARENT)))
14 |
15 | from metrics import flops, model_size
16 | from utils.loaders import get_loaders
17 |
18 |
19 | def train(teacher, student, loss_fn, optimizer, data_loader, device):
20 | teacher.train(False)
21 | student.train()
22 | for (idx, (x, t)) in enumerate(data_loader):
23 | optimizer.zero_grad()
24 | x = x.to(device)
25 | x_teacher = teacher.forward(x)
26 | x_student = student.forward(x)
27 | t = t.to(device)
28 | loss = loss_fn(x_student, x_teacher, t)
29 | loss.backward()
30 | optimizer.step()
31 |
32 |
33 | def test(net, data_loader, device):
34 | top1 = 0
35 | correct_samples = 0
36 | total_samples = 0
37 | net.train(False)
38 | net.eval()
39 | with torch.no_grad():
40 | for (idx, (x, t)) in enumerate(data_loader):
41 | x = net.forward(x.to(device))
42 | t = t.to(device)
43 | _, indices = torch.max(x, 1)
44 | correct_samples += torch.sum(indices == t)
45 | total_samples += t.shape[0]
46 |
47 | top1 = float(correct_samples) / total_samples
48 | return top1
49 |
50 |
51 | def calculate_prune_metrics(net, test_loader, device):
52 |
53 | x, _ = next(iter(test_loader))
54 | x = x.to(device)
55 |
56 | size, size_nz = model_size(net)
57 |
58 | FLOPS = flops(net, x)
59 | return FLOPS, size, size_nz
60 |
61 |
62 | def cross_entropy_with_soft_targets(pred, soft_targets):
63 | logsoftmax = nn.LogSoftmax(dim=1)
64 | return torch.mean(torch.sum(-soft_targets * logsoftmax(pred), 1))
65 |
66 |
67 | class CrossEntropyLossTemperature_withSoftTargets(torch.nn.Module):
68 | def __init__(self, temperature, reduction="mean"):
69 | super(CrossEntropyLossTemperature_withSoftTargets, self).__init__()
70 | self.T = temperature
71 | self.reduction = reduction
72 |
73 | def forward(self, input, soft_targets, hard_targets):
74 | """
75 | In the forward function we accept a Tensor of input data and we must
76 | return a Tensor of output data. We can use Modules defined in the
77 | constructor as well as arbitrary operators on Tensors.
78 | """
79 | z = input / self.T
80 | loss_1 = self.T ** 2 * cross_entropy_with_soft_targets(
81 | z, F.softmax(soft_targets, 1, _stacklevel=5)
82 | )
83 |
84 | loss_2 = F.cross_entropy(
85 | input,
86 | hard_targets,
87 | weight=None,
88 | ignore_index=-100,
89 | reduction=self.reduction,
90 | )
91 |
92 | return loss_1 + loss_2
93 |
94 |
95 | if __name__ == "__main__":
96 |
97 | device = "cuda"
98 | teacher = LeNet().to(device)
99 | student = LeNetStudent().to(device)
100 |
101 | teacher.load_state_dict(torch.load("models/best_acc.pth"))
102 | student.load_state_dict(torch.load("models/best_acc_student.pth"))
103 |
104 | batch_size_train = 1024
105 | batch_size_test = 1024
106 | nb_epoch = 40
107 |
108 | train_loader, test_loader = get_loaders(batch_size_train, batch_size_test)
109 |
110 | print(calculate_prune_metrics(teacher, test_loader, device))
111 | print(calculate_prune_metrics(student, test_loader, device))
112 |
113 | loss_fn = CrossEntropyLossTemperature_withSoftTargets(1)
114 | # optimizer = torch.optim.Adam(student.parameters(), lr=0.0001, weight_decay=0.00001)
115 | optimizer = torch.optim.SGD(student.parameters(), lr=0.01, momentum=0.2)
116 |
117 | best_model = None
118 | best_acc = 0
119 |
120 | teacher.train(False)
121 | for epoch in range(nb_epoch):
122 | train(teacher, student, loss_fn, optimizer, train_loader, device)
123 | test_top1 = test(student, test_loader, device)
124 | print(f"Epoch {epoch}. Top1 {test_top1:.4f}")
125 | if test_top1 > best_acc:
126 | best_model = student
127 | best_acc = test_top1
128 |
129 | torch.save(best_model.state_dict(), "models/best_acc_student_with_distillation.pth")
130 |
--------------------------------------------------------------------------------
/metrics/utils.py:
--------------------------------------------------------------------------------
1 | """Auxiliary utils for implementing pruning strategies
2 | """
3 |
4 | from collections import OrderedDict, defaultdict
5 |
6 | import torch
7 | from torch import nn
8 |
9 |
10 | def hook_applyfn(hook, model, forward=False, backward=False):
11 | """
12 |
13 | [description]
14 |
15 | Arguments:
16 | hook {[type]} -- [description]
17 | model {[type]} -- [description]
18 |
19 | Keyword Arguments:
20 | forward {bool} -- [description] (default: {False})
21 | backward {bool} -- [description] (default: {False})
22 |
23 | Returns:
24 | [type] -- [description]
25 | """
26 | assert forward ^ backward, "Either forward or backward must be True"
27 | hooks = []
28 |
29 | def register_hook(module):
30 | if (
31 | not isinstance(module, nn.Sequential)
32 | and not isinstance(module, nn.ModuleList)
33 | and not isinstance(module, nn.ModuleDict)
34 | and not (module == model)
35 | ):
36 | if forward:
37 | hooks.append(module.register_forward_hook(hook))
38 | if backward:
39 | hooks.append(module.register_backward_hook(hook))
40 |
41 | return register_hook, hooks
42 |
43 |
44 | def get_params(model, recurse=False):
45 | """Returns dictionary of paramters
46 |
47 | Arguments:
48 | model {torch.nn.Module} -- Network to extract the parameters from
49 |
50 | Keyword Arguments:
51 | recurse {bool} -- Whether to recurse through children modules
52 |
53 | Returns:
54 | Dict(str:numpy.ndarray) -- Dictionary of named parameters their
55 | associated parameter arrays
56 | """
57 | params = {
58 | k: v.detach().cpu().numpy().copy()
59 | for k, v in model.named_parameters(recurse=recurse)
60 | }
61 | return params
62 |
63 |
64 | def get_activations(model, input):
65 |
66 | activations = OrderedDict()
67 |
68 | def store_activations(module, input, output):
69 | if isinstance(module, nn.ReLU):
70 | # TODO ResNet18 implementation reuses a
71 | # single ReLU layer?
72 | return
73 | assert module not in activations, f"{module} already in activations"
74 | # TODO [0] means first input, not all models have a single input
75 | activations[module] = (
76 | input[0].detach().cpu().numpy().copy(),
77 | output.detach().cpu().numpy().copy(),
78 | )
79 |
80 | fn, hooks = hook_applyfn(store_activations, model, forward=True)
81 | model.apply(fn)
82 | with torch.no_grad():
83 | model(input)
84 |
85 | for h in hooks:
86 | h.remove()
87 |
88 | return activations
89 |
90 |
91 | def get_gradients(model, inputs, outputs):
92 | # TODO implement using model.register_backward_hook()
93 | # So it is harder than it seems, the grad_input contains also the gradients
94 | # with respect to the weights and so far order seems to be
95 | # (bias, input, weight) which is confusing.
96 | # Moreover, a lot of the time the output activation we are
97 | # looking for is the one after the ReLU and F.ReLU (or any functional call)
98 | # will not be called by the forward or backward hook
99 | # Discussion here
100 | # https://discuss.pytorch.org/t/how-to-register-hook-function-for-functional-form/25775
101 | # Best way seems to be monkey patching F.ReLU & other functional ops
102 | # That'll also help figuring out how to compute a module graph
103 | pass
104 |
105 |
106 | def get_param_gradients(model, inputs, outputs, loss_func=None, by_module=True):
107 |
108 | gradients = OrderedDict()
109 |
110 | if loss_func is None:
111 | loss_func = nn.CrossEntropyLoss()
112 |
113 | training = model.training
114 | model.train()
115 | pred = model(inputs)
116 | loss = loss_func(pred, outputs)
117 | loss.backward()
118 |
119 | if by_module:
120 | gradients = defaultdict(OrderedDict)
121 | for module in model.modules():
122 | assert module not in gradients
123 | for name, param in module.named_parameters(recurse=False):
124 | if param.requires_grad and param.grad is not None:
125 | gradients[module][name] = param.grad.detach().cpu().numpy().copy()
126 |
127 | else:
128 | gradients = OrderedDict()
129 | for name, param in model.named_parameters():
130 | assert name not in gradients
131 | if param.requires_grad and param.grad is not None:
132 | gradients[name] = param.grad.detach().cpu().numpy().copy()
133 |
134 | model.zero_grad()
135 | model.train(training)
136 |
137 | return gradients
138 |
139 |
140 | def fraction_to_keep(compression, model, prunable_modules):
141 | """Return fraction of params to keep to achieve desired compression ratio
142 |
143 | Compression = total / ( fraction * prunable + (total-prunable))
144 | Using algrebra fraction is equal to
145 | fraction = total/prunable * (1/compression - 1) + 1
146 |
147 | Arguments:
148 | compression {float} -- Desired overall compression
149 | model {torch.nn.Module} -- Full model for which to compute the fraction
150 | prunable_modules {List(torch.nn.Module)} --
151 | Modules that can be pruned in the model.
152 |
153 | Returns:
154 | {float} -- Fraction of prunable parameters to keep
155 | to achieve desired compression
156 | """
157 | from ..metrics import model_size
158 |
159 | total_size, _ = model_size(model)
160 | prunable_size = sum([model_size(m)[0] for m in prunable_modules])
161 | nonprunable_size = total_size - prunable_size
162 | fraction = 1 / prunable_size * (total_size / compression - nonprunable_size)
163 | assert 0 < fraction <= 1, (
164 | f"Cannot compress to {1/compression} model\
165 | with {nonprunable_size/total_size}"
166 | + "fraction of unprunable parameters"
167 | )
168 | return fraction
169 |
--------------------------------------------------------------------------------
/pruning_loop.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 | import torch.nn.functional as F
4 | import torch.nn.utils.prune as prune
5 | import torch.optim as optim
6 |
7 | from utils.loaders import get_loaders
8 | from utils.maskedLayers import Conv2dMasked, LinearMasked
9 | from metrics import flops, model_size
10 |
11 |
12 | class LeNet(nn.Module):
13 | def __init__(self):
14 | super().__init__()
15 | self.conv1 = Conv2dMasked(1, 10, kernel_size=5)
16 | self.conv2 = Conv2dMasked(10, 20, kernel_size=5)
17 | self.conv2_drop = nn.Dropout2d()
18 | self.fc1 = LinearMasked(320, 50)
19 | self.fc2 = LinearMasked(50, 10)
20 |
21 | def forward(self, x):
22 | x = F.relu(F.max_pool2d(self.conv1(x), 2))
23 | x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
24 | x = x.view(-1, 320)
25 | x = F.relu(self.fc1(x))
26 | x = F.dropout(x, training=self.training)
27 | x = self.fc2(x)
28 | return F.log_softmax(x, 1)
29 |
30 |
31 | class PruningExperiment:
32 | def __init__(
33 | self,
34 | pruning_strategy=None,
35 | batch_size_train=512,
36 | batch_size_test=1024,
37 | epochs_prune_finetune=3,
38 | epochs_finetune=5,
39 | device="cuda",
40 | save_model=None,
41 | ):
42 |
43 | """Initialize experiment
44 | :param pruning_strategy: Pruning strategy
45 | :param batch_size_train: Batch size for train
46 | :param batch_size_test: Batch size for test
47 | :param epochs_finetune: Number of epochs to finetune pruned model
48 | :param optimizer: Optimizer to perform gradient descent
49 | :param device: Device 'cpu' or 'cuda' where calculations are performed
50 |
51 | :return: Outcome of pruning strategy: Accuracy and pruning metrics
52 | """
53 | self.pruning_strategy = pruning_strategy
54 | self.device = device
55 | self.batch_size_train = batch_size_train
56 | self.batch_size_test = batch_size_test
57 | self.epochs_prune_finetune = epochs_prune_finetune
58 | self.epochs_finetune = epochs_finetune
59 | self.save_model = save_model
60 |
61 | def load_model(self):
62 |
63 | """Load LeNet model.
64 | All experiments will be performed on a trained model
65 | from the original script.
66 | """
67 | net = LeNet().to(self.device)
68 | net.load_state_dict(torch.load("models/best_acc.pth"))
69 | return net
70 |
71 | def prune_model(self, net, pruning_strategy):
72 |
73 | for modulename, strategy, name, amount, kwargs in pruning_strategy:
74 |
75 | module = getattr(net, modulename)
76 | if kwargs:
77 | n = kwargs["n"]
78 | dim = kwargs["dim"]
79 | mask = strategy(module, name=name, amount=amount, n=n, dim=dim)
80 | else:
81 | mask = strategy(module, name=name, amount=amount, n=n, dim=dim)
82 | # print(modulename, mask.weight_mask.shape)
83 | module.set_mask(mask.weight_mask)
84 |
85 | return net
86 |
87 | def train(self, net, optimizer, data_loader, device):
88 |
89 | net.train()
90 | for (idx, (x, t)) in enumerate(data_loader):
91 | optimizer.zero_grad()
92 | x = net.forward(x.to(device))
93 | t = t.to(device)
94 | loss = F.nll_loss(x, t)
95 | loss.backward()
96 | optimizer.step()
97 |
98 | def test(self, net, data_loader, device):
99 |
100 | correct_samples = 0
101 | total_samples = 0
102 | net.eval()
103 | with torch.no_grad():
104 | for (idx, (x, t)) in enumerate(data_loader):
105 | x = net.forward(x.to(device))
106 | t = t.to(device)
107 | _, indices = torch.max(x, 1)
108 | correct_samples += torch.sum(indices == t)
109 | total_samples += t.shape[0]
110 | top1 = float(correct_samples) / total_samples
111 | return top1
112 |
113 | def calculate_prune_metrics(self, net, test_loader, device):
114 |
115 | x, _ = next(iter(test_loader))
116 | x = x.to(device)
117 |
118 | size, size_nz = model_size(net)
119 |
120 | FLOPS = flops(net, x)
121 | compression_ratio = size / size_nz
122 |
123 | return FLOPS, compression_ratio
124 |
125 | def run(self):
126 | """
127 | Main function to run pruning -> finetuning -> evaluation
128 | """
129 | pruning_strategy = self.pruning_strategy
130 | batch_size_train = self.batch_size_train
131 | batch_size_test = self.batch_size_test
132 | epochs_prune_finetune = self.epochs_prune_finetune
133 | epochs_finetune = self.epochs_finetune
134 | device = self.device
135 |
136 | net = self.load_model()
137 |
138 | optimizer = optim.SGD(net.parameters(), lr=0.01, momentum=0.5)
139 |
140 | train_loader, test_loader = get_loaders(batch_size_train, batch_size_test)
141 |
142 | print("Pruning cycle")
143 | print("=======================")
144 | for epoch in range(epochs_prune_finetune):
145 | if pruning_strategy is not None:
146 | net = self.prune_model(net, pruning_strategy)
147 | test_top1 = self.test(net, test_loader, device)
148 | print(f"After pruning: Epoch {epoch}. Top1 {test_top1}.")
149 | for finetune_epoch in range(epochs_finetune):
150 | self.train(net, optimizer, train_loader, device)
151 | test_top1 = self.test(net, test_loader, device)
152 | print(
153 | f"\tAfter finetuning: Epoch {finetune_epoch}.\
154 | Top1 {test_top1:.4f}"
155 | )
156 |
157 | for modulename, strategy, name, amount, kwargs in pruning_strategy:
158 | module = getattr(net, modulename)
159 | prune.remove(module, name)
160 | test_top1 = self.test(net, test_loader, device)
161 |
162 | FLOPS, compression_ratio = self.calculate_prune_metrics(
163 | net, test_loader, device
164 | )
165 |
166 | if self.save_model is not None:
167 | torch.save(net.state_dict(), f"models/{self.save_model}.pth")
168 |
169 | return test_top1, FLOPS[0] / FLOPS[1], compression_ratio
170 |
--------------------------------------------------------------------------------
/Results.md:
--------------------------------------------------------------------------------
1 | # Experiments in Neural Network pruning
2 |
3 | ### Prepared by Oleg Polivin, 26 November 2020
4 | ---
5 |
6 | Let's define metrics that we will use to evaluate the effectiveness of pruning. We will look at categorical accuracy to estimate the quality of a neural network.[1](#myfootnote1) Accuracy in the experiments is reported based on the test set, not the one that has been used for training the neural network.
7 |
8 |
9 | Much of this work is based on the paper [What is the State of Neural Network Pruning?](https://arxiv.org/abs/2003.03033)
10 |
11 | In order to estimate the effectiveness at pruning we will take into account:
12 |
13 | 1. Acceleration of inference on the test set.
14 | - Compare the number of multiply-adds operations (FLOPs) to perform inference.
15 | - Additionally, I compute average time of running the original/pruned model on data.
16 |
17 | 2. Model size reduction/ weights compression.
18 | - Here we will compare total number of non-zero parameters.
19 |
20 | ## Experiment setting
21 |
22 | Given the code for LeNet model in PyTorch, let's calculate the metrics defined above. Canonical LeNet-5 architecture is below:
23 |
24 | 
25 |
26 | Architecture in the original paper [Gradient-Based Learning Applied to Document Recognition](http://yann.lecun.com/exdb/publis/pdf/lecun-98.pdf) is a bit different from the code given (for example, there was no ``Dropout``, ``hyperbolic tangent`` was used as an activation, number of filters is different, etc), but the idea is the same. I will organize experiments as follows:
27 |
28 | 1. Train the model using the original script (``lenet_pytorch.py``, although I made some modifications there).
29 | 2. Perform evaluation of the model using the metrics defined above.
30 | 3. Save the trained model.
31 |
32 | I will perform experiments on pruning using the saved model.
33 |
34 | 4. In order to perform pruning experiments I added:
35 | - ``metrics/``
36 | - ``experiments.py`` (this is the main script that produces results).
37 | - ``pruning_loop.py`` implements the experiment.
38 | - ``utils``
39 |
40 | - ``avg_speed_clac.py`` to calculate average inference time on train data
41 | - ``loaders.py`` to create train/test loaders
42 | - ``maskedLayers.py`` wrappers for Linear and Conv2d PyTorch modules.
43 | - ``plot.ipynb`` Jupyter notebook to produce the plots below.
44 |
45 | ## Pruning setup
46 |
47 | As suggested in the [What is the State of Neural Network Pruning?](https://arxiv.org/abs/2003.03033) paper many pruning methods are described by the following algorithm:
48 |
49 | 1. A neural network (NN) is trained until convergence (20 epochs now).
50 | 2. ```
51 | for i in 1 to K do
52 | prune NN
53 | finetune NN
54 | end for
55 |
56 | It means that the neural network is pruned several times. In my version, a weight once set as zero will always stay zero. The weights that were pruned are not retrained. Note also that finetuning means that there are several epochs of training happening.
57 |
58 | In order to fix the pruning setup, in all the experiments number of prune-finetune epochs is equal to 3 (it is ``K`` above), and number of finetuning epochs is equal to 4. The categorical accuracy and model's speed-ups and compression is reported after pruning-finetuning is finished.
59 |
60 | ## Results
61 |
62 | ### Baseline
63 |
64 | LeNet model as defined in the code was trained for ``80`` epochs, and the best model chosen by categorical accuracy was saved. Highest categorical accuracy was reached on epoch ``78`` and equals ``0.9809``. Our objective is to stop when the model converges to be sure that we prune a converged model. There are ``932500`` add-multiply operations (FLOPs), and in 20 runs through train data (``60000`` samples) , average time is given by ``9.1961866`` seconds.
65 |
66 | ### Experiments
67 |
68 | #### Experiment 1: Unstructured pruning of random weights
69 |
70 | **Setting:** Prune fully-connected layers (``fc1``, ``fc2``) and both convolutional layers (``conv1``, ``conv2``). Increase pruning from 10% to 70% (step = 10%). The pruning percentage is given for each layer. Roughly it corresponds to compressing the model up to 36 times.
71 |
72 | #### Experiment 2: Unstructured pruning of the smallest weights (based on the L1 norm)
73 |
74 | **Setting:** Same as in experiment 1. Notice the change that now pruning is not random. Here I assign 0's to the smallest weights.
75 |
76 | #### Experiment 3: Structured pruning (based on the L1 norm)
77 |
78 | **Setting:** Here I use structured pruning. In PyTorch one can use ``prune.ln_structured`` for that. It is possible to pass a dimension (``dim``) to specify which channel should be dropped. For fully-connected layers as ``fc1`` or ``fc2`` -> ``dim=0`` corresponds to "switching off" output neurons (like ``320`` for ``fc1`` and ``10`` for ``fc2``). Therefore, it does not really make sense to switch off neurons in the classification layer ``fc2``. For convolutional layers like ``conv1`` or ``conv2`` -> ``dim=0`` corresponds to removing the output channels of the layers (like ``10`` for ``conv1`` and ``20`` for ``conv2``). That's why I will only prune ``fc1``, ``conv1`` and ``conv2`` layers, again going from pruning 10% of the layers channels up to 70%. For instance, for the fully-connected layers it means zeroing 5 up to 35 neurons out of 50. For ``conv1`` layer it means zeroing out all the connections corresponding to 1 up to 7 channels.
79 |
80 | Below I present results of my pruning experiments:
81 |
82 | 
83 |
84 | And I confirm that using average time of running a model during inference, there is no real change in terms of time for pruned or non-pruned models.
85 |
86 | ## Conclusions and caveats
87 |
88 | Here are my thoughts on the results above and some caveats.
89 |
90 | If we take the results at face value, we conclude that better results are obtained when we do unstructured pruning of the smallest weights based on L1 norm. In reality however (more on that below) unstructured pruning makes weights sparse, but since sparse operations are not supported in PyTorch yet, it does not bring real gains in terms of model size or speed of inference. However, we can think of such results as some evidence that a smaller architecture with a lower number of weights might be beneficial.
91 |
92 | Below are further caveats:
93 |
94 | ### Unstructured pruning
95 | 1. We are looking at FLOPs to estimate a speed-up of a pruned neural network. We look at the number of non-null parameters to estimate compression. It gives us an impression that by doing pruning we gain a significant speed-up and memory gain.
96 |
97 | 2. However, people report that when looking at actual time that it takes to make a prediction there is no gain in speed-up. I tested it with the model before pruning and after pruning (random weights), and this is true. There is no speedup in terms of average time of running inference. Also, saved PyTorch models (``.pth``) have the same size.
98 |
99 | 3. Additionally, there is no saving in memory, because all those zero elements still have to be saved.
100 |
101 | 4. To my understanding one needs to change the architecture of the neural network according to the zeroed weights in order to really have gains in speed and memory.
102 |
103 | 5. There is a different way which is to use sparse matrices and operations in PyTorch. But this functionality is in beta. See the discussion here [How to improve inference time of pruned model using torch.nn.utils.prune](https://discuss.pytorch.org/t/how-to-improve-inference-time-of-pruned-model-using-torch-nn-utils-prune/78633/4)
104 |
105 | 6. So, if we do unstructured pruning and we want to make use of sparse operations, we will have to write code for inference to take into account sparse matrices. Here is an example of a paper where authors could get large speed-ups but when they introduced operations with sparse matrices on FPGA. [How Can We Be So Dense? The Benefits of Using Highly Sparse Representations](https://arxiv.org/abs/1903.11257)
106 |
107 | What's said above is more relevant to unstructured pruning of weights.
108 |
109 | ### Structured pruning
110 |
111 | One can have speed-ups when using structured pruning, that is, for example, dropping some channels. The price for that would be a drop in accuracy, but at least this really works for better model size and speed-ups.
112 |
113 | ## Additional chapter: Knowledge distillation
114 |
115 | Knowledge distillation is the idea proposed by Geoffrey Hinton, Oriol Vinyals and Jeff Dean to tranfer knowledge from a huge trained model to a simple and light-weighted one. It is not pruning strictly speaking, but has the same objective: simplify the original neural network without sacrifying much of quality.
116 |
117 | It works the following way:
118 |
119 | - Train a comprehensive large network which has a good accuracy [Teacher Network]
120 | - Train a small network until convergence [Student Network]. There will be trade-offs between accuracy that you reach with a simpler model and the level of compression.
121 | - Distill the knowledge from the Teacher Network by training the Student Network using the outputs of the Teacher Network.
122 | - See that original accuracy of the trained and converged student network is increased!
123 |
124 | I provide the code to do it in the ``knowledge_distillation`` folder. Run
125 |
126 | ```
127 | python knowledge_distillation/train_student.py
128 | ```
129 | to train the student network. It has a simplified architecture relative to the original ``LeNet`` neural network. For example, when the trained student network is saved it takes 1.16 times less memory on disk (from 90 kBs to 77 kBs, even twice less if saved in ``PyTorch v1.4``). I ran training for 60 epochs and the best accuracy was reached on epoch 47, and it equals ``0.9260``. Thus we can say that the model has converged.
130 |
131 | Run
132 |
133 | ```
134 | python knowledge_distillation/distillation.py
135 | ```
136 | to do additional training of the converged student neural network distilling teacher network.
137 |
138 | Here are the results:
139 | - ``FLOPS`` compression coefficient is 42 (the student model is 42 times smaller in terms of FLOPS, down to 21840 multiply-add operations from 932500).
140 | - ``Model size`` compression coefficient is 3 (the student model is 3 times smaller in terms of size)
141 | - ``Accuracy`` of the retrained student model is ``0.9276``, which is a tiny bit better than the original student network.
142 |
143 | I would say that knowledge distillation is definitely worth a try as a method to perform model compression.
144 |
145 | ## Bibliography with comments
146 |
147 | 1. The code to calculate FLOPs is taken from [ShrinkBench repo](https://github.com/JJGO/shrinkbench) written by the authors of the [What is the State of Neural Network Pruning?](https://arxiv.org/abs/2003.03033) paper. The authors are Davis Blalock, Jose Javier Gonzalez Ortiz, Jonathan Frankle and John Guttag. They created this code to allow researchers to compare pruning algorithms: that is, compare compression rates, speed-ups and quality of the model after pruning among others. I copy their way to measure ``FLOPs`` and ``model size`` which is located in the ``metrics`` folder. It is necessary to say that I made some minor modifications to the code, and all errors remain mine and should not be attributed to the author's code. It is also important to add that I also take the logic of evaluating pruned models from this paper. All in all, this is the main source of inspiration for my research.
148 |
149 | 2. The next important source is this [Neural Network Pruning PyTorch Implementation](https://github.com/wanglouis49/pytorch-weights_pruning) by Luyu Wang and Gavin Ding. I copy their code for implementing the high-level idea of doing pruning:
150 | - Write wrappers on PyTorch Linear and Conv2d layers.
151 | - Binary mask is multiplied by actual layer weights
152 | - "Multiplying the mask is a differentiable operation and the backward pass is handed by automatic differentiation"
153 |
154 | 3. Next, I make use of the [PyTorch Pruning Tutorial](https://pytorch.org/tutorials/intermediate/pruning_tutorial.html). It is different from the implementations above. My implementation mixes the code of the above two implementations with PyTorch way.
155 |
156 | Sources on knowledge distillation:
157 |
158 | 4. [Dark knowledge](https://www.ttic.edu/dl/dark14.pdf)
159 |
160 | 5. [Distilling the Knowledge in a Neural Network](https://arxiv.org/abs/1503.02531)
161 |
162 | 6. Open Data Science community (``ods.ai``) is my source of inspiration with brilliant people sharing their ideas on many aspects of Data Science.
163 |
164 | ## Footnotes
165 | 1: Indeed, at the extreme we can just predict a constant. Accuracy will be low, but prunning will be very effective, there will be no parameters at all in the neural network.
166 |
--------------------------------------------------------------------------------
/utils/plots.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 2,
4 | "metadata": {
5 | "language_info": {
6 | "name": "python",
7 | "codemirror_mode": {
8 | "name": "ipython",
9 | "version": 3
10 | },
11 | "version": "3.7.9-final"
12 | },
13 | "orig_nbformat": 2,
14 | "file_extension": ".py",
15 | "mimetype": "text/x-python",
16 | "name": "python",
17 | "npconvert_exporter": "python",
18 | "pygments_lexer": "ipython3",
19 | "version": 3,
20 | "kernelspec": {
21 | "name": "python37964bitpytorch17conda9210703cbb104b038c93c3861334f328",
22 | "display_name": "Python 3.7.9 64-bit ('pytorch17': conda)"
23 | }
24 | },
25 | "cells": [
26 | {
27 | "cell_type": "code",
28 | "execution_count": 1,
29 | "metadata": {},
30 | "outputs": [],
31 | "source": [
32 | "from matplotlib import pyplot as plt\n",
33 | "%matplotlib inline"
34 | ]
35 | },
36 | {
37 | "cell_type": "code",
38 | "execution_count": 2,
39 | "metadata": {},
40 | "outputs": [],
41 | "source": [
42 | "experiments_results = {\n",
43 | " \"UnstructRandomPrun\":\n",
44 | " {\n",
45 | " \"accuracy\": [0.9809, 0.9744, 0.9665, 0.9524, 0.9194, 0.8529, 0.4458, 0.1365],\n",
46 | " \"compression_flops\": [1, 1.3706, 1.9531, 2.9137, 4.6296, 8.0134, 15.625, 36.75],\n",
47 | " \"compression_size\": [1, 1.3695, 1.9455, 2.8927, 4.5614, 7.7750, 14.7368, 32.2124]\n",
48 | " },\n",
49 | " \"UnstructPrunL1Norm\":\n",
50 | " {\n",
51 | " \"accuracy\": [0.9809, 0.9824, 0.9818, 0.9781, 0.9698, 0.9543, 0.8555, 0.7565],\n",
52 | " \"compression_flops\": [1, 1.3706, 1.9531, 2.9137, 4.6296, 8.0134, 15.625, 36.75],\n",
53 | " \"compression_size\": [1, 1.3695, 1.9455, 2.8927, 4.5614, 7.7750, 14.7368, 32.2124]\n",
54 | " },\n",
55 | " \"StructuredPrunL1Norm\":\n",
56 | " {\n",
57 | " \"accuracy\": [0.9809, 0.9781, 0.9667, 0.9477, 0.9228, 0.753, 0.5095, 0.1135],\n",
58 | " \"compression_flops\": [1, 1.4268, 1.9976, 2.94, 4.98, 7.15, 16.34, 25.33],\n",
59 | " \"compression_size\": [1, 1.3561, 1.8934, 2.78, 4.23, 6.06, 11.97, 18.83]\n",
60 | " }\n",
61 | "}"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": 3,
67 | "metadata": {},
68 | "outputs": [
69 | {
70 | "data": {
71 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAA3gAAAFNCAYAAABSRs15AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nOydd3gVxfrHP3PSGwkkEEqAEAgJpAIxdAxVkCKg0hXkx1VEBRuIVwXseMEroCBevcIVpEgVQURa6IKhdxIwELokAZJQAsn+/phNclLOSSHthPk8zz5nz87szDvbvvvOThGapqFQKBQKhUKhUCgUCsvHUNYGKBQKhUKhUCgUCoWieFAOnkKhUCgUCoVCoVBUEJSDp1AoFAqFQqFQKBQVBOXgKRQKhUKhUCgUCkUFQTl4CoVCoVAoFAqFQlFBUA6eQqFQKBQKhUKhUFQQlIOnUCiKBSGEtRBCE0J4l7UteSGE+E4I8c8HTKOTECK2mEwqdsr7OVAoFArFgyOE+EgIMbeAcbcLIYaVrEXlGyFEhBDiaFnbUZooB+8BEULECiE6FXG/q0IIJ6NtI4QQkQXcf64Q4qN84gwTQqQJIZKNlojC2lpY9Hw1IcQXObY/oW+fq//31v//miPefCHEJH09Qghx3igsQAjxuxAiQQhxXQixVwjxuBBisFEZbwsh0o3LXQjbDUKIs0KIQw9yDB4GyrNo5HUvaZo2QtO0T8rIJIXioURppMl8lUYqFKWEpmmRmqYFlLUdpYly8MoWK2BMCeexS9M0Z6MlsoTzy+A00E8IYW20bShwKo+4zYUQrQqY7i/AeqA6UA0YDdzUNO3HjDIC3YCLxuUuhN3tgSqAvxCiSSH2e2ByHCuFCfQXDPXsUigqPkojJUojURr5sKDOc/GgXpJKECFEDyHEAb0WbacQIjhHlCnAm0IINxP7+wsh1us1cSeFEP307c8Dg4Fxeu3bL8Vkby8hxFHd3kghRCOjsFghxJtCiENCiBtCiMVCCHszyV0GDgOP6ftXAVoBq/KI+y/g4wLY5wHUA77VNC1VX3Zomra9EMXMj6HAcuA3fd04f3e9VviSECJRCLHMKKyvfq5vCiFihBBd9O3njWuEhVGzCiFEA7129jkhxDngd915WSqEuGziPDgKIb4QQpzTz8NWIYSdEGKdEOLFHPYeE0L0zFlA/ZoamWPbEf38G4QQM/Sa8xv6+W6cRxqfAS2B2fo1OM0o+DH9GCQKIWbk2G+EEOKEHrZWCFHbKKyNECJKz3ePEKK5Udh2IcSHQohdQApQRwjhJoSYo5+P80KID3T7g4CvgLa6bdf0NDJrvfM5ZyOEEMeFEElCiNNCiBE5y58X5o6dnvdMIcRGPd3NOcreWAixQb/XTwghnjQKsxdC/FsIESeEuCKEmGV87wkhxuvXywVyX7PZvrIKoy8gIqs55ytCiL+EENeEEJOFcp4VpYRQGqk0soJqpL7Pdl2X/hBCpAghVurHaKF+HHYLIeoYxTengT5CiG26fqwD3HPk1VrP57p+nNvlZVMeNloLId4TUutu6vnXLIA9BS5bflojpC5t1Y9rAvCu0fZc7wvmzoGQz5QM/T4vhHhN356te4WQX7q36MfrsBCiu1HYfD39tXo6u4QQ9QpyPMsVmqap5QEWIBbolMf2JsBVoDmyFnKoHtfOeD/kg/IjfdsIIFJfdwLigOcAaz29a0BjPXxuxn5mbBuGfBm+hqwVfA+wNhG3oR63M2ADjANiAFsje/cANZG1d8eBkWby3Q4MAhbr20YB3wAfAXP1bd6ABrgAFzKOIzAfmKSvRwDn9XUBRAOrgd6Ap4n8M/fJsf0bYIaZ4+UMJANdgP76+bM2Cl8HLAAq68eonb69FXAd6IisNKkN+Olh54EIozSMy99AL/8cwBFw0Pcfph8Te6SjEpWjDBuBGsjrqo1uyyBgh1G8ZjntNwobDmwx+h8CxAO2QHf9PLvqtjQGqps4XtuBYUb/rfXy/Kzv7w0kGJ3XJ4GTgJ8edxKwTQ/zAG4AA/WwZ3SbKhvlFQs00strjaypnqUfO09gL/B/Oe8lI/uMrytz56wn4IO83joAt4FgPawTEGvieJg8dnreN4DWgB0wk6x73Rl5/T+rl6uZXvYMe74EViCvu0rAr8CHelgP4JKelxPwk34OvE2cI+NnTMb52qCnXRd5zw/Lq3xqUUtRFpRGmspXaaT2UGjkSaSeVAZO6P/bI6/ZBUhnHPLXwD+RFR52+vlLNjpOtfW4j+k2dUVe0+5GdgwzYePbwEHAV983FHn9FkSTC1o2s1qDvK/vAy/q58wB8+8L5rT2b6CVvl4FaKqvZ2q3fh7/Qt7DNnpYMtDA6P66BoTp4YuB+WX9LC3sUuYGWPqCafH6Gv0lzGjbSeBR4/2AQP0mqkp28eqfcTEb7f8NMFFfn0v+4uWDrM0zAEHAMeBtE3HfA34y+m9ACkqEkb1DjML/Bcw2kdYw/eZ3AK7oN+EfyJfbvMTLGiluf+jb8xQv/b8X8oF+GkgHtgK+OfLPtk8hzuUwZK1qxgMmCeiph9VGPoBc89jvv8AUE2kWRLzqmLHJQ4/jpNt1FwjII54DUkDr6f+nYUKo9fNxC/DS/38G/Edf74J8UDcHDPkcL1MOXgujbcuBN/X19cDQHPHvArWQL2k7c6T/Z8Y1p+c1wSisFtLxsjPa9gywXl/Pz8Ezec7yKOdq4CV93ZyDZ/LY6XnPN/rvql+/NZBfGjbncU29g7wP7wB1jcLaAtH6+g8YPQeQQldYB6+TUfhoYF1h7x21qMXUgtLIvNIahtLIjLCKrpFvGf2fDvxi9L8PunOKGQ3Ur9NUwNEo7Cej4/QOMCfHvhuBwUZ2DDNh42mgex7bC6LJBS2bWa1B3tdncuRl7n3BnNZe1NNzybHd2MFrj7x3hVH4EuBdo/trtlFYL+BIYe+Xsl5UU5ySoy7whv7597oQ4jryAVjTOJKmaUeQL5Dj89i/eY79ByPb1edCZO9AvVZP+4ymaX9pmpauadph4APgKRP21gTOGtmVjqwdrWUU57LR+i1kbZ5JNE27DaxBfm531zRth5no3wGeeTWXyJHmeU3TXtY0rT7yGKUgX3KLg6HI2tQ03fYVZDVBqQ1c0zTtRh771UY+JItKXMaKEMJKCPEvIcQZIcRNZC0XSBHzRNY85cpLt3cpMEQIYQUMAObllZleht+A/kIIocf9UQ/7HZiNfPm6IoSYLYRwKWR5TF0ndYGZRtfzNeQLiBc5rj+ds2S//uKM1usiazKvGKU3E3mMCoLJc6Y38dgt9EEKkGLikV+CBTh2cUZxbyBfWmvqZWmd417vj3T+quvlPGgUthrZtwZ9f+PjkvMYFoSc+9c0FVGhKEaURiqNLCiWrJFXjNZv5/E/4xoxp4E1gXhN027lCMugLjAwx73QgoI9y02dm4JockHLloE5rYnLEdfk+0I+56AP0iE7J2Tz3ebkpiZwTtO9NxNlK9S9XB5RDl7JEQd8rGmam9HiqGnawjziTgT+Qe6X2S059nfWNC2jDbnxhYlm1IFa07RuJmzSkE048uIi8oYCQH+o1UbWcjwIPwBvIGtETKJpWirwPvChGRtz7hOHfKkPfEAbEULUBR4FhgnZtv8ysolLTyFEZeT58BBCVMpj9zigvomkU5BNSzLI9fKR4yHzLPA4smmgK7IGE+QxuYKsxTOV1/+QLzhdgERN0/40EQ9gIbLpRRvkc2CrkT3TNE1rijyujYHXTaShmdhuijhkE0rja9pB07Td5Lj+dOqQ/fozzi8O+dCtYpRWJU3TgvOIa8qWXMdRCOGAfAn4FNm8yQ34nYJfk+aOnXGfO1fk+b2o27Ixj3v9ZbLOuZ9RmKumaa56UpeM00UeM2Pyvf7y2P9iQcqqUDwgSiMlSiMrrkYWBnMaeAlw1/XJOCyDOOQXPON7wUnTtCkFyNfUuSmIJhcWc1qTU7PNvS+YPAeapu3WNK0XshJ0NbAoDzsuArX1e7i4ylbuUA5e8WAj5EAIGYs18C0wUgjRXEichBDd86rp0TQtBtnGd7TR5tVAQyHEM0IIG315RGR1Jr6C/GxvEiFENyGEp77uj2xi8rOJ6D8B3YUQHYUQNkjBuQvsLPBRyJstyD4LXxYg7jxkm/queQUKISoLId4XsuO1QcgO5cORTVselGeRzXP8kG3QQ/X1y8AAXSg3IGuU3PTzkdGJ+b/ACCFEe90uLyGEnx52ABggZCfjcKBvPna4II97PFL0MjvWa5qWhmx2NE0IUV2vyWytny+QTSZskc1J8qyZNOIXZJv7CcCiDAEVQoTrizVSeFORtWZ5ke81mIPZwDsZ17B+HDNqy1cDAUKI/vqxGoQU7jV5JaSfjy3AVCFEJf24NzA6J1cAL6NjkxNT58wOeQz/BtKEED2Q/UbypQDHrqcQoqUQwg7ZDGmbpmmXkIMqBAghBhnd6+FCCD/9nH+HPOdV9WeJl9AHKEDet8OFHGzCCfkibMwB4EkhhIMQoiHyfsnJOP1c1EE+gxYXpLwKRSFQGmkapZEVVyMLg0kN1DTtNHAImCSEsNWPa3ejfecBfYQQnfUy2+vHuiBf8L4DPhJC1Nfvw1AhB/wplCYXkMJojcn3BVPnQNe5QUKISpqm3UM2Ic7r3OxENid+Q79OOyArDSqU9ikHr3j4Ffk5OmOZpGlaFLLG8SsgEb1DqZk0PkC2IQdA07QkZC3TAGRtw2XkQ8lOj/JfoLGQn69XmkizI3BICJGi27gcyHMeME3TTiLben+J/BTeE9m2PtVcwfNDk2zUNC2hAHHTkA/TKiaipCL7JGwAbgJHkA/6YQWxRciJrr8yEfwsMFPTtMtGyyVkn46MJihD9N9TyJeHV3S7dyLP9Qxks7vNZNVUvQP4I9v+v4fseGyOOcjzfRE4Su6Xh9eQnff3Igcw+QS9NlcXoB+QNVo/mstE07Q7wEpku3Rjm9yQ19Z1ZJ+SS8C/TSQzjaxmIabiGOe5RE9riZBNaw6hjyCnadrfyGYVbyGF+zWgh6ZpiWaSHIK8Z44h77ElZNX+rkcONnBFyJrmnLbkec40Tbuu570CeXyfQgpdQcjv2M1HOnbXgGDkNZfRHOgxvTyXkPf6p2Td628gm4/s0W39HfnigaZpvyBr6Lcgr8v1OWyaiqwZvQp8T95fCX5BvmTt18s9t4DlVSgKitJIEyiNrNAaWWAKoIEDkH00E5DHbJ7RvrHIponvISsnzyF1oyDv+FOQ5dyIvGb+A9gXUZPzo8BaY+59AfPnYChwVt/n/8i6Jo3Tvou8f59A3sszgEGapkU/QNnKHSL7l2+FQmHJCCGGA89qmhZR1rYoshBCzAdiNE2bVNa2ZKDXft5DDjoQW8bmKBQKRYmjNLL0UVpTNqgveApFBUFvojcKWQOnUCgUCoVCR2mk4mFCOXgKRQVAyEk6ryKbZlSoduQKhUKhUDwISiMVDxuqiaZCoVAoFAqFQqFQVBDUFzyFQqFQKBQKhUKhqCAoB0+hUCgUCoVCoVAoKgjWZW1AYfHw8NC8vb1NhqekpODk5GQyvLyj7C97LL0Mlm4/WH4ZlP3Fx969e69pmla1rO2wFExpZHk6p8VNRS4bVOzyqbJZLhW5fJZSNnP6aHEOnre3N1FRUSbDIyMjiYiIKD2Dihllf9lj6WWwdPvB8sug7C8+hBBny9oGS8KURpanc1rcVOSyQcUunyqb5VKRy2cpZTOnj6qJpkKhUCgUCoVCoVBUEJSDp1AoFAqFQqFQKBQVhBJz8IQQ3wshrgohjpgIF0KIGUKIGCHEISFE05KyRaFQKBSK8oTSSIVCoVCUFCXZB28u8BXwg4nwboCvvjQHvtZ/FQpFOebevXucP3+eO3fulFgerq6uHD9+vMTSL2mU/YXH3t4eLy8vbGxsSjXfMmQuSiMVigqFsT5aug7kR0UuX3krW1H0scQcPE3TtgohvM1EeQL4QZMzrf8hhHATQtTQNO1SSdmkUCgenPPnz+Pi4oK3tzdCiBLJIykpCRcXlxJJuzRQ9hcOTdOIj4/n/Pnz1KtXr9TyLUuURioUFQ9jfUxOTrZoHcgPS9c5c5SnshVVH8tyFM1aQJzR//P6tlziJYR4HngewNPTk8jISJOJJicnmw0v7yj7yx5LL0NJ2+/q6oq7uzvJyckllkdaWhpJSUklln5Jo+wvPLa2tly/ft2i771iplg10tKfa+aoyGWDil2+ilY2Y320dB3Ij4pcvvJWtqLoo0VMk6Bp2n+A/wCEhYVp5oYutZShTU2h7C97LL0MJW3/8ePHqVSpUomlD+Wr9qwoKPuLhr29PU2aNCn1fC2dgmikpT/XzFGRywYVu3wVrWzG+mjpOpAfFbl85bFshdXHshxF8wJQ2+i/l76tRIlJjKH3z72J+XdDmOSae5niW9ImKBSKByA2NpbAwMBs2yZNmsTUqVMLlc7169eZNWtWsdq1YMGCzP+RkZG4uroSGhqKv78/b775ZrHlNXfuXF5++eViScvb25ugoCCCg4Pp0qULly9fLpZ0FQ+M0kiFQlFolEYqjYSydfBWAc/qI4W1AG6UaN+CKb7cet+NUct6ciYxhpcqWXFLCGJsbOhdqzoxGR0XU64qUVMoHgLMidf9+/cLnV5O8QJo27YtBw4cYP/+/axevZodO3YUydaSZvPmzRw6dIiwsDA++eSTXOFpaWllYNVDj9JIhUJRZiiNzMISNbIkp0lYCOwC/IQQ54UQ/yeEGCmEGKlH+RU4A8QA3wKjSsoWAFKuMsGjCglWBjQhiLcy8I5HFUZ5VuWMjQ0veVbllrkBI1KuZq1P8VUCp1AUkJX7L9B68ibqjV9D68mbWLm/5D5CRERE8NZbbxEeHk7Dhg3Ztm0bAEePHiU8PJzQ0FCCg4OJjo5m/PjxnD59mtDQUMaOHUtkZCRt27alV69eNG7cOFct6NSpU5k0aRIAMTExdOrUiZCQEJo2bcrp06cZP34827ZtIzQ0lK+++iqbXQ4ODoSGhnLhgiz7nj17aNmyJU2aNKFVq1acPHkSkLWOffv2pWvXrvj6+jJu3LjMNObMmUPDhg0JDw/PJoKxsbF06NCB4OBgOnbsyLlz5wAYNmwYL774Ii1atMDHx4fIyEiGDx9Oo0aNGDZsWJ7Hr127dsTExADg7OzMG2+8QUhICLt27cLb25tr164BEBUVldmsatKkSQwfPpyIiAh8fHyYMWNGUU7dQ0eF1sgMlFYqFGYpTX2E4tXIs2fPFlkjv/jii2x2lTeNHDlyJHlhSRpZkqNoDswnXANeKqn8c7LC2Ymtjg7cNUif9q7BwAYnRwyQKWYTPaow5e9404ksHgKVauUtZGB6u0LxkLJy/wXeXn6Y2/dk7daF67d5e/lhAHo3qVUied6/f589e/bw66+/8v7777NhwwZmz57NmDFjGDx4MKmpqaSlpTF58mSOHDnCgQMHANlcZN++fRw5coR69eoRGxtrMo/Bgwczfvx4+vTpw507d0hPT2fy5MlMnTqV1atXk5SUxN69ezPjJyYmEh0dTbt27QDw9/dn27ZtWFtbs2HDBv75z3+ybNkygMzaTDs7O/z8/HjllVewtrZm4sSJ7N27F1dXV9q3b5/ZFv+VV15h6NChDB06lO+//57Ro0ezcuXKzHx37drFqlWr6NWrFzt27OC7777jkUce4cCBA4SGhmYr1+rVqwkKCgIgJSWF5s2b8/nnn+d7zE+cOMHmzZtJSkrCz8+PF1988WGa7qBIWIJGbiysRs5qBS7VwaUGuHgqrVQozFAW+gjFp5FHjuQ5hSeQv0ZmpJdBedPIZs2aWbxGWsQgK8XBtCpu3Dbk+GApBOn66l2Dgc2ODqxwdqJPcgoxNja8Wc2dqVfjaXDvnoz09yk4vdl8RhsmQZX6UMVHLi7VIWet5xTfvAXOqRo88m1RiqdQlAnv/3KUYxdvmgzff+46qWnp2bbdvpfGuKWHWLjnXJ77NK5Zidcj6phM09TUDBnb+/btC0CzZs0ynbSWLVvy8ccfc/78efr27Yuvb95fEMLDw/MdhjgpKYkLFy7Qp08fQHZ8NsW2bdsICQkhOjqaV199lerVqwNw48YNhg4dSnR0NEII7mU8Y4COHTvi6uoKkFlLeu3aNSIiIqhatSoA/fv359SpUwDs2rWL5cuXA/DMM89kq9Hs2bMnQgiCgoLw9PTMFKaAgABiY2Mzxat9+/ZYWVkRHBzMRx99BICVlRVPPvmk2WORQffu3bGzs8POzo5q1apx5coVvLy8CrSvonyQl0ZqQpDR8OiuwUBkfhpZuS4kXYarxyH5ivkMf3lVxnfTl8p1wdE9t16Cec0cG134wioUpcBnv58m+tptk+FF1ceJPQPM5qs08sE10t/f3+I18qFx8F5NuM6n7pVzO3lG3DUY+MS9Mm1v3WaUZ1UuW1vxkmdVVly4hKOmwct7QNPgfTfTGe38EtKN2ibbOGY5e1V8wL2+qtVUPDTkFK/8thcEd3d3EhMTs21LSEjIFB07OztAPnwz+gkMGjSI5s2bs2bNGh5//HG++eYbfHx8cqXt5OSUuW5tbU16epadRZnYvW3btqxevZq//vqLFi1a0K9fP0JDQ3nvvfdo3749K1asIDY2Ntsochn25yxDUchIy2AwZEvXYDBkS3fz5s14eHhk/k9KSsLe3h4rK6vMbcbHI+exKE6bFWVDQTTyjsHAv6q40SXlVt4aOXBhVuT0NPigiukMj/0MtxOyb7NxArc6WY5f5bryv9JMRQWkJPQRlEYWhqJqJFDuNfKhcfD6JKeww8GeSL0JilV6OghBmlFNh5WmccdgoGvtmqQh8m6Wkt/Ezu9cgRtxkHAm+3L1OJxcC+n3zO5eJX4fxNcG19pgbfugxVYoSpT8ahJbT97Eheu5azBruTmw+IWWJvczN/+Ms7MzNWrUYNOmTXTo0IGEhAR+++03xowZw5w5c/Lc58yZM/j4+DB69GjOnTvHoUOHCAkJMZuPp6cnV69eJT4+HmdnZ1avXk3Xrl1xcXHBy8uLlStX0rt3b+7evUtaWhouLi4m06tXrx7jx4/ns88+Y+HChdy4cYNatWQTnLlz55q0IYPmzZszZswY4uPjqVSpEkuWLCEkJASAVq1asWjRIp555hl+/PFH2rZtm296RcHb25u9e/fSrVu3zKYyiopDQTRSaBrJVlZ086pJssGERmZgsMIsb/0Fd27C9XP6chYSz8rf6+cgdgekFmAeqhNrcEq+CrdDwcFM5atCUcq81aW+2aH2i6qP+VFaGlmtWjWlkUaUN40sy1E0SxenanxwLYEqaekITcMzLZ32t25hm64BoKVbc+dmCIMT75AqBPcNUtQymqXMc65asHysrKFKPWjQEcL/AV0/hUGL4ZUoePcKjDlodvfgw+/Dl03hY0/4Igjm9oCfX4Ztn8ORZXBhH9xKMJuGQlFeGPuYHw422V/0HGysGPuY3wOl+8MPP/Dhhx8SGhpKhw4dmDhxIvXr1zcZ/6effiIwMJDQ0FCOHDnCs88+i7u7O61btyYwMJCxY8fm2sfGxoYJEyYQHh5O586d8ff3zwybN28eM2bMIDg4mFatWnH58mWCg4OxsrIiJCQk1yArACNHjmTr1q3ExsYybtw43n77bZo0aVKgmrwaNWowadIkWrZsSevWrWnUqFFm2JdffsmcOXMIDg5m3rx5TJ8+Pd/0isLEiRMZM2YMYWFh2WotFRWEAmhk6s1gOt1MJdHKwD2jvnqF0khj7CtB9UDwfxxavAjdJsuvgC/ugLfjYNxf8Hyk+TQWDeKRqFfhs7rwaR34ug0sHAhr34JdM+HYKrh4QOqmphXeRoWihCgpfYTyr5E5B1kBpZHFjdAs7IEXFhamRUVFmQzPb9LMmMQY3tz6Jt2qjWXa2ngMtacgbK6j3XMjPW4shrqfYLBOybVf+n0njgzfJdswP2h/gEmuJoP2h35KE+/KkPgXJMZCgv6bMz97V6jsDZXryd8q9bL+V6olHU1zlFCfhoowaamll6E0Jjo3fnjmx8r9F5iy7iQXr9+mppsDYx/zy7cDeXmcZLQwKPuLRl7XlhBir6ZpYaVujIViSiML+lwoukY68lHTZXQNqIGDrf5yU1w6Y0Yz+cdmju5YS0AtF9l6JvOL4DlITc4e19ZZNvl0qyNbyWSsu9WWTUJN9QHMSSn3CbR0TTJHRSub8TOsIM/RouhjecHSdc4c5bFshdXHh6aJZgYNKjdg5RNy9BxP+wtM3jiSmy5zqJT0HOP7NOPjrb24XWkpwpDVlFLTAJFK268/4cVmg+g75gTrjl5m8sbIrH07RhTLTXnDrTGERuQOuJssm65kOHwZDuDlw3BiTfamnwZrKV7GTp+xE2jnovo0KEqN3k1qWYxgKRQPO0XVSGFI5a31M5jwczt6BNelX5gXoW+e4j9/7GTW0UkknxtIdUfv4n+BrdWUv6vdhNYR2bdrGtxOlI5epuNn5ACe2wV3bmTfx8bRtPPnVgecqkoHUOmnophQ+qgoKR46B88YeWMNBgYbbR3KOzuOgtMxhOE+Wro12m0fPFwMJFgt4uP92/hoQ2/u3aqOnfdsDDY3SDLM5u0VLplp5otTNdO1f6awcwbPALnkJD0Nbl7I/sUvwwG8sA/uXM8e39EjdxoKhUKhUBhRcI2sj391V04Z1mLPHlbGdGLhnhCqVTKQUm0ywvoG9rXncOHM60UbBr4omikEOFaRS83QvOPcvq47f3HZ+wLeiIMLUdJBNMbaXjp/CoVCUc55qB28vOjdpBap6R/w0YHnSBeJGNJdeLf5v3i6qQ+//vUrk3dP5brDTAyplRHWyQihgXUymsdipqxzLphomWvCYTQvSIExWGXVNtZrlzv8dqLu9MVmOYH7/mc6PU0rWDMVhUKhUDxU5K2Rn9GvWX32XNrD1KipHGcxDepEcTFBw2CVpZP2NZZy++Igpqw7WTgHr6SmQnBwk0v1oLzD7yZld/5u6L/xamoGhUJRvlEOXh70a1afpj7f8+bWN5nabioNKsuOqd19utO+dnuaffMMVs6nMn0gYbiPtctxrqZsBTqUneGmcKgsl5pNsraZc/BmtYQmQwIRxpwAACAASURBVCC4PxSl47xCoVAoKiymNDK8RjiLeixi7V9r+XT3p1g5ZTWBzNBJa9c/uXj9kbIyvXDYuYBnY7kYY65P4NdtIKQ/BD0t58FVKBSKMuDhGUWzkGT0Q2hQuUG27Y42jtg4Xsz1gUsY7mHnuZoFe06Rnm5ZA9fkwtYJfn8H/u0PiwbL6R3S1LxWCoVCoZCY0kiDMNDdpzuGPObTE4Z72FX7DQ0Yt/QgcQm3SsnaUsTaFn5/F/7dCOb1hUM/QWruQWkUCoWiJFEOXhF4ou4/0NKzz1GnaSCs7vLJ0QG0+n4430at4V4+c96VKab6LjhVg39shFG75bDVcbth4QD4ojH8/h78fbJ07VQoFAqFxfFa09ewMdjn2i5Ix69hFCsPnab91EjeXn44z7nAyjVm9XMTvBwFbV6Ha9Gw/B8wtSGsGAmnN8s+8wqFQlHCKAevCHzYcThBlVtAut7CNd2aILd2/O+xHwiv2oUUq2PMODqe8HmP8vaWDzj09yHK3XQUY6Nh0o3cS0Zfh2r+0OUjeP04DFgAtcLknEIzw+G7zrB3rpykVqEoZWJjYwkMDMy2bdKkSUydOrVQ6Vy/fp1Zs2YVq10LFizI/B8ZGYmrqyuhoaE0atSI999/v9jymjt3Li+//HKu7e+88w61a9fG2dk52/ZJkybh6OjI1atZA1XkjKNQFCd9fPvQoc6jWAtZGaqlW2N11wcf14ZctFqKa8PJBARtZNmhA0RM2cx7K49w6YaFOHr56aeHL3R8T857O+xXCOwrR7ue1xu+CIT1E+DKsbItg6LCojRSaSQoB6/I/Lf7FGq4VEUgqOlSjf92n0LT6k34vsdkdgzYQhf3t0lN8uGXMysY/Otgui1/nFkHZnH25lmTacYkxvDJxU+ISYwpxZLkg5UN+HeHgQuks9f5Qzm09C9jsmol/9oG6ellbalCUSjMiVdBJlbNSU7xAmjbti0HDhwgKiqK+fPns2/fvgfOxxw9e/Zkz549eYZ5eHjw+eefFyldTdNIV/e4opB80OoDqjq6IxDUcqnGzucWsuqp+SzpuYQu3p2Ju78Z+3pTqNt4MT8d2cyjUzYzadVRrt68k5lGTGIMvX/uXb50saAYDODdGnp9CW+egqfmQI1gWVn6dUuY3VauJ10pa0sVilwojSw45VEjlYNXRBxtHJnVcRY+bj7M7DgTRxvHzLBKDvZ83mMQm579jo5Os7hz6SkuXnPg64Oz6bGiB4PXDGbB8QUk3EnI3OfWvVuM2jiKy/cu89LGl7h1rxz2TXDxhNaj4aXdMGKj7Eh+Yg38rwd82QS2TMHuzt9lbaWiPDHFVw5IkHOZ4lsi2UVERPDWW28RHh5Ow4YN2bZtGwBHjx4lPDyc0NBQgoODiY6OZvz48Zw+fZrQ0FDGjh1LZGQkbdu2pVevXjRu3DhXLejUqVOZNGkSADExMXTq1ImQkBCaNm3K6dOnGT9+PNu2bSM0NJSvvvoqm11OTk40a9aMmJgY5s6dS69evejQoQMdO3YkMjKSHj16ZMZ9+eWXmTt3LgDe3t5MnDiRpk2bEhQUxIkTJ8yWv0WLFtSoUSPPsOHDh7N48WISEhJyhf373/8mMDCQwMBApk2bBkgx9vPz49lnnyUwMJC4uDicnZ0ZO3YsAQEBdOrUiT179hAREYGPjw+rVq0yf3IUDx2mdNK/ij8ft/mY35/6nRdCXuCedSy2tf9DVb9ZLDi2nLZT1vPh6mOcS7zOqI2jOHP9TPnVxYJi4yC/5A1aDG+chG7/kiNgr/un7K83/0k4vBRSLbiMisJRyvoIxauRZ8+eLbJGfvHFF9nsUhpZ/CgH7wEw1ck8A89K9kzv15IVz7yKP2NJjh6PS0pv4m+l8OmeT3l0UXsaffU04dOm8Nzq10m4k4CGRvzteCbunFjKpSkEQoBXGPScLoWqz3/k3ECbP6LFH/+AeX3gyDK4dyf/tBQVmzKYEPj+/fvs2bOHadOmZTb5mD17NmPGjMmsKfTy8mLy5MnUr1+fAwcOMGXKFAD27dvH9OnTOXXqlNk8Bg8ezEsvvcTBgwfZuXMnNWrUYPLkyZm1kTmbhsTHx/PHH38QEBCQmc/SpUvZsmVLvuXx8PBg3759vPjii4VuYmOMs7Mzw4cPZ/r06dm27927lzlz5rB7927++OMPvv32Ww4ePAhAdHQ0o0aN4ujRo9StW5eUlBQ6dOjA0aNHcXFx4d1332X9+vWsWLGCCRMmFNk2RcXFnE56OHjwUuhL/P7U77zf6n2qVbLBrsZPODf4jPknvqXbghe4mnINDY2rt67R+r8jqTd+Da0nb2Ll/gtlUJpiwskDmr8Az0fCS39Cm1dl//Zl/ydbxqwcBWe2qJYxFZ0y0EcoHxr52muvZYtvaRq5f/9+oHxrpJomoRQIrOXK4udbsO5oPT5dW4MT51pgZXcZq0oHsHHdz22bHzh6I2vqubvpd4mMi2RF9Ar6+PYpW+Pzw9ZRfskL6Q+JsZxd+Qne13bC0uFg7yaHim4yBGqEqLn1KiJrx8Plw0Xbd073vLdXD4I275jcTZi4jjK29+3bF4BmzZoRGxsLQMuWLfn44485f/48ffv2xdc37xrS8PBw6tWrZ9bspKQkLly4QJ8+8t60t889kEQG27Zto0mTJhgMBsaPH09AQAB//vknnTt3pkqVKmbzycC4PMuXLy/QPqYYPXo0oaGhvPnmm5nbtm/fTp8+fXBycsrMb+fOnfTr14+6devSokWLzLi2trZ07doVgKCgIOzs7LCxsSEoKCjzWCsUhcXOyo6+vn3p06APuy7tYv6x+WxjAwAZQ5Lc11LR7I5g5erDheuPFG3C9PJI1YbQcQK0fxfO7oBDi+Doz3DgR6jkBcFP43jXp6ytVBQBu80TIb6IA9OZ08duk83uqjSy6GRo5AsvvJC5LS+N3LZtG7169SrXGqm+4JUSQgi6Blbn99faUcnemrS71Un9uyspMW+hpdnn8n3upN1h2r5pZWNsUansTWy9QTDmEDyzEhp0gn0/wH8elX0N/vgaUuLL2kqFhePu7k5iYmK2bQkJCXh4eABgZ2cHgJWVVWb7/UGDBrFq1SocHBx4/PHH2bRpU55pZzzAAaytrbO1qb9zp/BfpNu2bcv+/fvZu3cvI0eOLFI+eZWnqLi5uTFo0CBmzpxZoPjGdgLY2NhkviQYDIZM2wwGQ7H3lVA8fAghaFWzFbM6zcLVNvdccxnTLADcvpfGlHUVaFRngwHqtYUnZsrBWp76Xs6/t2MG4X++At+0g12zILlkv+4oLB+lkUUnQyO//fbbAsUvzxqpvuCVMnbWViTdMT7JBu5c6Y599VUIQ9a0CvZW9rzW7LXcCVgCBgPUby+X24myueb++fDbeDnVgv/jEDoE6ncAK3UJWjT51CSanRD4uTWmw5KSTAY5OztTo0YNNm3aRIcOHUhISOC3335jzJgxzJkzJ899zpw5g4+PD6NHj+bcuXMcOnSIkJAQkszk4+npydWrV4mPj8fZ2ZnVq1fTtWtXXFxc8PLyYuXKlfTu3Zu7d++SlpaGi4uL2fRMUbduXY4dO8bdu3e5ffs2GzdupE2bNoVOp6C8/vrrPPLII5li07ZtW4YNG8b48ePRNI0VK1Ywe/bsEstfoSgIb4S9wad7PuX2/ayRNbV0G+5e7Zb5/6KlTa9QUGwcIPBJuSRfJXrlZ/imRMG6t+Ucew06QnB/OQCajUNZW6swwd3272Pr4mI6QlH1MR9KSyOrVatWYTUyLCzMrEbOmzevxPIvLtQXvDKgplv2B/L9G49wP6kRmj7tgkEYiKgdQe8GvcvCvOLFoTI8MkL2NXhxJ4Q/D7HbYcHTMC0QNrwP8afL2kqFhfHDDz/w4YcfEhoaSocOHZg4cSL169c3Gf+nn34iMDCQ0NBQjhw5wrPPPou7uzutW7cmMDCQsWPH5trHxsaGCRMmEB4eTufOnfH3988MmzdvHjNmzCA4OJhWrVpx+fJlgoODsbKyIiQkJNcgK+aoXbs2/fr1IzAwkH79+tGkSZMC7Td37ly8vLwyl/PnzzNu3Di8vLy4desWXl5emR3ejfHw8KBPnz7cvXsXgKZNmzJs2DDCw8Np3rw5I0aMICQkpMD2KxQlQR/fPrTzaoeNwQYALd3A/aRG3L8RlhlHCFi05xxp6eVsGqLixLkaF7x6wgtb5Py0rUfLKRaW/Z8cjOPnl9RI1opclHeNzDnIijnKQiN79OhhViMLakNZIsrd/Gz5EBYWpkVFRZkMj4yMJCIiovQMKgIr91/g7eWHuX0va8JTW+v72NX7HM0qEQF8HbGYNt6Ny87IIlKg438/FU79JvsYRP8OWjrUaSn76jXuDXZlO/eIJVxD5ihp+48fP06jRo0KFnmKb94dxp2qZc0ZlQdJSUm4mKv5LOco+4tGXteWEGKvpmlhJnZR5MCURlric+3WvVv0/rk3l1IuoWkGkk9OAE3257G1NlCjkh1nE25T3UkwoXcTugVWN9n/yJLJde7S0+Hsdji4GI6thNRkOdBZ0NMQMgCq+pWZrYXFEq9Lcxg/w/J9jhZRH8sLlq5z5iiPZSusPqr2cWVARqfwKetOcvH6bWq6OTD2MT8a1v6W51aPJIVr/GP5Nwzxe4FXOvpSyd6mjC0uZqxtoXEvuSRdhoOLZBPOn1+CX8dBQB/p7NVpoQZmsXQsQKQUCkX5JGOahZEbRnLl1hU8q13m6hXvTM18IrQm649dYdKKfYz6cR/BXq6Me8yfNr4eZW16yWIwQL12cnl8Cpz8Verojmmw/d9QswkED5BNPJ2rlrW1ClMofVSUIMrBKyN6N6mV5+hfn3lPZGHqUv4Ue/huR3uW77vA2Mf8eDqsNlaGCujsuFSXQ0S3HgNxe+DAfDiyXP5WqQ9NBkPIQKhUs6wtVSgUCkUp06ByA9b2XUvnpZ0JCD7GzI4vZQvvElAd66sOJFTy5Yv1pxjy3920buDOuMf8CantVkZWlyK2jhD0lFySrsCRpdLZ++0tOceeb2fZX8+vm+qvp1A8RKg+eOWQEcFDuUcyr/VOxtvDifHLD9Prq+3s+Sv35IsVBiGgTnPo9SW8eQp6fw0uNWDjB/BFAMx/Co6uhPt3y9pShUKhUJQiNlY2PNXwKbad30ZcUlyucIMQPNXMi01vPsqEHo05fimJJ2bu4MX5e4m5mlwGFpcRLp7Q8iUYuQ1G/QGtXoFLh2Dpc3J+vZ9fhtgdqr+eQvEQoBy8ckiYZxgNKzck8vJylrzQghkDm5CQkkq/b3bx8oJ9XKioI4dlYOsEoYPkKFKv7IM2r8PVY7BkKHzu/2BzrykUCoXC4ni64dMYhIGfTv5kMo6dtRXD29Rj67j2vNrJl62n/qbLF1t4a+mhijvipimqNYLO78NrR+DZn8G/BxxdAXMfh+khsPFDuKaaCCoUFRXl4JVDhBAMaTSE6MRo/rzyJ71CarLpjQjGdPRl/bErdJgayRfrT3E7NS3/xCwd9/rQ8T149TAMWQY+j0LUf2F2Gzkv0J5v4VYF/rKpUCgUCjydPOlQpwPLo5dnmzohL5ztrHm1U0O2jmvPsFb1WLH/AhFTI/l4zTESU1JLyeJygsEKfCKgz9eydUzfb8HDV/bV+yoM/tMedv9HzVGrUFQwlINXTnnc53Eq21Vm/vH5ADjYWvFa54ZsejOCzo09mb4xmg6fR7Lq4EUsbSTUImGwkhOnPz0X3jgJ3aaApsGvb8LnfrDkOYjZCOkPgdOrUCgUDyED/QdyM/Umv/31W4HiuzvbMaFnYza9+ShPhNTkv9v/ot2/NvPlxmhS7pbupMPlAlsnCO4HzyyH149Dl48h/R6sHQufN4QFA+RXvnuFn7BaoVCUL5SDV06xs7Ljab+n2RK3hbibWX0Oark58NWgpvz0QkuqONkyeuF+np69i8Pnb5ShtaWMYxVo/rzsZ/DCNggbDmc2w/y+MC0INn0ECWfK2kpFCfLxxx8TEBBAcHAwoaGh7N69m2nTpnHr1q1iy+OTTz4ptrTywtvbm2vXrgFgZWVFaGgogYGBPP3008VWjtjYWAIDA3NtX7JkCQEBARgMBoyH1I+MjEQIwdq1azO39ejRg8jIyGKxR6F4EMI8w2jg1oCFJxYWqmLTq7IjU54OYd2r7WjVwJ3P15/i0Smb+d/OWFLvP6T90VyqQ6uXYeR2OUdti1Fw6QAsGSb7660aDWd3qv56FkppaOTUqVOLLa28KEuNXLFihVmN/OWXXzK3lVeNVA5eOaa/X3+shBULTizIFRZerwqrXm7D5L5BxMan0GvmdsYtPcjVpIes5q1GMHT7TH7Ve/p/UK0xbPscZjSBOd3hwEJITSlrKx96YhJj6P1zb2ISYx44rV27drF69Wr27dvHoUOH2LBhA7Vr1zYrXmlphf+yWxQH7/79on0VcHBw4MCBAxw5cgRbW1tmz56dLVzTNNKL8UUrMDCQ5cuX065du1xhXl5eTJkypchpF+VYKxQFQQjBAL8BHE84zsG/DxZ6f19PF755Jozlo1rRoJozE1cdpcPnkSzfd75iT5aeH54B0OVDeO0oPLNSjrh5eCnM6QYzQmSl6bUHf3Yr8qY49RFKTyM///zzQu9jKRrZuHFjsxr58ccfFznt0tJI5eCVY6o5VqOLdxdWxKwgOTX3SGBWBsGA8DpsejOCf7T1YcX+C3SYuoVvtpzm7v2H7CXL2g4CesOQpVKkOk6ApIuwciRM9YNVr8hpGB6G5qzljFv3bjFq4yjOXD/DSxtf4ta9B6t5u3TpEh4eHtjZ2QHg4eHB0qVLuXjxIu3bt6d9+/YAODs788YbbxASEsKuXbuy1QZGRUVlTq6bnJzMc889R1BQEMHBwSxbtozx48dz+/ZtQkNDGTx4cK5avqlTpzJp0iQAIiIiePXVVwkLC2P69On8/fffDBkyhEceeYRHHnmEHTt2ABAfH0+XLl0ICAhgxIgRJr9AtG3blpiYGGJjY/Hz8+PZZ58lMDCQuLg4nJ2dM+MtXbqUYcOGATBs2DBGjx5Nq1at8PHxYenSpWaPYaNGjfDzy3sy5JCQECpVqsT69etzhW3cuJEmTZoQFBTE8OHDuXtXjmrr7e3NW2+9RdOmTVmyZAkRERG89tprhIWF0ahRI/7880/69u2Lr68v7777rlnbFApz9KzfE2cbZxadXFTkNJrWqczCf7Tgh+HhuDrY8PpPB3l8+jY2HLvycHR5MIXBCuq3h77fyDna+vwH3BvIStOvmsG3HVW/92KmuPURLEMjn3zyyXKtkX5+fmY10tXVtdxrpHLwyjlDGg0h5V4KP5/+2WScSvY2/PPxRqx7tR3N61Xh07UneOyLrQ+vWFWqCW3fkCNwPrcWGj8Bh5fBfzvDzHDYPk1OsK4oFSbsnEDCnQQ0NOJvxzNx58QHSq9Lly7ExcXRsGFDRo0axZYtWxg9ejQ1a9Zk8+bNbN68GYCUlBSaN2/OwYMHadOmjcn0PvzwQ1xdXTl8+DCHDh2iQ4cOTJ48ObPG8Mcff8zXptTUVKKionjjjTcYM2YML730En/++SfLli1jxIgRALz//vu0adOGo0eP0qdPH86dO5crnfv377N27VqCgoIAiI6OZtSoURw9epS6deuateHSpUts376d1atXM378+HxtNsfYsWP56KOPsm27c+cOw4YNY/HixRw+fJj79+/z9ddfZ4a7u7uzb98+BgwYAICtrS1RUVGMHDmSJ554gpkzZ3LkyBHmzp1LfLwa0EFRNBxtHOlVvxfrYtdx7fa1IqcjhKBdw6r88nIbvhrUhNS0dEb8EMVTs3dV7CmJCoqtE4T0h2dWwGvHoPOHcP+O7Pc+tSEsHATHflZTFz0gxa2PYBka+dprr1m0Rr7zzjvlXiPVROflnKCqQYRUDeHH4z8y0H8gBmHaJ/ep6sx/hz1C5MmrfLj6GCN+iKKtrwcTejTG19OlFK0uJwgBdVvJpdtkOY/egR9hw0Q5v55vFzmRuu9jYG1b1tZaJJ/t+YwTCSdMhv9962/ikuNI12TTibvpd/k99neOxx+nqmPVPPfxr+LPqEajTKbp7OzM3r172bZtG5s3b6Z///5Mnjw5VzwrKyuefPLJfMuwYcMGFi3K+hpQuXLlfPfJSf/+/bOld+TIEQwGea/evHmT5ORktm7dyvLlywHo3r17tnwyakJB1k7+3//9HxcvXqRu3bq0aNGiQDb07t0bg8FA48aNuXLlSqHLYEzr1q0B2L59e+a2kydPUq9ePRo2bAjA0KFDmTlzJq+++iqQ/RgA9OrVC4CgoCACAgKoUaMGAD4+PsTFxeHu7v5ANioeXvr792fBiQUsj17O88HPP1BaBoOgR3BNHguozpKo80zfeIp+3+yivV9Vxj7mT+OalYrJagumUg1oPVoulw/LidQPL4WTa8DeFQL6QsgAqN1c6q4CgGkHp3Em2fR4AEXVx7fC3zKbryVo5LFjxzL/W6JGZjTdLM8aqRw8C2BIoyGM3TqWree3ElE7It/4EX7VaN3Ag/l/nOWL9afoOn0bz7Soy6udfHFzfEgdGTsXaPqMXK7FwIH5sn/eqbXg6CHFKXQweDYua0srFBeSL2SKVwbppHMh+YJJASsIVlZWREREEBERQVBQEP/73/9yxbG3t8fKyirzv7W1dWYb/Tt3CtdX1XjfvPZ3cnLKXE9PT2fjxo1UrVrw8mXUhObEOF2QXx1M2ZDRHAcoli/3GTWU1tYFk4mctmbYYzAYstlmMBiK3A9DoQDwcfWhZY2W/HTyJ4YHDi+WNG2sDAxqXoc+TWrxv12xzNocw+MztvFEaE1e79yQuu5O+abxUFA9SC6dP4AzkXBosVz2zoHK3hDcXy7u9cva0nJPSekjlH+N/OOPP7C3ty9w+kojC49y8CyAjnU74unoyfzj8wvk4IEUq+da1+OJ0Fr8e/1JftgVy8oDF3ijc0MGhtfB2uohbp3r0QA6TYL278LpTbB/Huz+BnZ9BTWbUtMpHG6HgoMbTPGFlKu503CqJvsoPOTkV5O4InoFn+75NNu8VfZW9rzT4h16N+htcr+kpCSTYSdPnsRgMODr6wvAgQMHqFu3LrGxsSQlJeHh4ZHnft7e3uzdu5du3bqxbNmyzO2dO3dm5syZTJs2DYDExEQqV66MjY0N9+7dw8bGBk9PT65evUp8fDzOzs6sXr2arl275plPly5d+OabbzLb0R84cIDQ0FDatWvHggULePfdd1m7di2JiYkmy2gKT09Pjh8/jp+fHytWrMDFpeS+zHfp0oX33nuPS5cuAbJPQmxsLDExMTRo0IB58+bx6KOPllj+CoU5BvgPYMzmMUTGRWJdjK8yDrZWjHy0PgMfqcM3W0/z/Y6/WHPoEgPD6/BKhwZUq1Twl9IKjcEKGnSUy91kOP4LHFoEW/4FWz4Dr3DZxDOgrxz52oSWtrJxg4izZVCAkufVkFfNPqOLqo/5YQka+eWXXzJ27NhM+5RGFj8P8Vu+5WBjsGGA/wB2X9pNdGLhnIoqTrZ81DuINaPb0qh6Jd77+SjdZ2xnZ8w1Vu6/QOvJm6g3fg2tJ29i5f4LJVSCcoqVNTTsAv3nyVE4u06GtFQaRs+Wc+stG5G3cwemtyuy0ce3D+282mFnJWun7Ax2RNSOeCDxSk5OZujQoTRu3Jjg4GCOHTvGpEmTeP755+natWtmB/KcTJw4kTFjxhAWFpat1vLdd98lMTGRwMBAQkJCMvsnPP/88wQHBzN48GBsbGyYMGEC4eHhdO7cGX9/f5P2zZgxg/379xMcHEzjxo0zR/uaOHEiW7duJSAggOXLl1OnTp1Cl33y5Mn06NGDVq1aZTbnyI+TJ0/i5eWVuSxZsoQVK1bg5eXFrl276N69O4899lie+77zzjvExclpWuzt7ZkzZw5PP/00QUFBGAwGRo4cWegyKBTFwaNej1LDqQYLTywskfRdHW0Y19WfrWPbMzC8Dgv3nOPRKZFMWXeCG7fvKf00xs4ZQgfCsz/LQc46vQ+pybDmDdlfb9Fgk5ppe+96KRtbfigJfYTS08hhw4YVWSOjoqLKtUb+8ssvFq+RwtIG4QgLC9OM56TISWRkZObIP5aIKfuv37lO56Wd6e7TnUmtJhUpbU3TWHf0Mh+tOc75xNsYBBiPDO1gY8WnfYPo3aRW0YzH8o8/mkbU6u8JMxyHwz/BHTPzC04qn3MPlvQ5OH78OI0aNSpw/Fv3btH7595cTrlMDacarHhiBY42jmb3SUpKKtGat5JG2V808rq2hBB7NU0LK3VjLBRTGmnxz+Y8+O7wd0zfN51/1vgnA7sMLNG8zsan8O/1p/j5wEUcbAzcS9O4bySgxaGfprDIc6dpsr/eocVweAkkm+nzVE61tCgYP8MK8hwtij6WFyxd58xRHstWWH1UX/AsBDd7N7r7dGf1mdUk3in8Z2uQbZO7BtZgw+uPUsnempzT/ty+l8aUdSeLwVoLRgiSXepD96nwxinzcRcNht/fhajv4fRmSIyFNNW3KCeONo7M6jgLHzcfZnacaTHipVAoyjd9fftia7BlW9K2Es+rrrsT0wc0Yc3oNqRrZHPuQOlnLoSQ89Q+9rEchdMciwbD7+/B3rkQux1uXnpopjRS+qgoKVQfPAtiSKMhLItexrLoZYwIGlHkdOxtrEi6k7cjcvH67Ty3P5TY5NPX4lo0RK+HNKNhog024FYHqtSDyvWgik/WemXv/NOsoDSo3ICVT6wsazMUCkUFoop9FbrW68q6M+tITk3G2dY5/50ekICarqTez3tCZaWfJrDK51XzWjRE/w5pqVnbbJykfrr7yLn4qtSXA7dUqQ9OHhVqtE6lj4qSQDl4FkSDyg1oUaMFC08sZGjAUGwMNkVOq6abAxfyEKOabg4PYuLDxct7ID1dTqie8Bck/iV/E87I9bg/4W6OpieVS5Lg4wAAIABJREFUaumOn3duB9DBrUyKoVAoFJbKQP+BrDq9ilWnVzGo0aBSyVPpZzHz8h5IT4Mb5yHhNMTrS8JpuHwETqyBdKNKaTtX6fgZO33uDeQ2h8IP4a9QVESUg2dhDGk0hJc3vcyGsxvoVq9bkdMZ+5gfby8/zO17aZnbrAyCsY/5FYeZFQenaqZH0QQwGMDVSy712maPo2lwKyHL8UvUnb+Ev+SXv5x9EhyqGH35052/jHVnz3JVY6lpWrbhiBWKB8XS+oMrygeBHoHUsa3DopOLGOg/sFSeS3npp42V0k+zmNDSVBs3bEGOylm5rlzqd8geKe0eXD+X5fRl/J7fA0eWAcaDCVTJ7fRlOIJ2pdOnSumjorgpij4qB8/CaOvVljoudZh/fP4DOXgZHcGnrDvJxeu3cbSzIuVuGrWrqBrIbDzIVAhCgJO7XLzy6AN7N1n228v55e/8n3B0ORjPj2PjmOXsVfbO7gC61s6/CUwxYm9vT3x8PO7u7krEFMWCpmnEx8cXal4khSKDdi7tmB8/n92Xd9OiRsEmPX4QcuqnrbWBtPR0QmqrVhgmMaGlOyMjichvXysb6aDlNbfe/btSRzOdvxi5HrtNTttgjLOn7uwZOX3uDaSO2hZP3zdjfVQoioOi6qNy8CwMgzAwqNEgJu+ZzKG/DxFcNbjIafVuUitTqFLu3qfLF1sZv+wwq0e3wc7aKp+9FQ+MnTNUD5RLTu6nwo243E0/42MgZgPcN5rA02AtnTy9uadXQjqcSNEdQG+wKV6n3cvLi/Pnz/P3338Xa7rG3Llzx6Jf9pX9hcfe3h4vL69SzVNRMWjq1JQ1yWtYdGJRqTh4kF0/L9+4Q+cvtjBu6UEWP98Sg0FVfJUa1nZQ1U8uOUm9pbeaOZ3969+p33N/TaxUS+/zVz97n7/K3jKPAmKsj5auA/lRkctX3spWFH1UDp4F0rtBb77a/xXzj8/nX1X/VSxpOtlZ81HvQJ6b+yezI88wppNvsaSrKCLWtqZrLNPTIflyVnNP46afF6JocOcGnP4+K75LDaPmnt7Z+/4Vob+CjY0N9erVK3rZCkBkZCRNmjQp0TxKEmW/QlF62Agb+vr2Zc7ROVxKvkQN54LNf1VcVHe1Z2LPAN5ccpC5O2MZ3qZkn4+KAmLraLoS9c7N7M5fhgN4bBXcTsiKJwyyAjWz2afRr1vdXK1njPWxoj9HK3L5KkLZlINngTjZONHHtw8Ljy/kSrMreDp5Fku67f2r0TOkJjM3x9A9uDoNqpWvOUAUOgYDVKopF+82uYK3r/+FNo1q5u77F7NBOobG2Lvl7u+X4QC6VC9X/f4UCoXCFP38+jHn6ByWnFrC6KajSz3/J5vW4tfDl/jXuhO0969GPQ+nUrdBUQjsK0HNULnk5FaC3mLG6KtffAycj4K7N7PiGaylk2fs9GWsu6rWCIqyRTl4FspA/4HMPzafxScXF6uYTejRmK2n/uafy4+w6PkWqqmJBXLfxgW8msklJ6kpsr9CzqafF/bC0ZWgZQ0agLVDjv5+3lkOoFsd2S9CoVAoygE1nWvyqNejLItexsiQkdha2ZZq/kIIPukTpJpqVgQcq8glZ995TYOUa9mdvoTTEH9Gzt9371ZWXCs7HrGrBpeCjfr8NZAOoEsNVXmqKHGUg2eh1HapTUTtCJacWsLzwc9jb108bYWrutjxTvdGjFt6iEV/xjGoeZ1iSVdRTrB1As8AueQk7Z7e7y+j6WdslgN4ejPcNxoWXFiBW+3cUz1kDAJjq2qvFQpF6TLAfwCb4zazLnYdPev3LPX8VVPNCo4Q4FxVLnVy9PXUNEi6bOT0nebWqd04JZyWrWeM58u1cdR10yfL6cv4AuhUVTl/imJBOXgWzJBGQ9gct5lf//qVvr7/z959R0dVbQEc/p3JpJIOSSgJBEgIJdTQa+gIKEpHEWwgAoogXXpvCqKCUhREH0UQpIq00EF6Cy1A6L0HUkhy3x8JkIQWkplMZrK/te4ymblz7r4v67ndd/Y5p6nBxm0R5M2SfRcZveoodYp44umceSaaCiOysn6adFJ6nLySzvd7/PPhRRB1J/n5jjmfv9n743l/ksCEEAZWMVdFfJ19mXdsnkkKPEjeqlmrsCe+0qqZNSgFzrkSjsQtk47YhBAcHJywx9+9iylaPk/BtVA4vjLFHn/OTxd7SbrSp3uBhG8VhUglKfDMWLmc5SjkVojfj/7OO37vGGzJ+setJvUnbWLw0iNMbfucVj+RtSRNXvkqP/t+5O3kWz3cCk/45+kNcOB/yc+1c3l2vt/jn51yJcwxFEKI16RTOloXbs2Y/8Zw5MYRiuV4TqeCkSVv1TwoUx1Ewh5/rnkTjoI1k78XFwt3zqaY85c43+/I4uTbJdm7PVv0PS4E7Zwz9p5EpmfUAk8p1QD4DrACZmiaNibF+3mB2YBr4jl9NU1bacyYLIlSirZF2jJo2yD+u/IfFXJVMNjYvjmy8UVtf8avPs6/R65Qr1hOg40tLJC9G+Rxgzxlnn0v5uHTBJZ07t+l/XB0WfKnl3o7cPMlMN4JolYnX/xF5v0JCyL50TjeKvgW3+39jrnH5jKi6giTxJDTxY5BjYvSa+FBadUUL2elf7o4i3/d5O/FRsPts8n397t1CsK3wsH5yc/N5pnkW78kc/7cCxhsjz9hXoxW4CmlrIAfgbrABWCXUmqppmmhSU4bACzQNG2qUqoosBLwNVZMlqhhgYZM3DOR34/+btACD6Bj9QIsO3CJQX8foVLB7DjZyX9cizSwcQDPIglHSnGxCfP+krV+hmN3/hDsmZV80vrj5aqTtnsmLQBl3p8wE5IfjcfJxok3C7zJkrAl9CzbE1c702w+3jzIm1WHr0irpkg7vS14FEo4Uop5mJA3U7Z9hq2B/VeTn+uUO7H4SzHnzz3/a+3xJ8yLMb/BKw+EaZp2GkApNQ9oAiRNYBrw+HtlF+CSEeOxSLZWtrQIaMH0g9M5f+88Ps4+Bhvb2krHmGYleGfKVsb9c5zhbz9nLxkh0sNKn1io5YeCtZ68vDskhOAaNSDi6rN7/d0+A6FLEtpCk8rm+ex8v8cFoIO7zPsTmYnkRyNqXbg1C04s4K+wv/go8COTxCCtmsKobBxevGBa9P2nLZ9JC8Bjy+HhzSQnqoTF0lLu75fdTzpmLIAxC7w8wPkkv18AUn7FNAT4Vyn1OZANqGPEeCxWq4BW/HLoF/537H/0Kd/HoGOX8nHlg8q+zNoWztulcxOUTyb5igyiVMJefE45IV+lZ9+PvJN8q4fHc//ObIIDc5Ofa+ucYqXPJHP/nHLLvD+R0SQ/GpG/mz9lvcqy4PgC2hdtj5XOyiRxJG3VnL09nA+rSKumyAC2TpCrZMKRUuTthG0dbp1KPufv4J8QfffpecoK3PI9f86fi0/CvEKRqSlN04wzsFLNgQaapn2S+Pv7QAVN07omOadHYgzfKKUqATOBQE1LOqsUlFIdgY4AXl5eQfPmzXvhdSMiInB0dDT4/WSUtMY/+/psDkceZpj3MOx19gaNKSpW4+stkdjpYWhle/QveQpp7v/7g/nfg7nHD+m/B11cNHZRV7GPvIJ95GXsI69gF3Ul8Z9X0SXZ7y9eWRNp70WkfS6i7HISaf/4yEWUnSea7vWfYpr73yAzxV+zZs09mqaVffWZ5sOQ+THx3FfmyMz0NzW0593bvgf7+OXGL3T06Ehxh+Imigw0TWPi3miO3YxjeBV7vLK9/sOkrPa3sxRmdW+ahvWje9hHXsI+8jIODy8m5s5LODy8jFV81JNT45WeSPuc3Lfx5JFjXh465CLSPjeR9rmJtnVPmE5h5szlb/ey/GjMb/AuAkn7Bb0TX0vqY6ABgKZp25VSdkAO4FrSkzRNmwZMAyhbtqwWHBz8wouGhCQuS2um0hp/9uvZeXflu9zKdYv3irxn8Lhsva/x4axdhGrefBHs/8LzzP1/fzD/ezD3+MHI9xAXC/cuPGn31N06TbZbZ8h2OxyurYdHD56eq3Tg7A3uvsnn+z3+2fb5CcDc/wbmHr8ZMFh+THz/lTnSkv+mz7u3KvFVWL5wOUesj/B58OemCSxRkTJR1J24kUXn7dPUqpnV/naWwmLu7fE2SYnf+ulunSLbzVNw7iDZLh9Jvsef3j7xm77E+X5J2z8dPc1mqoQl/O2MWeDtAvyVUvlJSFytgXdTnHMOqA3MUkoVAeyA60aMyWIV9yhOSY+S/HH0D9oUboPOwE9Qahb2pHGJXPywPoyGxXPh55n5n2wI8VxW+oTN2N18gRRLVmsaRFx7TuvnmYQVP5PNXyBhU9rn7PVnHXM3YSwzSWYiw0l+NDJrnTUtAlrw4/4fCb8bjq+Lr8likVZNYdaSbpPkW/XJy7tCQgiuXj1hj78nK30mtn9eOwbH/4H4R0/HsXFKssLn4zl/iYu+yB5/Bme0Ak/TtFilVFdgNQlLPP+iadoRpdQwYLemaUuBr4DpSqnuJEwo/0AzVs9oFtC2SFt6berFpgubCPYJNvj4g98sxuaTN+j/1yGZMC4sk1Lg5JVw5K347PtRd5Nv9XDrNNwOh/AtictWJ/zrqwrAbqeEb/6eKQALgHNumcOQhUl+zBjNCzXn54M/M//4fIPPT3/tWIK8WXnoMmP/OUbNAFlVU1gInS5hoRZXHygQnPy9uFi4e+7ZOX+X9iYslJa029zO9dmi7/GcPzuXjLwji2HUffAS9+xZmeK1QUl+DiXxv4VE+tXOVxtPB09+P/q7UQo8Dydbvm5YhN6LDjJv13nerZDX4NcQIlOzc4HcpRKOlB5FJe73d4aTu9bg765LKAKvhcLxVcmfZFrZgGu+Z7d6cMufMLFdlq62eJIfjS+HfQ7q5qvL32F/83npz3GwNt1+YEopRjctIatqiqzDSp/4cLMAz6wRFRuTkC9vJn7z97gAPLcdDv3J44elQEKnzJNv/QokX/RFtkd6IaMWeCJjWeusaVO4Dd/t/Y6Tt0/i7/biuXJp1aKsN4v3XWT0qqPUKeKJp7Odwa8hhFmytgOPAPAI4OJlO/yT9u/Hx8HdC0+/+Xuy7UM4nN0GMRFJBlLg4p3QQvq8AtDOGSFE6rQp3IZVZ1ax4swKWhRqYdJYpFVTiER6G8jhn3Ck9CgysUMmxTYPYesg4o/k5zrlerq5e9I5f275E3JyFiYFnoVp7t+cnw/8zB9H/2BI5SEGH18pxaimxak/aRNDlh1hyntBBr+GEBZHl7jktFu+Z9tYNA0e3Eg+3+/xz8dWwsMbyc93yPHizd6zeci8PyGSKOVRisLuhZl7bC7N/ZujTPz/D2nVFOIVrO3Bq2jCkVJ0ROLD0RRz/p7JlSphO4fnzflzy5cl9viTAs/CuNq50qhAI5afXk63Mt1ws3Mz+DXy58hGt9r+jF99nDWhV6lb1Mvg1xAiy1AKHD0Sjrwpt0IDou6l+OYvsQB8XiuLjWNisef77Nw/F2+Z9yeyHKUUrQNaM2T7EPZe20uQl2kfSiZr1Vx0kHkdpFVTiFSzdYRcJRKOlCLvJBZ+Seb83QyDwwsT5s8/pqwSNnJPNucvsRB0zWsxeVIKPAvUtkhbFp1cxKKTi/ik+CdGuUbH6gVYduASA5ccpmIBd5zsLP9piBAmYef84k1rY6Ph9tlnWz+vHYMTqyEu5um5OuvEbxGfs9m7a74s384iLFfDAg35Zs83zD021+QFHiS0ag5sXJTe0qophOHYu0KeoIQjKU2Dh7eSfOuXpO3z3I7kUyR01uCen0DNBaLXJJ/z55Q7YVEZMyEFngXyc/OjYq6KzD02l/bF2mOdho2aX8XaSsfopsVpOnUb41cfZ1iTQINfQwjxCnpb8CiUcKQUHwf3LiVv/XxcAJ7bATH3k5ysElb2dC8Abr7kvQMcvvW0AJRVzIQZs9fb847fO/zv6P+49vAang6epg6JFkHerJJWTSGMTynIlj3h8Cmf/D1Ng4iryYu+W6ewO3cQds2A2KcbvKO3e7pozJOFXhLbPx29Mt30CCnwLFTbIm3pur4rv4f+zt+n/mZC9Qn4ufkZ9Bql87rRvpIvs7eH06RUHoLyGb4dVAiRRjqrp8tXUyP5e5qWsKdfyr3+bp2GE/9Q4MF1ODPn6fn27s/O93v8c9LNa8f7w4Nn9uGGbJ7Q66TRblWIV2kV0Io5oXOYdnAau6/uNkpOfB3SqilEJqAUOOVMOHyfLlq8+/Eef/cvJVnp83TCzzdOJHTIJNvjz/Fp4fe46HtcADq4myRHSoFnoap5V8Pb0Zvv931PbHwsXdZ1YXGTxQZfJrpn/QD+PXKFfn8dZPnn1Qw6thDCSJSCbDkSDp9yz7y9ee1KqgX6JE5mTzL379xOOLwo+f5F1tmervj5vMQFL35diAyS1zkvFXNV5M/jf6KhGS0nvo6krZq/bQ/nA2nVFCLz0OkS5q67eEOBFA9J42Lh7vln5/xd2g+hS0GLe3quncvToi8Dc6QUeBZKp3Q4WjtyIf4CADcjbzJ422DG1xhv0Os42uoZ8U4gH83azc8bT1HcMuamCpGlxekdIGfxhCOl2Bi4c+7Z1s8bJzI+UCFeQ3RcNPEkPJwwVk58XS2erKp5nJqFPcmXXVo1hcj0rPSJnSz5IWUjwJMcmWLO37mdGRqiFHgWavHJxYTfC3/ye3R8NCHnQ1h8cjHv+L9j0GvVKuxF4xK5+H59GEMryQbNQlg0vQ3k8Es4Uhoic/VE5rT45GKO3jr65Hdj5sTXkdCqWZx6EzfRa6G0agph9pLlyPrJ38vAHGk+y8GI1zJp7ySi4qKSvRYVF8WkvZOMcr3BbxbDzlrHrCPRxMdrr/6AEEIIkUEm7Z1EZGxksteMmRNfRy4XewY2Lsp/Z27x2/ZwU4cjhLAAUuBZqC/LfIm93v6Z14tmL0psfKzBr+fhZMvXjYpw/HY883efN/j4QgghRFo9LycqFJ1LdjZRRMm1CPImOMCDsf8c5+zNB6YORwhh5qTAs1Dv+L9Dde/q2FoltEza6mzxcfRhy8UtfLrmU25E3jD4NVuW9aGwu45RK49y7V7Uqz8ghLAs2V6w/PyLXhcig6TMiXqlR0Njzdk1RMdFmzi6p62aeitFr4UHpRNGCEuUgTlSCjwLNqzyMNzt3FEosttnZ+FbCxlRZQQHrh+g1bJW7L+236DXU0rxQTFbomPjGbos1KBjCyHMQK+TMOTus4dskSAygaQ50dPBkyGVhrDzyk56hvTkUdIlz01EWjWFsHAZmCOlwLNgDtYOTKk9hQKuBfix9o84WDvQxK8Jvzf8HRsrGz5c/SH/O/o/NM1wTwpzZtPRrbY/Kw5dZm3oVYONK4QQQqRHypzYrFAzvq7wNSEXQvh689fExce9ehAjk1ZNIYQhSIFn4fzc/FjSZEmyDV0LuxdmXuN5VMldhdH/jabfln48fPTQYNfsWL0AhXM6MfDvw9yPMv1TUSGEEAKezYmtC7eme1B3VoWvYviO4QZ94JkW0qophDAEKfCyKBdbFybXmkzXUl1ZeXol7618j7P3zhpkbGsrHaObFufKvSgmrD5ukDGFEEIIY/go8CM6FO/AopOLGL97vMmLPGnVFEKklxR4WZhO6fi05KdMrTOV65HXab28NevPrTfI2KXzutG+ki+/7TjLnrO3DTKmEEIIYQyfl/6cdwu/y5zQOfx04CdTh5OsVfPaw3hThyOEMDNS4Amq5KnCgsYLyOecj24bujFpzySDbKXQs34AuZzt6PfXQWJiJUEJIYTInJRS9CnfhyYFmzDlwBRmH5lt8nhGNy2OXqeYeUj2lxVCvB4p8AQAuR1zM/uN2TQv1JyZh2fSaU0nbkbeTNeYjrZ6hr8dyImrEUzbdMpAkQohhBCGp1M6hlQeQt18dZmwewILTyw0aTyPWzWP345nzg7DTKEQQmQNUuCJJ2ytbBlcaTDDKg9j37V9tFreioPXD6ZrzNpFvGhUIheT14dx+nqEgSIVQgghDE+v0zO22liq5KnCsO3DWHVmlUnjaVHWmxI5rBiz6pisqimESDUp8MQz3vF/hzkN56DX6Wn/T3vmH5ufrknng98sip1eR7+/DkmbiRBCiEzN2sqaicETKeNVhv6b+xNyPsRksSil+CDQBr1OVtUUQqSeFHjiuYpmL8r8xvOplKsSI3aO4OstXxMZG5mmsTyd7Pi6URF2nrnFgt3nDRypEEIIYVj2ent+qPUDhd0L81XIV+y4vMNksbjb6Z6sqimtmkKI1JACT7yQi60LP9T+gc6lOrP89HLarmzLuXvn0jRWy7I+VMjvzqiVR7l2P8rAkQohhBCG5WjjyNQ6U8nrnJcv1n/B/mv7TRZLi7Le1CjkIa2aQohUkQJPvJRO6fis5GdMqTOFKw+u0Hp56zS1qzxeESwqNp6hS0MNH6gQQghhYK52rkyrOw0Pew86r+vMsVvHTBKHUooxzRJW1ewtrZpCiFeQAk+kStU8VVnw5gK8nbz5fP3nTN47mbj4uNcao4CHI1/U8mPFocusDb1qpEiFEEIIw/Fw8GB6vek46B34dM2nnLl7xiRxPF5Vc6e0agohXkEKPJFqeRzzMKfhHJr6N2X6oel8tvYzbke93ibmHasXJMDLiYF/HyYiOv177QkhhBDGltsxNzPqzQCgw78duBhx0SRxJG3VPHfzoUliEEJkflLgiddia2XL0MpDGVJpCHuu7qHl8pYcun4o1Z+30esY06w4V+5FMWH1cSNGKoQQQhiOr4sv0+pO42HsQzr824HrD69neAxJWzV7LTwgrZpCiOeSAk+kSbNCzfit4W/o0NH+n/YsOL4g1VsplM7rRvtKvszeHs7ec6/3DaAQQghhKgHuAUytM5UbkTfouKYjd6LuZHgM0qophHgVKfBEmhXLXoz5jedTPmd5hu8YzoCtA4iJj0nVZ3vWDyCnsx39Fh0iJjbeyJEKIYQQhlHSoyTf1/qec/fO0WltJyJiIjI8BmnVFEK8jBR4Il1c7Vz5sfaPdCrZiaWnljLxykTO33/1XneOtnqGNwnk+NX7TNt0KgMiFUIIIQyjQq4KfBv8LcdvHafr+q5p3ic2rR6vTC2tmkKI55ECT6Sblc6KLqW68GPtH7kZd5NWy1ux6cKmV36uTlEvGhXPxeT1YZy+nvFPQIUQQoi0quFTg9HVRrP36l66h3QnJi51HSyGktvVngGNi7DzzC1+3ymtmkKIp6TAEwZT3bs6vXP2Jo9jHrqs68IP+3545VYKg98qip1eR7+/DqV6Dp8QQgiRGTTI34AhlYew9eJW+m7uS2x8xq4O3bKsDzUKeTB6pbRqCiGekgJPGFQO6xzMeWMOTQo24eeDP9N5XeeXTkL3dLKjf8OEJ5ALdr+6tVMIIYTITJr6N6V3ud6sObuGwdsGE69l3LxyadUUQjyPFHjC4Oz0dgyvMpxBlQax68ouWi1vxZEbR154fqtyPlTI787IFUe5dj8qAyMVQggh0u/9ou/TuVRnlp5aypj/xmRoR4q0agohUpICTxiFUooWhVrw2xu/oaHx/qr3WXRi0QvPHdW0OFGx8QxdFprBkQohhBDp16lEJ9oXbc/cY3P5ft/3GXrtlmV9qC6tmkKIRFLgCaMKzBHI/MbzKetVliHbhzBo6yCiYp/9lq6ghyOf1/RjxcHLrDt61QSRCiGEEGmnlOKrsl/RzL8Z0w9NZ8ahGRl67TGJrZq9F0mrphBZnRR4wujc7NyYWmcqHUt0ZHHYYtqtaseF+xeeOe/TGgUJ8HJiwJLDRERn7ER1IYQQIr2UUgysOJCG+Rvy3d7vmHdsXoZd+3Gr5o7T0qopRFYnBZ7IEFY6Kz4v/Tnf1/qeC/cv0Gp5KzZf2JzsHBu9jtHNinPlXhQTVh83UaRCCCFE2lnprBhRdQTBPsGM3DmSpaeWZti1H7dqygboQmRtUuCJDBXsE8z8xvPJlS0XXdZ1Ycr+KclWHCuT1412FfMxe3s4+87dNl2gQgghRBpZ66yZUGMCFXJVYODWgaw9uzZDrvu4VdNKSaumEFmZFHgiw/k4+zCn4RzeLPgmUw9Mpcu6LtyNvvvk/V4NCpPT2Y5+fx3iUVzGLTcthBBCGIqtlS2Ta06meI7i9NrUi60Xt2bIdaVVUwghBZ4wCXu9PSOqjGBgxYHsuLyDVstbEXozYQVNR1s9w5oEcuzKfaZtOm3iSIUQQoi0cbB2YEqdKfi5+vHlhi/Zc3VPhlxXWjWFyNqkwBMmo5SiZUBLZjeYTWx8LO+vfJ/FJxcDULeoFw2L5+S7dSc5fT3CxJEKIYQQaeNs48xPdX4iZ7acdFnX5aX7whqKtGoKkbVJgSdMroRHCRa8uYDSXqUZtG0QQ7YNIToumiFvFsNWr6P/4kMZummsEEIIYUjZ7bMzvd50XGxc6LS2E2G3w4x+zaStmn9Iq6YQWYoUeCJTcLdz5+c6P/NJ8U9YdHIR7Va1I1Z3i/4NE5LTn7uf3VZBCCGEMBc5s+VkRr0ZWOus6bimI+fvnTf6NZ9sgL7qGOdvSaumEFmFFHgi07DSWdGtTDe+q/kd5+6do+XylnjnPkf5/O6MXHmU6/ejTR2iEEIIkWY+zj5MqzuNR/GP6LCmA1ceXDHq9R63auqUotdCadUUIquQAk9kOrXy1mJe43l4OnjSZV1nShXfRWTMI4YuM/68BSGEEMKY/Nz8+KnuT9yJvkPHNR25GXnTqNfL7WrPgEbSqilEVvLKAk8p9blSyi0jghHisXzO+fj9jd9pVKARc09Ox6/Enyw/fIr1x66aOjQhhHhCcqRIi2LZi/Fj7R+5HHGZTms7cS/mnlGv16qctGoKkZWk5hs8L2CXUmqBUqqBUkoZOyghIGF56VFVR9G/Qn8uxxzA1W8K/Zb9Q0R0rKlDE0KIxyRHijQJ8gpiUs1JhN0Jo/Pazjx8ZLzCS1o1hchaXlngaZr4fAH2AAAgAElEQVQ2APAHZgIfACeVUqOUUgVf9dnEZHdcKRWmlOr7gnNaKqVClVJHlFL/e834hYVTStGmcBtmNZiFs73iQY5JdF36s6nDEkIIIO05UvKjAKiSpwrjq4/n0I1DfLHhC6LjjDfXXFo1hcg6UjUHT0tYo/5K4hELuAELlVLjXvQZpZQV8CPwBlAUaKOUKpriHH+gH1BF07RiwJdpuQlh+Up6lGTx23/iYVOIPQ9/otuaAcTExZg6LCGEeO0cKflRJFUnXx2GVxnOzss76bmxJ4/iHxntWq3K+VDNP4e0agph4VIzB6+bUmoPMA7YChTXNO0zIAho9pKPlgfCNE07rWlaDDAPaJLinA7Aj5qm3QbQNO1aGu5BZBHZ7bPz1zuzsL5fm/WX/qbdqnZcjrhs6rCEEFlYGnOk5EeRzFsF36J/hf6EnA9hwJYBxMXHGeU6SinGNCuBTil6LzworZpCWKjUfIPnDjTVNK2+pml/apr2CEDTtHig8Us+lwdIusnLhcTXkioEFFJKbVVK7VBKNXiN2EUW5OZgz9ha/Yg8/z4nb52h5fKWbLu0zdRhCSGyrrTkSMmP4hltCrehW5lurDyzkhE7R5DwxbDh5Uls1dx++qa0agphodSr/gWilKoIHNE07X7i785AEU3Tdr7ic82BBpqmfZL4+/tABU3TuiY5ZznwCGgJeAObSHj6eSfFWB2BjgBeXl5B8+bNe+F1IyIicHR0fOk9ZWYSf+r8sC+KA3eu4OP/BzfirtLItRF1neuiU+nf+UP+BqZn7vcg8RtOzZo192iaVtbUcbxIWnKkIfNj4rmvzJGZ6W9qaJZ2b0tvL2XNvTXUcq7F265v8+DBA4Pfn6ZpfLM7mpN34hhRxR4PB9PsmmVpf7ukLPnewLLvz1zu7aX5UdO0lx7APhILwcTfdcDeVHyuErA6ye/9gH4pzvkJ+DDJ7+uAci8bNygoSHuZDRs2vPT9zE7iT52rdyO1wMH/aC1/DtF6hfTSAmcFal3XdtXuRt9N99jyNzA9c78Hid9wgN3aK/KNKY+05Ehj5UftJTkyM/1NDc3S7i0+Pl4buWOkFjgrUJu6f6rR7u/C7YdasUH/aK1/3q7FxcUb5RqvYml/u6Qs+d40zbLvz1zu7WX5MTWPbFTiII8LwnhAn4rP7QL8lVL5lVI2QGtgaYpzlgDBAEqpHCS0pJxOxdgii/N0tqPfG0XYeTqCIPuu9C3fly0Xt9B6eWuO3zpu6vCEEFlHWnKk5EfxQkop+pbvy1sF3+LH/T+y4d4Go1wnWavmf+eMcg0hhGmkpsA7rZT6QillnXh0IxVJRtO0WKArsBo4CizQNO2IUmqYUuqtxNNWAzeVUqHABqCXpmk303YrIqtpXc6H8r7ujFp1jHrezfm1wa9Ex0bTdmVblp1aZurwhBBZw2vnSMmP4lV0SsfQykOpm68uf93+i79O/mWU6zxZVXPlUVlVUwgLkpoCrxNQGbhIwkTwCiT2+r+KpmkrNU0rpGlaQU3TRia+NkjTtKWJP2uapvXQNK2opmnFNU178eQ6IVLQ6RSjmhYnMiaOYctDKeVZivlvzicwRyD9t/RnxI4RspWCEMLY0pQjJT+KV9Hr9IypNoYidkUYsm0I/5z5x+DXkFU1hbBMqdno/Jqmaa01TfPUNM1L07R3NVmuWWQSfp6OdK3lx7IDl9hw7Bo57HMwvd50Piz2IfOPz+fDfz7kyoMrpg5TCGGhJEcKY7KxsuETj08o7Vmafpv7sfH8RoNfI4+rPV9Lq6YQFiU1++DZKaW6KKWmKKV+eXxkRHBCpEanGgXx93RkwJLDPIiORa/T06NsD74N/pawO2G0XNaSHZd3mDpMIYQFkhwpjM1GZ8OPtX8kwD2AHiE9+O/yfwa/Rmtp1RTCoqSmRXMOkBOoD2wkYbnm+8YMSojXYaPXMaZZcS7djWTCv08XWKmbry5zG8/Fzc6NT9d8yoxDM4y2r5AQIsuSHCmMztHGkZ/q/ERe57x0Xd+VA9cPGHR8adUUwrKkpsDz0zRtIPBA07TZQCMS5hgIkWkE5XOnbYV8zNoWzv7zT7eJKuBSgLmN5lIvXz2+2/sd3TZ0436M/LeXEMJgJEeKDOFq58q0utPIYZ+Dz9Z+ZvAVo6VVUwjLkZoC71HiP+8opQIBF8DTeCEJkTa9GwTg5WRH30UHeRQX/+R1B2sHxlUfR59yfdh8YTOtl7fmxO0TJoxUCGFBJEeKDOPh4MH0etNx0DvQcU1Hwu+GG3R8adUUwjKkpsCbppRyAwaQsE9PKDDWqFEJkQZOdtYMa1KMY1fuM31z8lXKlVK0LdqWmfVn8jD2Ie+teI/lp5ebKFIhhAWRHCkyVB7HPEyvNx2ADms6cCniksHGllZNISzDSws8pZQOuKdp2m1N0zZpmlYgcaWwnzMoPiFeS71iOXkjMCffrT1J+I0Hz7xfxqsMCxovoGj2ovTb3I9RO0fxKO7Rc0YSQoiXkxwpTCW/S36m1Z3Gg0cP6PBvB25E3jDY2NKqKYT5e2mBp2laPNA7g2IRwiCGvlUMG72O/osPPXdRFQ8HD2bUn0G7ou2Ye2wuH66WrRSEEK9PcqQwpQD3AKbUnsL1yOt0+LcDd6LuvPpDqSStmkKYt9S0aK5VSvVUSvkopdwfH0aPTIg08nS2o+8bhdl26iZ/7rnw3HOsddb0KteLCTUmcOL2CVotb2WUpaeFEBZPcqQwmVKepZhcazLn7p3js7WfERETYZBxk7Zq9lkkrZpCmJvUFHitgC7AJmBP4rHbmEEJkV5tyuWlvK87I1cc5fr96BeeV9+3PvMazcPF1oUOazrwy+FfZCsFIcTrkBwpTKpirop8E/wNR28dpev6rkTGRhpk3MetmttO3eR/0qophFl5ZYGnaVr+5xwFMiI4IdJKp1OMalqcyJg4hi0Pfem5BVwTtlKok7cOE/dMZOaNmQZ7CiqEsGySI0VmEOwTzKiqo9h7dS89QnoYbG65tGoKYZ5eWeAppdo978iI4IRIDz9PR7rU9GPZgUtsOHbtpedms87GhBoT6Fm2J4ceHqLNijaE3Q7LoEiFEOZKcqTILBoWaMigSoPYcnELfTb3ITY+Nt1jPm7VVNKqKYRZSU2LZrkkRzVgCPCWEWMSwmA+Cy6Iv6cjA5Yc5kH0y5OdUor2xdrzudfn3I+5z7sr32XVmVUZFKkQwkxJjhSZRvNCzelVthdrzq5hyLYhxGvxr/7QK0irphDmJzUtmp8nOToAZQBH44cmRPrZ6HWMaVaci3ci+ebf1G1u7mfnx4I3F1DYvTC9N/VmzH9jZCsFIcRzSY4UmU27Yu3oXLIzf5/6m3G7xhlkXrm0agphXlLzDV5KD4D8hg5ECGMJyudO24p5mbXtDAfOp24ZaU8HT2bWn0nbIm354+gffPzvx1x7+PI2TyGEQHKkyAQ6lexEu6Lt+OPoH/yw/4d0j5eyVVMWIxMic0vNHLxlSqmlicdy4Diw2PihCWE4vRsUxsPJlj6LDvIoLnUtK9Y6a/qU78P46uM5dusYLZe1ZNeVXUaOVAhhTiRHisxIKUXPsj1p5t+MaQen8cvhX9I9Zh5Xe/o3TGjV/GOntGoKkZnpU3HOhCQ/xwJnNU17/uZiQmRSznbWDGsSyKdz9jB982k6B/ul+rMN8jfAz9WP7iHd6fBvB7oHdadd0XYopYwYsRDCTEiOFJmSUoqBFQfy8NFDJu6ZSDZ9NloVbpWuMduU92HV4cuMXnmUGoU88HF3MFC0QghDSk2L5jlgp6ZpGzVN2wrcVEr5GjUqIYygfrGcNCiWk+/WniT8xoPX+qyfmx9zG82lpk9NJuyewFcbv+LBo4Qxwm6H8fbfb8uqm0JkTZIjRaZlpbNiZLWRBHsHM2LnCJadWpaunCWtmkKYh9QUeH8CSXva4hJfE8LsDG1SDBu9jq+XHHrtxORo48i3wd/yVdBXrDu3jjYr2hB6I5TO6zpz+s5puqzrwsNHMvlciCxGcqTI1Kx11kwInkCFnBUYsGUAH63+KF05S1o1hcj8UlPg6TVNi3n8S+LPNsYLSQjj8XK2o+8bhdkadpOFe16/i0opxQeBHzCj3gzuRt+lzco2XH94HQ2Nm5E3GbxtsBGiFkJkYpIjRaZna2XL5FqTcbJ14nb07XTnrDblfajqJ6tqCpFZpabAu66UerKnj1KqCXDDeCEJYVxtyuWlnK8bI1ce5UZEdJrGKJezHB8V+wg0iNUS9teLjo8m5HwIi0/K+gpCZCGSI4VZWB2+mujYpzkvPTkroVWzOEop+v4lrZpCZDapKfA6Af2VUueUUueAPsCnxg1LCOPR6RSjmxbnYXQcw5aFpnmcX478QjzJV+SMioti0t5J6Q1RCGE+JEcKszBp7ySi4qKSvZaenOXt5kD/hkXYGiYboAuR2aRmo/NTmqZVBIoCRTVNq6xpmqwmIcyan6cTnWsWZOmBS2w4nrb97b4s8yX2evtkr9lZ2dE9qLshQhRCmAHJkcJcPC9n6ZU+XTnrcavmqBXSqilEZpKaffBGKaVcNU2L0DQtQinlppQakRHBCWFMnwUXxM/TkQGLD/MgOva1P/+O/ztU966OrZXtk9fs9HY0KtDIkGEKITIxyZHCXKTMWTqlI06Lo5BboTSPKa2aQmROqWnRfEPTtDuPf9E07TbQ0HghCZExbPVWjGlanIt3Ivl2zYk0jTGs8jDc7dxRKNxs3bgTfYeJeyYaOFIhRCYmOVKYjaQ5y9Pekxx2Ofh6y9fExMW8+sMvIK2aQmQ+qSnwrJRST76iUErZA7YvOV8Is1HW1522FfPy69YzHDh/59UfSMHB2oEptadQwLUAv9T/hfeKvMec0DmsDl9thGiFEJmQ5EhhNpLmrKl1pjK0ylDC7oQxZf+UdI0rrZpCZC6pKfD+ANYppT5WSn0CrAFmGzcsITJO7waF8XCype9fh3gUF//qD6Tg5+bHkiZL8HPz46ugryjpUZJBWwdx+u5pI0QrhMhkJEcKs5I0Z1XzrkZT/6b8euRXDlw/kOYxH7dqAtKqKUQmkJpFVsYCI4AiQACwGshn5LiEyDDOdtYMfSuQo5fvMWPzmXSNZW1lzYQaE7C1sqXHhh6y8bkQFk5ypDB3vcr2wsvBiwFbBhAVG/XqD7yAt5sD/RtJq6YQmUFqvsEDuApoQAugFnDUaBEJYQINAnNSv5gXk9ae4OqD1/8WL6mc2XIyrsY4ztw7w5DtQ+RJphCWT3KkMFuONo4MrTyU8HvhfL/v+3SN9W75vNKqKUQm8MICTylVSCk1WCl1DPgeOAcoTdNqapr2Q4ZFKEQGGdYkEBsrHbNDo9NdlFXMVZEupbqw6swq5h2fZ6AIhRCZheRIYUkq5a5Eq4BWzAmdw56re9I8jrRqCpE5vOwbvGMkPIlsrGlaVU3TvgfiMiYsITKel7Mdfd4oTOjNeBbtvZju8T4p/gnVvaszbte4dM1tEEJkSpIjhUXpEdSDPI55GLh1YLqmF0irphCm97ICrylwGdiglJqulKoNqIwJSwjTeLd8XvxddYxYEcqNiOh0jaVTOkZVHYWXgxdfhXzFrahbBopSCJEJSI4UFsXB2oHhVYZz4f6FdG/3k7RV88JtadUUIqO9sMDTNG2JpmmtgcLABuBLwFMpNVUpVS+jAhQiI+l0ig8CbXkQHcvw5aHpHs/F1oVvg7/ldtRt+m7qS1y8POAXwhJIjhSWqGzOsrxX5D3mHZ/Hzss70zxOslbNRYekVVOIDJaaVTQfaJr2P03T3gS8gX1AH6NHJoSJ5HHU0TnYj7/3X2LD8WvpHq9o9qL0r9Cf7Ze3M/XAVANEKITILCRHCkvzRZkv8HX2ZdDWQUTERKR5nMetmlvCbkirphAZLLWraAKgadptTdOmaZpW21gBCZEZdK5ZED9PRwYsPsyD6Nh0j9fUvylv+73Nzwd/ZtOFTQaIUAiR2UiOFJbAXm/P8CrDufLwChN2T0jXWNKqKYRpvFaBJ0RWYau3YnTT4ly8E8m3a06kezylFF9X+JoAtwD6be7HxYj0L+IihBBCGEMpz1K0L9aeRScXseXiljSPI62aQpiGFHhCvEA5X3feq5CXmVvOUG7kWvL3XUGVMetZsi9txZmd3o6JwRPRNI0eIT2IjkvfIi5CCCGEsXQp1YWCLgUZvG0w92LupXmcpK2apYetSXcuFUK8mhR4QrxEYB5nAK7fj0YDLt6JpN9fh9KcmHycfRhRdQShN0MZ898YA0YqhBBCGI6tlS0jq43kZuRNxv43Nl1jOVhboVNwJ/JRsly67dIjwwQrhEhGCjwhXuKH9aeeeS3yURzjVx9P85i18tbi48CPWXhiIX+H/Z2e8IQQQgijKZa9GJ8U/4Slp5ay4dyGNI8z4d8TxKfozox8FMeiE1LgCWEMUuAJ8RKX7kQ+9/WLdyKJS5mtXkPX0l0pn7M8w3cM5/ittBeLQgghhDF9WuJTAtwCGLp9KHei7qRpjBfl0ptRGrFx8ekJTwjxHFLgCfESuV3tX/hezQkhzNh8mruRr/8EUq/TM7b6WJxtnOkR0oP7MffTE6YQQghhFNZW1oysOpK7MXcZtXNUmsZ4WS6tMT6EmVvOEGGAFauFEAmkwBPiJXrVD8De2irZa3bWOtpXyoeXsy0jVhyl0uh1DFxymLBrr7dfUA77HHwT/A2XIi4xYMsAWV1MCCFEphTgHkCnEp1YFb6Kf8P/fe3PPy+X2lvrqJ/Pijxu9gxfHkrl0esY+88xrt6LMlTYQmRZelMHIERm9nbpPACMX32cS3ciye1qT6/6AU9eP3zxLrO2hTN/13nm7DhL9UIefFjZlxqFPNDp1CvHL+1Zmh5lezBu1zhmHZnFh4EfGvV+hBBCiLT4uPjHbDi/gRE7RhDkFUR2++yp/uyLcqnr3ZMEB1di37nbzNh8hp83nmLG5tM0KZWHDtUKEJDTyVi3I4RFkwJPiFd4u3SeJ8kppcA8LkxoUZK+bxRm3n/nmLPjLB/O2oVvdgfaV/aleZA3TnbWLx2/bZG27L+2n0l7JxGYI5ByOcsZ4zaEEEKINNPr9IysOpIWy1owfMdwJgZPRKlXP8h87Hm5NCTkJACl87rx43tunLv5kJlbTrNg9wUW7rlAcIAHHasVoFLB7K91LSGyOmnRFMIAcjja0rWWP1v61GJym9K4Z7Nh6LJQKo1ez5ClRzhz48ELP6uUYmjloeR1ykuvjb24/vB6BkYuhBBCpE5B14J0Ld2VdefWsfLMSoOPnze7A0ObBLKtby161ivE4Yv3eHfGThp/v4W/91/kkSzIIkSqSIEnhAFZW+l4q2Ru/upchb+7VKFeUS/+2HmWmhNC+PDX/9h44jrxz1l909HGkYnBE3kY+5CeG3vyKF6WjhZCCJH5tC/anpIeJRm1cxTXHl4zyjXcstkkPjStyZimxYl6FEe3efsJHp+wuJksyCLEy0mBJ4SRlPRx5dtWpdjatxZf1vHn0MV7tP/lP+pO3Mic7eE8SJGg/Nz8GFxpMHuv7eW7Pd+ZJmghhBDiJax0VoyoMoKYuBiGbh9q1AXC7KytaF0+L2u612Bm+7LkcbN/srjZmFXHuHJXFmQR4nmkwBPCyDyd7PiyTiG29a3FpFalcLTVM/DvI1QctY7hy0M5e/Np+2ajAo1oHdCa2aGzWXN2jQmjFkIIIZ7P18WXbmW6senCJpaELTH69XQ6Re0iXiz4tBJLulSheiEPpm06RbVx6/lqwQGOXbln9BiEMCdGLfCUUg2UUseVUmFKqb4vOa+ZUkpTSpU1ZjxCmJKNXsfbpfPwd9eq/NW5MjULezJ7WzjBE0L4ZPYutpy8gaZp9CrXixI5SjBw60DC74abOmwhhBFIfhTm7t0i7xLkFcS4XeO48uBKhl23lI8rP75bhpCeNXmvQj5WHrpMg0mbaf/Lf2wNuyFbDgmBEQs8pZQV8CPwBlAUaKOUKvqc85yAbsBOY8UiRGZTJq8bk9uUZmvfWnxe04995+7QduZO6k3cxJ+7LzOi8lisddZ0D+nOw0cPTR2uEMKAJD8KS6BTOoZXGU6cFsfgbYMzvLDKm92BIW8VY3u/WvSqH8CRS/d4TxZkEQIw7jd45YEwTdNOa5oWA8wDmjznvOHAWEAaqUWW4+VsR496AWztW4tvWpTE1lrH14sP0+S7I5Sy68qpO6cYvmO4PJEUwrJIfhQWwcfJh6+CvmLbpW38eeJPk8Tg6mBDl5p+bOlTk7HNni7IUmPcBmZsPs39KFm0TGQ9xizw8gDnk/x+IfG1J5RSZQAfTdNWGDEOITI9O2srmgV5s6xrVRZ2qkS1Qh6s/M+J6Ot1WH56OWO2/ipFnhCWQ/KjsBgtA1pSMVdFJuyewIX7F0wWh521Fa3KJSzI8ssHZfFxd2DEiqNUHrOe0auOyoIsIktRxvqPRqVUc6CBpmmfJP7+PlBB07Suib/rgPXAB5qmhSulQoCemqbtfs5YHYGOAF5eXkHz5s174XUjIiJwdHQ09O1kGInf9DLLPdyKimfduRg2xs4A+zAcr3/GG7kKUDG3HlurF2/4mlniTw9zvweJ33Bq1qy5R9M0i5p/Zsj8mHj+K3NkZvqbGpol3xuYx/3dir3F6Euj8bbx5nOvz9Gp1H1/YOx7O303jn/OPGLXlTh0Cirm0tMgvzU+TsZfY9Ac/m7pYcn3Zy739tL8qGmaUQ6gErA6ye/9gH5JfncBbgDhiUcUcAko+7Jxg4KCtJfZsGHDS9/P7CR+08ts93Dl/k2tyh+1tJK/VNd8+8/XSg5drY1eeVS7cPvhc8/PbPGnhbnfg8RvOMBuzUh5ylSHsfKj9pIcmZn+poZmyfemaeZzf3+d+EsLnBWo/R76e6o/k1H3du7mA23w34e1IgNXafn6LNfen7lT23ziuhYfH2+0a5rL3y2tLPn+zOXeXpYfjfkIYxfgr5TKr5SyAVoDSx+/qWnaXU3Tcmia5qtpmi+wA3hLe8ETSiGyKi9Hd36uNxmd/j4VKvxDxfxuTNt0iurjNtD5jz38d+aWtG8KYV4kPwqL87bf21TLU41JeyZx9t5ZU4eTjI97woIs2/omLMhy9PI92s7cSaPJW1iyTxZkEZbHaAWepmmxQFdgNXAUWKBp2hGl1DCl1FvGuq4QlqhYjmL0q9CPI7d3UbL4bjb1rkmHagXYGnaTlj9vp9HkLSzYfZ6oR3GmDlUI8QqSH4UlUkoxpPIQrK2sGbBlAHHxmS8fJV2QZVyzEsTExfPl/P1UH7eB6ZtkQRZhOfTGHFzTtJXAyhSvDXrBucHGjEUIc9fcvzn7r+1n6oGpFPcoTt83qtKttj9L9l9k1tZwei88yJhVx6jspRFQOpJcLvamDlkI8QKSH4Ul8nTwpF/5fvTf0p85oXP4IPADU4f0XLZ6K1qW86F5kDcbT1zn502nGLnyKJPXneTdCnn5oIqv5FBh1ow/y1QIYRBKKQZUHIC/mz99N/flUsQl7G2saFM+L/98WY3/dahA2XxurDj9iKpjN9D1f3vZc1baN4UQQmScxgUaU8unFt/v+55Td06ZOpyX0ukUNQt7Mq9jJZZ2rUJwYU9mbDlDtbEb6DF/P6GX7pk6RCHSRAo8IcyIvd6eicETiYuPo0dID2LiYoCE4q9ywRxMa1eWcdXt+bhqfjaduE6zqdt564etLNpzgejYzNcuI4QQwrIopRhYaSAO1g4M2DKA2PhYU4eUKiW8Xfm+TWlCegbzfqV8/HPkCg0nb+b9mTvZfPK6PCwVZkUKPCHMTF7nvIyoOoIjN48wbte4Z973cNDRv2ERdvSvzYi3A4l8FMdXfx6gypj1fLvmBNfuyV5AQgghjCeHfQ4GVBzA4ZuH+fXwr6YO57X4uDsw+M1ibO9bm94NAjh25T7vz/yPhpO3sHjfBVmQRZgFKfCEMEO189bmw8APmX98PstOLXvuOQ42etpWzMea7tX5/eMKlPR25fv1J6k8Zj3d5u1j37nbGRy1EEKIrKK+b33q+9ZnyoEpHL913NThvDYXB2s6BycuyNK8BLFx8XSff4Dq4zYwbdMp7smCLCITkwJPCDP1RekvKOtVlmHbh3Hi9okXnqeUoqp/DmZ+UI4NXwXTrpIv649e450p22jy41b+3n+RmFh5IimEEMKwvq7wNc42zgzYOoBHceZZENnqrWhZ1od/u1fn1w/L4Zs9G6NWHqPy6PWMWnmUS3ciTR2iEM+QAk8IM6XX6RlfYzyONo70COlBREzEKz/jmyMbg94syvb+tRnWpBj3Ix/Rbd5+qoxdz3drT3L9fnQGRC6EECIrcLNzY3ClwRy7dYxph6aZOpx0UUpRM8CTuR0rsqxrVWoV9mTmljNUH7eB7rIgi8hkpMATwozlsM/BhBoTuHD/AgO3Dkz1JHBHWz3tKvmytkcNZn9UnmK5nZm49gRVxqynx/z9HLxwx8iRCyGEyApq5a3FmwXeZPrB6Ry5ecTU4RhEcW8XJrcpzcZeCV0xq5MsyLLphCzIIkxPCjwhzFyQVxDdg7qz9txafgv97bU+q9MpahTyYNaH5Vn/VQ3erZCX1Ueu8NYPW2k6ZSvLDlySCeVCCCHSpU/5PmS3y86ALQOerP5sCbzdHBK6YvrWpk+Dwhy/cp92v/zHG99tZuvFRzL9QZiMFHhCWIB2RdtRN19dJu6ZSFhUWJrGKODhyJC3irGjf20Gv1mUWw9i+HzuPqqOXc8P609yM0LaN4UQQrw+F1sXhlQeQtidMKbsn2LqcAzOxcGaz4ILsrlPTcY3L0G8pjH9UAzVx23g542yIIvIeFLgCWEBlFIMqzwMbydvfr3xKzcib6R5LCc7az6skp/1XwXzywdlKeTlxIR/T1BpzHp6/XmAwxfvGjByIYQQWUE172o09W/Kr0d+5cD1A6YOxyhs9Va0KOvD6i+r0yPIlgIe2Ri9KikSdlwAACAASURBVGFBlpErQmVBFpFhpMATwkI42jgyMXgiUfFR9NzYM92by+p0ilqFvZjzcQXW9qhOq7I+rDh0mcbfb6HlT9tZeegysdK+KYQQIpV6le2Fl4MXA7YMICrWcvdkVUpRwkPP/zpUZPnnValdxJNftoZTfdwGvpy3jyOX5EGpMC4p8ISwIP5u/rR2b82eq3uYvHeywcb183Ri+NuBbO9XmwGNinD5XiSd/9hL9XEbmBISxu0HljOnQgghhHE42jgyrMowwu+FM3mf4XJUZhaYx4XvWpdmU++atK/sy5rQqzSavIW2M3ayURZkEUYiBZ4QFqacYzlaBbTi1yO/su7sOoOO7WJvzSfVChDSsybT25Ulv0c2xv1znIqj19F30UGOXpZlooUQQrxYxVwVaRXQit9Df0/znHFzlMfVnoGNi7KtX236vlGYk9fu0z5xQZZFey7IgizCoKTAE8IC9S7Xm8DsgQzYOoCz984afHwrnaJuUS/++KQiq7+sTrMgb5bsv8gb322m9bTt/HP4CnHx8lRSCCHEs3oE9SCPYx7+uPkHDx89NHU4GcrF3ppONQqyuXctJrQoiabBV38eoNq49fy08RR3I2VBFpF+UuAJYYFsrGz4Nvhb9Do93UO6ExlrvIndATmdGPVOcXb0q02/Nwpz/lYknX7f82T1sDsPpX1TCCHEUw7WDgyvMpybsTeZuGeiqcMxCRu9juZB3vzzZTVmf1QeP09Hxqw6RpUx6xmxPJSLsiCLSAcp8ISwULkcczGm2hjCbocxYscIo/f5uzrY8GmNgmzsFcxPbYPwcbdn9KpjVBy9jv6LD3Hi6n2jXl8IIYT5KJuzLDWcajDv+Dx2XN5h6nBMRqmE/Wj/+CRhQZY6RTz5dVvCgizd5u2TlatFmkiBJ4QFq5KnCp+V/Iylp5ay8OTCDLmm3kpHg8CczOtYiZVfVKNJyTws2nOBehM38d6MHawJvSrtm0IIIXjT9U18nX0ZtHUQETERpg7H5ALzuDApcUGWDyv7sjb0Ko2/38J7M3YQcvyaLMgiUk0KPCEs3KclP6VKniqM3jmaIzeOZOi1i+Z2ZmzzEmzvV5veDQI4ff0BHX7bTc0JIczYfFrmGgghRBZmo7NhRNURXH14lQm7J5g6nEwjj6s9AxIXZOn3RmHCrkXwwa+7aDBpMwtlQRaRClLgCWHhdErHmKpjyGGfgx4hPbgTdSfDY3DPZkPnYD82967JlPfKkNPZjhErjlJp9DoGLjlM2DV5ciuEEFlRSY+StC/WnkUnF7Hl4hZTh5OpuNhb82nigizftCiJUtAzcUGWqSGyIIt4MSnwhMgCXO1c+Tb4W65HXqffln7Ea6Z5+qe30tGweC4WdKrE8s+r0rB4LubvOk+dbzfy/sydrD92lXhp3xRCiCylS6kuFHQpyOBtg7kXI9vtpGSj19EsyJtV3arx20fl8fd0Yuw/x6g8eh3Dl4dy4XbWWolUvJoUeEJkEYE5Aulbvi9bLm5h2sFppg6HwDwuTGhRkm39atGzXiFOXL3PR7N2U+ubENaEP+J+lDyZFEKIrMDWypaR1UZyM/ImY/8ba+pwMi2lFNULefD7JxVY8UVV6hXLyext4dQYH8IXc2VBFvGUFHhCZCEtCrXgzQJvMmX/FLZd3GbqcADI4WhL11r+bOlTi8ltSuOezYY/jsVQcdQ6hiw9wunr0r4phBCWrlj2YnxS/BOWnlrKhnMbTB1OplcstwsTW5ViU++afFTFl/XHrtH4+y28O30HG2RBlixPCjwhshClFAMrDaSga0H6bO7D5YjLpg7pCWsrHW+VzM1fnaswqJId9Yvl5I+dZ6n1zUY++PU/Qo5fk/ZNIYSwYJ+W+JTC7oUZun2oSeaLm6PcrvZ83ago2/rVon/Dwpy+/oAPf91F/Umb+HP3eaJj40wdojABKfCEyGLs9fZMDJ7Io/hHfLXxK2LiMt9G5AVcrPi2VSm29q1F9zqFOHLpHh/8uos6Ezfy2/ZwIqJjTR2iEEIIA7O2smZElRHcjbnLqJ2jTB2OWXG2s6Zj9YJs6l2Tb1uWRKcUvRYepNrYDUwJCZMFWbIYKfCEyIJ8XXwZUWUEh24cYvyu8aYO54U8nezoVsefrX1qMalVKZxs9Qz6+wiVRq1j2LJQzt58YOoQhRBCGFCAewCdSnRiVfgq/g3/19ThmB0bvY6mZZ4uyBKQ04lx/xyn8uiEvCkLsmQNelMHIIQwjTr56vBBsQ+YdWQWJT1L0rhAY1OH9EI2eh1vl87D26XzsO/cbWZtC+e37eH8uu0MtQt78kHl/FTxy45SytShCiGESKePi3/MhvMbGLFjBEFeQWS3z27qkMzO4wVZqhfyIPTSPWZsPs1v28OZvT2chsVz0bFaAYp7u5g6TGEk8g2eEFlYtzLdKONZhmHbhxF2O8zU4aRK6bxufNe6NFv71uLzmn7sO3eHtjN3Um/iJn7fcZaHMdK+KYQQ5kyv0zOy6kgePHrA8B3DZcGQdCqa25lvExdk+bhqfjYcu8abP2yhzbQdbDgm89stkRR4QmRhep2eCTUm4KB3oHtIdyJizGfFSi9nO3rUC2Bbv4QNYG2tdQxYcpiKo9YxckUo529JG4oQQpirgq4F6Vq6K+vOrWPFmRWmDsci5Ha1p3/DImzrV4uvGxYh/OYDPpyVsCDLAlmQxaJIgSdEFufh4MGEGhM4f/88g7YNMrsnpbZ6K5oFebOsa1UWdqpEtUIe/LI1nBrjN9Dxt91sO3XD7O5JCCEEtCvajpIeJRm9czTXHl4zdTgWw9nOmg7VC7Cpd00mtiqJ3kpH74UHqTp2Az9uCOPuQ1mQxdxJgSeEoGzOsnxZ5kvWnF3DnNA5pg4nTZRSlPV158d3y7ClT00+Cy7IrvBbvDt9Jw0mbWbuf+eIjJGnk0IIYS6sdFaMqDKCmLgYhm4fKg/rDMzaSsc7pb1Z+UVV5nxcnsI5nRi/+jiVxqxj6LIj0gljxqTAE0IA0L5Ye2rnrc3EPRPZe3WvqcNJl1wu9vSqX5jt/WozrnkJdDpFv78OUWnMOsasOsbFO5GmDlEIIUQq+Lr40q1MNzZd2MSSsCWmDsciKaWo5u/BnI8rsKpbNRoE5mTO9rPUGP//9u47vIoy7eP4906BEGlSBUQQkKJIEaSXAIq4uogF27r2LquCYMAXBREFERB7wUVsiA3RtYAIhBqULiAKiKGtICKgdAjP+8cZ3BCTE4JJ5szJ73NduThnZs7M/cyQc+eeeeaZ6fQYt4hvNuqZhEGjAk9EgNAX/COtH6Fy8cr0ntGbX/b+4ndIf1lCfCyXN63KZ3e34Z1bW9CyRllenvkDbR+fxh1vLuSrtdt0RlhEJMJdXe9qmlZsyrD5w9i8e7Pf4US1epVKMvLyRsxK7sAtbWsw4/utdH12Dle+nMq077ZoQJaAUIEnIn8oUaQEI5NG8vuB37l/5v0cOhwdI1KaGc1rlOWFa5owK7kjt7arSerabVzx8jwueHo27y7YwL6D6r4pIhKJYiyGQa0Hke7SeWhO8O4VD6JKpYrRzxuQpf8F9Vi/bQ83jl1A51EzeXf+Bg6q0ItoKvBE5Ch1ytThwZYPMn/zfJ5Z/Izf4eS5KqWL0ff8uqT27cSQS84k/bDj/ve/odXQaTwx+Tt+2qnumyIikaZqiar0btqb1J9SeW/Ve36HU2iUSIjn5rY1mHF/B0Zd0Yj42Bju/+Abes/Yy3PT17BjzwG/Q5QsqMATkT/pWrMr3Wt3Z8zyMUxbP83vcPJFsSKxXNXsFCbd25ZxtzSnabUTeT7lB9o8Pp27xi1iQdqvOkssIhJButfuTotKLRi+YDgbf9/odziFSnxsDN0aV+Gzu9vw5k3NqVoihicmf0+rodMY+LEGZIk0KvBEJEvJzZI5o+wZ9J/dn/W/rfc7nHxjZrSqWY6Xr23KzD6hh8DOWrWVy15Mpeuzc/hg4UY9G0hEJAKYGYNaDSLWYnlwzoMcdof9DqnQMTPanFaO3k0T+PyetpxfvxJvfRUakOUuDcgSMVTgiUiWisYWZUTSCGJiYuiV0ot9h/b5HVK+q1omkQf+Vo95D3RicLf67D2Yzn3vLaX10GmM/OJ7tvwW/ftARCSSVSpeifvPvp8FWxbw9ndv+x1OoVavUklGXN6QWfd3DD1XzxuQ5YqXUpm6UgOy+EkFnohkq0rxKgxpM4RV21cxeN7gQtNlMbFIHNe0qMaUnu1486bmNKpammemr6H10Gnc/fZiFq/f7neIIiKFVrda3WhbpS2jFo4ibWea3+EUeieVSqDf+f8bkGXDr3u46bXQgCzvzF+vQcx8oAJPRMJqe3Jbbmt4Gx/98BETVk/wO5wCdaQryivXnU1K7ySua1Wd6d/9zMXPz+Wi5+YwcfEmDhxSFyERkYJkZgxsNZD42HgenPMg6YdVQESCjAOyPHVlI4rExpD8wTLaPD6dZ6et1oAsBUgFnojk6PYGt9Oqcise++oxVmxb4Xc4vqhW9gQevPB0Uh/oxKCLzuD3fQe5950ltH58GqO+XMXW3/f7HaKISKFRIbEC/Zr1Y8nWJbzx7Rt+hyMZxMfGcFGjKnx6dxveurk5Z1QuyfAvVtFySGhAlvXbNCBLflOBJyI5io2JZWjboZQpVob7Uu5j5/6dfofkm+JF47i2ZXW+7Nme125sxhmVSzLqy9W0GjqVXu8s0Q3mIiIF5MIaF9KxakeeWfwMP+z4we9wJBMzo3Wtcrx2YzMm3duWCxqEBmRJGj6du95axNINypf5RQWeiByTExNOZET7EWzZs4UHZj9Q6Ecvi4kx2tcuz9gbmjHtvvb8o3k1Jq/YTNdn5zB43l4+XvpfDqYX7n0kIpKfzIwHWz5IYnwi/Wf359DhQ36HJNmoe1JJhncPDchya7uazFy9lYuem8PlL6Xy5bcakCWvqcATkWPWoHwDks9OZubGmbyy7BW/w4kYNcoXZ2DXM5j3QCcG/P10fj/guPvtxbR5fBrPTF3Ntl3qvikikh/KFStH/xb9Wb5tOWOWj/E7HMnBSaUS6Ht+XVL7deLBC09n0/a93Pz6As59cgbjv9aALHlFBZ6I5MoVda7gghoX8OziZ0n9b6rf4USUEgnx3ND6VIa0Lcar159N7YolGDFlFS2HTqP3e0tZvqnwdm0VEckv51U/jy7Vu/DC0hf4/tfv/Q5HjkHxonHc1OZUUvok8dSVjUiIj6XvhGV/nBjdvlsDsvwVKvBEJFfMjIdaPETN0jVJnpnM5t2b/Q4p4sSY0aFuBd64qTlf9mrHFU2r8tmyn7jwmdl0f3Eun37zE4fUfVNEJM880PwBShYpSf85/TmYftDvcOQYHRmQ5ZN/tWHczc2pX6UUI6asotXQaQz4aLkGZDlOKvBEJNcS4xMZmTSS/en7uW/GfUqmYdSqUIJHutUntV8n+l9Qj82/7eOucYtoO2w6z6es4VedpRQR+ctOTDiRAS0H8N2v3/Hyspf9DkdyycxoVascY29oxuR723Fhg0qM+3o9ScOnc+dbC1miAVlyRQWeiByXU0udyiOtH+Gbrd8wfMFwv8OJeKWKhZ4PlNK7A6OvbUqN8icwbNL3tBwyleT3v2HlT7/5HaKISKB1PKUjf6/xd0Z/M7rQPtInGtQ5qQRPdG/I7OSO3Na+JrNW/0K35+Zw+YupTNGALMdEBZ6IHLfO1Tvzz9P/ybjvxvH5j5/7HU4gxMYY555ekbdubsHke9txaZOT+WjpJs5/ahZXvJTKpOWb1X1TROQ4JTdLpmxCWfrP7s+BdPWQCLKKJRNI7pJhQJYde7nl9QWc8+QM3taALGHla4FnZl3M7HszW2NmfbOY38vMvjWzb8xsqplVy894RCTv9WzSk8YVGjNg7gA9hyiX6pxUgscuPpN5/TrR7/y6bNy+l9vfXEj7J1J4acYP7NijP06ilfKjSP4oVbQUD7d+mDU71vDckuf8DkfywJEBWWb0SeLpqxqTWCSWft6ALE9rQJYs5VuBZ2axwHPA+cDpwFVmdnqmxRYDTZ1zDYD3gWH5FY+I5I/4mHiGtx9Osbhi9Ezpye6Du/0OKXBKJxbhtvY1mdEniRevaULVMsUY8vl3tBgylX4TlvH95t/9DlHykPKjSP5qU6UNl552KWNXjGXp1qV+hyN5JC42hq4NK/OfHm0Yd0tzzqxSipFTVtFy6FQe+mg567bp748j8vMKXjNgjXNurXPuADAeuCjjAs656c65I8PjzANOzsd4RCSfVEiswPD2w1n32zoGzB2Ac+offzziYmPoUv8kxt/aks/vaUu3RlWYsGgj542aydWj5zHl2y2k696DaKD8KJLPejftTcXEivSf3Z99h/b5HY7kITOjVc1yvHpDM77o2Y6uDSsz/usNJA1P4Y43F7Jo/Xa/Q/RdfhZ4VYANGd5v9KZl5yZAN/GIBNTZJ53N3Y3vZnLaZMZ9N87vcAKvXqWSDL20AfP6deL+LnX48Zfd3PL6ApKGT+eVWWvZuVcjlwaY8qNIPitepDiDWg8i7bc0nl78tN/hSD6pXbEEwy5ryOzkDtzRviZz1vzCJc/PpfuLc/lixeZCOyCL5deZdjO7DOjinLvZe/9PoLlzrkcWy14D9ADaO+f2ZzH/VuBWgIoVKzYZP358ttvdtWsXxYsXz5tG+EDx+y/obfAzfucco7eOZsXeFdxT8R5qJNQ4rvXoGPxZ+mHHop/TmbLuIKu2H6ZoLLSuEsc5p8RTuXjenquLpP3foUOHhc65pn7HkZfyMj96y+SYIyPpmOa1aG4bRHf7CqJt7257l9m7ZnN3xbuplVArX7eVUTQfN4jc9u075Ji58RCT0w6ybZ/jpESjy6nxtKocR5FYO6Z1RGrbMgubH51z+fIDtAQmZ3jfD+iXxXLnACuBCsey3iZNmrhwpk+fHnZ+pFP8/gt6G/yOf+f+ne78D853Hd/p6H7Z88txrcPvNvxV+R3/so073H3vLnGnPfCZq5b8ibvmlXlu6srNLj39cJ6sP5L2P7DA5VOe8usnv/KjC5MjI+mY5rVobptz0d2+gmjb7gO7XZf3u7gu73dxuw/szvftHRHNx825yG/fwUPp7uMlm9yFT89y1ZI/cWcN+sKNmrLKbdu1P8fPRnrbjgiXH/Ozi+Z84DQzO9XMigBXAh9nXMDMGgMvAV2dcz/nYywiUkBKFinJk0lPsvPATpJnJpN+WMMY57X6VUoxvHtD5vbrSO/OtVm15XduHLuAjiNSGDP7R37fp+6bEU75UaSAJMYnMrjNYDbt2sSTC5/0OxwpIHGxMfy9YWU+7tGat29pQcOqpXnyy1W0GjqVBycuJ+2X6B6QJd8KPOfcIULdSiYTOgP5rnNuhZkNMrOu3mJPAMWB98xsiZl9nM3qRCRA6pSpQ/8W/flq81capjoflStelB4dT2N2ckeeuaoxZYsXZdAn39LisakM/HgFa7fu8jtEyYLyo0jBalKxCf+o9w/Gfz+eeT/N8zscKUBmRsuaZRlz/dlM8QZkeWf+BjqMSOH2N6J3QJa4/Fy5c+4z4LNM0x7K8Pqc/Ny+iPinW61uLPl5CaOXjaZB+QYkVU3yO6SoFe+dqfx7w8os3bCD1+am8dZX6xg7N42kOuW5vlV12p1WnpiYY7v/QPKf8qNIwbrnrHuYvWk2D815iAldJ1C8SOTfYyV56zRvQJbenevwWmoab85bz6QVm2la7URuaVeDc+tVjJo8ma8POheRwq1f837UK1OPB2Y9wIbfN+T8AfnLGlYtzcgrGjGnb0d6nlObFf/9jetfnc85I2fw2tw0du0/5HeIIiIFLiEugcFtBrNlzxaGLxjudzjiowolE+hzXl3m9u3IwL+fzubf9nHbGwvpNHIGb321jgPpwR95UwWeiOSborFFGZk0EjOjV0ovPYuoAFUokcA955zGnOSOjLqiESUS4hjw8QpaPjaVQf/5Vg+EFZFCp2H5hlx/xvV8sPoDZm2c5Xc44rMTisZxfetTSemdxLNXN6ZEQhz/9+Fy7kvZw6gvV7FtV5YDFweCCjwRyVcnlziZIW2H8N2v3zHk6yF+h1PoFImLoVvjKnzUow0f3tmKjvUq8HpqGknDU7hp7Hxmrd6qB9OLSKFxV6O7qFW6FgPnDmTn/p1+hyMRIC42hgsbVOaju1oz/tYW1Cgdy6gvV9Nq6DT6T1zGjwEckEUFnojku3Ynt+PWBrcyYfUEJqye4Hc4hVbjU07kqSsbM6dvR/7VoRZLNuzgn//+mnOfnMmb89ax54C6b4pIdCsSW4TBbQazbd82hs0f5nc4EkHMjBY1ytKzSQJf9mpHt0ZVeHf+RjqOSOG2NxawcF1wBmRRgSciBeLOhnfSolILHp33KCu3rfQ7nEKtYskEenWuw9x+HRnRvSEJ8TH0n7icFo9N5dFPv2XrnsN+hygikm/OKHsGN595Mx//8DHT10/3OxyJQLUqlODxyxowu28H7kqqxby1v3LpC3O59IW5TFq+mfTDkd3zRQWeiBSI2JhYHm/3OCcmnEivlF7qGhMBisbFcmmTk/lPjzZ8cEdL2tUuz5g5adw/cy+3vL6AuWt+UfdNEYlKtzW4jbpl6vJw6sPs2LfD73AkQlUokUDv8+qQ2i80IMvPv+/j9jcXcs7IGbw5bx37Dkbms35V4IlIgSmTUIYRSSPYvGcz/Wf357DTlaJIYGY0qVaGZ68+i9nJHbigRjwL0n7l6le+osuoWbz99Xr2HojMJCYicjziY+MZ3HowOw/s5NGvHvU7HIlwiUVCA7JMvy+J564+i5IJcfSfuJxWQ6fx5JTIG5BFBZ6IFKiG5RvSp2kfUjamMGb5GL/DkUwqlSrGZbWLkNqvE8Mua0BMjNFvwjJaDJnKkM9XsnH7Hr9DFBHJE3XK1OGOhncwKW0Sk9Mm+x2OBEBcbAwXNKjExLta886tLTjrlNI8NTU0IMv/fbiMtVt3+R0ikM8POhcRycpVda9iyc9LeGbxM9QvV58WlVr4HZJkkhAfy+VNq9K9ycnMT9vOq3N+ZPTMtYyeuZbOp5/E9a2r0/zUMphFx0NhRaRwurH+jUxbP43B8wbTpGITyhUr53dIEgBmRvMaZWleoyxrft7Fv2ev5b2FGxn39XrOrVeR29rXoEm1Mr7Fpyt4IlLgzIyBrQZSvWR1kmcms2X3Fr9DkmyYGc1OLcML1zRhVnJHbm1Xk3k/buPKl+fxt6dn8+78DRF7D4KISE7iYuJ4tM2j7Dm4h8HzBuu+Y8m1WhWKM+SSBsxJ7kiPDrX4Ou1XLn0hlUuen8Ok5T/5MiCLCjwR8UVifCJPdniSfYf2cd+M+ziYftDvkCQHVUoXo+/5dUnt24mhl5zJ4cOO+z/4hpZDpjJs0nf8tHMvExdvovXQaZza91NaD53GxMWb/A5bRCSsmqVr0qNxD6aun8qnP37qdzgSUOVLFOW+znWY27cjD3c9g6279nP7m4voNCKFN+atY++B9ALLkeqiKSK+qVGqBg+3fpg+M/owcuFIkpsl+x2SHINiRWK5stkpXHF2VVLXbmPsnDRenPEDL6T8QIxBuneyctOOvfSbsAyAbo2r+BixiEh4155+LdPWT+Oxrx6j2UnNqJBYwe+QJKASi8RxXavqXNOiGpNXbOalmWt5cOJyhnz6LQfSHYe8K3r5mSN1BU9EfNWleheuqXcNb658k0lpk/wOR3LBzGhVsxwvX9uUGX06cELRuD+KuyP2Hkznicnf+xOgiMgxio2J5ZHWj3Aw/SAD5w5UV035y2JjjL+dWYmJd7bi3dtactjxR3F3RH7lSBV4IuK7Xk160ah8IwbMGcDaHWv9DkeOQ9UyiezefyjLef/dsbeAoxERyb3qpapzz1n3MGvTLCaumeh3OBIljtzLvv9Q1o+Gyo8cqQJPRHwXHxvP8PbDSYhLoGdKT/YfjqznycixqVy6WK6mi4hEmqvrXU3Tik0ZNn8Ym3dv9jsciSIFmSNV4IlIRKh4QkWGtRtG2m9pjNs2Ducca7avodtH3VizfY3f4ckx6HNeHYrFxx41rVh8LH3Oq+NTRCIiuRNjMQxqPYh0l85Dcx5i9fbVykOSJwoyR6rAE5GI0bxSc/7V+F8s2rOI11a8xp1T72TtjrXcNfUu9hzUA7YjXbfGVRhyyZlUKV0MIzTq5pBLztQAKyISKFVLVKV3096k/pTKDZNuUB6SPFGQOVKjaIpIRLmx/o1MXTmVkQtHEhcTh8Oxbe82BswdwBPtn/A7PMlBt8ZVVNCJSOB1r92d55c8z7Z92wCUhyRPFFSO1BU8EYkoMRZD3aJ1ATh4OPRsvP2H95OyIYUPV3/oZ2giIlJITFwzkd0Hd//xXnlIgkQFnohEnEm/TcJx9FDC+9L3MWrRKJ8iEhGRwmTUolHsS9931DTlIQkKFXgiEnG6lu5KsbijR5VKiE2gZ5OePkUkIiKFyb1n3as8JIGlAk9EIk7LEi1pd3I7isYWBaBoTFGSqibRrVY3nyMTEZHC4OLTLlYeksBSgSciEWlQq0GUSSiDYZQtVpaHWz3sd0giIlKIKA9JUKnAE5GIlBifyPOdnqdG6Ro81+k5EuMT/Q5JREQKEeUhCSo9JkFEIlatE2sx8aKJfochIiKFlPKQBJGu4ImIiIiIiEQJFXgiIiIiIiJRQgWeiIiIiIhIlFCBJyIiIiIiEiVU4ImIiIiIiEQJFXgiIiIiIiJRQgWeiIiIiIhIlDDnnN8x5IqZbQXWhVmkHPBLAYWTHxS//4LehqDHD8Fvg+LPO9Wcc+X9DiIowuTISDqmeS2a2wbR3T61LbiiuX1BaVu2+TFwBV5OzGyBc66p33EcL8Xvv6C3IejxQ/DboPgl0kTzMY3mtkF0t09tC65obl80tE1dNEVERERERKKECjwREREREZEoEY0F3st+B/AXKX7/Bb0NQY8fgt8GxS+Rc7okqQAADGZJREFUJpqPaTS3DaK7fWpbcEVz+wLftqi7B09ERERERKSwisYreCIiIiIiIoVS1BR4ZtbFzL43szVm1tfveI6HmaWZ2TIzW2JmC/yOJydmNsbMfjaz5RmmlTGzKWa22vv3RD9jzEk2bRhoZpu847DEzP7mZ4zhmFlVM5tuZt+a2Qozu8ebHojjECb+QBwDM0sws6/NbKkX/8Pe9FPN7Cvv++gdMyvid6zZCdOGsWb2Y4Zj0MjvWCX3oiE3hhO0vBlONOTUcIKeb8MJei4OJ+h5OifRkMezEhVdNM0sFlgFnAtsBOYDVznnvvU1sFwyszSgqXMuCM/ewMzaAbuA151z9b1pw4BfnXNDvT8mTnTOJfsZZzjZtGEgsMs5N9zP2I6FmVUCKjnnFplZCWAh0A24ngAchzDxX04AjoGZGXCCc26XmcUDs4F7gF7ABOfceDN7EVjqnHvBz1izE6YNtwOfOOfe9zVAOW7RkhvDCVreDCcacmo4Qc+34QQ9F4cT9Dydk2jI41mJlit4zYA1zrm1zrkDwHjgIp9jinrOuZnAr5kmXwS85r1+jdCXQMTKpg2B4Zz7yTm3yHv9O7ASqEJAjkOY+APBhezy3sZ7Pw7oCBwpjCJ2/0PYNkjwKTcGSDTk1HCCnm/DCXouDifoeTon0ZDHsxItBV4VYEOG9xsJ5n8+B3xhZgvN7Fa/gzlOFZ1zP3mvNwMV/QzmL+hhZt94XUoC0aXCzKoDjYGvCOBxyBQ/BOQYmFmsmS0BfgamAD8AO5xzh7xFIv77KHMbnHNHjsGj3jF40syK+hiiHJ9oyY3hREPeDCdw3+XHIRDf9ccq6Lk4nKDm6ZxEQx7PLFoKvGjRxjl3FnA+cJfXnSGwXKj/bxCvBLwA1AQaAT8BI/wNJ2dmVhz4ALjXOfdbxnlBOA5ZxB+YY+CcS3fONQJOJnTFpK7PIeVa5jaYWX2gH6G2nA2UAQLVrUgKjajKm+EE4bv8OATmu/5YBD0XhxPkPJ2TaMjjmUVLgbcJqJrh/cnetEBxzm3y/v0Z+JDQf7Kg2eL11z7Sb/tnn+PJNefcFu+X/TAwmgg/Dl6f8Q+At5xzE7zJgTkOWcUftGMA4JzbAUwHWgKlzSzOmxWY76MMbejidctxzrn9wKsE4BjIn0RFbgwnSvJmOIH5Lj8eQfyuz07Qc3E40ZKncxINefyIaCnw5gOneSPeFAGuBD72OaZcMbMTvJtXMbMTgM7A8vCfikgfA9d5r68DPvIxluNy5MvYczERfBy8m4P/Dax0zo3MMCsQxyG7+INyDMysvJmV9l4XIzSYxUpCCeIyb7GI3f+QbRu+y/BHiRG69yAij4GEFfjcGE4U5c1wAvFdfryC8l2fk6Dn4nCCnqdzEg15PCtRMYomgIWGZx0FxAJjnHOP+hxSrphZDUJnHwHigHGR3gYzextIAsoBW4ABwETgXeAUYB1wuXMuYm+qzqYNSYS6HDggDbgtQx/6iGJmbYBZwDLgsDf5AUL94yP+OISJ/yoCcAzMrAGhm69jCZ0we9c5N8j7fR5PqGvjYuAa70pYxAnThmlAecCAJcDtGW5El4AIem4MJ4h5M5xoyKnhBD3fhhP0XBxO0PN0TqIhj2clago8ERERERGRwi5aumiKiIiIiIgUeirwREREREREooQKPBERERERkSihAk9ERERERCRKqMATERERERGJEirwJCqZ2UlmNt7MfjCzhWb2mZnV9juu3DCz283s2jxaVyUz+8R7nWRmO81siffzpTd9oJn1zuKzJ5vZR2a22tufT3nP1Mq8rpVmNsCbnmhmb5nZMjNbbmazzay4mRUxs5kZHh4qIiJ5QHnvT+vKnPecmd2cYX4jb9qf8l6YdVY3s7DPezuWZTIt/9mR57DlhnesT8vt56RwUIEnUcd7KOeHQIpzrqZzrgnQD6hYQNvPk+LFOfeic+71vFgX0AsYneH9LOdcI+/nnOw+5O3LCcBE59xpQG2gOJDxWVOznHONgKbANWZ2FnAPsMU5d6Zzrj5wE3DQOXcAmApckUftEhEp9JT3spQ57y0HLs/w/ipgaR5t67g55/7mnNtxHB99Abg/r+OR6KACT6JRB0LFxItHJjjnljrnZlnIE95VpWVmdgX8cXZvhnelaq2ZDTWzf5jZ195yNb3lxprZi2a2wMxWmdmF3vTrzexj7+HQU71pfcxsvpl9Y2YPe9NOMLNPzWypF8OR7Q81s2+9ZYd70/64ouadaZznzf/QzE70pqeY2eNenKvMrG02++RSYNJx7MuOwD7n3KvefkwHegI3mllixgWdc7uBhUAtoBKwKcO87zM8IHQi8I/jiEVERLKmvPdnmfPeOiDBzCp6BXEX4PMjM8Nsr4kX+1LgrgzLx3r79Uh7bwt3gCx0RXGmhXq8LD8St5mlmVk5C129PNKz5kczm+7N72xmqWa2yMzeM7Pi3ipnAeeYesRIFlTgSTSqT6jQyMolQCOgIXAO8ISZVfLmNQRuB+oB/wRqO+eaAa8A/8qwjupAM+AC4EUzS/CmnwVc5pxrb2adgdO85RoBTcysHaGE8l/nXEPvytYkMysLXAyc4ZxrAAzOIu7XgWRv/jJgQIZ5cV6c92aaDoCZnQpsz1BgAbTNkEj+L5t9BXAGmfalc+43YD2hQi7jdsoCLYAVwBgg2UtKg+3obiTLgbPDbFNERHJHeS+DbPIewPtAd6AVsAjIOD+77b0K/Ms51zDTum4CdjrnziaU027xtpudq4HJXo+XhsCSjDO9q5eNvHVtBEaaWTmgP3COc+4sYAGhK5M45w4Da7x1iRxFBZ4UNm2At51z6c65LcAM/ldszHfO/eQlhB+AL7zpywgltyPedc4dds6tBtYCdb3pU5xzv3qvO3s/iwklkbqEEt8y4Fzv7GNb59xOYCewD/i3mV0C7MkYsJmVAko752Z4k14D2mVYZIL378JMcR5RCdiaaVrGLpqPZvGZ3GhrZosJ7a+hzrkVzrklQA3gCaAMMN/M6sEfVwEPmFmJv7hdERHJmfJehnYQKvCuAt7OaXsWujeutHNupjf9jQzr6gxca2ZLgK+Asl57szMfuMHMBgJnOud+z2a5p4Bpzrn/EDppejowx9vOdUC1DMv+DFQOs00ppHRZV6LRCuCy4/hcxjN5hzO8P8zRvysu0+eOvN+dYZoBQ5xzL2XeiIXuUfsbMNjMpjrnBplZM6CTF3cPQl0jcxt3Oln/Tu8FErKYfiy+JdO+NLOSwCmEzhw2I1QsXpj5g865XYSS8AQzO0yozSu92UUJJXcREfnrlPeOlmXec85tNrODwLmE7hVvlYttZmaEruxNPmqiWfWsFnbOzfSuaF4AjDWzkZnvNzSz6wkVcD0ybGOKc+6qbGJIINRWkaPoCp5Eo2lAUTO79cgEM2vg9XefBVzh9Z0vT+iM4Ne5XH93M4vx7k+oAXyfxTKTCd2nVtzbfhUzq2BmlYE9zrk3CV3dOstbppRz7jNC97cd1d3CO9u5PcN9Bv8kdAb2WK0i6zOcx2IqkGjeqGZmFguMAMY65/Zk9yEza53h/oUihM5ArvPelwV+cc4dPM6YRETkaMp7RwuX9x4i1BUzPafteYOf7DCzNt70jPePTwbuMLN4r721zeyE7AIys2qEBh8bTagL7FmZ5jcBegPXeN0vAeYBrc2slrfMCXb0yKi1Cd32IHIUXcGTqOOcc2Z2MTDKzJIJXSlKI9RXfzbQktDIWQ643zujVze79WVhPaHkWBK43Tm3L3S/9lExfOF1SUz15u0CriF039oT3hWtg8AdQAngI++eBsPrX5/JdYTue0gk1D3mhmMN1jm320LDZtdyzq3JYfH+ZnZvhs+e7O3L583sQUInhT4DHshhPTWBFyzU+BjgU+ADb14H772IiOQB5b2jhct7zrm52Xwsu+3dAIwxM8f/urBCqEirDizyct1WoFuYsJKAPt4VxF1A5sdB9CB0S8N0b/8tcM7d7F3Ve9vMinrL9QdWmVlFYK9zbnOYbUohZc5lvuouItkxs7HAJ8659/2OJTe8xN/EOdc/AmKZAPR1zq3yOxYREQlPeS8ymVlP4Dfn3L/9jkUij67giRQCzrkPva6RvvK6a05UcSciIvkpUvJePtrB0YO+iPxBV/BERERERESihAZZERERERERiRIq8ERERERERKKECjwREREREZEooQJPREREREQkSqjAExERERERiRIq8ERERERERKLE/wNQk44kluVhpgAAAABJRU5ErkJggg==\n",
72 | "image/svg+xml": "\n\n\n\n",
73 | "text/plain": ""
74 | },
75 | "metadata": {
76 | "needs_background": "light"
77 | },
78 | "output_type": "display_data"
79 | }
80 | ],
81 | "source": [
82 | "plt.figure(figsize=(15,5))\n",
83 | "plt.subplot(1,2,1)\n",
84 | "marker = ['o', 's', 'd']\n",
85 | "for i, (key, results) in enumerate(experiments_results.items()):\n",
86 | " compression_flops = results[\"compression_flops\"]\n",
87 | " accuracy = results[\"accuracy\"]\n",
88 | " lbl = key\n",
89 | " plt.plot(compression_flops, accuracy, marker=marker[i], label = lbl)\n",
90 | "plt.xlabel('Compression (FLOPS)')\n",
91 | "plt.ylabel('Accuracy')\n",
92 | "plt.title('LeNet-5 on MNIST: Accuracy vs theoretical speedup')\n",
93 | "plt.grid(True)\n",
94 | "plt.legend()\n",
95 | "\n",
96 | "plt.subplot(1,2,2)\n",
97 | "\n",
98 | "for i, (key, results) in enumerate(experiments_results.items()):\n",
99 | " compression_size = results[\"compression_size\"]\n",
100 | " accuracy = results[\"accuracy\"]\n",
101 | " lbl = key\n",
102 | " plt.plot(compression_size, accuracy, marker=marker[i], label = lbl)\n",
103 | "plt.xlabel('Compression (Model size)')\n",
104 | "plt.ylabel('Accuracy')\n",
105 | "plt.title('LeNet-5 on MNIST: Accuracy vs model compression')\n",
106 | "plt.grid(True)\n",
107 | "plt.legend()\n",
108 | "\n",
109 | "plt.show()"
110 | ]
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": null,
115 | "metadata": {},
116 | "outputs": [],
117 | "source": []
118 | }
119 | ]
120 | }
121 |
--------------------------------------------------------------------------------