├── utils ├── progress │ ├── MANIFEST.in │ ├── demo.gif │ ├── LICENSE │ ├── setup.py │ ├── progress │ │ ├── spinner.py │ │ ├── counter.py │ │ ├── bar.py │ │ ├── helpers.py │ │ └── __init__.py │ ├── test_progress.py │ └── README.rst ├── images │ ├── cifar.png │ └── imagenet.png ├── test.txt ├── __init__.py ├── ramps.py ├── eval.py ├── misc.py ├── visualize.py ├── logger.py └── test.eps ├── models └── cifar │ ├── __init__.py │ └── resnet.py ├── loss ├── __init__.py └── kl_loss.py ├── scripts ├── Baseline_ResNet110.sh ├── Baseline_ResNet32.sh ├── ONE_ResNet110.sh └── ONE_ResNet32.sh ├── LICENSE ├── README.md ├── cifar_baseline.py └── cifar_one.py /utils/progress/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst LICENSE 2 | -------------------------------------------------------------------------------- /models/cifar/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from .resnet import * 3 | -------------------------------------------------------------------------------- /utils/images/cifar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lan1991Xu/ONE_NeurIPS2018/HEAD/utils/images/cifar.png -------------------------------------------------------------------------------- /utils/progress/demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lan1991Xu/ONE_NeurIPS2018/HEAD/utils/progress/demo.gif -------------------------------------------------------------------------------- /utils/images/imagenet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lan1991Xu/ONE_NeurIPS2018/HEAD/utils/images/imagenet.png -------------------------------------------------------------------------------- /loss/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from .kl_loss import KLLoss 3 | 4 | __all__ = [ 5 | 'KLLoss' 6 | ] 7 | -------------------------------------------------------------------------------- /utils/test.txt: -------------------------------------------------------------------------------- 1 | ONE ONE-E 2 | 31.180000 31.180000 3 | 27.280000 25.940000 4 | 26.680000 24.630000 5 | 26.800000 24.670000 6 | 26.470000 24.650000 7 | -------------------------------------------------------------------------------- /scripts/Baseline_ResNet110.sh: -------------------------------------------------------------------------------- 1 | python cifar_baseline.py -a resnet --dataset cifar100 --depth 110 --epochs 300 --gpu-id 5 --schedule 151 225 --gamma 0.1 --wd 1e-4 --checkpoint checkpoints/cifar100/baseline-32 2 | -------------------------------------------------------------------------------- /scripts/Baseline_ResNet32.sh: -------------------------------------------------------------------------------- 1 | python cifar_baseline.py -a resnet --dataset cifar100 --depth 32 --epochs 300 --gpu-id 5 --schedule 151 225 --gamma 0.1 --wd 1e-4 --checkpoint checkpoints/cifar100/baseline-32 2 | -------------------------------------------------------------------------------- /scripts/ONE_ResNet110.sh: -------------------------------------------------------------------------------- 1 | python cifar_one.py -a one_resnet --dataset cifar100 --depth 110 --epochs 300 --gpu-id 5 --schedule 151 225 --gamma 0.1 --wd 1e-4 --consistency_rampup 80 --checkpoint checkpoints/cifar100/ONE-32-rampup 2 | -------------------------------------------------------------------------------- /scripts/ONE_ResNet32.sh: -------------------------------------------------------------------------------- 1 | python cifar_one.py -a one_resnet --consistency_rampup 80 --dataset cifar100 --depth 32 --epochs 300 --consistency_rampup 80 --gpu-id 5 --schedule 151 225 --gamma 0.1 --wd 1e-4 --checkpoint checkpoints/cifar100/ONE-32-rampup 2 | -------------------------------------------------------------------------------- /utils/__init__.py: -------------------------------------------------------------------------------- 1 | """Useful utils 2 | """ 3 | from .misc import * 4 | from .logger import * 5 | from .visualize import * 6 | from .eval import * 7 | from .ramps import * 8 | # progress bar 9 | import os, sys 10 | sys.path.append(os.path.join(os.path.dirname(__file__), "progress")) 11 | from progress.bar import Bar as Bar 12 | -------------------------------------------------------------------------------- /utils/ramps.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def sigmoid_rampup(current, rampup_length): 5 | """Exponential rampup from https://arxiv.org/abs/1610.02242""" 6 | if rampup_length == 0: 7 | return 1.0 8 | else: 9 | current = np.clip(current, 0.0, rampup_length) 10 | phase = 1.0 - current / rampup_length 11 | return float(np.exp(-5.0 * phase * phase)) 12 | -------------------------------------------------------------------------------- /utils/eval.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, absolute_import 2 | 3 | __all__ = ['accuracy'] 4 | 5 | def accuracy(output, target, topk=(1,)): 6 | """Computes the precision@k for the specified values of k""" 7 | maxk = max(topk) 8 | batch_size = target.size(0) 9 | 10 | _, pred = output.topk(maxk, 1, True, True) 11 | pred = pred.t() 12 | correct = pred.eq(target.view(1, -1).expand_as(pred)) 13 | 14 | res = [] 15 | for k in topk: 16 | correct_k = correct[:k].view(-1).float().sum(0) 17 | res.append(correct_k.mul_(100.0 / batch_size)) 18 | return res -------------------------------------------------------------------------------- /loss/kl_loss.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import torch 4 | from torch import nn 5 | from torch.autograd import Variable 6 | import torch.nn.functional as F 7 | import pdb 8 | import numpy as np 9 | class KLLoss(nn.Module): 10 | def __init__(self): 11 | 12 | super(KLLoss, self).__init__() 13 | def forward(self, pred, label): 14 | # pred: 2D matrix (batch_size, num_classes) 15 | # label: 1D vector indicating class number 16 | T=3 17 | 18 | predict = F.log_softmax(pred/T,dim=1) 19 | target_data = F.softmax(label/T,dim=1) 20 | target_data =target_data+10**(-7) 21 | target = Variable(target_data.data.cuda(),requires_grad=False) 22 | loss=T*T*((target*(target.log()-predict)).sum(1).sum()/target.size()[0]) 23 | return loss 24 | 25 | -------------------------------------------------------------------------------- /utils/progress/LICENSE: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012 Giorgos Verigakis 2 | # 3 | # Permission to use, copy, modify, and distribute this software for any 4 | # purpose with or without fee is hereby granted, provided that the above 5 | # copyright notice and this permission notice appear in all copies. 6 | # 7 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 8 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 9 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 10 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 11 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 12 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 13 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 14 | -------------------------------------------------------------------------------- /utils/progress/setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from setuptools import setup 4 | 5 | import progress 6 | 7 | 8 | setup( 9 | name='progress', 10 | version=progress.__version__, 11 | description='Easy to use progress bars', 12 | long_description=open('README.rst').read(), 13 | author='Giorgos Verigakis', 14 | author_email='verigak@gmail.com', 15 | url='http://github.com/verigak/progress/', 16 | license='ISC', 17 | packages=['progress'], 18 | classifiers=[ 19 | 'Environment :: Console', 20 | 'Intended Audience :: Developers', 21 | 'License :: OSI Approved :: ISC License (ISCL)', 22 | 'Programming Language :: Python :: 2.6', 23 | 'Programming Language :: Python :: 2.7', 24 | 'Programming Language :: Python :: 3.3', 25 | 'Programming Language :: Python :: 3.4', 26 | 'Programming Language :: Python :: 3.5', 27 | 'Programming Language :: Python :: 3.6', 28 | ] 29 | ) 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Xu Lan 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /utils/progress/progress/spinner.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Giorgos Verigakis 4 | # 5 | # Permission to use, copy, modify, and distribute this software for any 6 | # purpose with or without fee is hereby granted, provided that the above 7 | # copyright notice and this permission notice appear in all copies. 8 | # 9 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 10 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 11 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 12 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 13 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 14 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 15 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 16 | 17 | from __future__ import unicode_literals 18 | from . import Infinite 19 | from .helpers import WriteMixin 20 | 21 | 22 | class Spinner(WriteMixin, Infinite): 23 | message = '' 24 | phases = ('-', '\\', '|', '/') 25 | hide_cursor = True 26 | 27 | def update(self): 28 | i = self.index % len(self.phases) 29 | self.write(self.phases[i]) 30 | 31 | 32 | class PieSpinner(Spinner): 33 | phases = ['◷', '◶', '◵', '◴'] 34 | 35 | 36 | class MoonSpinner(Spinner): 37 | phases = ['◑', '◒', '◐', '◓'] 38 | 39 | 40 | class LineSpinner(Spinner): 41 | phases = ['⎺', '⎻', '⎼', '⎽', '⎼', '⎻'] 42 | 43 | class PixelSpinner(Spinner): 44 | phases = ['⣾','⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽'] 45 | -------------------------------------------------------------------------------- /utils/progress/test_progress.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from __future__ import print_function 4 | 5 | import random 6 | import time 7 | 8 | from progress.bar import (Bar, ChargingBar, FillingSquaresBar, 9 | FillingCirclesBar, IncrementalBar, PixelBar, 10 | ShadyBar) 11 | from progress.spinner import (Spinner, PieSpinner, MoonSpinner, LineSpinner, 12 | PixelSpinner) 13 | from progress.counter import Counter, Countdown, Stack, Pie 14 | 15 | 16 | def sleep(): 17 | t = 0.01 18 | t += t * random.uniform(-0.1, 0.1) # Add some variance 19 | time.sleep(t) 20 | 21 | 22 | for bar_cls in (Bar, ChargingBar, FillingSquaresBar, FillingCirclesBar): 23 | suffix = '%(index)d/%(max)d [%(elapsed)d / %(eta)d / %(eta_td)s]' 24 | bar = bar_cls(bar_cls.__name__, suffix=suffix) 25 | for i in bar.iter(range(200)): 26 | sleep() 27 | 28 | for bar_cls in (IncrementalBar, PixelBar, ShadyBar): 29 | suffix = '%(percent)d%% [%(elapsed_td)s / %(eta)d / %(eta_td)s]' 30 | bar = bar_cls(bar_cls.__name__, suffix=suffix) 31 | for i in bar.iter(range(200)): 32 | sleep() 33 | 34 | for spin in (Spinner, PieSpinner, MoonSpinner, LineSpinner, PixelSpinner): 35 | for i in spin(spin.__name__ + ' ').iter(range(100)): 36 | sleep() 37 | print() 38 | 39 | for singleton in (Counter, Countdown, Stack, Pie): 40 | for i in singleton(singleton.__name__ + ' ').iter(range(100)): 41 | sleep() 42 | print() 43 | 44 | bar = IncrementalBar('Random', suffix='%(index)d') 45 | for i in range(100): 46 | bar.goto(random.randint(0, 100)) 47 | sleep() 48 | bar.finish() 49 | -------------------------------------------------------------------------------- /utils/progress/progress/counter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Giorgos Verigakis 4 | # 5 | # Permission to use, copy, modify, and distribute this software for any 6 | # purpose with or without fee is hereby granted, provided that the above 7 | # copyright notice and this permission notice appear in all copies. 8 | # 9 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 10 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 11 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 12 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 13 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 14 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 15 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 16 | 17 | from __future__ import unicode_literals 18 | from . import Infinite, Progress 19 | from .helpers import WriteMixin 20 | 21 | 22 | class Counter(WriteMixin, Infinite): 23 | message = '' 24 | hide_cursor = True 25 | 26 | def update(self): 27 | self.write(str(self.index)) 28 | 29 | 30 | class Countdown(WriteMixin, Progress): 31 | hide_cursor = True 32 | 33 | def update(self): 34 | self.write(str(self.remaining)) 35 | 36 | 37 | class Stack(WriteMixin, Progress): 38 | phases = (' ', '▁', '▂', '▃', '▄', '▅', '▆', '▇', '█') 39 | hide_cursor = True 40 | 41 | def update(self): 42 | nphases = len(self.phases) 43 | i = min(nphases - 1, int(self.progress * nphases)) 44 | self.write(self.phases[i]) 45 | 46 | 47 | class Pie(Stack): 48 | phases = ('○', '◔', '◑', '◕', '●') 49 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Knowledge Distillation by On the fly Native Ensemble (ONE)NeurIPS2018 2 | This is an [Pytorch](https://pytorch.org) implementation of [Xu et al. Knowledge Distillation On the Fly Native Ensemble (ONE) NeurIPS 2018](https://arxiv.org/pdf/1806.04606.pdf) on Python 2.7, Pytorch 2.0. 3 | You may refer to our [Vedio](http://www.eecs.qmul.ac.uk/~xl309/Doc/Publication/2018/NIPS2018/ONE-Slide-PPT.mp4) and [Poster](http://www.eecs.qmul.ac.uk/~xl309/Doc/Publication/2018/NIPS2018/Poster_landscape.pdf) for a quick overview. 4 | 5 | # ONE 6 | 7 | 8 | 9 | 10 | 11 | ## Getting Started 12 | 13 | ### Prerequisites: 14 | 15 | - Datasets: CIFAR100, CIFAR10 16 | - Python 2.7. 17 | - Pytorch version == 0.2.0. 18 | 19 | 20 | 21 | 22 | ## Running Experiments 23 | you may need change GPU-ID in scripts, “--gpu-id”, the default is 0. 24 | ### Training: 25 | 26 | 27 | For example, to train the ONE model using `ResNet-32` or `ResNet-110` on CIFAR100, run the the following scripts. 28 | ``` 29 | bash scripts/ONE_ResNet32.sh 30 | bash scripts/ONE_ResNet110.sh 31 | ``` 32 | To train baseline model using `ResNet-32` or `ResNet-110` on CIFAR100, run the the following scripts. 33 | ``` 34 | bash scripts/Baseline_ResNet32.sh 35 | bash scripts/Baseline_ResNet110.sh 36 | ``` 37 | 38 | ## Tip for Stabilizing Model Training 39 | It may help to ramp up [https://arxiv.org/abs/1703.01780] the KL cost in the beginning over the first few epochs until the teacher network starts giving good predictions. 40 | ## Citation 41 | Please refer to the following if this repository is useful for your research. 42 | 43 | ### Bibtex: 44 | 45 | ``` 46 | @inproceedings{lan2018knowledge, 47 | title={Knowledge Distillation by On-the-Fly Native Ensemble}, 48 | author={Lan, Xu and Zhu, Xiatian and Gong, Shaogang}, 49 | booktitle={Advances in Neural Information Processing Systems}, 50 | pages={7527--7537}, 51 | year={2018} 52 | } 53 | ``` 54 | 55 | ## License 56 | 57 | This project is licensed under the MIT License - see the [LICENSE.md](https://github.com/Lan1991Xu/ONE_NeurIPS2018/blob/master/LICENSE) file for details. 58 | 59 | 60 | ## Acknowledgements 61 | 62 | This repository is partially built upon the [bearpaw/pytorch-classification](https://github.com/bearpaw/pytorch-classification) repository. 63 | -------------------------------------------------------------------------------- /utils/misc.py: -------------------------------------------------------------------------------- 1 | '''Some helper functions for PyTorch, including: 2 | - get_mean_and_std: calculate the mean and std value of dataset. 3 | - msr_init: net parameter initialization. 4 | - progress_bar: progress bar mimic xlua.progress. 5 | ''' 6 | import errno 7 | import os 8 | import sys 9 | import time 10 | import math 11 | 12 | import torch.nn as nn 13 | import torch.nn.init as init 14 | from torch.autograd import Variable 15 | 16 | __all__ = ['get_mean_and_std', 'init_params', 'mkdir_p', 'AverageMeter'] 17 | 18 | 19 | def get_mean_and_std(dataset): 20 | '''Compute the mean and std value of dataset.''' 21 | dataloader = trainloader = torch.utils.data.DataLoader(dataset, batch_size=1, shuffle=True, num_workers=2) 22 | 23 | mean = torch.zeros(3) 24 | std = torch.zeros(3) 25 | print('==> Computing mean and std..') 26 | for inputs, targets in dataloader: 27 | for i in range(3): 28 | mean[i] += inputs[:,i,:,:].mean() 29 | std[i] += inputs[:,i,:,:].std() 30 | mean.div_(len(dataset)) 31 | std.div_(len(dataset)) 32 | return mean, std 33 | 34 | def init_params(net): 35 | '''Init layer parameters.''' 36 | for m in net.modules(): 37 | if isinstance(m, nn.Conv2d): 38 | init.kaiming_normal(m.weight, mode='fan_out') 39 | if m.bias: 40 | init.constant(m.bias, 0) 41 | elif isinstance(m, nn.BatchNorm2d): 42 | init.constant(m.weight, 1) 43 | init.constant(m.bias, 0) 44 | elif isinstance(m, nn.Linear): 45 | init.normal(m.weight, std=1e-3) 46 | if m.bias: 47 | init.constant(m.bias, 0) 48 | 49 | def mkdir_p(path): 50 | '''make dir if not exist''' 51 | try: 52 | os.makedirs(path) 53 | except OSError as exc: # Python >2.5 54 | if exc.errno == errno.EEXIST and os.path.isdir(path): 55 | pass 56 | else: 57 | raise 58 | 59 | class AverageMeter(object): 60 | """Computes and stores the average and current value 61 | Imported from https://github.com/pytorch/examples/blob/master/imagenet/main.py#L247-L262 62 | """ 63 | def __init__(self): 64 | self.reset() 65 | 66 | def reset(self): 67 | self.val = 0 68 | self.avg = 0 69 | self.sum = 0 70 | self.count = 0 71 | 72 | def update(self, val, n=1): 73 | self.val = val 74 | self.sum += val * n 75 | self.count += n 76 | self.avg = self.sum / self.count -------------------------------------------------------------------------------- /utils/progress/progress/bar.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Giorgos Verigakis 4 | # 5 | # Permission to use, copy, modify, and distribute this software for any 6 | # purpose with or without fee is hereby granted, provided that the above 7 | # copyright notice and this permission notice appear in all copies. 8 | # 9 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 10 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 11 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 12 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 13 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 14 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 15 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 16 | 17 | from __future__ import unicode_literals 18 | from . import Progress 19 | from .helpers import WritelnMixin 20 | 21 | 22 | class Bar(WritelnMixin, Progress): 23 | width = 32 24 | message = '' 25 | suffix = '%(index)d/%(max)d' 26 | bar_prefix = ' |' 27 | bar_suffix = '| ' 28 | empty_fill = ' ' 29 | fill = '#' 30 | hide_cursor = True 31 | 32 | def update(self): 33 | filled_length = int(self.width * self.progress) 34 | empty_length = self.width - filled_length 35 | 36 | message = self.message % self 37 | bar = self.fill * filled_length 38 | empty = self.empty_fill * empty_length 39 | suffix = self.suffix % self 40 | line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix, 41 | suffix]) 42 | self.writeln(line) 43 | 44 | 45 | class ChargingBar(Bar): 46 | suffix = '%(percent)d%%' 47 | bar_prefix = ' ' 48 | bar_suffix = ' ' 49 | empty_fill = '∙' 50 | fill = '█' 51 | 52 | 53 | class FillingSquaresBar(ChargingBar): 54 | empty_fill = '▢' 55 | fill = '▣' 56 | 57 | 58 | class FillingCirclesBar(ChargingBar): 59 | empty_fill = '◯' 60 | fill = '◉' 61 | 62 | 63 | class IncrementalBar(Bar): 64 | phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█') 65 | 66 | def update(self): 67 | nphases = len(self.phases) 68 | filled_len = self.width * self.progress 69 | nfull = int(filled_len) # Number of full chars 70 | phase = int((filled_len - nfull) * nphases) # Phase of last char 71 | nempty = self.width - nfull # Number of empty chars 72 | 73 | message = self.message % self 74 | bar = self.phases[-1] * nfull 75 | current = self.phases[phase] if phase > 0 else '' 76 | empty = self.empty_fill * max(0, nempty - len(current)) 77 | suffix = self.suffix % self 78 | line = ''.join([message, self.bar_prefix, bar, current, empty, 79 | self.bar_suffix, suffix]) 80 | self.writeln(line) 81 | 82 | 83 | class PixelBar(IncrementalBar): 84 | phases = ('⡀', '⡄', '⡆', '⡇', '⣇', '⣧', '⣷', '⣿') 85 | 86 | 87 | class ShadyBar(IncrementalBar): 88 | phases = (' ', '░', '▒', '▓', '█') 89 | -------------------------------------------------------------------------------- /utils/progress/progress/helpers.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012 Giorgos Verigakis 2 | # 3 | # Permission to use, copy, modify, and distribute this software for any 4 | # purpose with or without fee is hereby granted, provided that the above 5 | # copyright notice and this permission notice appear in all copies. 6 | # 7 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 8 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 9 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 10 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 11 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 12 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 13 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 14 | 15 | from __future__ import print_function 16 | 17 | 18 | HIDE_CURSOR = '\x1b[?25l' 19 | SHOW_CURSOR = '\x1b[?25h' 20 | 21 | 22 | class WriteMixin(object): 23 | hide_cursor = False 24 | 25 | def __init__(self, message=None, **kwargs): 26 | super(WriteMixin, self).__init__(**kwargs) 27 | self._width = 0 28 | if message: 29 | self.message = message 30 | 31 | if self.file.isatty(): 32 | if self.hide_cursor: 33 | print(HIDE_CURSOR, end='', file=self.file) 34 | print(self.message, end='', file=self.file) 35 | self.file.flush() 36 | 37 | def write(self, s): 38 | if self.file.isatty(): 39 | b = '\b' * self._width 40 | c = s.ljust(self._width) 41 | print(b + c, end='', file=self.file) 42 | self._width = max(self._width, len(s)) 43 | self.file.flush() 44 | 45 | def finish(self): 46 | if self.file.isatty() and self.hide_cursor: 47 | print(SHOW_CURSOR, end='', file=self.file) 48 | 49 | 50 | class WritelnMixin(object): 51 | hide_cursor = False 52 | 53 | def __init__(self, message=None, **kwargs): 54 | super(WritelnMixin, self).__init__(**kwargs) 55 | if message: 56 | self.message = message 57 | 58 | if self.file.isatty() and self.hide_cursor: 59 | print(HIDE_CURSOR, end='', file=self.file) 60 | 61 | def clearln(self): 62 | if self.file.isatty(): 63 | print('\r\x1b[K', end='', file=self.file) 64 | 65 | def writeln(self, line): 66 | if self.file.isatty(): 67 | self.clearln() 68 | print(line, end='', file=self.file) 69 | self.file.flush() 70 | 71 | def finish(self): 72 | if self.file.isatty(): 73 | print(file=self.file) 74 | if self.hide_cursor: 75 | print(SHOW_CURSOR, end='', file=self.file) 76 | 77 | 78 | from signal import signal, SIGINT 79 | from sys import exit 80 | 81 | 82 | class SigIntMixin(object): 83 | """Registers a signal handler that calls finish on SIGINT""" 84 | 85 | def __init__(self, *args, **kwargs): 86 | super(SigIntMixin, self).__init__(*args, **kwargs) 87 | signal(SIGINT, self._sigint_handler) 88 | 89 | def _sigint_handler(self, signum, frame): 90 | self.finish() 91 | exit(0) 92 | -------------------------------------------------------------------------------- /utils/progress/README.rst: -------------------------------------------------------------------------------- 1 | Easy progress reporting for Python 2 | ================================== 3 | 4 | |pypi| 5 | 6 | |demo| 7 | 8 | .. |pypi| image:: https://img.shields.io/pypi/v/progress.svg 9 | .. |demo| image:: https://raw.github.com/verigak/progress/master/demo.gif 10 | :alt: Demo 11 | 12 | Bars 13 | ---- 14 | 15 | There are 7 progress bars to choose from: 16 | 17 | - ``Bar`` 18 | - ``ChargingBar`` 19 | - ``FillingSquaresBar`` 20 | - ``FillingCirclesBar`` 21 | - ``IncrementalBar`` 22 | - ``PixelBar`` 23 | - ``ShadyBar`` 24 | 25 | To use them, just call ``next`` to advance and ``finish`` to finish: 26 | 27 | .. code-block:: python 28 | 29 | from progress.bar import Bar 30 | 31 | bar = Bar('Processing', max=20) 32 | for i in range(20): 33 | # Do some work 34 | bar.next() 35 | bar.finish() 36 | 37 | The result will be a bar like the following: :: 38 | 39 | Processing |############# | 42/100 40 | 41 | To simplify the common case where the work is done in an iterator, you can 42 | use the ``iter`` method: 43 | 44 | .. code-block:: python 45 | 46 | for i in Bar('Processing').iter(it): 47 | # Do some work 48 | 49 | Progress bars are very customizable, you can change their width, their fill 50 | character, their suffix and more: 51 | 52 | .. code-block:: python 53 | 54 | bar = Bar('Loading', fill='@', suffix='%(percent)d%%') 55 | 56 | This will produce a bar like the following: :: 57 | 58 | Loading |@@@@@@@@@@@@@ | 42% 59 | 60 | You can use a number of template arguments in ``message`` and ``suffix``: 61 | 62 | ========== ================================ 63 | Name Value 64 | ========== ================================ 65 | index current value 66 | max maximum value 67 | remaining max - index 68 | progress index / max 69 | percent progress * 100 70 | avg simple moving average time per item (in seconds) 71 | elapsed elapsed time in seconds 72 | elapsed_td elapsed as a timedelta (useful for printing as a string) 73 | eta avg * remaining 74 | eta_td eta as a timedelta (useful for printing as a string) 75 | ========== ================================ 76 | 77 | Instead of passing all configuration options on instatiation, you can create 78 | your custom subclass: 79 | 80 | .. code-block:: python 81 | 82 | class FancyBar(Bar): 83 | message = 'Loading' 84 | fill = '*' 85 | suffix = '%(percent).1f%% - %(eta)ds' 86 | 87 | You can also override any of the arguments or create your own: 88 | 89 | .. code-block:: python 90 | 91 | class SlowBar(Bar): 92 | suffix = '%(remaining_hours)d hours remaining' 93 | @property 94 | def remaining_hours(self): 95 | return self.eta // 3600 96 | 97 | 98 | Spinners 99 | ======== 100 | 101 | For actions with an unknown number of steps you can use a spinner: 102 | 103 | .. code-block:: python 104 | 105 | from progress.spinner import Spinner 106 | 107 | spinner = Spinner('Loading ') 108 | while state != 'FINISHED': 109 | # Do some work 110 | spinner.next() 111 | 112 | There are 5 predefined spinners: 113 | 114 | - ``Spinner`` 115 | - ``PieSpinner`` 116 | - ``MoonSpinner`` 117 | - ``LineSpinner`` 118 | - ``PixelSpinner`` 119 | 120 | 121 | Other 122 | ===== 123 | 124 | There are a number of other classes available too, please check the source or 125 | subclass one of them to create your own. 126 | 127 | 128 | License 129 | ======= 130 | 131 | progress is licensed under ISC 132 | -------------------------------------------------------------------------------- /utils/progress/progress/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012 Giorgos Verigakis 2 | # 3 | # Permission to use, copy, modify, and distribute this software for any 4 | # purpose with or without fee is hereby granted, provided that the above 5 | # copyright notice and this permission notice appear in all copies. 6 | # 7 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 8 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 9 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 10 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 11 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 12 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 13 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 14 | 15 | from __future__ import division 16 | 17 | from collections import deque 18 | from datetime import timedelta 19 | from math import ceil 20 | from sys import stderr 21 | from time import time 22 | 23 | 24 | __version__ = '1.3' 25 | 26 | 27 | class Infinite(object): 28 | file = stderr 29 | sma_window = 10 # Simple Moving Average window 30 | 31 | def __init__(self, *args, **kwargs): 32 | self.index = 0 33 | self.start_ts = time() 34 | self.avg = 0 35 | self._ts = self.start_ts 36 | self._xput = deque(maxlen=self.sma_window) 37 | for key, val in kwargs.items(): 38 | setattr(self, key, val) 39 | 40 | def __getitem__(self, key): 41 | if key.startswith('_'): 42 | return None 43 | return getattr(self, key, None) 44 | 45 | @property 46 | def elapsed(self): 47 | return int(time() - self.start_ts) 48 | 49 | @property 50 | def elapsed_td(self): 51 | return timedelta(seconds=self.elapsed) 52 | 53 | def update_avg(self, n, dt): 54 | if n > 0: 55 | self._xput.append(dt / n) 56 | self.avg = sum(self._xput) / len(self._xput) 57 | 58 | def update(self): 59 | pass 60 | 61 | def start(self): 62 | pass 63 | 64 | def finish(self): 65 | pass 66 | 67 | def next(self, n=1): 68 | now = time() 69 | dt = now - self._ts 70 | self.update_avg(n, dt) 71 | self._ts = now 72 | self.index = self.index + n 73 | self.update() 74 | 75 | def iter(self, it): 76 | try: 77 | for x in it: 78 | yield x 79 | self.next() 80 | finally: 81 | self.finish() 82 | 83 | 84 | class Progress(Infinite): 85 | def __init__(self, *args, **kwargs): 86 | super(Progress, self).__init__(*args, **kwargs) 87 | self.max = kwargs.get('max', 100) 88 | 89 | @property 90 | def eta(self): 91 | return int(ceil(self.avg * self.remaining)) 92 | 93 | @property 94 | def eta_td(self): 95 | return timedelta(seconds=self.eta) 96 | 97 | @property 98 | def percent(self): 99 | return self.progress * 100 100 | 101 | @property 102 | def progress(self): 103 | return min(1, self.index / self.max) 104 | 105 | @property 106 | def remaining(self): 107 | return max(self.max - self.index, 0) 108 | 109 | def start(self): 110 | self.update() 111 | 112 | def goto(self, index): 113 | incr = index - self.index 114 | self.next(incr) 115 | 116 | def iter(self, it): 117 | try: 118 | self.max = len(it) 119 | except TypeError: 120 | pass 121 | 122 | try: 123 | for x in it: 124 | yield x 125 | self.next() 126 | finally: 127 | self.finish() 128 | -------------------------------------------------------------------------------- /utils/visualize.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import torch 3 | import torch.nn as nn 4 | import torchvision 5 | import torchvision.transforms as transforms 6 | import numpy as np 7 | from .misc import * 8 | 9 | __all__ = ['make_image', 'show_batch', 'show_mask', 'show_mask_single'] 10 | 11 | # functions to show an image 12 | def make_image(img, mean=(0,0,0), std=(1,1,1)): 13 | for i in range(0, 3): 14 | img[i] = img[i] * std[i] + mean[i] # unnormalize 15 | npimg = img.numpy() 16 | return np.transpose(npimg, (1, 2, 0)) 17 | 18 | def gauss(x,a,b,c): 19 | return torch.exp(-torch.pow(torch.add(x,-b),2).div(2*c*c)).mul(a) 20 | 21 | def colorize(x): 22 | ''' Converts a one-channel grayscale image to a color heatmap image ''' 23 | if x.dim() == 2: 24 | torch.unsqueeze(x, 0, out=x) 25 | if x.dim() == 3: 26 | cl = torch.zeros([3, x.size(1), x.size(2)]) 27 | cl[0] = gauss(x,.5,.6,.2) + gauss(x,1,.8,.3) 28 | cl[1] = gauss(x,1,.5,.3) 29 | cl[2] = gauss(x,1,.2,.3) 30 | cl[cl.gt(1)] = 1 31 | elif x.dim() == 4: 32 | cl = torch.zeros([x.size(0), 3, x.size(2), x.size(3)]) 33 | cl[:,0,:,:] = gauss(x,.5,.6,.2) + gauss(x,1,.8,.3) 34 | cl[:,1,:,:] = gauss(x,1,.5,.3) 35 | cl[:,2,:,:] = gauss(x,1,.2,.3) 36 | return cl 37 | 38 | def show_batch(images, Mean=(2, 2, 2), Std=(0.5,0.5,0.5)): 39 | images = make_image(torchvision.utils.make_grid(images), Mean, Std) 40 | plt.imshow(images) 41 | plt.show() 42 | 43 | 44 | def show_mask_single(images, mask, Mean=(2, 2, 2), Std=(0.5,0.5,0.5)): 45 | im_size = images.size(2) 46 | 47 | # save for adding mask 48 | im_data = images.clone() 49 | for i in range(0, 3): 50 | im_data[:,i,:,:] = im_data[:,i,:,:] * Std[i] + Mean[i] # unnormalize 51 | 52 | images = make_image(torchvision.utils.make_grid(images), Mean, Std) 53 | plt.subplot(2, 1, 1) 54 | plt.imshow(images) 55 | plt.axis('off') 56 | 57 | # for b in range(mask.size(0)): 58 | # mask[b] = (mask[b] - mask[b].min())/(mask[b].max() - mask[b].min()) 59 | mask_size = mask.size(2) 60 | # print('Max %f Min %f' % (mask.max(), mask.min())) 61 | mask = (upsampling(mask, scale_factor=im_size/mask_size)) 62 | # mask = colorize(upsampling(mask, scale_factor=im_size/mask_size)) 63 | # for c in range(3): 64 | # mask[:,c,:,:] = (mask[:,c,:,:] - Mean[c])/Std[c] 65 | 66 | # print(mask.size()) 67 | mask = make_image(torchvision.utils.make_grid(0.3*im_data+0.7*mask.expand_as(im_data))) 68 | # mask = make_image(torchvision.utils.make_grid(0.3*im_data+0.7*mask), Mean, Std) 69 | plt.subplot(2, 1, 2) 70 | plt.imshow(mask) 71 | plt.axis('off') 72 | 73 | def show_mask(images, masklist, Mean=(2, 2, 2), Std=(0.5,0.5,0.5)): 74 | im_size = images.size(2) 75 | 76 | # save for adding mask 77 | im_data = images.clone() 78 | for i in range(0, 3): 79 | im_data[:,i,:,:] = im_data[:,i,:,:] * Std[i] + Mean[i] # unnormalize 80 | 81 | images = make_image(torchvision.utils.make_grid(images), Mean, Std) 82 | plt.subplot(1+len(masklist), 1, 1) 83 | plt.imshow(images) 84 | plt.axis('off') 85 | 86 | for i in range(len(masklist)): 87 | mask = masklist[i].data.cpu() 88 | # for b in range(mask.size(0)): 89 | # mask[b] = (mask[b] - mask[b].min())/(mask[b].max() - mask[b].min()) 90 | mask_size = mask.size(2) 91 | # print('Max %f Min %f' % (mask.max(), mask.min())) 92 | mask = (upsampling(mask, scale_factor=im_size/mask_size)) 93 | # mask = colorize(upsampling(mask, scale_factor=im_size/mask_size)) 94 | # for c in range(3): 95 | # mask[:,c,:,:] = (mask[:,c,:,:] - Mean[c])/Std[c] 96 | 97 | # print(mask.size()) 98 | mask = make_image(torchvision.utils.make_grid(0.3*im_data+0.7*mask.expand_as(im_data))) 99 | # mask = make_image(torchvision.utils.make_grid(0.3*im_data+0.7*mask), Mean, Std) 100 | plt.subplot(1+len(masklist), 1, i+2) 101 | plt.imshow(mask) 102 | plt.axis('off') 103 | 104 | 105 | 106 | # x = torch.zeros(1, 3, 3) 107 | # out = colorize(x) 108 | # out_im = make_image(out) 109 | # plt.imshow(out_im) 110 | # plt.show() -------------------------------------------------------------------------------- /utils/logger.py: -------------------------------------------------------------------------------- 1 | # A simple torch style logger 2 | # (C) Wei YANG 2017 3 | from __future__ import absolute_import 4 | import matplotlib.pyplot as plt 5 | plt.switch_backend('agg') 6 | import os 7 | import sys 8 | import numpy as np 9 | import pdb 10 | __all__ = ['Logger', 'LoggerMonitor', 'savefig'] 11 | 12 | def savefig(fname, dpi=None): 13 | dpi = 150 if dpi == None else dpi 14 | plt.savefig(fname, dpi=dpi) 15 | 16 | def plot_overlap(logger, names=None): 17 | names = logger.names if names == None else names 18 | numbers = logger.numbers 19 | for _, name in enumerate(names): 20 | x = np.arange(len(numbers[name])) 21 | plt.plot(x, np.asarray(numbers[name])) 22 | return [logger.title + '(' + name + ')' for name in names] 23 | 24 | class Logger(object): 25 | '''Save training process to log file with simple plot function.''' 26 | def __init__(self, fpath, title=None, resume=False): 27 | self.file = None 28 | self.resume = resume 29 | self.title = '' if title == None else title 30 | if fpath is not None: 31 | if resume: 32 | self.file = open(fpath, 'r') 33 | name = self.file.readline() 34 | self.names = name.rstrip().split('\t') 35 | self.numbers = {} 36 | for _, name in enumerate(self.names): 37 | self.numbers[name] = [] 38 | 39 | for numbers in self.file: 40 | numbers = numbers.rstrip().split('\t') 41 | for i in range(0, len(numbers)): 42 | self.numbers[self.names[i]].append(numbers[i]) 43 | self.file.close() 44 | self.file = open(fpath, 'a') 45 | else: 46 | self.file = open(fpath, 'w') 47 | 48 | def set_names(self, names): 49 | if self.resume: 50 | pass 51 | # initialize numbers as empty list 52 | self.numbers = {} 53 | self.names = names 54 | for _, name in enumerate(self.names): 55 | self.file.write(name) 56 | self.file.write('\t') 57 | self.numbers[name] = [] 58 | self.file.write('\n') 59 | self.file.flush() 60 | 61 | 62 | def append(self, numbers): 63 | assert len(self.names) == len(numbers), 'Numbers do not match names' 64 | for index, num in enumerate(numbers): 65 | self.file.write("{0:.6f}".format(num)) 66 | self.file.write('\t') 67 | self.numbers[self.names[index]].append(num) 68 | self.file.write('\n') 69 | self.file.flush() 70 | 71 | def plot(self, names=None): 72 | names = self.names if names == None else names 73 | numbers = self.numbers 74 | #pdb.set_trace() 75 | for _, name in enumerate(names): 76 | x = np.arange(len(numbers[name])) 77 | plt.plot(x, np.asarray(numbers[name])) 78 | plt.legend(['(' + name + ')' for name in names]) 79 | plt.grid(True) 80 | 81 | def close(self): 82 | if self.file is not None: 83 | self.file.close() 84 | 85 | class LoggerMonitor(object): 86 | '''Load and visualize multiple logs.''' 87 | def __init__ (self, paths): 88 | '''paths is a distionary with {name:filepath} pair''' 89 | self.loggers = [] 90 | for title, path in paths.items(): 91 | logger = Logger(path, title=title, resume=True) 92 | self.loggers.append(logger) 93 | 94 | def plot(self, names=None): 95 | plt.figure() 96 | plt.subplot(121) 97 | legend_text = [] 98 | for logger in self.loggers: 99 | legend_text += plot_overlap(logger, names) 100 | loc = 'best' 101 | plt.legend(legend_text, loc='upper right', ncol=2) 102 | 103 | #plt.legend(legend_text, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.) 104 | plt.grid(True) 105 | 106 | if __name__ == '__main__': 107 | # # Example 108 | # logger = Logger('test.txt') 109 | # logger.set_names(['Train loss', 'Valid loss','Test loss']) 110 | # 111 | # length = 100 112 | # t = np.arange(length) 113 | # # train_loss = np.exp(-t / 10.0) + np.random.rand(length) * 0.1 114 | # # valid_loss = np.exp(-t / 10.0) + np.random.rand(length) * 0.1 115 | # # test_loss = np.exp(-t / 10.0) + np.random.rand(length) * 0.1 116 | # # 117 | # # for i in range(0, length): 118 | # # logger.append([train_loss[i], valid_loss[i], test_loss[i]]) 119 | # # logger.plot() 120 | # 121 | # #Example: logger monitor 122 | paths = { 123 | 'resadvnet20': '/home/wyang/code/pytorch-classification/checkpoint/cifar10/resadvnet20/log.txt', 124 | 'resadvnet32': '/home/wyang/code/pytorch-classification/checkpoint/cifar10/resadvnet32/log.txt', 125 | 'resadvnet44': '/home/wyang/code/pytorch-classification/checkpoint/cifar10/resadvnet44/log.txt', 126 | } 127 | 128 | field = ['Valid Acc.'] 129 | 130 | monitor = LoggerMonitor(paths) 131 | monitor.plot(names=field) 132 | savefig('test.eps') -------------------------------------------------------------------------------- /models/cifar/resnet.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | '''Resnet for cifar dataset. 4 | Ported form 5 | https://github.com/facebook/fb.resnet.torch 6 | and 7 | https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py 8 | (c) YANG, Wei 9 | ''' 10 | import torch.nn as nn 11 | import math 12 | import pdb 13 | from torch.nn import init 14 | from torch.nn import functional as F 15 | __all__ = ['resnet','one_resnet'] 16 | 17 | def conv3x3(in_planes, out_planes, stride=1): 18 | "3x3 convolution with padding" 19 | return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, 20 | padding=1, bias=False) 21 | 22 | def weights_init_classifier(m): 23 | classname = m.__class__.__name__ 24 | if classname.find('Linear') != -1: 25 | init.normal(m.weight.data, std=0.001) 26 | init.constant(m.bias.data, 0.0) 27 | def weights_init_kaiming(m): 28 | classname = m.__class__.__name__ 29 | # print(classname) 30 | if classname.find('Conv') != -1: 31 | init.kaiming_normal(m.weight.data, a=0, mode='fan_in') 32 | elif classname.find('Linear') != -1: 33 | init.kaiming_normal(m.weight.data, a=0, mode='fan_out') 34 | init.constant(m.bias.data, 0.0) 35 | elif classname.find('BatchNorm1d') != -1: 36 | init.normal(m.weight.data, 1.0, 0.02) 37 | init.constant(m.bias.data, 0.0) 38 | class BasicBlock(nn.Module): 39 | expansion = 1 40 | 41 | def __init__(self, inplanes, planes, stride=1, downsample=None): 42 | super(BasicBlock, self).__init__() 43 | self.conv1 = conv3x3(inplanes, planes, stride) 44 | self.bn1 = nn.BatchNorm2d(planes) 45 | self.relu = nn.ReLU(inplace=True) 46 | self.conv2 = conv3x3(planes, planes) 47 | self.bn2 = nn.BatchNorm2d(planes) 48 | self.downsample = downsample 49 | self.stride = stride 50 | 51 | def forward(self, x): 52 | residual = x 53 | 54 | out = self.conv1(x) 55 | out = self.bn1(out) 56 | out = self.relu(out) 57 | 58 | out = self.conv2(out) 59 | out = self.bn2(out) 60 | 61 | if self.downsample is not None: 62 | residual = self.downsample(x) 63 | 64 | out += residual 65 | out = self.relu(out) 66 | 67 | return out 68 | 69 | 70 | class Bottleneck(nn.Module): 71 | expansion = 4 72 | 73 | def __init__(self, inplanes, planes, stride=1, downsample=None): 74 | super(Bottleneck, self).__init__() 75 | self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) 76 | self.bn1 = nn.BatchNorm2d(planes) 77 | self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, 78 | padding=1, bias=False) 79 | self.bn2 = nn.BatchNorm2d(planes) 80 | self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False) 81 | self.bn3 = nn.BatchNorm2d(planes * 4) 82 | self.relu = nn.ReLU(inplace=True) 83 | self.downsample = downsample 84 | self.stride = stride 85 | 86 | def forward(self, x): 87 | residual = x 88 | 89 | out = self.conv1(x) 90 | out = self.bn1(out) 91 | out = self.relu(out) 92 | 93 | out = self.conv2(out) 94 | out = self.bn2(out) 95 | out = self.relu(out) 96 | 97 | out = self.conv3(out) 98 | out = self.bn3(out) 99 | 100 | if self.downsample is not None: 101 | residual = self.downsample(x) 102 | 103 | out += residual 104 | out = self.relu(out) 105 | 106 | return out 107 | 108 | 109 | class ResNet(nn.Module): 110 | def __init__(self, depth, num_classes=1000): 111 | super(ResNet, self).__init__() 112 | # Model type specifies number of layers for CIFAR-10 model 113 | assert (depth - 2) % 6 == 0, 'depth should be 6n+2' 114 | n = (depth - 2) // 6 115 | block = Bottleneck if depth >=44 else BasicBlock 116 | 117 | self.inplanes = 16 118 | self.conv1 = nn.Conv2d(3, 16, kernel_size=3, padding=1, 119 | bias=False) 120 | self.bn1 = nn.BatchNorm2d(16) 121 | self.relu = nn.ReLU(inplace=True) 122 | self.layer1 = self._make_layer(block, 16, n) 123 | self.layer2 = self._make_layer(block, 32, n, stride=2) 124 | self.layer3 = self._make_layer(block, 64, n, stride=2) 125 | self.avgpool = nn.AvgPool2d(8) 126 | self.fc = nn.Linear(64 * block.expansion, num_classes) 127 | for m in self.modules(): 128 | if isinstance(m, nn.Conv2d): 129 | n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 130 | m.weight.data.normal_(0, math.sqrt(2. / n)) 131 | elif isinstance(m, nn.BatchNorm2d): 132 | m.weight.data.fill_(1) 133 | m.bias.data.zero_() 134 | 135 | def _make_layer(self, block, planes, blocks, stride=1): 136 | downsample = None 137 | if stride != 1 or self.inplanes != planes * block.expansion: 138 | downsample = nn.Sequential( 139 | nn.Conv2d(self.inplanes, planes * block.expansion, 140 | kernel_size=1, stride=stride, bias=False), 141 | nn.BatchNorm2d(planes * block.expansion), 142 | ) 143 | 144 | layers = [] 145 | layers.append(block(self.inplanes, planes, stride, downsample)) 146 | self.inplanes = planes * block.expansion 147 | for i in range(1, blocks): 148 | layers.append(block(self.inplanes, planes)) 149 | 150 | return nn.Sequential(*layers) 151 | 152 | def forward(self, x): 153 | x = self.conv1(x) 154 | x = self.bn1(x) 155 | x = self.relu(x) # 32x32 156 | x = self.layer1(x) # 32x32 157 | x = self.layer2(x) # 16x16 158 | x = self.layer3(x) # 8x8 159 | x = self.avgpool(x) 160 | x = x.view(x.size(0), -1) 161 | x = self.fc(x) 162 | 163 | return x 164 | 165 | class ONE_ResNet(nn.Module): 166 | 167 | def __init__(self, depth, num_classes=1000): 168 | super(ONE_ResNet, self).__init__() 169 | # Model type specifies number of layers for CIFAR-10 model 170 | assert (depth - 2) % 6 == 0, 'depth should be 6n+2' 171 | n = (depth - 2) // 6 172 | 173 | block = Bottleneck if depth >=44 else BasicBlock 174 | 175 | self.inplanes = 16 176 | self.conv1 = nn.Conv2d(3, 16, kernel_size=3, padding=1, 177 | bias=False) 178 | self.bn1 = nn.BatchNorm2d(16) 179 | self.relu = nn.ReLU(inplace=True) 180 | self.layer1 = self._make_layer(block, 16, n) 181 | self.layer2 = self._make_layer(block, 32, n, stride=2) 182 | 183 | fix_inplanes=self.inplanes 184 | self.layer3_1 = self._make_layer(block, 64, n, stride=2) 185 | self.inplanes = fix_inplanes ##reuse self.inplanes 186 | self.layer3_2 = self._make_layer(block, 64, n, stride=2) 187 | self.inplanes = fix_inplanes 188 | self.layer3_3 = self._make_layer(block, 64, n, stride=2) 189 | 190 | 191 | self.control_v1 = nn.Linear(fix_inplanes, 3) 192 | self.bn_v1 = nn.BatchNorm1d(3) 193 | 194 | self.avgpool = nn.AvgPool2d(8) 195 | 196 | self.avgpool_c = nn.AvgPool2d(16) 197 | 198 | self.classfier3_1=nn.Linear(64 * block.expansion, num_classes) 199 | self.classfier3_2=nn.Linear(64 * block.expansion,num_classes) 200 | self.classfier3_3=nn.Linear(64 * block.expansion, num_classes) 201 | for m in self.modules(): 202 | if isinstance(m, nn.Conv2d): 203 | n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 204 | m.weight.data.normal_(0, math.sqrt(2. / n)) 205 | elif isinstance(m, nn.BatchNorm2d): 206 | m.weight.data.fill_(1) 207 | m.bias.data.zero_() 208 | 209 | def _make_layer(self, block, planes, blocks, stride=1): 210 | downsample = None 211 | if stride != 1 or self.inplanes != planes * block.expansion: 212 | downsample = nn.Sequential( 213 | nn.Conv2d(self.inplanes, planes * block.expansion, 214 | kernel_size=1, stride=stride, bias=False), 215 | nn.BatchNorm2d(planes * block.expansion), 216 | ) 217 | 218 | layers = [] 219 | layers.append(block(self.inplanes, planes, stride, downsample)) 220 | self.inplanes = planes * block.expansion 221 | for i in range(1, blocks): 222 | layers.append(block(self.inplanes, planes)) 223 | 224 | return nn.Sequential(*layers) 225 | 226 | 227 | return x 228 | def forward(self, x): 229 | 230 | x = self.conv1(x) 231 | x = self.bn1(x) 232 | x = self.relu(x) # 32x32 233 | 234 | x = self.layer1(x) # 32x32 235 | x = self.layer2(x) # 16x16 236 | 237 | x_c = self.avgpool_c(x) 238 | x_c = x_c.view(x_c.size(0), -1) 239 | 240 | x_c=self.control_v1(x_c) 241 | x_c=self.bn_v1(x_c) 242 | x_c=F.relu(x_c) 243 | x_c = F.softmax(x_c,dim=1) 244 | 245 | x_3_1 = self.layer3_1(x) # 8x8 246 | x_3_2 = self.layer3_2(x) 247 | x_3_3 = self.layer3_3(x) 248 | 249 | 250 | x_3_1 = self.avgpool(x_3_1) 251 | x_3_1 = x_3_1.view(x_3_1.size(0), -1) 252 | x_3_2 = self.avgpool(x_3_2) 253 | x_3_2 = x_3_2.view(x_3_2.size(0), -1) 254 | x_3_3 = self.avgpool(x_3_3) 255 | x_3_3 = x_3_3.view(x_3_3.size(0), -1) 256 | 257 | x_3_1 = self.classfier3_1(x_3_1) 258 | x_3_2 = self.classfier3_2(x_3_2) 259 | x_3_3 = self.classfier3_3(x_3_3) 260 | x_c_1=x_c[:,0].repeat(x_3_1.size()[1], 1).transpose(0,1) 261 | x_c_2=x_c[:,1].repeat(x_3_1.size()[1], 1).transpose(0,1) 262 | x_c_3=x_c[:,2].repeat(x_3_1.size()[1], 1).transpose(0,1) 263 | x_m=x_c_1*x_3_1+x_c_2*x_3_2+x_c_3*x_3_3 264 | return x_3_1,x_3_2,x_3_3,x_m 265 | 266 | def resnet(**kwargs): 267 | """ 268 | Constructs a ResNet model. 269 | """ 270 | return ResNet(**kwargs) 271 | 272 | def one_resnet(**kwargs): 273 | """ 274 | Constructs a ResNet model. 275 | """ 276 | return ONE_ResNet(**kwargs) -------------------------------------------------------------------------------- /cifar_baseline.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Training script for CIFAR-10/100 3 | Copyright (c) Wei YANG, 2017 4 | ''' 5 | from __future__ import print_function 6 | 7 | import argparse 8 | import os 9 | import shutil 10 | import time 11 | import random 12 | 13 | import torch 14 | import torch.nn as nn 15 | import torch.nn.parallel 16 | import torch.backends.cudnn as cudnn 17 | import torch.optim as optim 18 | import torch.utils.data as data 19 | import torchvision.transforms as transforms 20 | import torchvision.datasets as datasets 21 | import models.cifar as models 22 | 23 | from utils import Bar, Logger, AverageMeter, accuracy, mkdir_p, savefig 24 | 25 | 26 | model_names = sorted(name for name in models.__dict__ 27 | if name.islower() and not name.startswith("__") 28 | and callable(models.__dict__[name])) 29 | 30 | parser = argparse.ArgumentParser(description='PyTorch CIFAR10/100 Training') 31 | # Datasets 32 | parser.add_argument('-d', '--dataset', default='cifar10', type=str) 33 | parser.add_argument('-j', '--workers', default=4, type=int, metavar='N', 34 | help='number of data loading workers (default: 4)') 35 | # Optimization options 36 | parser.add_argument('--epochs', default=300, type=int, metavar='N', 37 | help='number of total epochs to run') 38 | parser.add_argument('--start-epoch', default=0, type=int, metavar='N', 39 | help='manual epoch number (useful on restarts)') 40 | parser.add_argument('--train-batch', default=128, type=int, metavar='N', 41 | help='train batchsize') 42 | parser.add_argument('--test-batch', default=100, type=int, metavar='N', 43 | help='test batchsize') 44 | parser.add_argument('--lr', '--learning-rate', default=0.1, type=float, 45 | metavar='LR', help='initial learning rate') 46 | parser.add_argument('--drop', '--dropout', default=0, type=float, 47 | metavar='Dropout', help='Dropout ratio') 48 | parser.add_argument('--schedule', type=int, nargs='+', default=[150, 225], 49 | help='Decrease learning rate at these epochs.') 50 | parser.add_argument('--gamma', type=float, default=0.1, help='LR is multiplied by gamma on schedule.') 51 | parser.add_argument('--momentum', default=0.9, type=float, metavar='M', 52 | help='momentum') 53 | parser.add_argument('--weight-decay', '--wd', default=5e-4, type=float, 54 | metavar='W', help='weight decay (default: 1e-4)') 55 | # Checkpoints 56 | parser.add_argument('-c', '--checkpoint', default='checkpoint', type=str, metavar='PATH', 57 | help='path to save checkpoint (default: checkpoint)') 58 | parser.add_argument('--resume', default='', type=str, metavar='PATH', 59 | help='path to latest checkpoint (default: none)') 60 | # Architecture 61 | parser.add_argument('--arch', '-a', metavar='ARCH', default='resnet20', 62 | choices=model_names, 63 | help='model architecture: ' + 64 | ' | '.join(model_names) + 65 | ' (default: resnet18)') 66 | parser.add_argument('--depth', type=int, default=29, help='Model depth.') 67 | parser.add_argument('--cardinality', type=int, default=8, help='Model cardinality (group).') 68 | parser.add_argument('--widen-factor', type=int, default=4, help='Widen factor. 4 -> 64, 8 -> 128, ...') 69 | parser.add_argument('--growthRate', type=int, default=12, help='Growth rate for DenseNet.') 70 | parser.add_argument('--compressionRate', type=int, default=2, help='Compression Rate (theta) for DenseNet.') 71 | # Miscs 72 | parser.add_argument('--manualSeed', type=int, help='manual seed') 73 | parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true', 74 | help='evaluate model on validation set') 75 | #Device options 76 | parser.add_argument('--gpu-id', default='0', type=str, 77 | help='id(s) for CUDA_VISIBLE_DEVICES') 78 | 79 | args = parser.parse_args() 80 | state = {k: v for k, v in args._get_kwargs()} 81 | 82 | # Validate dataset 83 | assert args.dataset == 'cifar10' or args.dataset == 'cifar100', 'Dataset can only be cifar10 or cifar100.' 84 | 85 | # Use CUDA 86 | os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id 87 | use_cuda = torch.cuda.is_available() 88 | 89 | # Random seed 90 | if args.manualSeed is None: 91 | args.manualSeed = random.randint(1, 10000) 92 | random.seed(args.manualSeed) 93 | torch.manual_seed(args.manualSeed) 94 | if use_cuda: 95 | torch.cuda.manual_seed_all(args.manualSeed) 96 | 97 | best_acc = 0 # best test accuracy 98 | 99 | def main(): 100 | global best_acc 101 | start_epoch = args.start_epoch # start from epoch 0 or last checkpoint epoch 102 | 103 | if not os.path.isdir(args.checkpoint): 104 | mkdir_p(args.checkpoint) 105 | 106 | 107 | 108 | # Data 109 | print('==> Preparing dataset %s' % args.dataset) 110 | transform_train = transforms.Compose([ 111 | transforms.RandomCrop(32, padding=4), 112 | transforms.RandomHorizontalFlip(), 113 | transforms.ToTensor(), 114 | transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), 115 | ]) 116 | 117 | transform_test = transforms.Compose([ 118 | transforms.ToTensor(), 119 | transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), 120 | ]) 121 | if args.dataset == 'cifar10': 122 | dataloader = datasets.CIFAR10 123 | num_classes = 10 124 | else: 125 | dataloader = datasets.CIFAR100 126 | num_classes = 100 127 | 128 | 129 | trainset = dataloader(root='./data', train=True, download=True, transform=transform_train) 130 | trainloader = data.DataLoader(trainset, batch_size=args.train_batch, shuffle=True, num_workers=args.workers) 131 | 132 | testset = dataloader(root='./data', train=False, download=False, transform=transform_test) 133 | testloader = data.DataLoader(testset, batch_size=args.test_batch, shuffle=False, num_workers=args.workers) 134 | 135 | # Model 136 | print("==> creating model '{}'".format(args.arch)) 137 | if args.arch.startswith('resnext'): 138 | model = models.__dict__[args.arch]( 139 | cardinality=args.cardinality, 140 | num_classes=num_classes, 141 | depth=args.depth, 142 | widen_factor=args.widen_factor, 143 | dropRate=args.drop, 144 | ) 145 | elif args.arch.startswith('densenet'): 146 | model = models.__dict__[args.arch]( 147 | num_classes=num_classes, 148 | depth=args.depth, 149 | growthRate=args.growthRate, 150 | compressionRate=args.compressionRate, 151 | dropRate=args.drop, 152 | ) 153 | elif args.arch.startswith('wrn'): 154 | model = models.__dict__[args.arch]( 155 | num_classes=num_classes, 156 | depth=args.depth, 157 | widen_factor=args.widen_factor, 158 | dropRate=args.drop, 159 | ) 160 | elif args.arch.endswith('resnet'): 161 | model = models.__dict__[args.arch]( 162 | num_classes=num_classes, 163 | depth=args.depth, 164 | ) 165 | else: 166 | model = models.__dict__[args.arch](num_classes=num_classes) 167 | 168 | model = torch.nn.DataParallel(model).cuda() 169 | cudnn.benchmark = True 170 | print(' Total params: %.2fM' % (sum(p.numel() for p in model.parameters())/1000000.0)) 171 | 172 | criterion = nn.CrossEntropyLoss(dim=1) 173 | optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay) 174 | 175 | # Resume 176 | title = 'cifar-10-' + args.arch 177 | if args.resume: 178 | # Load checkpoint. 179 | print('==> Resuming from checkpoint..') 180 | assert os.path.isfile(args.resume), 'Error: no checkpoint directory found!' 181 | args.checkpoint = os.path.dirname(args.resume) 182 | checkpoint = torch.load(args.resume) 183 | best_acc = checkpoint['best_acc'] 184 | start_epoch = checkpoint['epoch'] 185 | model.load_state_dict(checkpoint['state_dict']) 186 | optimizer.load_state_dict(checkpoint['optimizer']) 187 | logger = Logger(os.path.join(args.checkpoint, 'log.txt'), title=title, resume=True) 188 | else: 189 | logger = Logger(os.path.join(args.checkpoint, 'log.txt'), title=title) 190 | logger.set_names(['Learning Rate', 'Train Loss', 'Valid Loss', 'Train Acc.', 'Valid Acc.']) 191 | 192 | 193 | if args.evaluate: 194 | print('\nEvaluation only') 195 | test_loss, test_acc = test(testloader, model, criterion, start_epoch, use_cuda) 196 | print(' Test Loss: %.8f, Test Acc: %.2f' % (test_loss, test_acc)) 197 | return 198 | 199 | # Train and val 200 | for epoch in range(start_epoch, args.epochs): 201 | adjust_learning_rate(optimizer, epoch) 202 | 203 | print('\nEpoch: [%d | %d] LR: %f' % (epoch + 1, args.epochs, state['lr'])) 204 | 205 | train_loss, train_acc = train(trainloader, model, criterion, optimizer, epoch, use_cuda) 206 | test_loss, test_acc = test(testloader, model, criterion, epoch, use_cuda) 207 | 208 | # append logger file 209 | logger.append([state['lr'], train_loss, test_loss, train_acc, test_acc]) 210 | 211 | # save model 212 | is_best = test_acc > best_acc 213 | best_acc = max(test_acc, best_acc) 214 | save_checkpoint({ 215 | 'epoch': epoch + 1, 216 | 'state_dict': model.state_dict(), 217 | 'acc': test_acc, 218 | 'best_acc': best_acc, 219 | 'optimizer' : optimizer.state_dict(), 220 | }, is_best, checkpoint=args.checkpoint) 221 | 222 | logger.close() 223 | logger.plot() 224 | savefig(os.path.join(args.checkpoint, 'log.eps')) 225 | 226 | print('Best acc:') 227 | print(best_acc) 228 | 229 | def train(trainloader, model, criterion, optimizer, epoch, use_cuda): 230 | # switch to train mode 231 | model.train() 232 | 233 | batch_time = AverageMeter() 234 | data_time = AverageMeter() 235 | losses = AverageMeter() 236 | top1 = AverageMeter() 237 | top5 = AverageMeter() 238 | end = time.time() 239 | 240 | bar = Bar('Processing', max=len(trainloader)) 241 | for batch_idx, (inputs, targets) in enumerate(trainloader): 242 | # measure data loading time 243 | data_time.update(time.time() - end) 244 | 245 | if use_cuda: 246 | inputs, targets = inputs.cuda(), targets.cuda(async=True) 247 | inputs, targets = torch.autograd.Variable(inputs), torch.autograd.Variable(targets) 248 | 249 | # compute output 250 | outputs = model(inputs) 251 | loss = criterion(outputs, targets) 252 | 253 | # measure accuracy and record loss 254 | prec1, prec5 = accuracy(outputs.data, targets.data, topk=(1, 5)) 255 | losses.update(loss.data[0], inputs.size(0)) 256 | top1.update(prec1[0], inputs.size(0)) 257 | top5.update(prec5[0], inputs.size(0)) 258 | 259 | # compute gradient and do SGD step 260 | optimizer.zero_grad() 261 | loss.backward() 262 | optimizer.step() 263 | 264 | # measure elapsed time 265 | batch_time.update(time.time() - end) 266 | end = time.time() 267 | 268 | # plot progress 269 | bar.suffix = '({batch}/{size}) Data: {data:.3f}s | Batch: {bt:.3f}s | Total: {total:} | ETA: {eta:} | Loss: {loss:.4f} | top1: {top1: .4f} | top5: {top5: .4f}'.format( 270 | batch=batch_idx + 1, 271 | size=len(trainloader), 272 | data=data_time.avg, 273 | bt=batch_time.avg, 274 | total=bar.elapsed_td, 275 | eta=bar.eta_td, 276 | loss=losses.avg, 277 | top1=top1.avg, 278 | top5=top5.avg, 279 | ) 280 | bar.next() 281 | bar.finish() 282 | return (losses.avg, top1.avg) 283 | 284 | def test(testloader, model, criterion, epoch, use_cuda): 285 | global best_acc 286 | 287 | batch_time = AverageMeter() 288 | data_time = AverageMeter() 289 | losses = AverageMeter() 290 | top1 = AverageMeter() 291 | top5 = AverageMeter() 292 | 293 | # switch to evaluate mode 294 | model.eval() 295 | 296 | end = time.time() 297 | bar = Bar('Processing', max=len(testloader)) 298 | for batch_idx, (inputs, targets) in enumerate(testloader): 299 | # measure data loading time 300 | data_time.update(time.time() - end) 301 | 302 | if use_cuda: 303 | inputs, targets = inputs.cuda(), targets.cuda() 304 | inputs, targets = torch.autograd.Variable(inputs, volatile=True), torch.autograd.Variable(targets) 305 | 306 | # compute output 307 | outputs = model(inputs) 308 | loss = criterion(outputs, targets) 309 | 310 | # measure accuracy and record loss 311 | prec1, prec5 = accuracy(outputs.data, targets.data, topk=(1, 5)) 312 | losses.update(loss.data[0], inputs.size(0)) 313 | top1.update(prec1[0], inputs.size(0)) 314 | top5.update(prec5[0], inputs.size(0)) 315 | 316 | # measure elapsed time 317 | batch_time.update(time.time() - end) 318 | end = time.time() 319 | 320 | # plot progress 321 | bar.suffix = '({batch}/{size}) Data: {data:.3f}s | Batch: {bt:.3f}s | Total: {total:} | ETA: {eta:} | Loss: {loss:.4f} | top1: {top1: .4f} | top5: {top5: .4f}'.format( 322 | batch=batch_idx + 1, 323 | size=len(testloader), 324 | data=data_time.avg, 325 | bt=batch_time.avg, 326 | total=bar.elapsed_td, 327 | eta=bar.eta_td, 328 | loss=losses.avg, 329 | top1=top1.avg, 330 | top5=top5.avg, 331 | ) 332 | bar.next() 333 | bar.finish() 334 | return (losses.avg, top1.avg) 335 | 336 | def save_checkpoint(state, is_best, checkpoint='checkpoint', filename='checkpoint.pth.tar'): 337 | filepath = os.path.join(checkpoint, filename) 338 | torch.save(state, filepath) 339 | if is_best: 340 | shutil.copyfile(filepath, os.path.join(checkpoint, 'model_best.pth.tar')) 341 | 342 | def adjust_learning_rate(optimizer, epoch): 343 | global state 344 | if epoch in args.schedule: 345 | state['lr'] *= args.gamma 346 | for param_group in optimizer.param_groups: 347 | param_group['lr'] = state['lr'] 348 | 349 | if __name__ == '__main__': 350 | main() -------------------------------------------------------------------------------- /cifar_one.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Training script for CIFAR-10/100 3 | Copyright (c) Wei YANG, 2017 4 | ''' 5 | from __future__ import print_function 6 | 7 | import argparse 8 | import os 9 | import shutil 10 | import time 11 | import random 12 | import torch 13 | import torch.nn as nn 14 | import torch.nn.parallel 15 | import torch.backends.cudnn as cudnn 16 | import torch.optim as optim 17 | import torch.utils.data as data 18 | import torchvision.transforms as transforms 19 | import torchvision.datasets as datasets 20 | import models.cifar as models 21 | import torch.utils.data 22 | from loss import KLLoss 23 | from torch.utils.data import ConcatDataset 24 | from utils import Bar, Logger, AverageMeter, accuracy, mkdir_p, savefig,ramps 25 | import pdb 26 | 27 | model_names = sorted(name for name in models.__dict__ 28 | if name.islower() and not name.startswith("__") 29 | and callable(models.__dict__[name])) 30 | 31 | parser = argparse.ArgumentParser(description='PyTorch CIFAR10/100 Training') 32 | # Datasets 33 | parser.add_argument('-d', '--dataset', default='cifar10', type=str) 34 | parser.add_argument('-j', '--workers', default=4, type=int, metavar='N', 35 | help='number of data loading workers (default: 4)') 36 | # Optimization options 37 | parser.add_argument('--epochs', default=300, type=int, metavar='N', 38 | help='number of total epochs to run') 39 | parser.add_argument('--start-epoch', default=0, type=int, metavar='N', 40 | help='manual epoch number (useful on restarts)') 41 | parser.add_argument('--train-batch', default=128, type=int, metavar='N', 42 | help='train batchsize') 43 | parser.add_argument('--consistency_rampup', '--consistency_rampup', default=80, type=float, 44 | metavar='consistency_rampup', help='consistency_rampup ratio') 45 | parser.add_argument('--test-batch', default=100, type=int, metavar='N', 46 | help='test batchsize') 47 | parser.add_argument('--lr', '--learning-rate', default=0.1, type=float, 48 | metavar='LR', help='initial learning rate') 49 | parser.add_argument('--drop', '--dropout', default=0, type=float, 50 | metavar='Dropout', help='Dropout ratio') 51 | 52 | parser.add_argument('--schedule', type=int, nargs='+', default=[150, 225], 53 | help='Decrease learning rate at these epochs.') 54 | parser.add_argument('--gamma', type=float, default=0.1, help='LR is multiplied by gamma on schedule.') 55 | parser.add_argument('--momentum', default=0.9, type=float, metavar='M', 56 | help='momentum') 57 | parser.add_argument('--weight-decay', '--wd', default=5e-4, type=float, 58 | metavar='W', help='weight decay (default: 1e-4)') 59 | # Checkpoints 60 | parser.add_argument('-c', '--checkpoint', default='checkpoint', type=str, metavar='PATH', 61 | help='path to save checkpoint (default: checkpoint)') 62 | parser.add_argument('--resume', 63 | default='', 64 | type=str, metavar='PATH', 65 | help='path to latest checkpoint (default: none)') 66 | # Architecture 67 | parser.add_argument('--arch', '-a', metavar='ARCH', default='resnet20', 68 | choices=model_names, 69 | help='model architecture: ' + 70 | ' | '.join(model_names) + 71 | ' (default: resnet18)') 72 | parser.add_argument('--depth', type=int, default=29, help='Model depth.') 73 | parser.add_argument('--cardinality', type=int, default=8, help='Model cardinality (group).') 74 | parser.add_argument('--widen-factor', type=int, default=4, help='Widen factor. 4 -> 64, 8 -> 128, ...') 75 | parser.add_argument('--growthRate', type=int, default=12, help='Growth rate for DenseNet.') 76 | parser.add_argument('--compressionRate', type=int, default=2, help='Compression Rate (theta) for DenseNet.') 77 | parser.add_argument('--layerensemble', type=bool, default=False, help='Using layer ensembel') 78 | 79 | # Miscs 80 | parser.add_argument('--manualSeed', type=int, help='manual seed') 81 | parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true', 82 | help='evaluate model on validation set') 83 | # Device options 84 | parser.add_argument('--gpu-id', default='0', type=str, 85 | help='id(s) for CUDA_VISIBLE_DEVICES') 86 | 87 | args = parser.parse_args() 88 | 89 | state = {k: v for k, v in args._get_kwargs()} 90 | 91 | # Validate dataset 92 | assert args.dataset == 'cifar10' or args.dataset == 'cifar100' or args.dataset == 'svhn', 'Dataset can only be cifar10 or cifar100.' 93 | 94 | # Use CUDA 95 | os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id 96 | use_cuda = torch.cuda.is_available() 97 | 98 | # Random seed 99 | if args.manualSeed is None: 100 | args.manualSeed = random.randint(1, 10000) 101 | random.seed(args.manualSeed) 102 | torch.manual_seed(args.manualSeed) 103 | if use_cuda: 104 | torch.cuda.manual_seed_all(args.manualSeed) 105 | 106 | best_acc = 0 # best test accuracy 107 | 108 | 109 | def main(): 110 | global best_acc 111 | start_epoch = args.start_epoch # start from epoch 0 or last checkpoint epoch 112 | 113 | if not os.path.isdir(args.checkpoint): 114 | mkdir_p(args.checkpoint) 115 | 116 | # Data 117 | print('==> Preparing dataset %s' % args.dataset) 118 | transform_train = transforms.Compose([ 119 | transforms.RandomCrop(32, padding=4), 120 | transforms.RandomHorizontalFlip(), 121 | transforms.ToTensor(), 122 | transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), 123 | ]) 124 | 125 | transform_test = transforms.Compose([ 126 | transforms.ToTensor(), 127 | transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), 128 | ]) 129 | if args.dataset == 'cifar10': 130 | dataloader = datasets.CIFAR10 131 | num_classes = 10 132 | elif args.dataset =='cifar100': 133 | dataloader = datasets.CIFAR100 134 | num_classes = 100 135 | elif args.dataset == 'svhn': 136 | dataloader = datasets.SVHN 137 | num_classes = 10 138 | 139 | if args.dataset.startswith('cifar'): 140 | 141 | trainset = dataloader(root='./data', train=True, download=True, transform=transform_train) 142 | trainloader = data.DataLoader(trainset, batch_size=args.train_batch, shuffle=True, num_workers=args.workers) 143 | testset = dataloader(root='./data', train=False, download=False, transform=transform_test) 144 | testloader = data.DataLoader(testset, batch_size=args.test_batch, shuffle=False, num_workers=args.workers) 145 | elif args.dataset == 'svhn': 146 | train_dataset = dataloader(root='./data', split='train', download=True, transform=transforms.Compose([ 147 | # transforms.Scale(opt.imageSize), 148 | transforms.ToTensor(), 149 | transforms.Normalize((0.5, 0.5, 0.5), (0.2, 0.2, 0.2))])) 150 | train_extra_dataset = dataloader(root='./data', split='extra', download=True, transform=transforms.Compose([ 151 | # transforms.Scale(opt.imageSize), 152 | transforms.ToTensor(), 153 | transforms.Normalize((0.5, 0.5, 0.5), (0.2, 0.2, 0.2))])) 154 | trainset = ConcatDataset([train_dataset, train_extra_dataset]) 155 | trainloader = data.DataLoader(trainset, batch_size=args.train_batch, shuffle=True, num_workers=args.workers) 156 | testset = dataloader(root='./data', split='test', download=True, transform=transforms.Compose([ 157 | # transforms.Scale(opt.imageSize), 158 | transforms.ToTensor(), 159 | transforms.Normalize((0.5, 0.5, 0.5), (0.2, 0.2, 0.2)), 160 | ])) 161 | testloader = data.DataLoader(testset, batch_size=args.test_batch, shuffle=False, num_workers=args.workers) 162 | 163 | else: 164 | print("not support dataset") 165 | 166 | # Model 167 | print("==> creating model '{}'".format(args.arch)) 168 | if args.arch.endswith('resnext'): 169 | model = models.__dict__[args.arch]( 170 | cardinality=args.cardinality, 171 | num_classes=num_classes, 172 | depth=args.depth, 173 | widen_factor=args.widen_factor, 174 | dropRate=args.drop, 175 | ) 176 | elif args.arch.endswith('densenet'): 177 | 178 | model = models.__dict__[args.arch]( 179 | num_classes=num_classes, 180 | depth=args.depth, 181 | growthRate=args.growthRate, 182 | compressionRate=args.compressionRate, 183 | dropRate=args.drop, 184 | ) 185 | elif args.arch.startswith('wrn'): 186 | model = models.__dict__[args.arch]( 187 | num_classes=num_classes, 188 | depth=args.depth, 189 | widen_factor=args.widen_factor, 190 | dropRate=args.drop, 191 | ) 192 | elif args.arch.endswith('resnet'): 193 | model = models.__dict__[args.arch]( 194 | num_classes=num_classes, 195 | depth=args.depth, 196 | ) 197 | 198 | else: 199 | model = models.__dict__[args.arch](num_classes=num_classes) 200 | 201 | model = torch.nn.DataParallel(model).cuda() 202 | cudnn.benchmark = True 203 | print(' Total params: %.2fM' % (sum(p.numel() for p in model.parameters()) / 1000000.0)) 204 | 205 | criterion = nn.CrossEntropyLoss() 206 | 207 | criterion_kl = KLLoss().cuda() 208 | 209 | optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay) 210 | 211 | # Resume 212 | title = 'cifar' + args.arch 213 | 214 | if args.resume or args.evaluate: 215 | # Load checkpoint. 216 | 217 | print('==> Resuming from checkpoint..') 218 | assert os.path.isfile(args.resume), 'Error: no checkpoint directory found!' 219 | args.checkpoint = os.path.dirname(args.resume) 220 | checkpoint = torch.load(args.resume) 221 | best_acc = checkpoint['best_acc'] 222 | start_epoch = checkpoint['epoch'] 223 | model.load_state_dict(checkpoint['state_dict']) 224 | optimizer.load_state_dict(checkpoint['optimizer']) 225 | logger = Logger(os.path.join(args.checkpoint, 'log.txt'), title=title, resume=True) 226 | else: 227 | logger = Logger(os.path.join(args.checkpoint, 'log.txt'), title=title) 228 | logger.set_names(['TAcc_1', 'VAcc_1','TAcc_2','VAcc_2','TAcc_3','VAcc_3','TAcc_e','VAcc_e']) 229 | 230 | if args.evaluate: 231 | print('\nEvaluation only') 232 | test_loss, test_acc = test(testloader, model, use_cuda) 233 | print(' Test Loss: %.8f, Test Acc: %.2f' % (test_loss, test_acc)) 234 | return 235 | 236 | # Train and val 237 | for epoch in range(start_epoch, args.epochs): 238 | adjust_learning_rate(optimizer, epoch) 239 | 240 | print('\nEpoch: [%d | %d] LR: %f' % (epoch + 1, args.epochs, state['lr'])) 241 | 242 | train_loss, train_acc_1,train_acc_2,train_acc_3,train_acc_en = train(trainloader, model, criterion, criterion_kl, optimizer, epoch, 243 | use_cuda) 244 | test_loss, test_acc_1,test_acc_2,test_acc_3,test_acc_en = test(testloader, model, use_cuda) 245 | 246 | # append logger file 247 | logger.append([train_acc_1, test_acc_1,train_acc_2,test_acc_2,train_acc_3,test_acc_3,train_acc_en,test_acc_en]) 248 | 249 | # save model 250 | is_best = test_acc_1 > best_acc 251 | best_acc = max(test_acc_1, best_acc) 252 | save_checkpoint({ 253 | 'epoch': epoch + 1, 254 | 'state_dict': model.state_dict(), 255 | 'acc': test_acc_1, 256 | 'best_acc': best_acc, 257 | 'optimizer': optimizer.state_dict(), 258 | }, is_best, checkpoint=args.checkpoint) 259 | 260 | logger.close() 261 | logger.plot() 262 | savefig(os.path.join(args.checkpoint, 'log.eps')) 263 | 264 | print('Best acc:') 265 | print(best_acc) 266 | 267 | def train(trainloader, model, criterion, criterion_kl, optimizer, epoch, use_cuda): 268 | # switch to train mode 269 | model.train() 270 | 271 | losses = AverageMeter() 272 | losses_kl = AverageMeter() 273 | top1_c1 = AverageMeter() 274 | top5_c1 = AverageMeter() 275 | top1_c2 = AverageMeter() 276 | top5_c2 = AverageMeter() 277 | top1_c3 = AverageMeter() 278 | top5_c3 = AverageMeter() 279 | top1_t = AverageMeter() 280 | top5_t = AverageMeter() 281 | 282 | 283 | bar = Bar('Processing', max=len(trainloader)) 284 | consistency_weight = get_current_consistency_weight(epoch) 285 | for batch_idx, (inputs, targets) in enumerate(trainloader): 286 | 287 | 288 | if use_cuda: 289 | inputs, targets = inputs.cuda(), targets.cuda(async=True) 290 | inputs, targets = torch.autograd.Variable(inputs), torch.autograd.Variable(targets) 291 | 292 | outputs1, outputs2, outputs3, outputs4 = model(inputs) 293 | 294 | loss_cross = criterion(outputs1, targets) + criterion(outputs2, targets) + criterion(outputs3,targets) + criterion( 295 | outputs4, targets) 296 | 297 | loss_kl = consistency_weight*(criterion_kl(outputs1, outputs4) +criterion_kl(outputs2,outputs4)+criterion_kl(outputs3,outputs4)) 298 | prec1_t, prec5_t = accuracy(outputs4.data, targets.data, topk=(1, 5)) 299 | prec1_c1, prec5_c1 = accuracy(outputs1.data, targets.data, topk=(1, 5)) 300 | prec1_c2, prec5_c2 = accuracy(outputs2.data, targets.data, topk=(1, 5)) 301 | prec1_c3, prec5_c3 = accuracy(outputs3.data, targets.data, topk=(1, 5)) 302 | top1_c1.update(prec1_c1[0], inputs.size(0)) 303 | top5_c1.update(prec5_c1[0], inputs.size(0)) 304 | loss = loss_cross+loss_kl 305 | losses_kl.update(loss_kl.data[0], inputs.size(0)) 306 | losses.update(loss.data[0], inputs.size(0)) 307 | top1_c2.update(prec1_c2[0], inputs.size(0)) 308 | top5_c2.update(prec5_c2[0], inputs.size(0)) 309 | top1_c3.update(prec1_c3[0], inputs.size(0)) 310 | top5_c3.update(prec5_c3[0], inputs.size(0)) 311 | top1_t.update(prec1_t[0], inputs.size(0)) 312 | top5_t.update(prec5_t[0], inputs.size(0)) 313 | 314 | # compute gradient and do SGD step 315 | optimizer.zero_grad() 316 | loss.backward() 317 | optimizer.step() 318 | 319 | bar.suffix = '({batch}/{size}) || Loss: {loss:.4f} |LossKL: {losses_kl:.4f} | top1_C1: {top1_C1: .4f} | top1_C2: {top1_C2: .4f}|top1_C3: {top1_C3: .4f}| top1_t: {top1_t: .4f} '.format( 320 | batch=batch_idx + 1, 321 | size=len(trainloader), 322 | loss=losses.avg, 323 | losses_kl=losses_kl.avg, 324 | top1_C1=top1_c1.avg, 325 | top1_C2=top1_c2.avg, 326 | top1_C3=top1_c3.avg, 327 | top1_t=top1_t.avg, 328 | ) 329 | bar.next() 330 | bar.finish() 331 | return (losses.avg,top1_c1.avg,top1_c2.avg,top1_c3.avg,top1_t.avg) 332 | 333 | 334 | def test(testloader, model, use_cuda): 335 | global best_acc 336 | 337 | batch_time = AverageMeter() 338 | data_time = AverageMeter() 339 | losses = AverageMeter() 340 | top1_c1 = AverageMeter() 341 | top1_c2 = AverageMeter() 342 | top1_c3 = AverageMeter() 343 | top1_avg= AverageMeter() 344 | top1_t = AverageMeter() 345 | 346 | # switch to evaluate mode 347 | model.eval() 348 | 349 | end = time.time() 350 | bar = Bar('Processing', max=len(testloader)) 351 | for batch_idx, (inputs, targets) in enumerate(testloader): 352 | 353 | data_time.update(time.time() - end) 354 | 355 | if use_cuda: 356 | inputs, targets = inputs.cuda(), targets.cuda() 357 | inputs, targets = torch.autograd.Variable(inputs, volatile=True), torch.autograd.Variable(targets) 358 | 359 | outputs1, outputs2, outputs3, outputs4 = model(inputs) 360 | 361 | # measure accuracy and record loss 362 | prec1_c1, _ = accuracy(outputs1.data, targets.data, topk=(1, 5)) 363 | prec1_c2, _ = accuracy(outputs2.data, targets.data, topk=(1, 5)) 364 | prec1_c3, _ = accuracy(outputs3.data, targets.data, topk=(1, 5)) 365 | prec1_en, _ = accuracy(outputs4.data, targets.data, topk=(1, 5)) 366 | top1_c1.update(prec1_c1[0], inputs.size(0)) 367 | top1_c2.update(prec1_c2[0], inputs.size(0)) 368 | top1_c3.update(prec1_c3[0], inputs.size(0)) 369 | top1_avg.update((prec1_c1[0]+prec1_c2[0]+prec1_c3[0])/3, inputs.size(0)) 370 | top1_t.update(prec1_en[0], inputs.size(0)) 371 | # measure elapsed time 372 | batch_time.update(time.time() - end) 373 | end = time.time() 374 | 375 | # plot progress 376 | bar.suffix = '({batch}/{size})| Loss: {loss: .4f} | top1_C1: {top1_C1: .4f} |top1_C2: {top1_C2: .4f}|top1_C3: {top1_C3:.4f} |top1_t: {top1_t: .4f}'.format( 377 | batch=batch_idx + 1, 378 | size=len(testloader), 379 | loss=losses.avg, 380 | top1_C1=top1_c1.avg, 381 | top1_C2=top1_c2.avg, 382 | top1_C3=top1_c3.avg, 383 | top1_t=top1_t.avg, 384 | ) 385 | bar.next() 386 | 387 | bar.finish() 388 | return (losses.avg, top1_c1.avg,top1_c2.avg,top1_c3.avg,top1_t.avg) 389 | 390 | 391 | def save_checkpoint(state, is_best, checkpoint='checkpoint', filename='checkpoint.pth.tar'): 392 | filepath = os.path.join(checkpoint, filename) 393 | torch.save(state, filepath) 394 | if is_best: 395 | shutil.copyfile(filepath, os.path.join(checkpoint, 'model_best.pth.tar')) 396 | 397 | def get_current_consistency_weight(epoch): 398 | # Consistency ramp-up from https://arxiv.org/abs/1610.02242 399 | return ramps.sigmoid_rampup(epoch, args.consistency_rampup) 400 | 401 | def adjust_learning_rate(optimizer, epoch): 402 | global state 403 | if epoch in args.schedule: 404 | state['lr'] *= args.gamma 405 | for param_group in optimizer.param_groups: 406 | param_group['lr'] = state['lr'] 407 | 408 | 409 | if __name__ == '__main__': 410 | main() 411 | -------------------------------------------------------------------------------- /utils/test.eps: -------------------------------------------------------------------------------- 1 | %!PS-Adobe-3.0 EPSF-3.0 2 | %%Title: test.eps 3 | %%Creator: matplotlib version 1.3.1, http://matplotlib.org/ 4 | %%CreationDate: Thu May 17 16:15:41 2018 5 | %%Orientation: portrait 6 | %%BoundingBox: 18 216 594 576 7 | %%EndComments 8 | %%BeginProlog 9 | /mpldict 8 dict def 10 | mpldict begin 11 | /m { moveto } bind def 12 | /l { lineto } bind def 13 | /r { rlineto } bind def 14 | /c { curveto } bind def 15 | /cl { closepath } bind def 16 | /box { 17 | m 18 | 1 index 0 r 19 | 0 exch r 20 | neg 0 r 21 | cl 22 | } bind def 23 | /clipbox { 24 | box 25 | clip 26 | newpath 27 | } bind def 28 | %!PS-Adobe-3.0 Resource-Font 29 | %%Title: Bitstream Vera Sans 30 | %%Copyright: Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. 31 | %%Creator: Converted from TrueType to type 3 by PPR 32 | 25 dict begin 33 | /_d{bind def}bind def 34 | /_m{moveto}_d 35 | /_l{lineto}_d 36 | /_cl{closepath eofill}_d 37 | /_c{curveto}_d 38 | /_sc{7 -1 roll{setcachedevice}{pop pop pop pop pop pop}ifelse}_d 39 | /_e{exec}_d 40 | /FontName /BitstreamVeraSans-Roman def 41 | /PaintType 0 def 42 | /FontMatrix[.001 0 0 .001 0 0]def 43 | /FontBBox[-183 -236 1287 928]def 44 | /FontType 3 def 45 | /Encoding [ /space /percent /parenleft /parenright /hyphen /zero /one /two /three /four /five /six /seven /eight /nine /B /E /N /O /a /b /c /e /f /h /m /n /o /r /u ] def 46 | /FontInfo 10 dict dup begin 47 | /FamilyName (Bitstream Vera Sans) def 48 | /FullName (Bitstream Vera Sans) def 49 | /Notice (Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera is a trademark of Bitstream, Inc.) def 50 | /Weight (Roman) def 51 | /Version (Release 1.10) def 52 | /ItalicAngle 0.0 def 53 | /isFixedPitch false def 54 | /UnderlinePosition -213 def 55 | /UnderlineThickness 143 def 56 | end readonly def 57 | /CharStrings 30 dict dup begin 58 | /space{318 0 0 0 0 0 _sc 59 | }_d 60 | /percent{{950 0 55 -13 895 742 _sc 61 | 727 321 _m 62 | 699 321 676 309 660 285 _c 63 | 644 261 636 227 636 184 _c 64 | 636 142 644 108 660 84 _c 65 | 676 60 699 48 727 48 _c 66 | 755 48 777 60 793 84 _c 67 | 809 108 817 142 817 184 _c 68 | 817 226 809 260 793 284 _c 69 | 777 308 755 321 727 321 _c 70 | 727 383 _m 71 | 778 383 819 365 849 329 _c 72 | 879 293 895 244 895 184 _c 73 | 895 123 879 75 849 40 _c 74 | 819 4 778 -13 727 -13 _c 75 | }_e{675 -13 633 4 603 40 _c 76 | 573 75 558 123 558 184 _c 77 | 558 245 573 293 603 329 _c 78 | 633 365 675 383 727 383 _c 79 | 223 680 _m 80 | 195 680 173 667 157 643 _c 81 | 141 619 133 586 133 544 _c 82 | 133 500 141 467 157 443 _c 83 | 173 419 195 407 223 407 _c 84 | 251 407 274 419 290 443 _c 85 | 306 467 314 500 314 544 _c 86 | 314 586 305 619 289 643 _c 87 | 273 667 251 680 223 680 _c 88 | 664 742 _m 89 | 742 742 _l 90 | 286 -13 _l 91 | }_e{208 -13 _l 92 | 664 742 _l 93 | 223 742 _m 94 | 274 742 315 724 346 688 _c 95 | 376 652 392 604 392 544 _c 96 | 392 482 376 434 346 398 _c 97 | 316 362 275 345 223 345 _c 98 | 171 345 130 362 100 398 _c 99 | 70 434 55 482 55 544 _c 100 | 55 604 70 652 100 688 _c 101 | 130 724 171 742 223 742 _c 102 | _cl}_e}_d 103 | /parenleft{390 0 86 -131 310 759 _sc 104 | 310 759 _m 105 | 266 683 234 609 213 536 _c 106 | 191 463 181 389 181 314 _c 107 | 181 238 191 164 213 91 _c 108 | 234 17 266 -56 310 -131 _c 109 | 232 -131 _l 110 | 183 -54 146 20 122 94 _c 111 | 98 168 86 241 86 314 _c 112 | 86 386 98 459 122 533 _c 113 | 146 607 182 682 232 759 _c 114 | 310 759 _l 115 | _cl}_d 116 | /parenright{390 0 80 -131 304 759 _sc 117 | 80 759 _m 118 | 158 759 _l 119 | 206 682 243 607 267 533 _c 120 | 291 459 304 386 304 314 _c 121 | 304 241 291 168 267 94 _c 122 | 243 20 206 -54 158 -131 _c 123 | 80 -131 _l 124 | 123 -56 155 17 177 91 _c 125 | 198 164 209 238 209 314 _c 126 | 209 389 198 463 177 536 _c 127 | 155 609 123 683 80 759 _c 128 | _cl}_d 129 | /hyphen{361 0 49 234 312 314 _sc 130 | 49 314 _m 131 | 312 314 _l 132 | 312 234 _l 133 | 49 234 _l 134 | 49 314 _l 135 | _cl}_d 136 | /zero{636 0 66 -13 570 742 _sc 137 | 318 664 _m 138 | 267 664 229 639 203 589 _c 139 | 177 539 165 464 165 364 _c 140 | 165 264 177 189 203 139 _c 141 | 229 89 267 64 318 64 _c 142 | 369 64 407 89 433 139 _c 143 | 458 189 471 264 471 364 _c 144 | 471 464 458 539 433 589 _c 145 | 407 639 369 664 318 664 _c 146 | 318 742 _m 147 | 399 742 461 709 505 645 _c 148 | 548 580 570 486 570 364 _c 149 | 570 241 548 147 505 83 _c 150 | 461 19 399 -13 318 -13 _c 151 | 236 -13 173 19 130 83 _c 152 | 87 147 66 241 66 364 _c 153 | 66 486 87 580 130 645 _c 154 | 173 709 236 742 318 742 _c 155 | _cl}_d 156 | /one{636 0 110 0 544 729 _sc 157 | 124 83 _m 158 | 285 83 _l 159 | 285 639 _l 160 | 110 604 _l 161 | 110 694 _l 162 | 284 729 _l 163 | 383 729 _l 164 | 383 83 _l 165 | 544 83 _l 166 | 544 0 _l 167 | 124 0 _l 168 | 124 83 _l 169 | _cl}_d 170 | /two{{636 0 73 0 536 742 _sc 171 | 192 83 _m 172 | 536 83 _l 173 | 536 0 _l 174 | 73 0 _l 175 | 73 83 _l 176 | 110 121 161 173 226 239 _c 177 | 290 304 331 346 348 365 _c 178 | 380 400 402 430 414 455 _c 179 | 426 479 433 504 433 528 _c 180 | 433 566 419 598 392 622 _c 181 | 365 646 330 659 286 659 _c 182 | 255 659 222 653 188 643 _c 183 | 154 632 117 616 78 594 _c 184 | 78 694 _l 185 | 118 710 155 722 189 730 _c 186 | 223 738 255 742 284 742 _c 187 | }_e{359 742 419 723 464 685 _c 188 | 509 647 532 597 532 534 _c 189 | 532 504 526 475 515 449 _c 190 | 504 422 484 390 454 354 _c 191 | 446 344 420 317 376 272 _c 192 | 332 227 271 164 192 83 _c 193 | _cl}_e}_d 194 | /three{{636 0 76 -13 556 742 _sc 195 | 406 393 _m 196 | 453 383 490 362 516 330 _c 197 | 542 298 556 258 556 212 _c 198 | 556 140 531 84 482 45 _c 199 | 432 6 362 -13 271 -13 _c 200 | 240 -13 208 -10 176 -4 _c 201 | 144 1 110 10 76 22 _c 202 | 76 117 _l 203 | 103 101 133 89 166 81 _c 204 | 198 73 232 69 268 69 _c 205 | 330 69 377 81 409 105 _c 206 | 441 129 458 165 458 212 _c 207 | 458 254 443 288 413 312 _c 208 | 383 336 341 349 287 349 _c 209 | }_e{202 349 _l 210 | 202 430 _l 211 | 291 430 _l 212 | 339 430 376 439 402 459 _c 213 | 428 478 441 506 441 543 _c 214 | 441 580 427 609 401 629 _c 215 | 374 649 336 659 287 659 _c 216 | 260 659 231 656 200 650 _c 217 | 169 644 135 635 98 623 _c 218 | 98 711 _l 219 | 135 721 170 729 203 734 _c 220 | 235 739 266 742 296 742 _c 221 | 370 742 429 725 473 691 _c 222 | 517 657 539 611 539 553 _c 223 | 539 513 527 479 504 451 _c 224 | 481 423 448 403 406 393 _c 225 | _cl}_e}_d 226 | /four{636 0 49 0 580 729 _sc 227 | 378 643 _m 228 | 129 254 _l 229 | 378 254 _l 230 | 378 643 _l 231 | 352 729 _m 232 | 476 729 _l 233 | 476 254 _l 234 | 580 254 _l 235 | 580 172 _l 236 | 476 172 _l 237 | 476 0 _l 238 | 378 0 _l 239 | 378 172 _l 240 | 49 172 _l 241 | 49 267 _l 242 | 352 729 _l 243 | _cl}_d 244 | /five{{636 0 77 -13 549 729 _sc 245 | 108 729 _m 246 | 495 729 _l 247 | 495 646 _l 248 | 198 646 _l 249 | 198 467 _l 250 | 212 472 227 476 241 478 _c 251 | 255 480 270 482 284 482 _c 252 | 365 482 429 459 477 415 _c 253 | 525 370 549 310 549 234 _c 254 | 549 155 524 94 475 51 _c 255 | 426 8 357 -13 269 -13 _c 256 | 238 -13 207 -10 175 -6 _c 257 | 143 -1 111 6 77 17 _c 258 | 77 116 _l 259 | 106 100 136 88 168 80 _c 260 | 199 72 232 69 267 69 _c 261 | }_e{323 69 368 83 401 113 _c 262 | 433 143 450 183 450 234 _c 263 | 450 284 433 324 401 354 _c 264 | 368 384 323 399 267 399 _c 265 | 241 399 214 396 188 390 _c 266 | 162 384 135 375 108 363 _c 267 | 108 729 _l 268 | _cl}_e}_d 269 | /six{{636 0 70 -13 573 742 _sc 270 | 330 404 _m 271 | 286 404 251 388 225 358 _c 272 | 199 328 186 286 186 234 _c 273 | 186 181 199 139 225 109 _c 274 | 251 79 286 64 330 64 _c 275 | 374 64 409 79 435 109 _c 276 | 461 139 474 181 474 234 _c 277 | 474 286 461 328 435 358 _c 278 | 409 388 374 404 330 404 _c 279 | 526 713 _m 280 | 526 623 _l 281 | 501 635 476 644 451 650 _c 282 | 425 656 400 659 376 659 _c 283 | 310 659 260 637 226 593 _c 284 | }_e{192 549 172 482 168 394 _c 285 | 187 422 211 444 240 459 _c 286 | 269 474 301 482 336 482 _c 287 | 409 482 467 459 509 415 _c 288 | 551 371 573 310 573 234 _c 289 | 573 159 550 99 506 54 _c 290 | 462 9 403 -13 330 -13 _c 291 | 246 -13 181 19 137 83 _c 292 | 92 147 70 241 70 364 _c 293 | 70 479 97 571 152 639 _c 294 | 206 707 280 742 372 742 _c 295 | 396 742 421 739 447 735 _c 296 | 472 730 498 723 526 713 _c 297 | _cl}_e}_d 298 | /seven{636 0 82 0 551 729 _sc 299 | 82 729 _m 300 | 551 729 _l 301 | 551 687 _l 302 | 286 0 _l 303 | 183 0 _l 304 | 432 646 _l 305 | 82 646 _l 306 | 82 729 _l 307 | _cl}_d 308 | /eight{{636 0 68 -13 568 742 _sc 309 | 318 346 _m 310 | 271 346 234 333 207 308 _c 311 | 180 283 167 249 167 205 _c 312 | 167 161 180 126 207 101 _c 313 | 234 76 271 64 318 64 _c 314 | 364 64 401 76 428 102 _c 315 | 455 127 469 161 469 205 _c 316 | 469 249 455 283 429 308 _c 317 | 402 333 365 346 318 346 _c 318 | 219 388 _m 319 | 177 398 144 418 120 447 _c 320 | 96 476 85 511 85 553 _c 321 | 85 611 105 657 147 691 _c 322 | 188 725 245 742 318 742 _c 323 | }_e{390 742 447 725 489 691 _c 324 | 530 657 551 611 551 553 _c 325 | 551 511 539 476 515 447 _c 326 | 491 418 459 398 417 388 _c 327 | 464 377 501 355 528 323 _c 328 | 554 291 568 251 568 205 _c 329 | 568 134 546 80 503 43 _c 330 | 459 5 398 -13 318 -13 _c 331 | 237 -13 175 5 132 43 _c 332 | 89 80 68 134 68 205 _c 333 | 68 251 81 291 108 323 _c 334 | 134 355 171 377 219 388 _c 335 | 183 544 _m 336 | 183 506 194 476 218 455 _c 337 | }_e{242 434 275 424 318 424 _c 338 | 360 424 393 434 417 455 _c 339 | 441 476 453 506 453 544 _c 340 | 453 582 441 611 417 632 _c 341 | 393 653 360 664 318 664 _c 342 | 275 664 242 653 218 632 _c 343 | 194 611 183 582 183 544 _c 344 | _cl}_e}_d 345 | /nine{{636 0 63 -13 566 742 _sc 346 | 110 15 _m 347 | 110 105 _l 348 | 134 93 159 84 185 78 _c 349 | 210 72 235 69 260 69 _c 350 | 324 69 374 90 408 134 _c 351 | 442 178 462 244 468 334 _c 352 | 448 306 424 284 396 269 _c 353 | 367 254 335 247 300 247 _c 354 | 226 247 168 269 126 313 _c 355 | 84 357 63 417 63 494 _c 356 | 63 568 85 628 129 674 _c 357 | 173 719 232 742 306 742 _c 358 | 390 742 455 709 499 645 _c 359 | 543 580 566 486 566 364 _c 360 | }_e{566 248 538 157 484 89 _c 361 | 429 21 356 -13 264 -13 _c 362 | 239 -13 214 -10 189 -6 _c 363 | 163 -2 137 5 110 15 _c 364 | 306 324 _m 365 | 350 324 385 339 411 369 _c 366 | 437 399 450 441 450 494 _c 367 | 450 546 437 588 411 618 _c 368 | 385 648 350 664 306 664 _c 369 | 262 664 227 648 201 618 _c 370 | 175 588 162 546 162 494 _c 371 | 162 441 175 399 201 369 _c 372 | 227 339 262 324 306 324 _c 373 | _cl}_e}_d 374 | /B{{686 0 98 0 615 729 _sc 375 | 197 348 _m 376 | 197 81 _l 377 | 355 81 _l 378 | 408 81 447 92 473 114 _c 379 | 498 136 511 169 511 215 _c 380 | 511 260 498 293 473 315 _c 381 | 447 337 408 348 355 348 _c 382 | 197 348 _l 383 | 197 648 _m 384 | 197 428 _l 385 | 343 428 _l 386 | 391 428 426 437 450 455 _c 387 | 474 473 486 500 486 538 _c 388 | 486 574 474 602 450 620 _c 389 | 426 638 391 648 343 648 _c 390 | 197 648 _l 391 | 98 729 _m 392 | 350 729 _l 393 | }_e{425 729 483 713 524 682 _c 394 | 564 650 585 606 585 549 _c 395 | 585 504 574 468 553 442 _c 396 | 532 416 502 399 462 393 _c 397 | 510 382 548 360 575 327 _c 398 | 601 294 615 253 615 204 _c 399 | 615 138 592 88 548 53 _c 400 | 504 17 441 0 360 0 _c 401 | 98 0 _l 402 | 98 729 _l 403 | _cl}_e}_d 404 | /E{632 0 98 0 568 729 _sc 405 | 98 729 _m 406 | 559 729 _l 407 | 559 646 _l 408 | 197 646 _l 409 | 197 430 _l 410 | 544 430 _l 411 | 544 347 _l 412 | 197 347 _l 413 | 197 83 _l 414 | 568 83 _l 415 | 568 0 _l 416 | 98 0 _l 417 | 98 729 _l 418 | _cl}_d 419 | /N{748 0 98 0 650 729 _sc 420 | 98 729 _m 421 | 231 729 _l 422 | 554 119 _l 423 | 554 729 _l 424 | 650 729 _l 425 | 650 0 _l 426 | 517 0 _l 427 | 194 610 _l 428 | 194 0 _l 429 | 98 0 _l 430 | 98 729 _l 431 | _cl}_d 432 | /O{787 0 56 -13 731 742 _sc 433 | 394 662 _m 434 | 322 662 265 635 223 582 _c 435 | 181 528 160 456 160 364 _c 436 | 160 272 181 199 223 146 _c 437 | 265 92 322 66 394 66 _c 438 | 465 66 522 92 564 146 _c 439 | 606 199 627 272 627 364 _c 440 | 627 456 606 528 564 582 _c 441 | 522 635 465 662 394 662 _c 442 | 394 742 _m 443 | 496 742 577 707 639 639 _c 444 | 700 571 731 479 731 364 _c 445 | 731 248 700 157 639 89 _c 446 | 577 21 496 -13 394 -13 _c 447 | 291 -13 209 21 148 89 _c 448 | 86 157 56 248 56 364 _c 449 | 56 479 86 571 148 639 _c 450 | 209 707 291 742 394 742 _c 451 | _cl}_d 452 | /a{{613 0 60 -13 522 560 _sc 453 | 343 275 _m 454 | 270 275 220 266 192 250 _c 455 | 164 233 150 205 150 165 _c 456 | 150 133 160 107 181 89 _c 457 | 202 70 231 61 267 61 _c 458 | 317 61 357 78 387 114 _c 459 | 417 149 432 196 432 255 _c 460 | 432 275 _l 461 | 343 275 _l 462 | 522 312 _m 463 | 522 0 _l 464 | 432 0 _l 465 | 432 83 _l 466 | 411 49 385 25 355 10 _c 467 | 325 -5 287 -13 243 -13 _c 468 | 187 -13 142 2 109 33 _c 469 | 76 64 60 106 60 159 _c 470 | }_e{60 220 80 266 122 298 _c 471 | 163 329 224 345 306 345 _c 472 | 432 345 _l 473 | 432 354 _l 474 | 432 395 418 427 391 450 _c 475 | 364 472 326 484 277 484 _c 476 | 245 484 215 480 185 472 _c 477 | 155 464 127 453 100 439 _c 478 | 100 522 _l 479 | 132 534 164 544 195 550 _c 480 | 226 556 256 560 286 560 _c 481 | 365 560 424 539 463 498 _c 482 | 502 457 522 395 522 312 _c 483 | _cl}_e}_d 484 | /b{{635 0 91 -13 580 760 _sc 485 | 487 273 _m 486 | 487 339 473 390 446 428 _c 487 | 418 466 381 485 334 485 _c 488 | 286 485 249 466 222 428 _c 489 | 194 390 181 339 181 273 _c 490 | 181 207 194 155 222 117 _c 491 | 249 79 286 61 334 61 _c 492 | 381 61 418 79 446 117 _c 493 | 473 155 487 207 487 273 _c 494 | 181 464 _m 495 | 199 496 223 520 252 536 _c 496 | 281 552 316 560 356 560 _c 497 | 422 560 476 533 518 481 _c 498 | 559 428 580 359 580 273 _c 499 | }_e{580 187 559 117 518 65 _c 500 | 476 13 422 -13 356 -13 _c 501 | 316 -13 281 -5 252 10 _c 502 | 223 25 199 49 181 82 _c 503 | 181 0 _l 504 | 91 0 _l 505 | 91 760 _l 506 | 181 760 _l 507 | 181 464 _l 508 | _cl}_e}_d 509 | /c{{550 0 55 -13 488 560 _sc 510 | 488 526 _m 511 | 488 442 _l 512 | 462 456 437 466 411 473 _c 513 | 385 480 360 484 334 484 _c 514 | 276 484 230 465 198 428 _c 515 | 166 391 150 339 150 273 _c 516 | 150 206 166 154 198 117 _c 517 | 230 80 276 62 334 62 _c 518 | 360 62 385 65 411 72 _c 519 | 437 79 462 90 488 104 _c 520 | 488 21 _l 521 | 462 9 436 0 410 -5 _c 522 | 383 -10 354 -13 324 -13 _c 523 | 242 -13 176 12 128 64 _c 524 | }_e{79 115 55 185 55 273 _c 525 | 55 362 79 432 128 483 _c 526 | 177 534 244 560 330 560 _c 527 | 358 560 385 557 411 551 _c 528 | 437 545 463 537 488 526 _c 529 | _cl}_e}_d 530 | /e{{615 0 55 -13 562 560 _sc 531 | 562 296 _m 532 | 562 252 _l 533 | 149 252 _l 534 | 153 190 171 142 205 110 _c 535 | 238 78 284 62 344 62 _c 536 | 378 62 412 66 444 74 _c 537 | 476 82 509 95 541 113 _c 538 | 541 28 _l 539 | 509 14 476 3 442 -3 _c 540 | 408 -9 373 -13 339 -13 _c 541 | 251 -13 182 12 131 62 _c 542 | 80 112 55 181 55 268 _c 543 | 55 357 79 428 127 481 _c 544 | 175 533 241 560 323 560 _c 545 | 397 560 455 536 498 489 _c 546 | }_e{540 441 562 377 562 296 _c 547 | 472 322 _m 548 | 471 371 457 410 431 440 _c 549 | 404 469 368 484 324 484 _c 550 | 274 484 234 469 204 441 _c 551 | 174 413 156 373 152 322 _c 552 | 472 322 _l 553 | _cl}_e}_d 554 | /f{352 0 23 0 371 760 _sc 555 | 371 760 _m 556 | 371 685 _l 557 | 285 685 _l 558 | 253 685 230 678 218 665 _c 559 | 205 652 199 629 199 595 _c 560 | 199 547 _l 561 | 347 547 _l 562 | 347 477 _l 563 | 199 477 _l 564 | 199 0 _l 565 | 109 0 _l 566 | 109 477 _l 567 | 23 477 _l 568 | 23 547 _l 569 | 109 547 _l 570 | 109 585 _l 571 | 109 645 123 690 151 718 _c 572 | 179 746 224 760 286 760 _c 573 | 371 760 _l 574 | _cl}_d 575 | /h{634 0 91 0 549 760 _sc 576 | 549 330 _m 577 | 549 0 _l 578 | 459 0 _l 579 | 459 327 _l 580 | 459 379 448 417 428 443 _c 581 | 408 469 378 482 338 482 _c 582 | 289 482 251 466 223 435 _c 583 | 195 404 181 362 181 309 _c 584 | 181 0 _l 585 | 91 0 _l 586 | 91 760 _l 587 | 181 760 _l 588 | 181 462 _l 589 | 202 494 227 519 257 535 _c 590 | 286 551 320 560 358 560 _c 591 | 420 560 468 540 500 501 _c 592 | 532 462 549 405 549 330 _c 593 | _cl}_d 594 | /m{{974 0 91 0 889 560 _sc 595 | 520 442 _m 596 | 542 482 569 511 600 531 _c 597 | 631 550 668 560 711 560 _c 598 | 767 560 811 540 842 500 _c 599 | 873 460 889 403 889 330 _c 600 | 889 0 _l 601 | 799 0 _l 602 | 799 327 _l 603 | 799 379 789 418 771 444 _c 604 | 752 469 724 482 686 482 _c 605 | 639 482 602 466 575 435 _c 606 | 548 404 535 362 535 309 _c 607 | 535 0 _l 608 | 445 0 _l 609 | 445 327 _l 610 | 445 379 435 418 417 444 _c 611 | 398 469 369 482 331 482 _c 612 | }_e{285 482 248 466 221 435 _c 613 | 194 404 181 362 181 309 _c 614 | 181 0 _l 615 | 91 0 _l 616 | 91 547 _l 617 | 181 547 _l 618 | 181 462 _l 619 | 201 495 226 520 255 536 _c 620 | 283 552 317 560 357 560 _c 621 | 397 560 430 550 458 530 _c 622 | 486 510 506 480 520 442 _c 623 | _cl}_e}_d 624 | /n{634 0 91 0 549 560 _sc 625 | 549 330 _m 626 | 549 0 _l 627 | 459 0 _l 628 | 459 327 _l 629 | 459 379 448 417 428 443 _c 630 | 408 469 378 482 338 482 _c 631 | 289 482 251 466 223 435 _c 632 | 195 404 181 362 181 309 _c 633 | 181 0 _l 634 | 91 0 _l 635 | 91 547 _l 636 | 181 547 _l 637 | 181 462 _l 638 | 202 494 227 519 257 535 _c 639 | 286 551 320 560 358 560 _c 640 | 420 560 468 540 500 501 _c 641 | 532 462 549 405 549 330 _c 642 | _cl}_d 643 | /o{612 0 55 -13 557 560 _sc 644 | 306 484 _m 645 | 258 484 220 465 192 427 _c 646 | 164 389 150 338 150 273 _c 647 | 150 207 163 156 191 118 _c 648 | 219 80 257 62 306 62 _c 649 | 354 62 392 80 420 118 _c 650 | 448 156 462 207 462 273 _c 651 | 462 337 448 389 420 427 _c 652 | 392 465 354 484 306 484 _c 653 | 306 560 _m 654 | 384 560 445 534 490 484 _c 655 | 534 433 557 363 557 273 _c 656 | 557 183 534 113 490 63 _c 657 | 445 12 384 -13 306 -13 _c 658 | 227 -13 165 12 121 63 _c 659 | 77 113 55 183 55 273 _c 660 | 55 363 77 433 121 484 _c 661 | 165 534 227 560 306 560 _c 662 | _cl}_d 663 | /r{411 0 91 0 411 560 _sc 664 | 411 463 _m 665 | 401 469 390 473 378 476 _c 666 | 366 478 353 480 339 480 _c 667 | 288 480 249 463 222 430 _c 668 | 194 397 181 350 181 288 _c 669 | 181 0 _l 670 | 91 0 _l 671 | 91 547 _l 672 | 181 547 _l 673 | 181 462 _l 674 | 199 495 224 520 254 536 _c 675 | 284 552 321 560 365 560 _c 676 | 371 560 378 559 386 559 _c 677 | 393 558 401 557 411 555 _c 678 | 411 463 _l 679 | _cl}_d 680 | /u{634 0 85 -13 543 547 _sc 681 | 85 216 _m 682 | 85 547 _l 683 | 175 547 _l 684 | 175 219 _l 685 | 175 167 185 129 205 103 _c 686 | 225 77 255 64 296 64 _c 687 | 344 64 383 79 411 110 _c 688 | 439 141 453 183 453 237 _c 689 | 453 547 _l 690 | 543 547 _l 691 | 543 0 _l 692 | 453 0 _l 693 | 453 84 _l 694 | 431 50 405 26 377 10 _c 695 | 348 -5 315 -13 277 -13 _c 696 | 214 -13 166 6 134 45 _c 697 | 101 83 85 140 85 216 _c 698 | _cl}_d 699 | end readonly def 700 | 701 | /BuildGlyph 702 | {exch begin 703 | CharStrings exch 704 | 2 copy known not{pop /.notdef}if 705 | true 3 1 roll get exec 706 | end}_d 707 | 708 | /BuildChar { 709 | 1 index /Encoding get exch get 710 | 1 index /BuildGlyph get exec 711 | }_d 712 | 713 | FontName currentdict end definefont pop 714 | end 715 | %%EndProlog 716 | mpldict begin 717 | 18 216 translate 718 | 576 360 0 0 clipbox 719 | gsave 720 | 0 0 m 721 | 576 0 l 722 | 576 360 l 723 | 0 360 l 724 | cl 725 | 1.000 setgray 726 | fill 727 | grestore 728 | gsave 729 | 72 36 m 730 | 518.4 36 l 731 | 518.4 324 l 732 | 72 324 l 733 | cl 734 | 1.000 setgray 735 | fill 736 | grestore 737 | 1.000 setlinewidth 738 | 1 setlinejoin 739 | 2 setlinecap 740 | [] 0 setdash 741 | 0.839 0.153 0.157 setrgbcolor 742 | gsave 743 | 446.4 288 72 36 clipbox 744 | 72 294.48 m 745 | 183.6 154.08 l 746 | 295.2 132.48 l 747 | 406.8 136.8 l 748 | 518.4 124.92 l 749 | stroke 750 | grestore 751 | 0.500 setlinewidth 752 | 0 setlinecap 753 | 0.000 setgray 754 | gsave 755 | 446.4 288 72 36 clipbox 756 | /o { 757 | gsave 758 | newpath 759 | translate 760 | 0.5 setlinewidth 761 | 1 setlinejoin 762 | 0 setlinecap 763 | 0 -4 m 764 | 1.06081 -4 2.07832 -3.57853 2.82843 -2.82843 c 765 | 3.57853 -2.07832 4 -1.06081 4 0 c 766 | 4 1.06081 3.57853 2.07832 2.82843 2.82843 c 767 | 2.07832 3.57853 1.06081 4 0 4 c 768 | -1.06081 4 -2.07832 3.57853 -2.82843 2.82843 c 769 | -3.57853 2.07832 -4 1.06081 -4 0 c 770 | -4 -1.06081 -3.57853 -2.07832 -2.82843 -2.82843 c 771 | -2.07832 -3.57853 -1.06081 -4 0 -4 c 772 | cl 773 | gsave 774 | 0.839 0.153 0.157 setrgbcolor 775 | fill 776 | grestore 777 | stroke 778 | grestore 779 | } bind def 780 | 72 294.48 o 781 | 183.6 154.08 o 782 | 295.2 132.48 o 783 | 406.8 136.8 o 784 | 518.4 124.92 o 785 | grestore 786 | 1.000 setlinewidth 787 | 2 setlinecap 788 | 1.000 0.498 0.055 setrgbcolor 789 | gsave 790 | 446.4 288 72 36 clipbox 791 | 72 294.48 m 792 | 183.6 105.84 l 793 | 295.2 58.68 l 794 | 406.8 60.12 l 795 | 518.4 59.4 l 796 | stroke 797 | grestore 798 | 0.500 setlinewidth 799 | 0 setlinejoin 800 | 0 setlinecap 801 | 0.000 setgray 802 | gsave 803 | 446.4 288 72 36 clipbox 804 | /o { 805 | gsave 806 | newpath 807 | translate 808 | 0.5 setlinewidth 809 | 0 setlinejoin 810 | 0 setlinecap 811 | -4.44089e-16 -5.65685 m 812 | 5.65685 0 l 813 | 4.44089e-16 5.65685 l 814 | -5.65685 8.88178e-16 l 815 | cl 816 | gsave 817 | 1.000 0.498 0.055 setrgbcolor 818 | fill 819 | grestore 820 | stroke 821 | grestore 822 | } bind def 823 | 72 294.48 o 824 | 183.6 105.84 o 825 | 295.2 58.68 o 826 | 406.8 60.12 o 827 | 518.4 59.4 o 828 | grestore 829 | 1 setlinejoin 830 | [1 3] 0 setdash 831 | gsave 832 | 446.4 288 72 36 clipbox 833 | 72 36 m 834 | 72 324 l 835 | stroke 836 | grestore 837 | [] 0 setdash 838 | gsave 839 | /o { 840 | gsave 841 | newpath 842 | translate 843 | 0.5 setlinewidth 844 | 1 setlinejoin 845 | 0 setlinecap 846 | 0 0 m 847 | 0 4 l 848 | gsave 849 | 0.000 setgray 850 | fill 851 | grestore 852 | stroke 853 | grestore 854 | } bind def 855 | 72 36 o 856 | grestore 857 | gsave 858 | /o { 859 | gsave 860 | newpath 861 | translate 862 | 0.5 setlinewidth 863 | 1 setlinejoin 864 | 0 setlinecap 865 | 0 0 m 866 | 0 -4 l 867 | gsave 868 | 0.000 setgray 869 | fill 870 | grestore 871 | stroke 872 | grestore 873 | } bind def 874 | 72 324 o 875 | grestore 876 | /BitstreamVeraSans-Roman findfont 877 | 16.000 scalefont 878 | setfont 879 | gsave 880 | 68.531250 19.843750 translate 881 | 0.000000 rotate 882 | 0.000000 0.000000 m /one glyphshow 883 | grestore 884 | [1 3] 0 setdash 885 | gsave 886 | 446.4 288 72 36 clipbox 887 | 183.6 36 m 888 | 183.6 324 l 889 | stroke 890 | grestore 891 | [] 0 setdash 892 | gsave 893 | /o { 894 | gsave 895 | newpath 896 | translate 897 | 0.5 setlinewidth 898 | 1 setlinejoin 899 | 0 setlinecap 900 | 0 0 m 901 | 0 4 l 902 | gsave 903 | 0.000 setgray 904 | fill 905 | grestore 906 | stroke 907 | grestore 908 | } bind def 909 | 183.6 36 o 910 | grestore 911 | gsave 912 | /o { 913 | gsave 914 | newpath 915 | translate 916 | 0.5 setlinewidth 917 | 1 setlinejoin 918 | 0 setlinecap 919 | 0 0 m 920 | 0 -4 l 921 | gsave 922 | 0.000 setgray 923 | fill 924 | grestore 925 | stroke 926 | grestore 927 | } bind def 928 | 183.6 324 o 929 | grestore 930 | gsave 931 | 179.896875 19.843750 translate 932 | 0.000000 rotate 933 | 0.000000 0.000000 m /two glyphshow 934 | grestore 935 | [1 3] 0 setdash 936 | gsave 937 | 446.4 288 72 36 clipbox 938 | 295.2 36 m 939 | 295.2 324 l 940 | stroke 941 | grestore 942 | [] 0 setdash 943 | gsave 944 | /o { 945 | gsave 946 | newpath 947 | translate 948 | 0.5 setlinewidth 949 | 1 setlinejoin 950 | 0 setlinecap 951 | 0 0 m 952 | 0 4 l 953 | gsave 954 | 0.000 setgray 955 | fill 956 | grestore 957 | stroke 958 | grestore 959 | } bind def 960 | 295.2 36 o 961 | grestore 962 | gsave 963 | /o { 964 | gsave 965 | newpath 966 | translate 967 | 0.5 setlinewidth 968 | 1 setlinejoin 969 | 0 setlinecap 970 | 0 0 m 971 | 0 -4 l 972 | gsave 973 | 0.000 setgray 974 | fill 975 | grestore 976 | stroke 977 | grestore 978 | } bind def 979 | 295.2 324 o 980 | grestore 981 | gsave 982 | 291.356250 19.843750 translate 983 | 0.000000 rotate 984 | 0.000000 0.000000 m /three glyphshow 985 | grestore 986 | [1 3] 0 setdash 987 | gsave 988 | 446.4 288 72 36 clipbox 989 | 406.8 36 m 990 | 406.8 324 l 991 | stroke 992 | grestore 993 | [] 0 setdash 994 | gsave 995 | /o { 996 | gsave 997 | newpath 998 | translate 999 | 0.5 setlinewidth 1000 | 1 setlinejoin 1001 | 0 setlinecap 1002 | 0 0 m 1003 | 0 4 l 1004 | gsave 1005 | 0.000 setgray 1006 | fill 1007 | grestore 1008 | stroke 1009 | grestore 1010 | } bind def 1011 | 406.8 36 o 1012 | grestore 1013 | gsave 1014 | /o { 1015 | gsave 1016 | newpath 1017 | translate 1018 | 0.5 setlinewidth 1019 | 1 setlinejoin 1020 | 0 setlinecap 1021 | 0 0 m 1022 | 0 -4 l 1023 | gsave 1024 | 0.000 setgray 1025 | fill 1026 | grestore 1027 | stroke 1028 | grestore 1029 | } bind def 1030 | 406.8 324 o 1031 | grestore 1032 | gsave 1033 | 402.550000 19.843750 translate 1034 | 0.000000 rotate 1035 | 0.000000 0.000000 m /four glyphshow 1036 | grestore 1037 | [1 3] 0 setdash 1038 | gsave 1039 | 446.4 288 72 36 clipbox 1040 | 518.4 36 m 1041 | 518.4 324 l 1042 | stroke 1043 | grestore 1044 | [] 0 setdash 1045 | gsave 1046 | /o { 1047 | gsave 1048 | newpath 1049 | translate 1050 | 0.5 setlinewidth 1051 | 1 setlinejoin 1052 | 0 setlinecap 1053 | 0 0 m 1054 | 0 4 l 1055 | gsave 1056 | 0.000 setgray 1057 | fill 1058 | grestore 1059 | stroke 1060 | grestore 1061 | } bind def 1062 | 518.4 36 o 1063 | grestore 1064 | gsave 1065 | /o { 1066 | gsave 1067 | newpath 1068 | translate 1069 | 0.5 setlinewidth 1070 | 1 setlinejoin 1071 | 0 setlinecap 1072 | 0 0 m 1073 | 0 -4 l 1074 | gsave 1075 | 0.000 setgray 1076 | fill 1077 | grestore 1078 | stroke 1079 | grestore 1080 | } bind def 1081 | 518.4 324 o 1082 | grestore 1083 | gsave 1084 | 514.626562 19.843750 translate 1085 | 0.000000 rotate 1086 | 0.000000 0.000000 m /five glyphshow 1087 | grestore 1088 | gsave 1089 | 222.700000 -0.640625 translate 1090 | 0.000000 rotate 1091 | 0.000000 0.000000 m /N glyphshow 1092 | 11.968750 0.000000 m /u glyphshow 1093 | 22.109375 0.000000 m /m glyphshow 1094 | 37.695312 0.000000 m /b glyphshow 1095 | 47.851562 0.000000 m /e glyphshow 1096 | 57.695312 0.000000 m /r glyphshow 1097 | 64.273438 0.000000 m /space glyphshow 1098 | 69.359375 0.000000 m /o glyphshow 1099 | 79.148438 0.000000 m /f glyphshow 1100 | 84.781250 0.000000 m /space glyphshow 1101 | 89.867188 0.000000 m /B glyphshow 1102 | 100.843750 0.000000 m /r glyphshow 1103 | 107.421875 0.000000 m /a glyphshow 1104 | 117.226562 0.000000 m /n glyphshow 1105 | 127.367188 0.000000 m /c glyphshow 1106 | 136.164062 0.000000 m /h glyphshow 1107 | grestore 1108 | [1 3] 0 setdash 1109 | gsave 1110 | 446.4 288 72 36 clipbox 1111 | 72 36 m 1112 | 518.4 36 l 1113 | stroke 1114 | grestore 1115 | [] 0 setdash 1116 | gsave 1117 | /o { 1118 | gsave 1119 | newpath 1120 | translate 1121 | 0.5 setlinewidth 1122 | 1 setlinejoin 1123 | 0 setlinecap 1124 | 0 0 m 1125 | 4 0 l 1126 | gsave 1127 | 0.000 setgray 1128 | fill 1129 | grestore 1130 | stroke 1131 | grestore 1132 | } bind def 1133 | 72 36 o 1134 | grestore 1135 | gsave 1136 | /o { 1137 | gsave 1138 | newpath 1139 | translate 1140 | 0.5 setlinewidth 1141 | 1 setlinejoin 1142 | 0 setlinecap 1143 | 0 0 m 1144 | -4 0 l 1145 | gsave 1146 | 0.000 setgray 1147 | fill 1148 | grestore 1149 | stroke 1150 | grestore 1151 | } bind def 1152 | 518.4 36 o 1153 | grestore 1154 | gsave 1155 | 49.703125 31.585938 translate 1156 | 0.000000 rotate 1157 | 0.000000 0.000000 m /two glyphshow 1158 | 10.179688 0.000000 m /four glyphshow 1159 | grestore 1160 | [1 3] 0 setdash 1161 | gsave 1162 | 446.4 288 72 36 clipbox 1163 | 72 72 m 1164 | 518.4 72 l 1165 | stroke 1166 | grestore 1167 | [] 0 setdash 1168 | gsave 1169 | /o { 1170 | gsave 1171 | newpath 1172 | translate 1173 | 0.5 setlinewidth 1174 | 1 setlinejoin 1175 | 0 setlinecap 1176 | 0 0 m 1177 | 4 0 l 1178 | gsave 1179 | 0.000 setgray 1180 | fill 1181 | grestore 1182 | stroke 1183 | grestore 1184 | } bind def 1185 | 72 72 o 1186 | grestore 1187 | gsave 1188 | /o { 1189 | gsave 1190 | newpath 1191 | translate 1192 | 0.5 setlinewidth 1193 | 1 setlinejoin 1194 | 0 setlinecap 1195 | 0 0 m 1196 | -4 0 l 1197 | gsave 1198 | 0.000 setgray 1199 | fill 1200 | grestore 1201 | stroke 1202 | grestore 1203 | } bind def 1204 | 518.4 72 o 1205 | grestore 1206 | gsave 1207 | 50.203125 67.585938 translate 1208 | 0.000000 rotate 1209 | 0.000000 0.000000 m /two glyphshow 1210 | 10.179688 0.000000 m /five glyphshow 1211 | grestore 1212 | [1 3] 0 setdash 1213 | gsave 1214 | 446.4 288 72 36 clipbox 1215 | 72 108 m 1216 | 518.4 108 l 1217 | stroke 1218 | grestore 1219 | [] 0 setdash 1220 | gsave 1221 | /o { 1222 | gsave 1223 | newpath 1224 | translate 1225 | 0.5 setlinewidth 1226 | 1 setlinejoin 1227 | 0 setlinecap 1228 | 0 0 m 1229 | 4 0 l 1230 | gsave 1231 | 0.000 setgray 1232 | fill 1233 | grestore 1234 | stroke 1235 | grestore 1236 | } bind def 1237 | 72 108 o 1238 | grestore 1239 | gsave 1240 | /o { 1241 | gsave 1242 | newpath 1243 | translate 1244 | 0.5 setlinewidth 1245 | 1 setlinejoin 1246 | 0 setlinecap 1247 | 0 0 m 1248 | -4 0 l 1249 | gsave 1250 | 0.000 setgray 1251 | fill 1252 | grestore 1253 | stroke 1254 | grestore 1255 | } bind def 1256 | 518.4 108 o 1257 | grestore 1258 | gsave 1259 | 49.812500 103.585938 translate 1260 | 0.000000 rotate 1261 | 0.000000 0.000000 m /two glyphshow 1262 | 10.179688 0.000000 m /six glyphshow 1263 | grestore 1264 | [1 3] 0 setdash 1265 | gsave 1266 | 446.4 288 72 36 clipbox 1267 | 72 144 m 1268 | 518.4 144 l 1269 | stroke 1270 | grestore 1271 | [] 0 setdash 1272 | gsave 1273 | /o { 1274 | gsave 1275 | newpath 1276 | translate 1277 | 0.5 setlinewidth 1278 | 1 setlinejoin 1279 | 0 setlinecap 1280 | 0 0 m 1281 | 4 0 l 1282 | gsave 1283 | 0.000 setgray 1284 | fill 1285 | grestore 1286 | stroke 1287 | grestore 1288 | } bind def 1289 | 72 144 o 1290 | grestore 1291 | gsave 1292 | /o { 1293 | gsave 1294 | newpath 1295 | translate 1296 | 0.5 setlinewidth 1297 | 1 setlinejoin 1298 | 0 setlinecap 1299 | 0 0 m 1300 | -4 0 l 1301 | gsave 1302 | 0.000 setgray 1303 | fill 1304 | grestore 1305 | stroke 1306 | grestore 1307 | } bind def 1308 | 518.4 144 o 1309 | grestore 1310 | gsave 1311 | 50.171875 139.585938 translate 1312 | 0.000000 rotate 1313 | 0.000000 0.000000 m /two glyphshow 1314 | 10.179688 0.000000 m /seven glyphshow 1315 | grestore 1316 | [1 3] 0 setdash 1317 | gsave 1318 | 446.4 288 72 36 clipbox 1319 | 72 180 m 1320 | 518.4 180 l 1321 | stroke 1322 | grestore 1323 | [] 0 setdash 1324 | gsave 1325 | /o { 1326 | gsave 1327 | newpath 1328 | translate 1329 | 0.5 setlinewidth 1330 | 1 setlinejoin 1331 | 0 setlinecap 1332 | 0 0 m 1333 | 4 0 l 1334 | gsave 1335 | 0.000 setgray 1336 | fill 1337 | grestore 1338 | stroke 1339 | grestore 1340 | } bind def 1341 | 72 180 o 1342 | grestore 1343 | gsave 1344 | /o { 1345 | gsave 1346 | newpath 1347 | translate 1348 | 0.5 setlinewidth 1349 | 1 setlinejoin 1350 | 0 setlinecap 1351 | 0 0 m 1352 | -4 0 l 1353 | gsave 1354 | 0.000 setgray 1355 | fill 1356 | grestore 1357 | stroke 1358 | grestore 1359 | } bind def 1360 | 518.4 180 o 1361 | grestore 1362 | gsave 1363 | 49.890625 175.585938 translate 1364 | 0.000000 rotate 1365 | 0.000000 0.000000 m /two glyphshow 1366 | 10.179688 0.000000 m /eight glyphshow 1367 | grestore 1368 | [1 3] 0 setdash 1369 | gsave 1370 | 446.4 288 72 36 clipbox 1371 | 72 216 m 1372 | 518.4 216 l 1373 | stroke 1374 | grestore 1375 | [] 0 setdash 1376 | gsave 1377 | /o { 1378 | gsave 1379 | newpath 1380 | translate 1381 | 0.5 setlinewidth 1382 | 1 setlinejoin 1383 | 0 setlinecap 1384 | 0 0 m 1385 | 4 0 l 1386 | gsave 1387 | 0.000 setgray 1388 | fill 1389 | grestore 1390 | stroke 1391 | grestore 1392 | } bind def 1393 | 72 216 o 1394 | grestore 1395 | gsave 1396 | /o { 1397 | gsave 1398 | newpath 1399 | translate 1400 | 0.5 setlinewidth 1401 | 1 setlinejoin 1402 | 0 setlinecap 1403 | 0 0 m 1404 | -4 0 l 1405 | gsave 1406 | 0.000 setgray 1407 | fill 1408 | grestore 1409 | stroke 1410 | grestore 1411 | } bind def 1412 | 518.4 216 o 1413 | grestore 1414 | gsave 1415 | 49.921875 211.585938 translate 1416 | 0.000000 rotate 1417 | 0.000000 0.000000 m /two glyphshow 1418 | 10.179688 0.000000 m /nine glyphshow 1419 | grestore 1420 | [1 3] 0 setdash 1421 | gsave 1422 | 446.4 288 72 36 clipbox 1423 | 72 252 m 1424 | 518.4 252 l 1425 | stroke 1426 | grestore 1427 | [] 0 setdash 1428 | gsave 1429 | /o { 1430 | gsave 1431 | newpath 1432 | translate 1433 | 0.5 setlinewidth 1434 | 1 setlinejoin 1435 | 0 setlinecap 1436 | 0 0 m 1437 | 4 0 l 1438 | gsave 1439 | 0.000 setgray 1440 | fill 1441 | grestore 1442 | stroke 1443 | grestore 1444 | } bind def 1445 | 72 252 o 1446 | grestore 1447 | gsave 1448 | /o { 1449 | gsave 1450 | newpath 1451 | translate 1452 | 0.5 setlinewidth 1453 | 1 setlinejoin 1454 | 0 setlinecap 1455 | 0 0 m 1456 | -4 0 l 1457 | gsave 1458 | 0.000 setgray 1459 | fill 1460 | grestore 1461 | stroke 1462 | grestore 1463 | } bind def 1464 | 518.4 252 o 1465 | grestore 1466 | gsave 1467 | 49.906250 247.585938 translate 1468 | 0.000000 rotate 1469 | 0.000000 0.000000 m /three glyphshow 1470 | 10.179688 0.000000 m /zero glyphshow 1471 | grestore 1472 | [1 3] 0 setdash 1473 | gsave 1474 | 446.4 288 72 36 clipbox 1475 | 72 288 m 1476 | 518.4 288 l 1477 | stroke 1478 | grestore 1479 | [] 0 setdash 1480 | gsave 1481 | /o { 1482 | gsave 1483 | newpath 1484 | translate 1485 | 0.5 setlinewidth 1486 | 1 setlinejoin 1487 | 0 setlinecap 1488 | 0 0 m 1489 | 4 0 l 1490 | gsave 1491 | 0.000 setgray 1492 | fill 1493 | grestore 1494 | stroke 1495 | grestore 1496 | } bind def 1497 | 72 288 o 1498 | grestore 1499 | gsave 1500 | /o { 1501 | gsave 1502 | newpath 1503 | translate 1504 | 0.5 setlinewidth 1505 | 1 setlinejoin 1506 | 0 setlinecap 1507 | 0 0 m 1508 | -4 0 l 1509 | gsave 1510 | 0.000 setgray 1511 | fill 1512 | grestore 1513 | stroke 1514 | grestore 1515 | } bind def 1516 | 518.4 288 o 1517 | grestore 1518 | gsave 1519 | 50.328125 283.585938 translate 1520 | 0.000000 rotate 1521 | 0.000000 0.000000 m /three glyphshow 1522 | 10.179688 0.000000 m /one glyphshow 1523 | grestore 1524 | [1 3] 0 setdash 1525 | gsave 1526 | 446.4 288 72 36 clipbox 1527 | 72 324 m 1528 | 518.4 324 l 1529 | stroke 1530 | grestore 1531 | [] 0 setdash 1532 | gsave 1533 | /o { 1534 | gsave 1535 | newpath 1536 | translate 1537 | 0.5 setlinewidth 1538 | 1 setlinejoin 1539 | 0 setlinecap 1540 | 0 0 m 1541 | 4 0 l 1542 | gsave 1543 | 0.000 setgray 1544 | fill 1545 | grestore 1546 | stroke 1547 | grestore 1548 | } bind def 1549 | 72 324 o 1550 | grestore 1551 | gsave 1552 | /o { 1553 | gsave 1554 | newpath 1555 | translate 1556 | 0.5 setlinewidth 1557 | 1 setlinejoin 1558 | 0 setlinecap 1559 | 0 0 m 1560 | -4 0 l 1561 | gsave 1562 | 0.000 setgray 1563 | fill 1564 | grestore 1565 | stroke 1566 | grestore 1567 | } bind def 1568 | 518.4 324 o 1569 | grestore 1570 | gsave 1571 | 50.453125 319.585938 translate 1572 | 0.000000 rotate 1573 | 0.000000 0.000000 m /three glyphshow 1574 | 10.179688 0.000000 m /two glyphshow 1575 | grestore 1576 | gsave 1577 | 41.375000 148.117188 translate 1578 | 90.000000 rotate 1579 | 0.000000 0.000000 m /E glyphshow 1580 | 10.109375 0.000000 m /r glyphshow 1581 | 16.437500 0.000000 m /r glyphshow 1582 | 22.640625 0.000000 m /o glyphshow 1583 | 32.429688 0.000000 m /r glyphshow 1584 | 39.007812 0.000000 m /parenleft glyphshow 1585 | 45.250000 0.000000 m /percent glyphshow 1586 | 60.453125 0.000000 m /parenright glyphshow 1587 | grestore 1588 | 1.000 setlinewidth 1589 | gsave 1590 | 72 324 m 1591 | 518.4 324 l 1592 | stroke 1593 | grestore 1594 | gsave 1595 | 518.4 36 m 1596 | 518.4 324 l 1597 | stroke 1598 | grestore 1599 | gsave 1600 | 72 36 m 1601 | 518.4 36 l 1602 | stroke 1603 | grestore 1604 | gsave 1605 | 72 36 m 1606 | 72 324 l 1607 | stroke 1608 | grestore 1609 | gsave 1610 | 367.133 252.659 m 1611 | 508.8 252.659 l 1612 | 508.8 314.4 l 1613 | 367.133 314.4 l 1614 | 367.133 252.659 l 1615 | cl 1616 | gsave 1617 | 1.000 setgray 1618 | fill 1619 | grestore 1620 | stroke 1621 | grestore 1622 | 2 setlinecap 1623 | 0.839 0.153 0.157 setrgbcolor 1624 | gsave 1625 | 380.573 299.002 m 1626 | 407.453 299.002 l 1627 | stroke 1628 | grestore 1629 | 0.500 setlinewidth 1630 | 0 setlinecap 1631 | 0.000 setgray 1632 | gsave 1633 | /o { 1634 | gsave 1635 | newpath 1636 | translate 1637 | 0.5 setlinewidth 1638 | 1 setlinejoin 1639 | 0 setlinecap 1640 | 0 -4 m 1641 | 1.06081 -4 2.07832 -3.57853 2.82843 -2.82843 c 1642 | 3.57853 -2.07832 4 -1.06081 4 0 c 1643 | 4 1.06081 3.57853 2.07832 2.82843 2.82843 c 1644 | 2.07832 3.57853 1.06081 4 0 4 c 1645 | -1.06081 4 -2.07832 3.57853 -2.82843 2.82843 c 1646 | -3.57853 2.07832 -4 1.06081 -4 0 c 1647 | -4 -1.06081 -3.57853 -2.07832 -2.82843 -2.82843 c 1648 | -2.07832 -3.57853 -1.06081 -4 0 -4 c 1649 | cl 1650 | gsave 1651 | 0.839 0.153 0.157 setrgbcolor 1652 | fill 1653 | grestore 1654 | stroke 1655 | grestore 1656 | } bind def 1657 | 380.573 299.002 o 1658 | 407.453 299.002 o 1659 | grestore 1660 | /BitstreamVeraSans-Roman findfont 1661 | 19.200 scalefont 1662 | setfont 1663 | gsave 1664 | 428.573125 292.282500 translate 1665 | 0.000000 rotate 1666 | 0.000000 0.000000 m /parenleft glyphshow 1667 | 7.510132 0.000000 m /O glyphshow 1668 | 22.661987 0.000000 m /N glyphshow 1669 | 37.061890 0.000000 m /E glyphshow 1670 | 49.224731 0.000000 m /parenright glyphshow 1671 | grestore 1672 | 1.000 setlinewidth 1673 | 2 setlinecap 1674 | 1.000 0.498 0.055 setrgbcolor 1675 | gsave 1676 | 380.573 271.012 m 1677 | 407.453 271.012 l 1678 | stroke 1679 | grestore 1680 | 0.500 setlinewidth 1681 | 0 setlinejoin 1682 | 0 setlinecap 1683 | 0.000 setgray 1684 | gsave 1685 | /o { 1686 | gsave 1687 | newpath 1688 | translate 1689 | 0.5 setlinewidth 1690 | 0 setlinejoin 1691 | 0 setlinecap 1692 | -4.44089e-16 -5.65685 m 1693 | 5.65685 0 l 1694 | 4.44089e-16 5.65685 l 1695 | -5.65685 8.88178e-16 l 1696 | cl 1697 | gsave 1698 | 1.000 0.498 0.055 setrgbcolor 1699 | fill 1700 | grestore 1701 | stroke 1702 | grestore 1703 | } bind def 1704 | 380.573 271.012 o 1705 | 407.453 271.012 o 1706 | grestore 1707 | gsave 1708 | 428.573125 264.291875 translate 1709 | 0.000000 rotate 1710 | 0.000000 0.000000 m /parenleft glyphshow 1711 | 7.510132 0.000000 m /O glyphshow 1712 | 22.661987 0.000000 m /N glyphshow 1713 | 37.061890 0.000000 m /E glyphshow 1714 | 49.224731 0.000000 m /hyphen glyphshow 1715 | 56.170898 0.000000 m /E glyphshow 1716 | 68.333740 0.000000 m /parenright glyphshow 1717 | grestore 1718 | 1719 | end 1720 | showpage 1721 | --------------------------------------------------------------------------------