├── .gitattributes ├── .gitignore ├── LICENSE ├── Leaky.py ├── README.md ├── data_utils.py ├── dropout.py ├── feature_used.py ├── fine_grained_nn.csv ├── fine_grained_tune.py ├── initialization.py ├── naive_nn.csv ├── neural_net.py ├── paper ├── 1212.0901v2.pdf ├── AnalysisSingleLayerUnsupervisedFeatureLearning.pdf ├── DropConnect Neural Network.html ├── DropConnect Neural Network_files │ ├── MathJax.js │ ├── convergenceCost.jpeg │ ├── dropRate400_400.jpeg │ ├── modelSize.jpeg │ ├── nn.jpg │ ├── nn_dc.jpg │ └── nn_do.jpg ├── PReLU.pdf ├── Unsupervised Feature Learning with C-SVDDNet.pdf ├── adam.pdf ├── co-adaptation of feature detectors.pdf ├── dropout.pdf └── effcient backprop.pdf ├── redo.py ├── report ├── IEEEtran.cls ├── dropout accuracy.eps ├── dropout loss_history.eps ├── dropout-eps-converted-to.pdf ├── dropout.eps ├── kmeans_acc-eps-converted-to.pdf ├── kmeans_acc.eps ├── kmeans_his-eps-converted-to.pdf ├── kmeans_his.eps ├── loss-eps-converted-to.pdf ├── loss.eps ├── mp1_Yihui He.aux ├── mp1_Yihui He.dvi ├── mp1_Yihui He.pdf ├── mp1_Yihui He.tex ├── mp1_Yihui He.tex.backup ├── parameters └── plot_tf.pdf ├── tryhere.ipynb ├── tune_naive.py └── zca.py /.gitattributes: -------------------------------------------------------------------------------- 1 | report/* linguist-vendored 2 | paper/* linguist-vendored 3 | kmeans-demo/* linguist-vendored 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | *.pickle 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | 47 | # Translations 48 | *.mo 49 | *.pot 50 | 51 | # Django stuff: 52 | *.log 53 | 54 | # Sphinx documentation 55 | docs/_build/ 56 | 57 | # PyBuilder 58 | target/ 59 | 60 | #dataset 61 | datasets/ 62 | kmeans_demo/cifar-10-batches-mat/ 63 | #docs 64 | *.csv 65 | #vim 66 | *.swp 67 | 68 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Yihui He 何宜晖 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Leaky.py: -------------------------------------------------------------------------------- 1 | from data_utils import load_CIFAR10 2 | from neural_net import * 3 | import matplotlib.pyplot as plt 4 | 5 | def get_CIFAR10_data(num_training=49000, num_validation=1000, num_test=1000): 6 | """ 7 | Load the CIFAR-10 dataset from disk and perform preprocessing to prepare 8 | it for the two-layer neural net classifier. These are the same steps as 9 | we used for the SVM, but condensed to a single function. 10 | """ 11 | # Load the raw CIFAR-10 data 12 | cifar10_dir = './datasets/cifar-10-batches-py' 13 | X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir) 14 | 15 | # Subsample the data 16 | mask = range(num_training, num_training + num_validation) 17 | X_val = X_train[mask] 18 | y_val = y_train[mask] 19 | mask = range(num_training) 20 | X_train = X_train[mask] 21 | y_train = y_train[mask] 22 | mask = range(num_test) 23 | X_test = X_test[mask] 24 | y_test = y_test[mask] 25 | 26 | # Normalize the data: subtract the mean image 27 | mean_image = np.mean(X_train, axis=0) 28 | X_train -= mean_image 29 | X_val -= mean_image 30 | X_test -= mean_image 31 | 32 | # Reshape data to rows 33 | X_train = X_train.reshape(num_training, -1) 34 | X_val = X_val.reshape(num_validation, -1) 35 | X_test = X_test.reshape(num_test, -1) 36 | 37 | return X_train, y_train, X_val, y_val, X_test, y_test 38 | 39 | 40 | # Invoke the above function to get our data. 41 | X_train, y_train, X_val, y_val, X_test, y_test = get_CIFAR10_data() 42 | print 'Train data shape: ', X_train.shape 43 | print 'Train labels shape: ', y_train.shape 44 | print 'Validation data shape: ', X_val.shape 45 | print 'Validation labels shape: ', y_val.shape 46 | print 'Test data shape: ', X_test.shape 47 | print 'Test labels shape: ', y_test.shape 48 | 49 | input_size = 32 * 32 * 3 50 | hidden_size = 50 51 | num_classes = 10 52 | 53 | # for method in ["Nesterov momentum","rmsprop"]: 54 | # net = TwoLayerNet(input_size, 500, num_classes,1e-5) 55 | # stats = net.train(X_train, y_train, X_val, y_val, 56 | # num_iters=1000, batch_size=100, 57 | # learning_rate=1e-4, learning_rate_decay=0.95, 58 | # reg=0.8, verbose=True,update="momentum",arg=0.9) 59 | # val_acc = (net.predict(X_train) == y_train).mean() 60 | # print 'Train accuracy: ', val_acc 61 | # val_acc = (net.predict(X_val) == y_val).mean() 62 | # print 'Validation accuracy: ', val_acc 63 | # val_acc = (net.predict(X_test) == y_test).mean() 64 | # print 'Test accuracy: ', val_acc 65 | 66 | net = TwoLayerNet(input_size, 500, num_classes,1e-5) 67 | stats = net.train(X_train, y_train, X_val, y_val, 68 | num_iters=10000, batch_size=100, 69 | learning_rate=1e-4, learning_rate_decay=0.95, 70 | reg=0, verbose=True,update="momentum",arg=0.9,dropout=0.5,activation='leaky') 71 | val_acc = (net.predict(X_train) == y_train).mean() 72 | print 'Train accuracy: ', val_acc 73 | val_acc = (net.predict(X_val) == y_val).mean() 74 | print 'Validation accuracy: ', val_acc 75 | val_acc = (net.predict(X_test) == y_test).mean() 76 | print 'Test accuracy: ', val_acc 77 | 78 | #Plot the loss function and train / validation accuracies 79 | # plt.plot(stats['loss_history']) 80 | # plt.title('Loss history') 81 | # plt.xlabel('Iteration') 82 | # plt.ylabel('Loss') 83 | # plt.savefig("dropout loss_history.eps") 84 | 85 | # plt.subplot(2, 1, 2) 86 | # plt.plot(stats['train_acc_history'], label='train') 87 | # plt.plot(stats['val_acc_history'], label='val') 88 | # plt.title('Classification accuracy history') 89 | # plt.xlabel('Epoch') 90 | # plt.ylabel('Clasification accuracy') 91 | # plt.savefig('dropout accuracy.eps') -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # http://yihui-he.github.io/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/ 2 | -------------------------------------------------------------------------------- /data_utils.py: -------------------------------------------------------------------------------- 1 | import cPickle as pickle 2 | import numpy as np 3 | import os 4 | from scipy.misc import imread 5 | 6 | def load_CIFAR_batch(filename): 7 | """ load single batch of cifar """ 8 | with open(filename, 'rb') as f: 9 | datadict = pickle.load(f) 10 | X = datadict['data'] 11 | Y = datadict['labels'] 12 | X = X.reshape(10000, 3, 32, 32).transpose(0,2,3,1).astype("float") 13 | Y = np.array(Y) 14 | return X, Y 15 | 16 | def load_CIFAR10(ROOT): 17 | """ load all of cifar """ 18 | xs = [] 19 | ys = [] 20 | for b in range(1,6): 21 | f = os.path.join(ROOT, 'data_batch_%d' % (b, )) 22 | X, Y = load_CIFAR_batch(f) 23 | xs.append(X) 24 | ys.append(Y) 25 | Xtr = np.concatenate(xs) 26 | Ytr = np.concatenate(ys) 27 | del X, Y 28 | Xte, Yte = load_CIFAR_batch(os.path.join(ROOT, 'test_batch')) 29 | return Xtr, Ytr, Xte, Yte 30 | 31 | -------------------------------------------------------------------------------- /dropout.py: -------------------------------------------------------------------------------- 1 | from data_utils import load_CIFAR10 2 | from neural_net import * 3 | import matplotlib.pyplot as plt 4 | 5 | def get_CIFAR10_data(num_training=49000, num_validation=1000, num_test=1000): 6 | """ 7 | Load the CIFAR-10 dataset from disk and perform preprocessing to prepare 8 | it for the two-layer neural net classifier. These are the same steps as 9 | we used for the SVM, but condensed to a single function. 10 | """ 11 | # Load the raw CIFAR-10 data 12 | cifar10_dir = './datasets/cifar-10-batches-py' 13 | X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir) 14 | 15 | # Subsample the data 16 | mask = range(num_training, num_training + num_validation) 17 | X_val = X_train[mask] 18 | y_val = y_train[mask] 19 | mask = range(num_training) 20 | X_train = X_train[mask] 21 | y_train = y_train[mask] 22 | mask = range(num_test) 23 | X_test = X_test[mask] 24 | y_test = y_test[mask] 25 | 26 | # Normalize the data: subtract the mean image 27 | mean_image = np.mean(X_train, axis=0) 28 | X_train -= mean_image 29 | X_val -= mean_image 30 | X_test -= mean_image 31 | 32 | # Reshape data to rows 33 | X_train = X_train.reshape(num_training, -1) 34 | X_val = X_val.reshape(num_validation, -1) 35 | X_test = X_test.reshape(num_test, -1) 36 | 37 | return X_train, y_train, X_val, y_val, X_test, y_test 38 | 39 | 40 | # Invoke the above function to get our data. 41 | X_train, y_train, X_val, y_val, X_test, y_test = get_CIFAR10_data() 42 | print 'Train data shape: ', X_train.shape 43 | print 'Train labels shape: ', y_train.shape 44 | print 'Validation data shape: ', X_val.shape 45 | print 'Validation labels shape: ', y_val.shape 46 | print 'Test data shape: ', X_test.shape 47 | print 'Test labels shape: ', y_test.shape 48 | 49 | input_size = 32 * 32 * 3 50 | hidden_size = 50 51 | num_classes = 10 52 | 53 | # for method in ["Nesterov momentum","rmsprop"]: 54 | # net = TwoLayerNet(input_size, 500, num_classes,1e-5) 55 | # stats = net.train(X_train, y_train, X_val, y_val, 56 | # num_iters=1000, batch_size=100, 57 | # learning_rate=1e-4, learning_rate_decay=0.95, 58 | # reg=0.8, verbose=True,update="momentum",arg=0.9) 59 | # val_acc = (net.predict(X_train) == y_train).mean() 60 | # print 'Train accuracy: ', val_acc 61 | # val_acc = (net.predict(X_val) == y_val).mean() 62 | # print 'Validation accuracy: ', val_acc 63 | # val_acc = (net.predict(X_test) == y_test).mean() 64 | # print 'Test accuracy: ', val_acc 65 | 66 | net = TwoLayerNet(input_size, 500, num_classes,1e-5) 67 | stats = net.train(X_train, y_train, X_val, y_val, 68 | num_iters=20000, batch_size=200, 69 | learning_rate=1e-4, learning_rate_decay=0.95, 70 | reg=0, verbose=True,update="momentum",arg=0.9,dropout=0.7) 71 | val_acc = (net.predict(X_train) == y_train).mean() 72 | print 'Train accuracy: ', val_acc 73 | val_acc = (net.predict(X_val) == y_val).mean() 74 | print 'Validation accuracy: ', val_acc 75 | val_acc = (net.predict(X_test) == y_test).mean() 76 | print 'Test accuracy: ', val_acc 77 | 78 | #Plot the loss function and train / validation accuracies 79 | plt.plot(stats['loss_history']) 80 | plt.title('Loss history') 81 | plt.xlabel('Iteration') 82 | plt.ylabel('Loss') 83 | plt.savefig("dropout loss_history.eps") 84 | 85 | plt.subplot(2, 1, 2) 86 | plt.plot(stats['train_acc_history'], label='train') 87 | plt.plot(stats['val_acc_history'], label='val') 88 | plt.title('Classification accuracy history') 89 | plt.xlabel('Epoch') 90 | plt.ylabel('Clasification accuracy') 91 | plt.savefig('dropout accuracy.eps') 92 | -------------------------------------------------------------------------------- /feature_used.py: -------------------------------------------------------------------------------- 1 | # just test the password 2 | import cPickle as pickle 3 | with open("features.pickle") as f: 4 | [trainXC,valXC,testXC,y_train,y_val,y_test]=pickle.load(f) 5 | 6 | print "train",trainXC.shape 7 | print "val",valXC.shape 8 | print "test",testXC.shape 9 | # In[125]: 10 | 11 | from neural_net import * 12 | import matplotlib.pyplot as plt 13 | input_size = trainXC.shape[1] 14 | num_classes = 10 15 | 16 | 17 | # In[126]: 18 | import os.path 19 | if not os.path.isfile("feats.csv"): 20 | with open("feats.csv","w") as f: 21 | f.write("hidden_size,momentum,dropout,learning_rate,learning_rate_decay"+'\n') 22 | 23 | def tryArgs(hidden_size,momentum,dropout,learning_rate,learning_rate_decay): 24 | 25 | net = TwoLayerNet(input_size, hidden_size, num_classes,1e-4) 26 | # Train the network 27 | stats = net.train(trainXC, y_train, valXC, y_val, 28 | num_iters=15000, batch_size=128, 29 | learning_rate=learning_rate, learning_rate_decay=learning_rate_decay, 30 | reg=0, verbose=False,update="momentum",arg=momentum,dropout=dropout) 31 | 32 | # Predict on the validation set 33 | val_acc = (net.predict(valXC) == y_val).mean() 34 | train_acc = (net.predict(trainXC) == y_train).mean() 35 | f=open("feats.csv","a") 36 | tune=[hidden_size,momentum,dropout,learning_rate,learning_rate_decay] 37 | f.write(str(tune+[train_acc,val_acc]).strip("[]")+'\n') 38 | f.close() 39 | with open("feats/"+ str([val_acc]+tune).strip("[]")+'.pickle','w') as f: 40 | pickle.dump(stats,f) 41 | 42 | print progress, str(tune+[train_acc,val_acc]).strip("[]") 43 | hidden_size =[500,300,150] 44 | momentum=[.9,.95] 45 | dropout=[.1,.3,.5] 46 | learning_rate=[5e-4*i for i in range(1,4,20)] 47 | learning_rate_decay=[.95,.99,.999] 48 | progress=0 49 | for i in hidden_size: 50 | for j in momentum: 51 | for k in dropout: 52 | for m in learning_rate: 53 | for n in learning_rate_decay: 54 | if progress <=1: 55 | progress+=1 56 | continue 57 | progress+=1 58 | tryArgs(i,j,k,m,n) 59 | 60 | # In[121]: 61 | if False: 62 | #Plot the loss function and train / validation accuracies 63 | plt.plot(stats['loss_history']) 64 | plt.title('Loss history') 65 | plt.xlabel('Iteration') 66 | plt.ylabel('Loss') 67 | plt.show() 68 | #plt.savefig("dropout loss_history.eps") 69 | 70 | plt.plot(stats['train_acc_history'], label='train') 71 | plt.plot(stats['val_acc_history'], label='val') 72 | plt.title('Classification accuracy history') 73 | plt.xlabel('Epoch') 74 | plt.show() 75 | plt.ylabel('Clasification accuracy') 76 | #plt.savefig('dropout accuracy.eps') 77 | 78 | 79 | -------------------------------------------------------------------------------- /fine_grained_nn.csv: -------------------------------------------------------------------------------- 1 | 300,0.001,0.0125,0.504 2 | 300,0.001,0.025,0.501 3 | 300,0.001,0.05,0.491 4 | 300,0.001,0.1,0.474 5 | 300,0.001,0.2,0.482 6 | 300,0.001,0.4,0.492 7 | 300,0.001,0.8,0.49 8 | 300,0.001,1.6,0.474 9 | 300,0.001,3.2,0.461 10 | 300,0.001,6.4,0.421 11 | 310,0.001,0.0125,0.463 12 | 310,0.001,0.025,0.504 13 | 310,0.001,0.05,0.502 14 | 310,0.001,0.1,0.5 15 | 310,0.001,0.2,0.496 16 | 310,0.001,0.4,0.501 17 | 310,0.001,0.8,0.477 18 | 310,0.001,1.6,0.472 19 | 310,0.001,3.2,0.466 20 | 310,0.001,6.4,0.443 21 | 320,0.001,0.0125,0.49 22 | 320,0.001,0.025,0.486 23 | 320,0.001,0.05,0.495 24 | 320,0.001,0.1,0.484 25 | 320,0.001,0.2,0.48 26 | 320,0.001,0.4,0.486 27 | 320,0.001,0.8,0.488 28 | 320,0.001,1.6,0.479 29 | 320,0.001,3.2,0.455 30 | 320,0.001,6.4,0.44 31 | 330,0.001,0.0125,0.48 32 | 330,0.001,0.025,0.475 33 | 330,0.001,0.05,0.493 34 | 330,0.001,0.1,0.481 35 | 330,0.001,0.2,0.469 36 | 330,0.001,0.4,0.485 37 | 330,0.001,0.8,0.473 38 | 330,0.001,1.6,0.474 39 | 330,0.001,3.2,0.461 40 | 330,0.001,6.4,0.432 41 | 340,0.001,0.0125,0.508 42 | 340,0.001,0.025,0.493 43 | 340,0.001,0.05,0.498 44 | 340,0.001,0.1,0.493 45 | 340,0.001,0.2,0.484 46 | 340,0.001,0.4,0.486 47 | 340,0.001,0.8,0.49 48 | 340,0.001,1.6,0.467 49 | 340,0.001,3.2,0.455 50 | 340,0.001,6.4,0.425 51 | 350,0.001,0.0125,0.498 52 | 350,0.001,0.025,0.484 53 | 350,0.001,0.05,0.478 54 | 350,0.001,0.1,0.49 55 | 350,0.001,0.2,0.456 56 | 350,0.001,0.4,0.492 57 | 350,0.001,0.8,0.478 58 | 350,0.001,1.6,0.465 59 | 350,0.001,3.2,0.453 60 | 350,0.001,6.4,0.428 61 | 360,0.001,0.0125,0.507 62 | 360,0.001,0.025,0.506 63 | 360,0.001,0.05,0.486 64 | 360,0.001,0.1,0.494 65 | 360,0.001,0.2,0.494 66 | 360,0.001,0.4,0.48 67 | 360,0.001,0.8,0.474 68 | 360,0.001,1.6,0.481 69 | 360,0.001,3.2,0.465 70 | 360,0.001,6.4,0.42 71 | 370,0.001,0.0125,0.491 72 | 370,0.001,0.025,0.489 73 | 370,0.001,0.05,0.492 74 | 370,0.001,0.1,0.485 75 | 370,0.001,0.2,0.496 76 | 370,0.001,0.4,0.498 77 | 370,0.001,0.8,0.471 78 | 370,0.001,1.6,0.481 79 | 370,0.001,3.2,0.468 80 | 370,0.001,6.4,0.443 81 | 380,0.001,0.0125,0.491 82 | 380,0.001,0.025,0.491 83 | 380,0.001,0.05,0.506 84 | 380,0.001,0.1,0.48 85 | 380,0.001,0.2,0.485 86 | 380,0.001,0.4,0.481 87 | 380,0.001,0.8,0.492 88 | 380,0.001,1.6,0.475 89 | 380,0.001,3.2,0.451 90 | 380,0.001,6.4,0.432 91 | 390,0.001,0.0125,0.507 92 | 390,0.001,0.025,0.474 93 | 390,0.001,0.05,0.484 94 | 390,0.001,0.1,0.511 95 | 390,0.001,0.2,0.499 96 | 390,0.001,0.4,0.486 97 | 390,0.001,0.8,0.483 98 | 390,0.001,1.6,0.463 99 | 390,0.001,3.2,0.462 100 | 390,0.001,6.4,0.434 101 | 400,0.001,0.0125,0.483 102 | 400,0.001,0.025,0.489 103 | 400,0.001,0.05,0.486 104 | 400,0.001,0.1,0.504 105 | 400,0.001,0.2,0.489 106 | 400,0.001,0.4,0.487 107 | 400,0.001,0.8,0.483 108 | 400,0.001,1.6,0.492 109 | 400,0.001,3.2,0.452 110 | 400,0.001,6.4,0.42 111 | 410,0.001,0.0125,0.479 112 | 410,0.001,0.025,0.489 113 | 410,0.001,0.05,0.482 114 | 410,0.001,0.1,0.494 115 | 410,0.001,0.2,0.483 116 | 410,0.001,0.4,0.493 117 | 410,0.001,0.8,0.466 118 | 410,0.001,1.6,0.467 119 | 410,0.001,3.2,0.452 120 | 410,0.001,6.4,0.418 121 | 420,0.001,0.0125,0.504 122 | 420,0.001,0.025,0.497 123 | 420,0.001,0.05,0.489 124 | 420,0.001,0.1,0.489 125 | 420,0.001,0.2,0.505 126 | 420,0.001,0.4,0.483 127 | 420,0.001,0.8,0.48 128 | 420,0.001,1.6,0.472 129 | 420,0.001,3.2,0.446 130 | 420,0.001,6.4,0.426 131 | 430,0.001,0.0125,0.502 132 | 430,0.001,0.025,0.5 133 | 430,0.001,0.05,0.495 134 | 430,0.001,0.1,0.503 135 | 430,0.001,0.2,0.484 136 | 430,0.001,0.4,0.501 137 | 430,0.001,0.8,0.48 138 | 430,0.001,1.6,0.468 139 | 430,0.001,3.2,0.447 140 | 430,0.001,6.4,0.426 141 | 440,0.001,0.0125,0.467 142 | 440,0.001,0.025,0.487 143 | 440,0.001,0.05,0.481 144 | 440,0.001,0.1,0.497 145 | 440,0.001,0.2,0.485 146 | 440,0.001,0.4,0.488 147 | 440,0.001,0.8,0.485 148 | 440,0.001,1.6,0.465 149 | 440,0.001,3.2,0.466 150 | 440,0.001,6.4,0.434 151 | -------------------------------------------------------------------------------- /fine_grained_tune.py: -------------------------------------------------------------------------------- 1 | from neural_net import * 2 | from threading import * 3 | from data_utils import * 4 | 5 | 6 | def get_CIFAR10_data(num_training=49000, num_validation=1000, num_test=1000): 7 | """ 8 | Load the CIFAR-10 dataset from disk and perform preprocessing to prepare 9 | it for the two-layer neural net classifier. These are the same steps as 10 | we used for the SVM, but condensed to a single function. 11 | """ 12 | # Load the raw CIFAR-10 data 13 | cifar10_dir = "C:\Users\Pomodori\workspace\cifar-10-batches-py" 14 | X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir) 15 | 16 | # Subsample the data 17 | mask = range(num_training, num_training + num_validation) 18 | X_val = X_train[mask] 19 | y_val = y_train[mask] 20 | mask = range(num_training) 21 | X_train = X_train[mask] 22 | y_train = y_train[mask] 23 | mask = range(num_test) 24 | X_test = X_test[mask] 25 | y_test = y_test[mask] 26 | 27 | # Normalize the data: subtract the mean image 28 | mean_image = np.mean(X_train, axis=0) 29 | X_train -= mean_image 30 | X_val -= mean_image 31 | X_test -= mean_image 32 | 33 | # Reshape data to rows 34 | X_train = X_train.reshape(num_training, -1) 35 | X_val = X_val.reshape(num_validation, -1) 36 | X_test = X_test.reshape(num_test, -1) 37 | 38 | return X_train, y_train, X_val, y_val, X_test, y_test 39 | 40 | 41 | # Invoke the above function to get our data. 42 | X_train, y_train, X_val, y_val, X_test, y_test = get_CIFAR10_data() 43 | print 'Train data shape: ', X_train.shape 44 | print 'Train labels shape: ', y_train.shape 45 | print 'Validation data shape: ', X_val.shape 46 | print 'Validation labels shape: ', y_val.shape 47 | print 'Test data shape: ', X_test.shape 48 | print 'Test labels shape: ', y_test.shape 49 | 50 | 51 | 52 | def tryArgs(hidden_size,learning_rate,reg): 53 | net = TwoLayerNet(3072, i, 10) 54 | 55 | # Train the network 56 | stats = net.train(X_train, y_train, X_val, y_val, 57 | num_iters=1000, batch_size=200, 58 | learning_rate=learning_rate, learning_rate_decay=0.95, 59 | reg=reg, verbose=False) 60 | 61 | # Predict on the validation set 62 | val_acc = (net.predict(X_val) == y_val).mean() 63 | f=open("fine_grained_nn.csv","a") 64 | f.write(str(hidden_size)+','+str(learning_rate)+','+str(reg)+','+str(val_acc)+'\n') 65 | f.close() 66 | print hidden_size,learning_rate,reg, val_acc 67 | 68 | 69 | hidden_size = range(300,450,10) 70 | reg=[0.05*2**i for i in range(-2,8)] 71 | f=open("naive_nn.csv","w") 72 | for i in hidden_size: 73 | for k in reg: 74 | # t=Thread(target=tryArgs,args=(i,j,k)) 75 | # t.daemon=True 76 | # t.start() 77 | tryArgs(i,0.001,k) 78 | -------------------------------------------------------------------------------- /initialization.py: -------------------------------------------------------------------------------- 1 | from data_utils import load_CIFAR10 2 | from neural_net import * 3 | import matplotlib.pyplot as plt 4 | 5 | def get_CIFAR10_data(num_training=49000, num_validation=1000, num_test=1000): 6 | """ 7 | Load the CIFAR-10 dataset from disk and perform preprocessing to prepare 8 | it for the two-layer neural net classifier. These are the same steps as 9 | we used for the SVM, but condensed to a single function. 10 | """ 11 | # Load the raw CIFAR-10 data 12 | cifar10_dir = './datasets/cifar-10-batches-py' 13 | X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir) 14 | 15 | # Subsample the data 16 | mask = range(num_training, num_training + num_validation) 17 | X_val = X_train[mask] 18 | y_val = y_train[mask] 19 | mask = range(num_training) 20 | X_train = X_train[mask] 21 | y_train = y_train[mask] 22 | mask = range(num_test) 23 | X_test = X_test[mask] 24 | y_test = y_test[mask] 25 | 26 | # Normalize the data: subtract the mean image 27 | mean_image = np.mean(X_train, axis=0) 28 | X_train -= mean_image 29 | X_val -= mean_image 30 | X_test -= mean_image 31 | 32 | # Reshape data to rows 33 | X_train = X_train.reshape(num_training, -1) 34 | X_val = X_val.reshape(num_validation, -1) 35 | X_test = X_test.reshape(num_test, -1) 36 | 37 | return X_train, y_train, X_val, y_val, X_test, y_test 38 | 39 | 40 | # Invoke the above function to get our data. 41 | X_train, y_train, X_val, y_val, X_test, y_test = get_CIFAR10_data() 42 | print 'Train data shape: ', X_train.shape 43 | print 'Train labels shape: ', y_train.shape 44 | print 'Validation data shape: ', X_val.shape 45 | print 'Validation labels shape: ', y_val.shape 46 | print 'Test data shape: ', X_test.shape 47 | print 'Test labels shape: ', y_test.shape 48 | 49 | input_size = 32 * 32 * 3 50 | hidden_size = 50 51 | num_classes = 10 52 | 53 | # for method in ["Nesterov momentum","rmsprop"]: 54 | # net = TwoLayerNet(input_size, 500, num_classes,1e-5) 55 | # stats = net.train(X_train, y_train, X_val, y_val, 56 | # num_iters=1000, batch_size=100, 57 | # learning_rate=1e-4, learning_rate_decay=0.95, 58 | # reg=0.8, verbose=True,update="momentum",arg=0.9) 59 | # val_acc = (net.predict(X_train) == y_train).mean() 60 | # print 'Train accuracy: ', val_acc 61 | # val_acc = (net.predict(X_val) == y_val).mean() 62 | # print 'Validation accuracy: ', val_acc 63 | # val_acc = (net.predict(X_test) == y_test).mean() 64 | # print 'Test accuracy: ', val_acc 65 | methods=['normal','o'] 66 | for i in methods: 67 | net = TwoLayerNet(input_size, 500, num_classes,1e-5,init_method=i) 68 | stats = net.train(X_train, y_train, X_val, y_val, 69 | num_iters=1000, batch_size=100, 70 | learning_rate=1e-4, learning_rate_decay=0.95, 71 | reg=0, verbose=True,update="momentum",arg=0.9,dropout=0.5) 72 | val_acc = (net.predict(X_train) == y_train).mean() 73 | print 'Train accuracy: ', val_acc 74 | val_acc = (net.predict(X_val) == y_val).mean() 75 | print 'Validation accuracy: ', val_acc 76 | val_acc = (net.predict(X_test) == y_test).mean() 77 | print 'Test accuracy: ', val_acc 78 | 79 | #Plot the loss function and train / validation accuracies 80 | plt.plot(stats['loss_history']) 81 | plt.legend(methods) 82 | plt.title('Loss history') 83 | plt.xlabel('Iteration') 84 | plt.ylabel('Loss') 85 | plt.savefig("dropout loss_history.eps") 86 | 87 | # plt.subplot(2, 1, 2) 88 | # plt.plot(stats['train_acc_history'], label='train') 89 | # plt.plot(stats['val_acc_history'], label='val') 90 | # plt.title('Classification accuracy history') 91 | # plt.xlabel('Epoch') 92 | # plt.ylabel('Clasification accuracy') 93 | # plt.savefig('dropout accuracy.eps') 94 | -------------------------------------------------------------------------------- /naive_nn.csv: -------------------------------------------------------------------------------- 1 | 350,0.001,0.05,0.516 2 | 400,0.001,0.005,0.509 3 | 250,0.001,0.0005,0.505 4 | 250,0.001,0.05,0.501 5 | 150,0.001,0.005,0.5 6 | 500,0.001,0.05,0.5 7 | 450,0.001,0.05,0.499 8 | 400,0.001,0.05,0.498 9 | 450,0.001,0.0005,0.498 10 | 450,0.001,0.005,0.497 11 | 200,0.001,0.5,0.496 12 | 450,0.001,0.5,0.494 13 | 500,0.001,0.5,0.494 14 | 550,0.001,0.05,0.49 15 | 200,0.001,0.0005,0.488 16 | 550,0.001,0.5,0.488 17 | 350,0.001,0.005,0.487 18 | 500,0.001,0.0005,0.487 19 | 550,0.001,0.005,0.486 20 | 150,0.001,0.05,0.484 21 | 400,0.001,0.0005,0.483 22 | 150,0.001,0.0005,0.482 23 | 500,0.001,0.005,0.482 24 | 250,0.001,0.5,0.481 25 | 350,0.001,0.0005,0.481 26 | 300,0.001,0.005,0.48 27 | 200,0.001,0.005,0.479 28 | 250,0.001,0.005,0.479 29 | 200,0.001,0.05,0.478 30 | 400,0.001,0.5,0.476 31 | 550,0.001,0.0005,0.476 32 | 150,0.001,0.5,0.474 33 | 300,0.001,0.05,0.474 34 | 300,0.001,0.5,0.469 35 | 350,0.001,0.5,0.468 36 | 300,0.001,0.0005,0.464 37 | 200,0.001,5,0.459 38 | 400,0.001,5,0.453 39 | 550,0.001,5,0.452 40 | 300,0.001,5,0.444 41 | 450,0.001,5,0.443 42 | 500,0.001,5,0.441 43 | 150,0.001,5,0.439 44 | 250,0.001,5,0.433 45 | 350,0.001,5,0.432 46 | 550,0.0001,0.5,0.322 47 | 500,0.0001,0.005,0.321 48 | 500,0.0001,0.05,0.32 49 | 500,0.0001,0.0005,0.319 50 | 500,0.0001,0.5,0.319 51 | 550,0.0001,0.005,0.318 52 | 450,0.0001,0.0005,0.317 53 | 450,0.0001,0.05,0.316 54 | 200,0.0001,0.005,0.315 55 | 400,0.0001,0.05,0.315 56 | 300,0.0001,0.0005,0.314 57 | 300,0.0001,0.005,0.313 58 | 350,0.0001,0.005,0.313 59 | 400,0.0001,0.0005,0.313 60 | 550,0.0001,0.0005,0.313 61 | 350,0.0001,0.0005,0.312 62 | 400,0.0001,0.5,0.312 63 | 450,0.0001,0.5,0.312 64 | 550,0.0001,0.05,0.312 65 | 400,0.0001,5,0.31 66 | 450,0.0001,0.005,0.31 67 | 550,0.0001,5,0.31 68 | 300,0.0001,0.05,0.309 69 | 500,0.0001,5,0.309 70 | 250,0.0001,0.5,0.307 71 | 350,0.0001,0.5,0.307 72 | 250,0.0001,0.0005,0.306 73 | 250,0.0001,0.05,0.306 74 | 350,0.0001,0.05,0.306 75 | 450,0.0001,5,0.306 76 | 150,0.0001,0.005,0.305 77 | 200,0.0001,0.05,0.305 78 | 400,0.0001,0.005,0.305 79 | 300,0.0001,0.5,0.304 80 | 200,0.0001,0.5,0.303 81 | 150,0.0001,0.0005,0.302 82 | 250,0.0001,0.005,0.302 83 | 200,0.0001,0.0005,0.301 84 | 150,0.0001,0.5,0.299 85 | 300,0.0001,5,0.299 86 | 150,0.0001,0.05,0.297 87 | 250,0.0001,5,0.295 88 | 350,0.0001,5,0.295 89 | 200,0.0001,5,0.287 90 | 150,0.0001,5,0.286 91 | 500,1.00E-05,50,0.263 92 | 250,0.001,50,0.261 93 | 450,1.00E-05,0.5,0.261 94 | 500,1.00E-05,5,0.261 95 | 300,0.001,50,0.26 96 | 400,1.00E-05,0.0005,0.26 97 | 450,1.00E-05,0.0005,0.26 98 | 450,1.00E-05,5,0.259 99 | 400,0.001,50,0.258 100 | 450,0.001,50,0.258 101 | 400,1.00E-05,0.05,0.257 102 | 500,0.001,50,0.257 103 | 250,1.00E-05,0.05,0.256 104 | 350,1.00E-05,5,0.256 105 | 550,0.001,50,0.256 106 | 300,1.00E-05,0.5,0.255 107 | 500,1.00E-05,0.005,0.255 108 | 150,0.001,50,0.254 109 | 350,0.001,50,0.253 110 | 550,1.00E-05,0.005,0.253 111 | 550,1.00E-05,0.5,0.253 112 | 200,1.00E-05,50,0.251 113 | 300,1.00E-05,0.0005,0.251 114 | 550,1.00E-05,50,0.251 115 | 300,1.00E-05,0.005,0.25 116 | 350,1.00E-05,0.005,0.25 117 | 500,1.00E-05,0.5,0.25 118 | 550,1.00E-05,0.0005,0.249 119 | 250,1.00E-05,5,0.247 120 | 400,1.00E-05,0.005,0.246 121 | 450,1.00E-05,0.005,0.245 122 | 250,1.00E-05,50,0.244 123 | 450,1.00E-05,50,0.243 124 | 150,1.00E-05,0.005,0.242 125 | 450,1.00E-05,0.05,0.242 126 | 200,1.00E-05,0.05,0.24 127 | 200,1.00E-05,5,0.24 128 | 150,1.00E-05,0.5,0.239 129 | 200,0.001,50,0.239 130 | 350,1.00E-05,0.05,0.239 131 | 500,1.00E-05,0.05,0.239 132 | 200,1.00E-05,0.0005,0.238 133 | 550,1.00E-05,0.05,0.238 134 | 400,1.00E-05,50,0.237 135 | 250,1.00E-05,0.005,0.236 136 | 150,1.00E-05,0.05,0.235 137 | 300,1.00E-05,0.05,0.233 138 | 550,1.00E-05,5,0.232 139 | 150,1.00E-05,0.0005,0.231 140 | 300,1.00E-05,5,0.231 141 | 500,1.00E-05,0.0005,0.231 142 | 350,1.00E-05,0.5,0.229 143 | 400,1.00E-05,0.5,0.229 144 | 550,0.0001,50,0.229 145 | 250,1.00E-05,0.5,0.228 146 | 250,0.0001,50,0.228 147 | 300,0.0001,50,0.228 148 | 350,0.0001,50,0.227 149 | 400,0.0001,50,0.227 150 | 450,0.0001,50,0.227 151 | 150,1.00E-05,5,0.226 152 | 350,1.00E-05,50,0.226 153 | 250,1.00E-05,0.0005,0.225 154 | 350,1.00E-05,0.0005,0.225 155 | 500,0.0001,50,0.225 156 | 150,1.00E-05,50,0.223 157 | 150,0.0001,50,0.222 158 | 200,1.00E-05,0.005,0.222 159 | 200,0.0001,50,0.219 160 | 300,1.00E-05,50,0.217 161 | 400,1.00E-05,5,0.217 162 | 200,1.00E-05,0.5,0.215 163 | 150,0.01,5,0.134 164 | 350,0.01,0.05,0.131 165 | 450,0.01,0.005,0.122 166 | 500,0.01,0.5,0.122 167 | 300,0.01,0.005,0.12 168 | 400,0.01,50,0.117 169 | 550,0.01,0.005,0.117 170 | 200,0.01,0.0005,0.116 171 | 300,0.1,0.5,0.115 172 | 150,0.1,5,0.114 173 | 400,0.1,0.05,0.114 174 | 450,0.01,50,0.114 175 | 200,0.01,0.5,0.113 176 | 450,0.01,0.0005,0.112 177 | 550,0.01,0.05,0.11 178 | 150,0.1,0.0005,0.109 179 | 250,0.01,50,0.109 180 | 500,0.01,0.005,0.108 181 | 300,0.01,5,0.107 182 | 450,0.01,0.05,0.107 183 | 350,0.01,0.005,0.106 184 | 500,0.1,0.005,0.106 185 | 400,0.1,0.005,0.105 186 | 550,0.01,5,0.103 187 | 250,0.01,0.005,0.102 188 | 150,0.01,0.5,0.1 189 | 550,0.1,50,0.098 190 | 450,0.1,0.005,0.097 191 | 500,0.01,0.05,0.097 192 | 350,0.01,5,0.096 193 | 350,0.1,0.005,0.096 194 | 250,0.1,0.05,0.095 195 | 300,0.1,5,0.094 196 | 300,0.01,50,0.092 197 | 350,0.01,0.0005,0.092 198 | 400,0.01,0.005,0.092 199 | 200,0.1,5,0.091 200 | 150,0.01,0.05,0.09 201 | 200,0.01,0.05,0.09 202 | 250,0.1,5,0.09 203 | 300,0.01,0.0005,0.09 204 | 400,0.01,0.5,0.089 205 | 350,0.01,0.5,0.087 206 | 400,0.1,0.5,0.087 207 | 150,0.01,0.0005,0.085 208 | 150,0.01,0.005,0.083 209 | 350,0.1,50,0.083 210 | 200,0.01,5,0.082 211 | 500,0.1,0.05,0.082 212 | 150,0.1,0.005,0.081 213 | 450,0.1,0.05,0.081 214 | 500,0.01,5,0.081 215 | 150,0.01,50,0.08 216 | 300,0.1,0.05,0.08 217 | 500,0.01,50,0.08 218 | 400,0.01,0.05,0.079 219 | 500,0.1,0.5,0.079 220 | 250,0.1,0.5,0.078 221 | 550,0.01,0.0005,0.078 222 | 550,0.01,50,0.078 223 | 450,0.1,50,0.076 224 | 350,0.01,50,0.075 225 | 300,0.1,0.005,0.074 226 | 150,0.1,0.5,0.073 227 | 550,0.01,0.5,0.072 228 | 450,0.1,5,0.071 229 | 450,0.1,0.5,0.068 230 | 500,0.01,0.0005,0.068 231 | 450,0.01,5,0.067 232 | 350,0.1,0.0005,0.066 233 | 400,0.1,5,0.066 234 | 250,0.01,0.05,0.065 235 | 350,0.1,5,0.065 236 | 500,0.1,5,0.065 237 | 450,0.1,0.0005,0.063 238 | 200,0.1,0.05,0.062 239 | 200,0.1,0.005,0.061 240 | 250,0.01,0.5,0.061 241 | 250,0.1,50,0.061 242 | 400,0.01,5,0.061 243 | 500,0.1,50,0.061 244 | 250,0.01,5,0.06 245 | 250,0.01,0.0005,0.059 246 | 450,0.01,0.5,0.059 247 | 550,0.1,5,0.057 248 | 400,0.1,0.0005,0.054 249 | 200,0.01,0.005,0.053 250 | 500,0.1,0.0005,0.053 251 | 550,0.1,0.0005,0.053 252 | 250,0.1,0.005,0.052 253 | 350,0.1,0.05,0.052 254 | 550,0.1,0.05,0.052 255 | 350,0.1,0.5,0.05 256 | 550,0.1,0.005,0.05 257 | 150,0.1,0.05,0.049 258 | 400,0.01,0.0005,0.049 259 | 300,0.01,0.5,0.045 260 | 400,0.1,50,0.045 261 | 200,0.01,50,0.044 262 | 200,0.1,0.5,0.044 263 | 200,0.1,0.0005,0.043 264 | 300,0.1,0.0005,0.043 265 | 150,0.1,50,0.042 266 | 300,0.1,50,0.042 267 | 200,0.1,50,0.038 268 | 300,0.01,0.05,0.038 269 | 250,0.1,0.0005,0.037 270 | 550,0.1,0.5,0.035 271 | -------------------------------------------------------------------------------- /neural_net.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import time 4 | 5 | class TwoLayerNet(object): 6 | """ 7 | A two-layer fully-connected neural network. The net has an input dimension of 8 | N, a hidden layer dimension of H, and performs classification over C classes. 9 | We train the network with a softmax loss function and L2 regularization on the 10 | weight matrices. The network uses a ReLU nonlinearity after the first fully 11 | connected layer. 12 | 13 | In other words, the network has the following architecture: 14 | 15 | input - fully connected layer - ReLU - fully connected layer - softmax 16 | 17 | The outputs of the second fully-connected layer are the scores for each class. 18 | """ 19 | 20 | def __init__(self, input_size, hidden_size, output_size, std=1e-4, 21 | init_method="Normal"): 22 | """ 23 | Initialize the model. Weights are initialized to small random values and 24 | biases are initialized to zero. Weights and biases are stored in the 25 | variable self.params, which is a dictionary with the following keys: 26 | 27 | W1: First layer weights; has shape (D, H) 28 | b1: First layer biases; has shape (H,) 29 | W2: Second layer weights; has shape (H, C) 30 | b2: Second layer biases; has shape (C,) 31 | 32 | Inputs: 33 | - input_size: The dimension D of the input data. 34 | - hidden_size: The number of neurons H in the hidden layer. 35 | - output_size: The number of classes C. 36 | """ 37 | self.params = {} 38 | self.params['W1'] = std * np.random.randn(input_size, hidden_size) 39 | self.params['b1'] = np.zeros(hidden_size) 40 | self.params['W2'] = std * np.random.randn(hidden_size, output_size) 41 | self.params['b2'] = np.zeros(output_size) 42 | 43 | #special initialization 44 | if init_method=="i": 45 | self.params['W1']=np.random.randn(input_size,hidden_size)/np.sqrt(input_size) 46 | self.params['W2']=np.random.randn(hidden_size,output_size)/np.sqrt(hidden_size) 47 | elif init_method=="io": 48 | self.params['W1']=np.random.randn(input_size,hidden_size)*np.sqrt(2.0/(input_size+hidden_size)) 49 | self.params['W2']=np.random.randn(hidden_size,output_size)*np.sqrt(2.0/(hidden_size+output_size)) 50 | elif init_method=="ReLU": 51 | self.params['W1']=np.random.randn(input_size,hidden_size)*np.sqrt(2.0/input_size) 52 | self.params['W2']=np.random.randn(hidden_size,output_size)*np.sqrt(2.0/(hidden_size+output_size)) 53 | 54 | def loss(self, X, y=None, reg=0.0, dropout=0, dropMask=None,activation='Relu'): 55 | """ 56 | Compute the loss and gradients for a two layer fully connected neural 57 | network. 58 | 59 | Inputs: 60 | - X: Input data of shape (N, D). Each X[i] is a training sample. 61 | - y: Vector of training labels. y[i] is the label for X[i], and each y[i] is 62 | an integer in the range 0 <= y[i] < C. This parameter is optional; if it 63 | is not passed then we only return scores, and if it is passed then we 64 | instead return the loss and gradients. 65 | - reg: Regularization strength. 66 | 67 | Returns: 68 | If y is None, return a matrix scores of shape (N, C) where scores[i, c] is 69 | the score for class c on input X[i]. 70 | 71 | If y is not None, instead return a tuple of: 72 | - loss: Loss (data loss and regularization loss) for this batch of training 73 | samples. 74 | - grads: Dictionary mapping parameter names to gradients of those parameters 75 | with respect to the loss function; has the same keys as self.params. 76 | """ 77 | # Unpack variables from the params dictionary 78 | W1, b1 = self.params['W1'], self.params['b1'] 79 | W2, b2 = self.params['W2'], self.params['b2'] 80 | N, D = X.shape 81 | 82 | # Compute the forward pass 83 | scores = None 84 | ############################################################################# 85 | # TODO: Perform the forward pass, computing the class scores for the input. # 86 | # Store the result in the scores variable, which should be an array of # 87 | # shape (N, C). # 88 | ############################################################################# 89 | 90 | 91 | if activation=='leaky': 92 | inp=X.dot(W1)+b1 93 | a2=np.maximum(inp,.01*inp) 94 | else: 95 | a2=np.maximum(X.dot(W1)+b1,0) 96 | 97 | if dropout != 0 and dropout<1: 98 | a2*=(np.random.randn(*a2.shape)1: 100 | W2*=dropMask['W2']/(dropout-1) 101 | b2*=dropMask['b2']/(dropout-1) 102 | # for convinient this is drop connect , drop rate= dropout-1 103 | 104 | scores=a2.dot(W2)+b2 # z3 105 | ############################################################################# 106 | # END OF YOUR CODE # 107 | ############################################################################# 108 | 109 | # If the targets are not given then jump out, we're done 110 | if y is None: 111 | return scores 112 | 113 | # Compute the loss 114 | loss = None 115 | ############################################################################# 116 | # TODO: Finish the forward pass, and compute the loss. This should include # 117 | # both the data loss and L2 regularization for W1 and W2. Store the result # 118 | # in the variable loss, which should be a scalar. Use the Softmax # 119 | # classifier loss. So that your results match ours, multiply the # 120 | # regularization loss by 0.5 # 121 | ############################################################################# 122 | #do a softmax first 123 | if dropout>1: 124 | print dropMask['W2'] 125 | exp_scores=np.exp(scores) 126 | 127 | a3=exp_scores/(np.sum(exp_scores,1))[:,None] #h(x) 128 | 129 | loss=-np.sum(np.log(a3[range(len(a3)),y]))/len(a3)+\ 130 | 0.5*reg*(np.sum(np.power(W1,2))+np.sum(np.power(W2,2))) 131 | ############################################################################# 132 | # END OF YOUR CODE # 133 | ############################################################################# 134 | 135 | # Backward pass: compute gradients 136 | grads = {} 137 | ############################################################################# 138 | # TODO: Compute the backward pass, computing the derivatives of the weights # 139 | # and biases. Store the results in the grads dictionary. For example, # 140 | # grads['W1'] should store the gradient on W1, and be a matrix of same size # 141 | ############################################################################# 142 | delta_3=a3 143 | delta_3[range(len(a3)),y]=a3[range(len(a3)),y]-1 144 | delta_3/=len(a3) 145 | grads['W2']=a2.T.dot(delta_3)+reg*W2 146 | grads['b2']=np.sum(delta_3,0) 147 | 148 | 149 | dF=np.ones(np.shape(a2)) 150 | if activation=='leaky': 151 | dF[a2<0.0]=0.01 152 | else: 153 | dF[a2==0.0]=0 #activation res a2 has been ReLUed 154 | 155 | delta_2=delta_3.dot(W2.T)*dF 156 | grads['W1']=X.T.dot(delta_2)+reg*W1 157 | grads['b1']=np.sum(delta_2,0) 158 | ############################################################################# 159 | # END OF YOUR CODE # 160 | ############################################################################# 161 | 162 | return loss, grads 163 | 164 | def train(self, X, y, X_val, y_val, 165 | learning_rate=1e-3, learning_rate_decay=0.95, 166 | reg=1e-5, num_iters=100, 167 | batch_size=200, verbose=False, 168 | 169 | update="SGD",arg=.99, 170 | dropout=0, 171 | activation='ReLU'): 172 | """ 173 | Train this neural network using stochastic gradient descent. 174 | 175 | Inputs: 176 | - X: A numpy array of shape (N, D) giving training data. 177 | - y: A numpy array f shape (N,) giving training labels; y[i] = c means that 178 | X[i] has label c, where 0 <= c < C. 179 | - X_val: A numpy array of shape (N_val, D) giving validation data. 180 | - y_val: A numpy array of shape (N_val,) giving validation labels. 181 | - learning_rate: Scalar giving learning rate for optimization. 182 | - learning_rate_decay: Scalar giving factor used to decay the learning rate 183 | after each epoch. 184 | - reg: Scalar giving regularization strength. 185 | - num_iters: Number of steps to take when optimizing. 186 | - batch_size: Number of training examples to use per step. 187 | - verbose: boolean; if true print progress during optimization. 188 | """ 189 | num_train = X.shape[0] 190 | iterations_per_epoch = max(num_train / batch_size, 1) 191 | 192 | # Use SGD to optimize the parameters in self.model 193 | loss_history = [] 194 | train_acc_history = [] 195 | val_acc_history = [] 196 | #### for tracking top model 197 | top_params=dict() 198 | cache_params=dict() 199 | top_acc=0 200 | cache=dict() 201 | dropMask=dict() 202 | start_time=time.time() 203 | #### 204 | 205 | for it in xrange(num_iters): 206 | X_batch = None 207 | y_batch = None 208 | 209 | ######################################################################### 210 | # TODO: Create a random minibatch of training data and labels, storing # 211 | # them in X_batch and y_batch respectively. # 212 | ######################################################################### 213 | if num_train >= batch_size: 214 | rand_idx=np.random.choice(num_train,batch_size) 215 | else: 216 | rand_idx=np.random.choice(num_train,batch_size,replace=True) 217 | X_batch=X[rand_idx] 218 | y_batch=y[rand_idx] 219 | 220 | if dropout>1: 221 | for param in ['W2','b2']: 222 | dropMask[param]=np.random.randn(*self.params[param].shape)<(dropout-1) 223 | ######################################################################### 224 | # END OF YOUR CODE # 225 | ######################################################################### 226 | 227 | # Compute loss and gradients using the current minibatch 228 | loss, grads = self.loss(X_batch, y=y_batch, reg=reg, dropout=dropout,dropMask=dropMask,activation=activation) 229 | loss_history.append(loss) 230 | 231 | ######################################################################### 232 | # TODO: Use the gradients in the grads dictionary to update the # 233 | # parameters of the network (stored in the dictionary self.params) # 234 | # using stochastic gradient descent. You'll need to use the gradients # 235 | # stored in the grads dictionary defined above. # 236 | ######################################################################### 237 | if np.isnan(grads['W1']).any() or np.isnan(grads['W2']).any() or \ 238 | np.isnan(grads['b1']).any() or np.isnan(grads['b2']).any(): 239 | continue 240 | #cache_params=self.params.copy() 241 | dx=None 242 | for param in self.params: 243 | if update=="SGD": 244 | dx=learning_rate*grads[param] 245 | #self.params[param]-=learning_rate*grads[param] 246 | 247 | elif update=="momentum": 248 | if not param in cache: 249 | cache[param]=np.zeros(grads[param].shape) 250 | cache[param]=arg*cache[param]-learning_rate*grads[param] 251 | dx=-cache[param] 252 | #self.params[param]+=cache[param] 253 | 254 | elif update=="Nesterov momentum": 255 | if not param in cache: 256 | cache[param]=np.zeros(grads[param].shape) 257 | v_prev = cache[param] # back this up 258 | cache[param] = arg * cache[param] - learning_rate * grads[param] # velocity update stays the same 259 | dx=arg * v_prev - (1 + arg) * cache[param] 260 | #self.params[param] += -arg * v_prev + (1 + arg) * cache[param] # position update changes form 261 | 262 | elif update=="rmsprop": 263 | if not param in cache: 264 | cache[param]=np.zeros(grads[param].shape) 265 | cache[param]=arg*cache[param]+(1-arg)*np.power(grads[param],2) 266 | dx=learning_rate*grads[param]/np.sqrt(cache[param]+1e-8) 267 | #self.params[param]-=learning_rate*grads[param]/np.sqrt(cache[param]+1e-8) 268 | 269 | 270 | elif update=="Adam": 271 | print "update error" 272 | 273 | elif update=="Adagrad": 274 | print "update error" 275 | 276 | else: 277 | # if have time try more update methods 278 | print "choose update method!" 279 | if dropout>1: 280 | if param == 'W2' or param == 'b2': 281 | dx*=dropMask[param] 282 | self.params[param]-=dx 283 | #Bug: learning rate should not decay at first epoch 284 | it+=1 285 | ######################################################################### 286 | # END OF YOUR CODE # 287 | ######################################################################### 288 | 289 | if verbose and it % 100 == 0: 290 | print 'iteration %d / %d: loss %f' % (it, num_iters, loss) 291 | 292 | # Every epoch, check train and val accuracy and decay learning rate. 293 | if it % iterations_per_epoch == 0: 294 | # Check accuracy 295 | train_acc = (self.predict(X_batch) == y_batch).mean() 296 | val_acc = (self.predict(X_val) == y_val).mean() 297 | train_acc_history.append(train_acc) 298 | val_acc_history.append(val_acc) 299 | 300 | # Decay learning rate 301 | learning_rate *= learning_rate_decay 302 | 303 | ### update top model 304 | if val_acc > top_acc: 305 | top_acc = val_acc 306 | top_params=self.params.copy() 307 | 308 | if verbose: 309 | print ('train_acc %f, val_acc %f, time %d' % (train_acc, val_acc,(time.time()-start_time)/60.0)) 310 | 311 | self.params=top_params.copy() 312 | ### update params to top params finally 313 | 314 | return { 315 | 'loss_history': loss_history, 316 | 'train_acc_history': train_acc_history, 317 | 'val_acc_history': val_acc_history, 318 | } 319 | 320 | def predict(self, X): 321 | """ 322 | Use the trained weights of this two-layer network to predict labels for 323 | data points. For each data point we predict scores for each of the C 324 | classes, and assign each data point to the class with the highest score. 325 | 326 | Inputs: 327 | - X: A numpy array of shape (N, D) giving N D-dimensional data points to 328 | classify. 329 | 330 | Returns: 331 | - y_pred: A numpy array of shape (N,) giving predicted labels for each of 332 | the elements of X. For all i, y_pred[i] = c means that X[i] is predicted 333 | to have class c, where 0 <= c < C. 334 | """ 335 | y_pred = None 336 | 337 | ########################################################################### 338 | # TODO: Implement this function; it should be VERY simple! # 339 | ########################################################################### 340 | y_pred=np.argmax(np.maximum(0,(X.dot(self.params["W1"])+self.params['b1']))\ 341 | .dot(self.params['W2'])+self.params['b2'],1) # z3 342 | ########################################################################### 343 | # END OF YOUR CODE # 344 | ########################################################################### 345 | 346 | return y_pred 347 | 348 | def accuracy(self,X,y): 349 | """ 350 | Use the trained model to predict labels for X, and compute the accuracy. 351 | 352 | Inputs: 353 | - X: A numpy array of shape (N, D) giving N D-dimensional data points to 354 | classify. 355 | - y: A numpy array of shape (N,) giving the correct labels. 356 | 357 | Returns: 358 | - acc: Accuracy 359 | 360 | """ 361 | acc = (self.predict(X) == y).mean() 362 | 363 | return acc 364 | 365 | def gradient_check(self,X,y): 366 | realGrads=dict() 367 | _,grads=self.loss(X,y) 368 | keys=['W1','b1', 369 | 'W2','b2'] 370 | for key in keys: 371 | W1=self.params[key] 372 | W1_grad=[] 373 | delta=1e-4 374 | if len(np.shape(W1))==2: 375 | for i in range(np.shape(W1)[0]): 376 | grad=[] 377 | for j in range(np.shape(W1)[1]): 378 | W1[i,j]+=delta 379 | self.params[key]=W1 380 | l_plus,_=self.loss(X,y) 381 | W1[i,j]-=2*delta 382 | self.params[key]=W1 383 | l_minus,_=self.loss(X,y) 384 | grad.append((l_plus-l_minus)/2.0/delta) 385 | W1[i,j]+=delta 386 | W1_grad.append(grad) 387 | else: 388 | for i in range(len(W1)): 389 | W1[i]+=delta 390 | self.params[key]=W1 391 | l_plus,_=self.loss(X,y) 392 | W1[i]-=2*delta 393 | self.params[key]=W1 394 | l_minus,_=self.loss(X,y) 395 | W1_grad.append((l_plus-l_minus)/2.0/delta) 396 | W1[i]+=delta 397 | 398 | print(W1_grad) 399 | print(grads[key]) 400 | print key,"error",np.mean(np.sum(np.power((W1_grad-grads[key]),2),len(np.shape(W1))-1)\ 401 | /np.sum(np.power((W1_grad+grads[key]),2),len(np.shape(W1))-1)) 402 | -------------------------------------------------------------------------------- /paper/1212.0901v2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/1212.0901v2.pdf -------------------------------------------------------------------------------- /paper/AnalysisSingleLayerUnsupervisedFeatureLearning.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/AnalysisSingleLayerUnsupervisedFeatureLearning.pdf -------------------------------------------------------------------------------- /paper/DropConnect Neural Network_files/convergenceCost.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/DropConnect Neural Network_files/convergenceCost.jpeg -------------------------------------------------------------------------------- /paper/DropConnect Neural Network_files/dropRate400_400.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/DropConnect Neural Network_files/dropRate400_400.jpeg -------------------------------------------------------------------------------- /paper/DropConnect Neural Network_files/modelSize.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/DropConnect Neural Network_files/modelSize.jpeg -------------------------------------------------------------------------------- /paper/DropConnect Neural Network_files/nn.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/DropConnect Neural Network_files/nn.jpg -------------------------------------------------------------------------------- /paper/DropConnect Neural Network_files/nn_dc.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/DropConnect Neural Network_files/nn_dc.jpg -------------------------------------------------------------------------------- /paper/DropConnect Neural Network_files/nn_do.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/DropConnect Neural Network_files/nn_do.jpg -------------------------------------------------------------------------------- /paper/PReLU.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/PReLU.pdf -------------------------------------------------------------------------------- /paper/Unsupervised Feature Learning with C-SVDDNet.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/Unsupervised Feature Learning with C-SVDDNet.pdf -------------------------------------------------------------------------------- /paper/adam.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/adam.pdf -------------------------------------------------------------------------------- /paper/co-adaptation of feature detectors.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/co-adaptation of feature detectors.pdf -------------------------------------------------------------------------------- /paper/dropout.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/dropout.pdf -------------------------------------------------------------------------------- /paper/effcient backprop.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/paper/effcient backprop.pdf -------------------------------------------------------------------------------- /redo.py: -------------------------------------------------------------------------------- 1 | 2 | # coding: utf-8 3 | 4 | # In[59]: 5 | import sys 6 | from data_utils import load_CIFAR10 7 | from neural_net import * 8 | import matplotlib.pyplot as plt 9 | import time 10 | if len(sys.argv)!=2: 11 | print "something goes wrong,try% python redo.py dataset" 12 | quit() 13 | dataset_dir=sys.argv[1] 14 | start_time=time.time() 15 | 16 | def get_CIFAR10_data(num_training=49000, num_validation=1000, num_test=1000): 17 | cifar10_dir = './'+dataset_dir+'/cifar-10-batches-py' 18 | print cifar10_dir 19 | X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir) 20 | mask = range(num_training, num_training + num_validation) 21 | X_val = X_train[mask] 22 | y_val = y_train[mask] 23 | mask = range(num_training) 24 | X_train = X_train[mask] 25 | y_train = y_train[mask] 26 | mask = range(num_test) 27 | X_test = X_test[mask] 28 | y_test = y_test[mask] 29 | mean_image = np.mean(X_train, axis=0) 30 | X_train -= mean_image 31 | X_val -= mean_image 32 | X_test -= mean_image 33 | X_train=X_train.swapaxes(1,3) 34 | X_val=X_val.swapaxes(1,3) 35 | X_test=X_test.swapaxes(1,3) 36 | return X_train, y_train, X_val, y_val, X_test, y_test 37 | 38 | 39 | # Invoke the above function to get our data. 40 | X_train, y_train, X_val, y_val, X_test, y_test = get_CIFAR10_data() 41 | print "finish loading" 42 | print 'Train data : ', X_train.shape 43 | print 'Validation data : ', X_val.shape 44 | print 'Test data: ', X_test.shape 45 | print "Time", (time.time()-start_time)/60.0 46 | rfSize = 6 47 | numCentroids=1600 48 | whitening=True 49 | numPatches = 400000 50 | CIFAR_DIM=[32,32,3] 51 | 52 | #create unsurpervised data 53 | patches=[] 54 | for i in range(numPatches): 55 | if(np.mod(i,10000) == 0): 56 | print "sampling for Kmeans",i,"/",numPatches 57 | start_r=np.random.randint(CIFAR_DIM[0]-rfSize) 58 | start_c=np.random.randint(CIFAR_DIM[1]-rfSize) 59 | patch=np.array([]) 60 | img=X_train[np.mod(i,X_train.shape[0])] 61 | for layer in img: 62 | patch=np.append(patch,layer[start_r:start_r+rfSize].T[start_c:start_c+rfSize].T.ravel()) 63 | patches.append(patch) 64 | patches=np.array(patches) 65 | #normalize patches 66 | patches=(patches-patches.mean(1)[:,None])/np.sqrt(patches.var(1)+10)[:,None] 67 | print "time",(time.time()-start_time)/60.0 68 | 69 | # In[66]: 70 | #for csil 71 | del X_train, y_train, X_val, y_val, X_test, y_test 72 | #whitening 73 | print "whitening" 74 | [D,V]=np.linalg.eig(np.cov(patches,rowvar=0)) 75 | 76 | P = V.dot(np.diag(np.sqrt(1/(D + 0.1)))).dot(V.T) 77 | patches = patches.dot(P) 78 | 79 | print "time",(time.time()-start_time)/60.0 80 | del D,V 81 | # In[ ]: 82 | 83 | centroids=np.random.randn(numCentroids,patches.shape[1])*.1 84 | num_iters=50 85 | batch_size=1000#CSIL do not have enough memory, dam 86 | for ite in range(num_iters): 87 | print "kmeans iters",ite+1,"/",num_iters 88 | # c2=.5*np.power(centroids,2).sum(1) 89 | # idx=np.argmax(patches.dot(centroids.T)-c2,axis=1) # x2 the same omit 90 | hf_c2_sum=.5*np.power(centroids,2).sum(1) 91 | counts=np.zeros(numCentroids) 92 | summation=np.zeros_like(centroids) 93 | for i in range(0,len(patches),batch_size): 94 | last_i=min(i+batch_size,len(patches)) 95 | idx=np.argmax(patches[i:last_i].dot(centroids.T) -hf_c2_sum.T, axis=1) 96 | S=np.zeros([last_i-i,numCentroids]) 97 | S[range(last_i-i), 98 | np.argmax(patches[i:last_i].dot(centroids.T)-hf_c2_sum.T 99 | ,axis=1)]=1 100 | summation+=S.T.dot(patches[i:last_i]) 101 | counts+=S.sum(0) 102 | centroids=summation/counts[:,None] 103 | centroids[counts==0]=0 # some centroids didn't get members, divide by zero 104 | #the thing is, they will stay zero forever 105 | 106 | print "time",(time.time()-start_time)/60.0 107 | 108 | 109 | # In[82]: 110 | 111 | def sliding(img,window=[6,6]): 112 | out=np.array([]) 113 | for i in range(3): 114 | s=img.shape 115 | row=s[1] 116 | col=s[2] 117 | col_extent = col - window[1] + 1 118 | row_extent = row - window[0] + 1 119 | start_idx = np.arange(window[0])[:,None]*col + np.arange(window[1]) 120 | offset_idx = np.arange(row_extent)[:,None]*col + np.arange(col_extent) 121 | if len(out)==0: 122 | out=np.take (img[i],start_idx.ravel()[:,None] + offset_idx.ravel()) 123 | else: 124 | out=np.append(out,np.take (img[i],start_idx.ravel()[:,None] + offset_idx.ravel()),axis=0) 125 | return out 126 | 127 | 128 | # In[111]: 129 | 130 | def extract_features(X_train): 131 | trainXC=[] 132 | idx=0 133 | for img in X_train: 134 | idx+=1 135 | if not np.mod(idx,1000): 136 | print "extract features",idx,'/',len(X_train) 137 | print "time",(time.time()-start_time)/60.0 138 | patches=sliding(img,[rfSize,rfSize]).T 139 | #normalize 140 | patches=(patches-patches.mean(1)[:,None])/(np.sqrt(patches.var(1)+10)[:,None]) 141 | #map to feature space 142 | patches=patches.dot(P) 143 | #calculate distance using x2-2xc+c2 144 | x2=np.power(patches,2).sum(1) 145 | c2=np.power(centroids,2).sum(1) 146 | xc=patches.dot(centroids.T) 147 | 148 | dist=np.sqrt(-2*xc+x2[:,None]+c2) 149 | u=dist.mean(1) 150 | patches=np.maximum(-dist+u[:,None],0) 151 | rs=CIFAR_DIM[0]-rfSize+1 152 | cs=CIFAR_DIM[1]-rfSize+1 153 | patches=np.reshape(patches,[rs,cs,-1]) 154 | q=[] 155 | q.append(patches[0:rs/2,0:cs/2].sum(0).sum(0)) 156 | q.append(patches[0:rs/2,cs/2:cs-1].sum(0).sum(0)) 157 | q.append(patches[rs/2:rs-1,0:cs/2].sum(0).sum(0)) 158 | q.append(patches[rs/2:rs-1,cs/2:cs-1].sum(0).sum(0)) 159 | q=np.array(q).ravel() 160 | trainXC.append(q) 161 | trainXC=np.array(trainXC) 162 | trainXC=(trainXC-trainXC.mean(1)[:,None])/(np.sqrt(trainXC.var(1)+.01)[:,None]) 163 | return trainXC 164 | 165 | 166 | X_train, y_train, X_val, y_val, X_test, y_test = get_CIFAR10_data() 167 | # In[112]: 168 | trainXC=extract_features(X_train) 169 | print "time",(time.time()-start_time)/60.0 170 | valXC=extract_features(X_val) 171 | 172 | testXC=extract_features(X_test) 173 | 174 | 175 | # # save features 176 | 177 | # In[131]: 178 | 179 | #import cPickle as pickle 180 | #with open("features.pickle","w") as f: 181 | # pickle.dump([trainXC,valXC,testXC,y_train,y_val,y_test],f) 182 | 183 | 184 | # In[125]: 185 | 186 | from neural_net import * 187 | 188 | input_size = trainXC.shape[1] 189 | hidden_size = 200 190 | num_classes = 10 191 | 192 | net = TwoLayerNet(input_size, hidden_size, num_classes,1e-4) 193 | stats = net.train(trainXC, y_train, valXC, y_val, 194 | num_iters=70000, batch_size=128, 195 | learning_rate=5e-4, learning_rate_decay=0.99, 196 | reg=0, verbose=True,update="momentum",arg=0.95,dropout=0.3) 197 | 198 | 199 | # In[126]: 200 | 201 | val_acc = (net.predict(trainXC) == y_train).mean() 202 | print 'Train accuracy: ', val_acc 203 | val_acc = (net.predict(valXC) == y_val).mean() 204 | print 'Validation accuracy: ', val_acc 205 | 206 | val_acc = (net.predict(testXC) == y_test).mean() 207 | print 'Test accuracy: ', val_acc 208 | 209 | print "time",(time.time()-start_time)/60.0 210 | 211 | # In[121]: 212 | 213 | ##Plot the loss function and train / validation accuracies 214 | #plt.plot(stats['loss_history']) 215 | #plt.title('Loss history') 216 | #plt.xlabel('Iteration') 217 | #plt.ylabel('Loss') 218 | #plt.show() 219 | ##plt.savefig("dropout loss_history.eps") 220 | # 221 | #plt.plot(stats['train_acc_history'], label='train') 222 | #plt.plot(stats['val_acc_history'], label='val') 223 | #plt.title('Classification accuracy history') 224 | #plt.xlabel('Epoch') 225 | #plt.show() 226 | #plt.ylabel('Clasification accuracy') 227 | ##plt.savefig('dropout accuracy.eps') 228 | 229 | 230 | # In[ ]: 231 | 232 | 233 | 234 | -------------------------------------------------------------------------------- /report/dropout accuracy.eps: -------------------------------------------------------------------------------- 1 | %!PS-Adobe-3.0 EPSF-3.0 2 | %%Title: dropout accuracy.eps 3 | %%Creator: matplotlib version 1.4.3, http://matplotlib.org/ 4 | %%CreationDate: Mon Apr 11 19:48:23 2016 5 | %%Orientation: portrait 6 | %%BoundingBox: 18 180 594 612 7 | %%EndComments 8 | %%BeginProlog 9 | /mpldict 8 dict def 10 | mpldict begin 11 | /m { moveto } bind def 12 | /l { lineto } bind def 13 | /r { rlineto } bind def 14 | /c { curveto } bind def 15 | /cl { closepath } bind def 16 | /box { 17 | m 18 | 1 index 0 r 19 | 0 exch r 20 | neg 0 r 21 | cl 22 | } bind def 23 | /clipbox { 24 | box 25 | clip 26 | newpath 27 | } bind def 28 | %!PS-Adobe-3.0 Resource-Font 29 | %%Title: Bitstream Vera Sans 30 | %%Copyright: Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. 31 | %%Creator: Converted from TrueType to type 3 by PPR 32 | 25 dict begin 33 | /_d{bind def}bind def 34 | /_m{moveto}_d 35 | /_l{lineto}_d 36 | /_cl{closepath eofill}_d 37 | /_c{curveto}_d 38 | /_sc{7 -1 roll{setcachedevice}{pop pop pop pop pop pop}ifelse}_d 39 | /_e{exec}_d 40 | /FontName /BitstreamVeraSans-Roman def 41 | /PaintType 0 def 42 | /FontMatrix[.001 0 0 .001 0 0]def 43 | /FontBBox[-183 -236 1287 928]def 44 | /FontType 3 def 45 | /Encoding [ /space /period /zero /one /two /four /five /six /seven /eight /C /E /a /c /f /h /i /l /n /o /p /r /s /t /u /y ] def 46 | /FontInfo 10 dict dup begin 47 | /FamilyName (Bitstream Vera Sans) def 48 | /FullName (Bitstream Vera Sans) def 49 | /Notice (Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera is a trademark of Bitstream, Inc.) def 50 | /Weight (Roman) def 51 | /Version (Release 1.10) def 52 | /ItalicAngle 0.0 def 53 | /isFixedPitch false def 54 | /UnderlinePosition -213 def 55 | /UnderlineThickness 143 def 56 | end readonly def 57 | /CharStrings 26 dict dup begin 58 | /space{318 0 0 0 0 0 _sc 59 | }_d 60 | /period{318 0 107 0 210 124 _sc 61 | 107 124 _m 62 | 210 124 _l 63 | 210 0 _l 64 | 107 0 _l 65 | 107 124 _l 66 | _cl}_d 67 | /zero{636 0 66 -13 570 742 _sc 68 | 318 664 _m 69 | 267 664 229 639 203 589 _c 70 | 177 539 165 464 165 364 _c 71 | 165 264 177 189 203 139 _c 72 | 229 89 267 64 318 64 _c 73 | 369 64 407 89 433 139 _c 74 | 458 189 471 264 471 364 _c 75 | 471 464 458 539 433 589 _c 76 | 407 639 369 664 318 664 _c 77 | 318 742 _m 78 | 399 742 461 709 505 645 _c 79 | 548 580 570 486 570 364 _c 80 | 570 241 548 147 505 83 _c 81 | 461 19 399 -13 318 -13 _c 82 | 236 -13 173 19 130 83 _c 83 | 87 147 66 241 66 364 _c 84 | 66 486 87 580 130 645 _c 85 | 173 709 236 742 318 742 _c 86 | _cl}_d 87 | /one{636 0 110 0 544 729 _sc 88 | 124 83 _m 89 | 285 83 _l 90 | 285 639 _l 91 | 110 604 _l 92 | 110 694 _l 93 | 284 729 _l 94 | 383 729 _l 95 | 383 83 _l 96 | 544 83 _l 97 | 544 0 _l 98 | 124 0 _l 99 | 124 83 _l 100 | _cl}_d 101 | /two{{636 0 73 0 536 742 _sc 102 | 192 83 _m 103 | 536 83 _l 104 | 536 0 _l 105 | 73 0 _l 106 | 73 83 _l 107 | 110 121 161 173 226 239 _c 108 | 290 304 331 346 348 365 _c 109 | 380 400 402 430 414 455 _c 110 | 426 479 433 504 433 528 _c 111 | 433 566 419 598 392 622 _c 112 | 365 646 330 659 286 659 _c 113 | 255 659 222 653 188 643 _c 114 | 154 632 117 616 78 594 _c 115 | 78 694 _l 116 | 118 710 155 722 189 730 _c 117 | 223 738 255 742 284 742 _c 118 | }_e{359 742 419 723 464 685 _c 119 | 509 647 532 597 532 534 _c 120 | 532 504 526 475 515 449 _c 121 | 504 422 484 390 454 354 _c 122 | 446 344 420 317 376 272 _c 123 | 332 227 271 164 192 83 _c 124 | _cl}_e}_d 125 | /four{636 0 49 0 580 729 _sc 126 | 378 643 _m 127 | 129 254 _l 128 | 378 254 _l 129 | 378 643 _l 130 | 352 729 _m 131 | 476 729 _l 132 | 476 254 _l 133 | 580 254 _l 134 | 580 172 _l 135 | 476 172 _l 136 | 476 0 _l 137 | 378 0 _l 138 | 378 172 _l 139 | 49 172 _l 140 | 49 267 _l 141 | 352 729 _l 142 | _cl}_d 143 | /five{{636 0 77 -13 549 729 _sc 144 | 108 729 _m 145 | 495 729 _l 146 | 495 646 _l 147 | 198 646 _l 148 | 198 467 _l 149 | 212 472 227 476 241 478 _c 150 | 255 480 270 482 284 482 _c 151 | 365 482 429 459 477 415 _c 152 | 525 370 549 310 549 234 _c 153 | 549 155 524 94 475 51 _c 154 | 426 8 357 -13 269 -13 _c 155 | 238 -13 207 -10 175 -6 _c 156 | 143 -1 111 6 77 17 _c 157 | 77 116 _l 158 | 106 100 136 88 168 80 _c 159 | 199 72 232 69 267 69 _c 160 | }_e{323 69 368 83 401 113 _c 161 | 433 143 450 183 450 234 _c 162 | 450 284 433 324 401 354 _c 163 | 368 384 323 399 267 399 _c 164 | 241 399 214 396 188 390 _c 165 | 162 384 135 375 108 363 _c 166 | 108 729 _l 167 | _cl}_e}_d 168 | /six{{636 0 70 -13 573 742 _sc 169 | 330 404 _m 170 | 286 404 251 388 225 358 _c 171 | 199 328 186 286 186 234 _c 172 | 186 181 199 139 225 109 _c 173 | 251 79 286 64 330 64 _c 174 | 374 64 409 79 435 109 _c 175 | 461 139 474 181 474 234 _c 176 | 474 286 461 328 435 358 _c 177 | 409 388 374 404 330 404 _c 178 | 526 713 _m 179 | 526 623 _l 180 | 501 635 476 644 451 650 _c 181 | 425 656 400 659 376 659 _c 182 | 310 659 260 637 226 593 _c 183 | }_e{192 549 172 482 168 394 _c 184 | 187 422 211 444 240 459 _c 185 | 269 474 301 482 336 482 _c 186 | 409 482 467 459 509 415 _c 187 | 551 371 573 310 573 234 _c 188 | 573 159 550 99 506 54 _c 189 | 462 9 403 -13 330 -13 _c 190 | 246 -13 181 19 137 83 _c 191 | 92 147 70 241 70 364 _c 192 | 70 479 97 571 152 639 _c 193 | 206 707 280 742 372 742 _c 194 | 396 742 421 739 447 735 _c 195 | 472 730 498 723 526 713 _c 196 | _cl}_e}_d 197 | /seven{636 0 82 0 551 729 _sc 198 | 82 729 _m 199 | 551 729 _l 200 | 551 687 _l 201 | 286 0 _l 202 | 183 0 _l 203 | 432 646 _l 204 | 82 646 _l 205 | 82 729 _l 206 | _cl}_d 207 | /eight{{636 0 68 -13 568 742 _sc 208 | 318 346 _m 209 | 271 346 234 333 207 308 _c 210 | 180 283 167 249 167 205 _c 211 | 167 161 180 126 207 101 _c 212 | 234 76 271 64 318 64 _c 213 | 364 64 401 76 428 102 _c 214 | 455 127 469 161 469 205 _c 215 | 469 249 455 283 429 308 _c 216 | 402 333 365 346 318 346 _c 217 | 219 388 _m 218 | 177 398 144 418 120 447 _c 219 | 96 476 85 511 85 553 _c 220 | 85 611 105 657 147 691 _c 221 | 188 725 245 742 318 742 _c 222 | }_e{390 742 447 725 489 691 _c 223 | 530 657 551 611 551 553 _c 224 | 551 511 539 476 515 447 _c 225 | 491 418 459 398 417 388 _c 226 | 464 377 501 355 528 323 _c 227 | 554 291 568 251 568 205 _c 228 | 568 134 546 80 503 43 _c 229 | 459 5 398 -13 318 -13 _c 230 | 237 -13 175 5 132 43 _c 231 | 89 80 68 134 68 205 _c 232 | 68 251 81 291 108 323 _c 233 | 134 355 171 377 219 388 _c 234 | 183 544 _m 235 | 183 506 194 476 218 455 _c 236 | }_e{242 434 275 424 318 424 _c 237 | 360 424 393 434 417 455 _c 238 | 441 476 453 506 453 544 _c 239 | 453 582 441 611 417 632 _c 240 | 393 653 360 664 318 664 _c 241 | 275 664 242 653 218 632 _c 242 | 194 611 183 582 183 544 _c 243 | _cl}_e}_d 244 | /C{{698 0 56 -13 644 742 _sc 245 | 644 673 _m 246 | 644 569 _l 247 | 610 599 575 622 537 638 _c 248 | 499 653 460 661 418 661 _c 249 | 334 661 270 635 226 584 _c 250 | 182 533 160 460 160 364 _c 251 | 160 268 182 194 226 143 _c 252 | 270 92 334 67 418 67 _c 253 | 460 67 499 74 537 90 _c 254 | 575 105 610 128 644 159 _c 255 | 644 56 _l 256 | 609 32 572 15 534 4 _c 257 | 496 -7 455 -13 412 -13 _c 258 | 302 -13 215 20 151 87 _c 259 | }_e{87 154 56 246 56 364 _c 260 | 56 481 87 573 151 641 _c 261 | 215 708 302 742 412 742 _c 262 | 456 742 497 736 535 725 _c 263 | 573 713 610 696 644 673 _c 264 | _cl}_e}_d 265 | /E{632 0 98 0 568 729 _sc 266 | 98 729 _m 267 | 559 729 _l 268 | 559 646 _l 269 | 197 646 _l 270 | 197 430 _l 271 | 544 430 _l 272 | 544 347 _l 273 | 197 347 _l 274 | 197 83 _l 275 | 568 83 _l 276 | 568 0 _l 277 | 98 0 _l 278 | 98 729 _l 279 | _cl}_d 280 | /a{{613 0 60 -13 522 560 _sc 281 | 343 275 _m 282 | 270 275 220 266 192 250 _c 283 | 164 233 150 205 150 165 _c 284 | 150 133 160 107 181 89 _c 285 | 202 70 231 61 267 61 _c 286 | 317 61 357 78 387 114 _c 287 | 417 149 432 196 432 255 _c 288 | 432 275 _l 289 | 343 275 _l 290 | 522 312 _m 291 | 522 0 _l 292 | 432 0 _l 293 | 432 83 _l 294 | 411 49 385 25 355 10 _c 295 | 325 -5 287 -13 243 -13 _c 296 | 187 -13 142 2 109 33 _c 297 | 76 64 60 106 60 159 _c 298 | }_e{60 220 80 266 122 298 _c 299 | 163 329 224 345 306 345 _c 300 | 432 345 _l 301 | 432 354 _l 302 | 432 395 418 427 391 450 _c 303 | 364 472 326 484 277 484 _c 304 | 245 484 215 480 185 472 _c 305 | 155 464 127 453 100 439 _c 306 | 100 522 _l 307 | 132 534 164 544 195 550 _c 308 | 226 556 256 560 286 560 _c 309 | 365 560 424 539 463 498 _c 310 | 502 457 522 395 522 312 _c 311 | _cl}_e}_d 312 | /c{{550 0 55 -13 488 560 _sc 313 | 488 526 _m 314 | 488 442 _l 315 | 462 456 437 466 411 473 _c 316 | 385 480 360 484 334 484 _c 317 | 276 484 230 465 198 428 _c 318 | 166 391 150 339 150 273 _c 319 | 150 206 166 154 198 117 _c 320 | 230 80 276 62 334 62 _c 321 | 360 62 385 65 411 72 _c 322 | 437 79 462 90 488 104 _c 323 | 488 21 _l 324 | 462 9 436 0 410 -5 _c 325 | 383 -10 354 -13 324 -13 _c 326 | 242 -13 176 12 128 64 _c 327 | }_e{79 115 55 185 55 273 _c 328 | 55 362 79 432 128 483 _c 329 | 177 534 244 560 330 560 _c 330 | 358 560 385 557 411 551 _c 331 | 437 545 463 537 488 526 _c 332 | _cl}_e}_d 333 | /f{352 0 23 0 371 760 _sc 334 | 371 760 _m 335 | 371 685 _l 336 | 285 685 _l 337 | 253 685 230 678 218 665 _c 338 | 205 652 199 629 199 595 _c 339 | 199 547 _l 340 | 347 547 _l 341 | 347 477 _l 342 | 199 477 _l 343 | 199 0 _l 344 | 109 0 _l 345 | 109 477 _l 346 | 23 477 _l 347 | 23 547 _l 348 | 109 547 _l 349 | 109 585 _l 350 | 109 645 123 690 151 718 _c 351 | 179 746 224 760 286 760 _c 352 | 371 760 _l 353 | _cl}_d 354 | /h{634 0 91 0 549 760 _sc 355 | 549 330 _m 356 | 549 0 _l 357 | 459 0 _l 358 | 459 327 _l 359 | 459 379 448 417 428 443 _c 360 | 408 469 378 482 338 482 _c 361 | 289 482 251 466 223 435 _c 362 | 195 404 181 362 181 309 _c 363 | 181 0 _l 364 | 91 0 _l 365 | 91 760 _l 366 | 181 760 _l 367 | 181 462 _l 368 | 202 494 227 519 257 535 _c 369 | 286 551 320 560 358 560 _c 370 | 420 560 468 540 500 501 _c 371 | 532 462 549 405 549 330 _c 372 | _cl}_d 373 | /i{278 0 94 0 184 760 _sc 374 | 94 547 _m 375 | 184 547 _l 376 | 184 0 _l 377 | 94 0 _l 378 | 94 547 _l 379 | 94 760 _m 380 | 184 760 _l 381 | 184 646 _l 382 | 94 646 _l 383 | 94 760 _l 384 | _cl}_d 385 | /l{278 0 94 0 184 760 _sc 386 | 94 760 _m 387 | 184 760 _l 388 | 184 0 _l 389 | 94 0 _l 390 | 94 760 _l 391 | _cl}_d 392 | /n{634 0 91 0 549 560 _sc 393 | 549 330 _m 394 | 549 0 _l 395 | 459 0 _l 396 | 459 327 _l 397 | 459 379 448 417 428 443 _c 398 | 408 469 378 482 338 482 _c 399 | 289 482 251 466 223 435 _c 400 | 195 404 181 362 181 309 _c 401 | 181 0 _l 402 | 91 0 _l 403 | 91 547 _l 404 | 181 547 _l 405 | 181 462 _l 406 | 202 494 227 519 257 535 _c 407 | 286 551 320 560 358 560 _c 408 | 420 560 468 540 500 501 _c 409 | 532 462 549 405 549 330 _c 410 | _cl}_d 411 | /o{612 0 55 -13 557 560 _sc 412 | 306 484 _m 413 | 258 484 220 465 192 427 _c 414 | 164 389 150 338 150 273 _c 415 | 150 207 163 156 191 118 _c 416 | 219 80 257 62 306 62 _c 417 | 354 62 392 80 420 118 _c 418 | 448 156 462 207 462 273 _c 419 | 462 337 448 389 420 427 _c 420 | 392 465 354 484 306 484 _c 421 | 306 560 _m 422 | 384 560 445 534 490 484 _c 423 | 534 433 557 363 557 273 _c 424 | 557 183 534 113 490 63 _c 425 | 445 12 384 -13 306 -13 _c 426 | 227 -13 165 12 121 63 _c 427 | 77 113 55 183 55 273 _c 428 | 55 363 77 433 121 484 _c 429 | 165 534 227 560 306 560 _c 430 | _cl}_d 431 | /p{{635 0 91 -207 580 560 _sc 432 | 181 82 _m 433 | 181 -207 _l 434 | 91 -207 _l 435 | 91 547 _l 436 | 181 547 _l 437 | 181 464 _l 438 | 199 496 223 520 252 536 _c 439 | 281 552 316 560 356 560 _c 440 | 422 560 476 533 518 481 _c 441 | 559 428 580 359 580 273 _c 442 | 580 187 559 117 518 65 _c 443 | 476 13 422 -13 356 -13 _c 444 | 316 -13 281 -5 252 10 _c 445 | 223 25 199 49 181 82 _c 446 | 487 273 _m 447 | 487 339 473 390 446 428 _c 448 | 418 466 381 485 334 485 _c 449 | }_e{286 485 249 466 222 428 _c 450 | 194 390 181 339 181 273 _c 451 | 181 207 194 155 222 117 _c 452 | 249 79 286 61 334 61 _c 453 | 381 61 418 79 446 117 _c 454 | 473 155 487 207 487 273 _c 455 | _cl}_e}_d 456 | /r{411 0 91 0 411 560 _sc 457 | 411 463 _m 458 | 401 469 390 473 378 476 _c 459 | 366 478 353 480 339 480 _c 460 | 288 480 249 463 222 430 _c 461 | 194 397 181 350 181 288 _c 462 | 181 0 _l 463 | 91 0 _l 464 | 91 547 _l 465 | 181 547 _l 466 | 181 462 _l 467 | 199 495 224 520 254 536 _c 468 | 284 552 321 560 365 560 _c 469 | 371 560 378 559 386 559 _c 470 | 393 558 401 557 411 555 _c 471 | 411 463 _l 472 | _cl}_d 473 | /s{{521 0 54 -13 472 560 _sc 474 | 443 531 _m 475 | 443 446 _l 476 | 417 458 391 468 364 475 _c 477 | 336 481 308 485 279 485 _c 478 | 234 485 200 478 178 464 _c 479 | 156 450 145 430 145 403 _c 480 | 145 382 153 366 169 354 _c 481 | 185 342 217 330 265 320 _c 482 | 296 313 _l 483 | 360 299 405 279 432 255 _c 484 | 458 230 472 195 472 151 _c 485 | 472 100 452 60 412 31 _c 486 | 372 1 316 -13 246 -13 _c 487 | 216 -13 186 -10 154 -5 _c 488 | }_e{122 0 89 8 54 20 _c 489 | 54 113 _l 490 | 87 95 120 82 152 74 _c 491 | 184 65 216 61 248 61 _c 492 | 290 61 323 68 346 82 _c 493 | 368 96 380 117 380 144 _c 494 | 380 168 371 187 355 200 _c 495 | 339 213 303 226 247 238 _c 496 | 216 245 _l 497 | 160 257 119 275 95 299 _c 498 | 70 323 58 356 58 399 _c 499 | 58 450 76 490 112 518 _c 500 | 148 546 200 560 268 560 _c 501 | 301 560 332 557 362 552 _c 502 | 391 547 418 540 443 531 _c 503 | }_e{_cl}_e}_d 504 | /t{392 0 27 0 368 702 _sc 505 | 183 702 _m 506 | 183 547 _l 507 | 368 547 _l 508 | 368 477 _l 509 | 183 477 _l 510 | 183 180 _l 511 | 183 135 189 106 201 94 _c 512 | 213 81 238 75 276 75 _c 513 | 368 75 _l 514 | 368 0 _l 515 | 276 0 _l 516 | 206 0 158 13 132 39 _c 517 | 106 65 93 112 93 180 _c 518 | 93 477 _l 519 | 27 477 _l 520 | 27 547 _l 521 | 93 547 _l 522 | 93 702 _l 523 | 183 702 _l 524 | _cl}_d 525 | /u{634 0 85 -13 543 547 _sc 526 | 85 216 _m 527 | 85 547 _l 528 | 175 547 _l 529 | 175 219 _l 530 | 175 167 185 129 205 103 _c 531 | 225 77 255 64 296 64 _c 532 | 344 64 383 79 411 110 _c 533 | 439 141 453 183 453 237 _c 534 | 453 547 _l 535 | 543 547 _l 536 | 543 0 _l 537 | 453 0 _l 538 | 453 84 _l 539 | 431 50 405 26 377 10 _c 540 | 348 -5 315 -13 277 -13 _c 541 | 214 -13 166 6 134 45 _c 542 | 101 83 85 140 85 216 _c 543 | _cl}_d 544 | /y{592 0 30 -207 562 547 _sc 545 | 322 -50 _m 546 | 296 -114 271 -157 247 -177 _c 547 | 223 -197 191 -207 151 -207 _c 548 | 79 -207 _l 549 | 79 -132 _l 550 | 132 -132 _l 551 | 156 -132 175 -126 189 -114 _c 552 | 203 -102 218 -75 235 -31 _c 553 | 251 9 _l 554 | 30 547 _l 555 | 125 547 _l 556 | 296 119 _l 557 | 467 547 _l 558 | 562 547 _l 559 | 322 -50 _l 560 | _cl}_d 561 | end readonly def 562 | 563 | /BuildGlyph 564 | {exch begin 565 | CharStrings exch 566 | 2 copy known not{pop /.notdef}if 567 | true 3 1 roll get exec 568 | end}_d 569 | 570 | /BuildChar { 571 | 1 index /Encoding get exch get 572 | 1 index /BuildGlyph get exec 573 | }_d 574 | 575 | FontName currentdict end definefont pop 576 | end 577 | %%EndProlog 578 | mpldict begin 579 | 18 180 translate 580 | 576 432 0 0 clipbox 581 | gsave 582 | 0 0 m 583 | 576 0 l 584 | 576 432 l 585 | 0 432 l 586 | cl 587 | 1.000 setgray 588 | fill 589 | grestore 590 | gsave 591 | 72 43.2 m 592 | 518.4 43.2 l 593 | 518.4 200.291 l 594 | 72 200.291 l 595 | cl 596 | 1.000 setgray 597 | fill 598 | grestore 599 | 1.000 setlinewidth 600 | 1 setlinejoin 601 | 2 setlinecap 602 | [] 0 setdash 603 | 0.000 0.000 1.000 setrgbcolor 604 | gsave 605 | 446.4 157.1 72 43.2 clipbox 606 | 72 58.9091 m 607 | 94.32 70.6909 l 608 | 116.64 86.4 l 609 | 138.96 109.964 l 610 | 161.28 121.745 l 611 | 183.6 90.3273 l 612 | 205.92 121.745 l 613 | 228.24 168.873 l 614 | 250.56 106.036 l 615 | 272.88 98.1818 l 616 | 295.2 125.673 l 617 | 317.52 133.527 l 618 | 339.84 117.818 l 619 | 362.16 133.527 l 620 | 384.48 113.891 l 621 | 406.8 200.291 l 622 | 429.12 192.436 l 623 | 451.44 157.091 l 624 | 473.76 188.509 l 625 | 496.08 180.655 l 626 | stroke 627 | grestore 628 | 0.000 0.500 0.000 setrgbcolor 629 | gsave 630 | 446.4 157.1 72 43.2 clipbox 631 | 72 59.3018 m 632 | 94.32 64.4073 l 633 | 116.64 73.0473 l 634 | 138.96 73.0473 l 635 | 161.28 83.2582 l 636 | 183.6 89.5418 l 637 | 205.92 84.0436 l 638 | 228.24 91.5055 l 639 | 250.56 94.2545 l 640 | 272.88 82.8655 l 641 | 295.2 88.3636 l 642 | 317.52 92.2909 l 643 | 339.84 97.0036 l 644 | 362.16 91.5055 l 645 | 384.48 97.0036 l 646 | 406.8 99.7527 l 647 | 429.12 103.68 l 648 | 451.44 100.145 l 649 | 473.76 103.68 l 650 | 496.08 102.502 l 651 | stroke 652 | grestore 653 | 0 setlinejoin 654 | 0.000 setgray 655 | gsave 656 | 72 200.291 m 657 | 518.4 200.291 l 658 | stroke 659 | grestore 660 | gsave 661 | 518.4 43.2 m 662 | 518.4 200.291 l 663 | stroke 664 | grestore 665 | gsave 666 | 72 43.2 m 667 | 518.4 43.2 l 668 | stroke 669 | grestore 670 | gsave 671 | 72 43.2 m 672 | 72 200.291 l 673 | stroke 674 | grestore 675 | 0.500 setlinewidth 676 | 1 setlinejoin 677 | 0 setlinecap 678 | gsave 679 | /o { 680 | gsave 681 | newpath 682 | translate 683 | 0.5 setlinewidth 684 | 1 setlinejoin 685 | 0 setlinecap 686 | 0 0 m 687 | 0 4 l 688 | gsave 689 | 0.000 setgray 690 | fill 691 | grestore 692 | stroke 693 | grestore 694 | } bind def 695 | 72 43.2 o 696 | grestore 697 | gsave 698 | /o { 699 | gsave 700 | newpath 701 | translate 702 | 0.5 setlinewidth 703 | 1 setlinejoin 704 | 0 setlinecap 705 | 0 0 m 706 | 0 -4 l 707 | gsave 708 | 0.000 setgray 709 | fill 710 | grestore 711 | stroke 712 | grestore 713 | } bind def 714 | 72 200.291 o 715 | grestore 716 | /BitstreamVeraSans-Roman findfont 717 | 12.000 scalefont 718 | setfont 719 | gsave 720 | 68.976562 30.075000 translate 721 | 0.000000 rotate 722 | 0.000000 0.000000 m /zero glyphshow 723 | grestore 724 | gsave 725 | /o { 726 | gsave 727 | newpath 728 | translate 729 | 0.5 setlinewidth 730 | 1 setlinejoin 731 | 0 setlinecap 732 | 0 0 m 733 | 0 4 l 734 | gsave 735 | 0.000 setgray 736 | fill 737 | grestore 738 | stroke 739 | grestore 740 | } bind def 741 | 183.6 43.2 o 742 | grestore 743 | gsave 744 | /o { 745 | gsave 746 | newpath 747 | translate 748 | 0.5 setlinewidth 749 | 1 setlinejoin 750 | 0 setlinecap 751 | 0 0 m 752 | 0 -4 l 753 | gsave 754 | 0.000 setgray 755 | fill 756 | grestore 757 | stroke 758 | grestore 759 | } bind def 760 | 183.6 200.291 o 761 | grestore 762 | gsave 763 | 180.764062 30.075000 translate 764 | 0.000000 rotate 765 | 0.000000 0.000000 m /five glyphshow 766 | grestore 767 | gsave 768 | /o { 769 | gsave 770 | newpath 771 | translate 772 | 0.5 setlinewidth 773 | 1 setlinejoin 774 | 0 setlinecap 775 | 0 0 m 776 | 0 4 l 777 | gsave 778 | 0.000 setgray 779 | fill 780 | grestore 781 | stroke 782 | grestore 783 | } bind def 784 | 295.2 43.2 o 785 | grestore 786 | gsave 787 | /o { 788 | gsave 789 | newpath 790 | translate 791 | 0.5 setlinewidth 792 | 1 setlinejoin 793 | 0 setlinecap 794 | 0 0 m 795 | 0 -4 l 796 | gsave 797 | 0.000 setgray 798 | fill 799 | grestore 800 | stroke 801 | grestore 802 | } bind def 803 | 295.2 200.291 o 804 | grestore 805 | gsave 806 | 288.614062 30.075000 translate 807 | 0.000000 rotate 808 | 0.000000 0.000000 m /one glyphshow 809 | 7.634766 0.000000 m /zero glyphshow 810 | grestore 811 | gsave 812 | /o { 813 | gsave 814 | newpath 815 | translate 816 | 0.5 setlinewidth 817 | 1 setlinejoin 818 | 0 setlinecap 819 | 0 0 m 820 | 0 4 l 821 | gsave 822 | 0.000 setgray 823 | fill 824 | grestore 825 | stroke 826 | grestore 827 | } bind def 828 | 406.8 43.2 o 829 | grestore 830 | gsave 831 | /o { 832 | gsave 833 | newpath 834 | translate 835 | 0.5 setlinewidth 836 | 1 setlinejoin 837 | 0 setlinecap 838 | 0 0 m 839 | 0 -4 l 840 | gsave 841 | 0.000 setgray 842 | fill 843 | grestore 844 | stroke 845 | grestore 846 | } bind def 847 | 406.8 200.291 o 848 | grestore 849 | gsave 850 | 400.339063 30.075000 translate 851 | 0.000000 rotate 852 | 0.000000 0.000000 m /one glyphshow 853 | 7.634766 0.000000 m /five glyphshow 854 | grestore 855 | gsave 856 | /o { 857 | gsave 858 | newpath 859 | translate 860 | 0.5 setlinewidth 861 | 1 setlinejoin 862 | 0 setlinecap 863 | 0 0 m 864 | 0 4 l 865 | gsave 866 | 0.000 setgray 867 | fill 868 | grestore 869 | stroke 870 | grestore 871 | } bind def 872 | 518.4 43.2 o 873 | grestore 874 | gsave 875 | /o { 876 | gsave 877 | newpath 878 | translate 879 | 0.5 setlinewidth 880 | 1 setlinejoin 881 | 0 setlinecap 882 | 0 0 m 883 | 0 -4 l 884 | gsave 885 | 0.000 setgray 886 | fill 887 | grestore 888 | stroke 889 | grestore 890 | } bind def 891 | 518.4 200.291 o 892 | grestore 893 | gsave 894 | 511.595312 30.075000 translate 895 | 0.000000 rotate 896 | 0.000000 0.000000 m /two glyphshow 897 | 7.634766 0.000000 m /zero glyphshow 898 | grestore 899 | gsave 900 | 277.918750 13.450000 translate 901 | 0.000000 rotate 902 | 0.000000 0.000000 m /E glyphshow 903 | 7.582031 0.000000 m /p glyphshow 904 | 15.199219 0.000000 m /o glyphshow 905 | 22.541016 0.000000 m /c glyphshow 906 | 29.138672 0.000000 m /h glyphshow 907 | grestore 908 | gsave 909 | /o { 910 | gsave 911 | newpath 912 | translate 913 | 0.5 setlinewidth 914 | 1 setlinejoin 915 | 0 setlinecap 916 | 0 0 m 917 | 4 0 l 918 | gsave 919 | 0.000 setgray 920 | fill 921 | grestore 922 | stroke 923 | grestore 924 | } bind def 925 | 72 43.2 o 926 | grestore 927 | gsave 928 | /o { 929 | gsave 930 | newpath 931 | translate 932 | 0.5 setlinewidth 933 | 1 setlinejoin 934 | 0 setlinecap 935 | 0 0 m 936 | -4 0 l 937 | gsave 938 | 0.000 setgray 939 | fill 940 | grestore 941 | stroke 942 | grestore 943 | } bind def 944 | 518.4 43.2 o 945 | grestore 946 | gsave 947 | 42.859375 39.887500 translate 948 | 0.000000 rotate 949 | 0.000000 0.000000 m /zero glyphshow 950 | 7.634766 0.000000 m /period glyphshow 951 | 11.449219 0.000000 m /four glyphshow 952 | 19.083984 0.000000 m /zero glyphshow 953 | grestore 954 | gsave 955 | /o { 956 | gsave 957 | newpath 958 | translate 959 | 0.5 setlinewidth 960 | 1 setlinejoin 961 | 0 setlinecap 962 | 0 0 m 963 | 4 0 l 964 | gsave 965 | 0.000 setgray 966 | fill 967 | grestore 968 | stroke 969 | grestore 970 | } bind def 971 | 72 62.8364 o 972 | grestore 973 | gsave 974 | /o { 975 | gsave 976 | newpath 977 | translate 978 | 0.5 setlinewidth 979 | 1 setlinejoin 980 | 0 setlinecap 981 | 0 0 m 982 | -4 0 l 983 | gsave 984 | 0.000 setgray 985 | fill 986 | grestore 987 | stroke 988 | grestore 989 | } bind def 990 | 518.4 62.8364 o 991 | grestore 992 | gsave 993 | 43.109375 59.523864 translate 994 | 0.000000 rotate 995 | 0.000000 0.000000 m /zero glyphshow 996 | 7.634766 0.000000 m /period glyphshow 997 | 11.449219 0.000000 m /four glyphshow 998 | 19.083984 0.000000 m /five glyphshow 999 | grestore 1000 | gsave 1001 | /o { 1002 | gsave 1003 | newpath 1004 | translate 1005 | 0.5 setlinewidth 1006 | 1 setlinejoin 1007 | 0 setlinecap 1008 | 0 0 m 1009 | 4 0 l 1010 | gsave 1011 | 0.000 setgray 1012 | fill 1013 | grestore 1014 | stroke 1015 | grestore 1016 | } bind def 1017 | 72 82.4727 o 1018 | grestore 1019 | gsave 1020 | /o { 1021 | gsave 1022 | newpath 1023 | translate 1024 | 0.5 setlinewidth 1025 | 1 setlinejoin 1026 | 0 setlinecap 1027 | 0 0 m 1028 | -4 0 l 1029 | gsave 1030 | 0.000 setgray 1031 | fill 1032 | grestore 1033 | stroke 1034 | grestore 1035 | } bind def 1036 | 518.4 82.4727 o 1037 | grestore 1038 | gsave 1039 | 42.859375 79.160227 translate 1040 | 0.000000 rotate 1041 | 0.000000 0.000000 m /zero glyphshow 1042 | 7.634766 0.000000 m /period glyphshow 1043 | 11.449219 0.000000 m /five glyphshow 1044 | 19.083984 0.000000 m /zero glyphshow 1045 | grestore 1046 | gsave 1047 | /o { 1048 | gsave 1049 | newpath 1050 | translate 1051 | 0.5 setlinewidth 1052 | 1 setlinejoin 1053 | 0 setlinecap 1054 | 0 0 m 1055 | 4 0 l 1056 | gsave 1057 | 0.000 setgray 1058 | fill 1059 | grestore 1060 | stroke 1061 | grestore 1062 | } bind def 1063 | 72 102.109 o 1064 | grestore 1065 | gsave 1066 | /o { 1067 | gsave 1068 | newpath 1069 | translate 1070 | 0.5 setlinewidth 1071 | 1 setlinejoin 1072 | 0 setlinecap 1073 | 0 0 m 1074 | -4 0 l 1075 | gsave 1076 | 0.000 setgray 1077 | fill 1078 | grestore 1079 | stroke 1080 | grestore 1081 | } bind def 1082 | 518.4 102.109 o 1083 | grestore 1084 | gsave 1085 | 43.109375 98.796591 translate 1086 | 0.000000 rotate 1087 | 0.000000 0.000000 m /zero glyphshow 1088 | 7.634766 0.000000 m /period glyphshow 1089 | 11.449219 0.000000 m /five glyphshow 1090 | 19.083984 0.000000 m /five glyphshow 1091 | grestore 1092 | gsave 1093 | /o { 1094 | gsave 1095 | newpath 1096 | translate 1097 | 0.5 setlinewidth 1098 | 1 setlinejoin 1099 | 0 setlinecap 1100 | 0 0 m 1101 | 4 0 l 1102 | gsave 1103 | 0.000 setgray 1104 | fill 1105 | grestore 1106 | stroke 1107 | grestore 1108 | } bind def 1109 | 72 121.745 o 1110 | grestore 1111 | gsave 1112 | /o { 1113 | gsave 1114 | newpath 1115 | translate 1116 | 0.5 setlinewidth 1117 | 1 setlinejoin 1118 | 0 setlinecap 1119 | 0 0 m 1120 | -4 0 l 1121 | gsave 1122 | 0.000 setgray 1123 | fill 1124 | grestore 1125 | stroke 1126 | grestore 1127 | } bind def 1128 | 518.4 121.745 o 1129 | grestore 1130 | gsave 1131 | 42.859375 118.432955 translate 1132 | 0.000000 rotate 1133 | 0.000000 0.000000 m /zero glyphshow 1134 | 7.634766 0.000000 m /period glyphshow 1135 | 11.449219 0.000000 m /six glyphshow 1136 | 19.083984 0.000000 m /zero glyphshow 1137 | grestore 1138 | gsave 1139 | /o { 1140 | gsave 1141 | newpath 1142 | translate 1143 | 0.5 setlinewidth 1144 | 1 setlinejoin 1145 | 0 setlinecap 1146 | 0 0 m 1147 | 4 0 l 1148 | gsave 1149 | 0.000 setgray 1150 | fill 1151 | grestore 1152 | stroke 1153 | grestore 1154 | } bind def 1155 | 72 141.382 o 1156 | grestore 1157 | gsave 1158 | /o { 1159 | gsave 1160 | newpath 1161 | translate 1162 | 0.5 setlinewidth 1163 | 1 setlinejoin 1164 | 0 setlinecap 1165 | 0 0 m 1166 | -4 0 l 1167 | gsave 1168 | 0.000 setgray 1169 | fill 1170 | grestore 1171 | stroke 1172 | grestore 1173 | } bind def 1174 | 518.4 141.382 o 1175 | grestore 1176 | gsave 1177 | 43.109375 138.069318 translate 1178 | 0.000000 rotate 1179 | 0.000000 0.000000 m /zero glyphshow 1180 | 7.634766 0.000000 m /period glyphshow 1181 | 11.449219 0.000000 m /six glyphshow 1182 | 19.083984 0.000000 m /five glyphshow 1183 | grestore 1184 | gsave 1185 | /o { 1186 | gsave 1187 | newpath 1188 | translate 1189 | 0.5 setlinewidth 1190 | 1 setlinejoin 1191 | 0 setlinecap 1192 | 0 0 m 1193 | 4 0 l 1194 | gsave 1195 | 0.000 setgray 1196 | fill 1197 | grestore 1198 | stroke 1199 | grestore 1200 | } bind def 1201 | 72 161.018 o 1202 | grestore 1203 | gsave 1204 | /o { 1205 | gsave 1206 | newpath 1207 | translate 1208 | 0.5 setlinewidth 1209 | 1 setlinejoin 1210 | 0 setlinecap 1211 | 0 0 m 1212 | -4 0 l 1213 | gsave 1214 | 0.000 setgray 1215 | fill 1216 | grestore 1217 | stroke 1218 | grestore 1219 | } bind def 1220 | 518.4 161.018 o 1221 | grestore 1222 | gsave 1223 | 42.859375 157.705682 translate 1224 | 0.000000 rotate 1225 | 0.000000 0.000000 m /zero glyphshow 1226 | 7.634766 0.000000 m /period glyphshow 1227 | 11.449219 0.000000 m /seven glyphshow 1228 | 19.083984 0.000000 m /zero glyphshow 1229 | grestore 1230 | gsave 1231 | /o { 1232 | gsave 1233 | newpath 1234 | translate 1235 | 0.5 setlinewidth 1236 | 1 setlinejoin 1237 | 0 setlinecap 1238 | 0 0 m 1239 | 4 0 l 1240 | gsave 1241 | 0.000 setgray 1242 | fill 1243 | grestore 1244 | stroke 1245 | grestore 1246 | } bind def 1247 | 72 180.655 o 1248 | grestore 1249 | gsave 1250 | /o { 1251 | gsave 1252 | newpath 1253 | translate 1254 | 0.5 setlinewidth 1255 | 1 setlinejoin 1256 | 0 setlinecap 1257 | 0 0 m 1258 | -4 0 l 1259 | gsave 1260 | 0.000 setgray 1261 | fill 1262 | grestore 1263 | stroke 1264 | grestore 1265 | } bind def 1266 | 518.4 180.655 o 1267 | grestore 1268 | gsave 1269 | 43.109375 177.342045 translate 1270 | 0.000000 rotate 1271 | 0.000000 0.000000 m /zero glyphshow 1272 | 7.634766 0.000000 m /period glyphshow 1273 | 11.449219 0.000000 m /seven glyphshow 1274 | 19.083984 0.000000 m /five glyphshow 1275 | grestore 1276 | gsave 1277 | /o { 1278 | gsave 1279 | newpath 1280 | translate 1281 | 0.5 setlinewidth 1282 | 1 setlinejoin 1283 | 0 setlinecap 1284 | 0 0 m 1285 | 4 0 l 1286 | gsave 1287 | 0.000 setgray 1288 | fill 1289 | grestore 1290 | stroke 1291 | grestore 1292 | } bind def 1293 | 72 200.291 o 1294 | grestore 1295 | gsave 1296 | /o { 1297 | gsave 1298 | newpath 1299 | translate 1300 | 0.5 setlinewidth 1301 | 1 setlinejoin 1302 | 0 setlinecap 1303 | 0 0 m 1304 | -4 0 l 1305 | gsave 1306 | 0.000 setgray 1307 | fill 1308 | grestore 1309 | stroke 1310 | grestore 1311 | } bind def 1312 | 518.4 200.291 o 1313 | grestore 1314 | gsave 1315 | 42.859375 196.978409 translate 1316 | 0.000000 rotate 1317 | 0.000000 0.000000 m /zero glyphshow 1318 | 7.634766 0.000000 m /period glyphshow 1319 | 11.449219 0.000000 m /eight glyphshow 1320 | 19.083984 0.000000 m /zero glyphshow 1321 | grestore 1322 | gsave 1323 | 35.359375 56.378267 translate 1324 | 90.000000 rotate 1325 | 0.000000 0.000000 m /C glyphshow 1326 | 8.378906 0.000000 m /l glyphshow 1327 | 11.712891 0.000000 m /a glyphshow 1328 | 19.066406 0.000000 m /s glyphshow 1329 | 25.318359 0.000000 m /i glyphshow 1330 | 28.652344 0.000000 m /f glyphshow 1331 | 32.876953 0.000000 m /i glyphshow 1332 | 36.210938 0.000000 m /c glyphshow 1333 | 42.808594 0.000000 m /a glyphshow 1334 | 50.162109 0.000000 m /t glyphshow 1335 | 54.867188 0.000000 m /i glyphshow 1336 | 58.201172 0.000000 m /o glyphshow 1337 | 65.542969 0.000000 m /n glyphshow 1338 | 73.148438 0.000000 m /space glyphshow 1339 | 76.962891 0.000000 m /a glyphshow 1340 | 84.316406 0.000000 m /c glyphshow 1341 | 90.914062 0.000000 m /c glyphshow 1342 | 97.511719 0.000000 m /u glyphshow 1343 | 105.117188 0.000000 m /r glyphshow 1344 | 110.050781 0.000000 m /a glyphshow 1345 | 117.404297 0.000000 m /c glyphshow 1346 | 124.001953 0.000000 m /y glyphshow 1347 | grestore 1348 | /BitstreamVeraSans-Roman findfont 1349 | 14.400 scalefont 1350 | setfont 1351 | gsave 1352 | 186.106250 205.290909 translate 1353 | 0.000000 rotate 1354 | 0.000000 0.000000 m /C glyphshow 1355 | 10.037231 0.000000 m /l glyphshow 1356 | 14.031067 0.000000 m /a glyphshow 1357 | 22.839966 0.000000 m /s glyphshow 1358 | 30.329285 0.000000 m /s glyphshow 1359 | 37.818604 0.000000 m /i glyphshow 1360 | 41.812439 0.000000 m /f glyphshow 1361 | 46.873169 0.000000 m /i glyphshow 1362 | 50.867004 0.000000 m /c glyphshow 1363 | 58.770447 0.000000 m /a glyphshow 1364 | 67.579346 0.000000 m /t glyphshow 1365 | 73.215637 0.000000 m /i glyphshow 1366 | 77.209473 0.000000 m /o glyphshow 1367 | 86.004333 0.000000 m /n glyphshow 1368 | 95.115051 0.000000 m /space glyphshow 1369 | 99.684448 0.000000 m /a glyphshow 1370 | 108.493347 0.000000 m /c glyphshow 1371 | 116.396790 0.000000 m /c glyphshow 1372 | 124.300232 0.000000 m /u glyphshow 1373 | 133.410950 0.000000 m /r glyphshow 1374 | 139.320984 0.000000 m /a glyphshow 1375 | 148.129883 0.000000 m /c glyphshow 1376 | 156.033325 0.000000 m /y glyphshow 1377 | 164.540405 0.000000 m /space glyphshow 1378 | 169.109802 0.000000 m /h glyphshow 1379 | 178.220520 0.000000 m /i glyphshow 1380 | 182.214355 0.000000 m /s glyphshow 1381 | 189.703674 0.000000 m /t glyphshow 1382 | 195.339966 0.000000 m /o glyphshow 1383 | 204.134827 0.000000 m /r glyphshow 1384 | 210.044861 0.000000 m /y glyphshow 1385 | grestore 1386 | 1387 | end 1388 | showpage 1389 | -------------------------------------------------------------------------------- /report/dropout-eps-converted-to.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/report/dropout-eps-converted-to.pdf -------------------------------------------------------------------------------- /report/dropout.eps: -------------------------------------------------------------------------------- 1 | %!PS-Adobe-3.0 EPSF-3.0 2 | %%Title: dropout accuracy.eps 3 | %%Creator: matplotlib version 1.4.3, http://matplotlib.org/ 4 | %%CreationDate: Mon Apr 11 19:48:23 2016 5 | %%Orientation: portrait 6 | %%BoundingBox: 18 180 594 612 7 | %%EndComments 8 | %%BeginProlog 9 | /mpldict 8 dict def 10 | mpldict begin 11 | /m { moveto } bind def 12 | /l { lineto } bind def 13 | /r { rlineto } bind def 14 | /c { curveto } bind def 15 | /cl { closepath } bind def 16 | /box { 17 | m 18 | 1 index 0 r 19 | 0 exch r 20 | neg 0 r 21 | cl 22 | } bind def 23 | /clipbox { 24 | box 25 | clip 26 | newpath 27 | } bind def 28 | %!PS-Adobe-3.0 Resource-Font 29 | %%Title: Bitstream Vera Sans 30 | %%Copyright: Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. 31 | %%Creator: Converted from TrueType to type 3 by PPR 32 | 25 dict begin 33 | /_d{bind def}bind def 34 | /_m{moveto}_d 35 | /_l{lineto}_d 36 | /_cl{closepath eofill}_d 37 | /_c{curveto}_d 38 | /_sc{7 -1 roll{setcachedevice}{pop pop pop pop pop pop}ifelse}_d 39 | /_e{exec}_d 40 | /FontName /BitstreamVeraSans-Roman def 41 | /PaintType 0 def 42 | /FontMatrix[.001 0 0 .001 0 0]def 43 | /FontBBox[-183 -236 1287 928]def 44 | /FontType 3 def 45 | /Encoding [ /space /period /zero /one /two /four /five /six /seven /eight /C /E /a /c /f /h /i /l /n /o /p /r /s /t /u /y ] def 46 | /FontInfo 10 dict dup begin 47 | /FamilyName (Bitstream Vera Sans) def 48 | /FullName (Bitstream Vera Sans) def 49 | /Notice (Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera is a trademark of Bitstream, Inc.) def 50 | /Weight (Roman) def 51 | /Version (Release 1.10) def 52 | /ItalicAngle 0.0 def 53 | /isFixedPitch false def 54 | /UnderlinePosition -213 def 55 | /UnderlineThickness 143 def 56 | end readonly def 57 | /CharStrings 26 dict dup begin 58 | /space{318 0 0 0 0 0 _sc 59 | }_d 60 | /period{318 0 107 0 210 124 _sc 61 | 107 124 _m 62 | 210 124 _l 63 | 210 0 _l 64 | 107 0 _l 65 | 107 124 _l 66 | _cl}_d 67 | /zero{636 0 66 -13 570 742 _sc 68 | 318 664 _m 69 | 267 664 229 639 203 589 _c 70 | 177 539 165 464 165 364 _c 71 | 165 264 177 189 203 139 _c 72 | 229 89 267 64 318 64 _c 73 | 369 64 407 89 433 139 _c 74 | 458 189 471 264 471 364 _c 75 | 471 464 458 539 433 589 _c 76 | 407 639 369 664 318 664 _c 77 | 318 742 _m 78 | 399 742 461 709 505 645 _c 79 | 548 580 570 486 570 364 _c 80 | 570 241 548 147 505 83 _c 81 | 461 19 399 -13 318 -13 _c 82 | 236 -13 173 19 130 83 _c 83 | 87 147 66 241 66 364 _c 84 | 66 486 87 580 130 645 _c 85 | 173 709 236 742 318 742 _c 86 | _cl}_d 87 | /one{636 0 110 0 544 729 _sc 88 | 124 83 _m 89 | 285 83 _l 90 | 285 639 _l 91 | 110 604 _l 92 | 110 694 _l 93 | 284 729 _l 94 | 383 729 _l 95 | 383 83 _l 96 | 544 83 _l 97 | 544 0 _l 98 | 124 0 _l 99 | 124 83 _l 100 | _cl}_d 101 | /two{{636 0 73 0 536 742 _sc 102 | 192 83 _m 103 | 536 83 _l 104 | 536 0 _l 105 | 73 0 _l 106 | 73 83 _l 107 | 110 121 161 173 226 239 _c 108 | 290 304 331 346 348 365 _c 109 | 380 400 402 430 414 455 _c 110 | 426 479 433 504 433 528 _c 111 | 433 566 419 598 392 622 _c 112 | 365 646 330 659 286 659 _c 113 | 255 659 222 653 188 643 _c 114 | 154 632 117 616 78 594 _c 115 | 78 694 _l 116 | 118 710 155 722 189 730 _c 117 | 223 738 255 742 284 742 _c 118 | }_e{359 742 419 723 464 685 _c 119 | 509 647 532 597 532 534 _c 120 | 532 504 526 475 515 449 _c 121 | 504 422 484 390 454 354 _c 122 | 446 344 420 317 376 272 _c 123 | 332 227 271 164 192 83 _c 124 | _cl}_e}_d 125 | /four{636 0 49 0 580 729 _sc 126 | 378 643 _m 127 | 129 254 _l 128 | 378 254 _l 129 | 378 643 _l 130 | 352 729 _m 131 | 476 729 _l 132 | 476 254 _l 133 | 580 254 _l 134 | 580 172 _l 135 | 476 172 _l 136 | 476 0 _l 137 | 378 0 _l 138 | 378 172 _l 139 | 49 172 _l 140 | 49 267 _l 141 | 352 729 _l 142 | _cl}_d 143 | /five{{636 0 77 -13 549 729 _sc 144 | 108 729 _m 145 | 495 729 _l 146 | 495 646 _l 147 | 198 646 _l 148 | 198 467 _l 149 | 212 472 227 476 241 478 _c 150 | 255 480 270 482 284 482 _c 151 | 365 482 429 459 477 415 _c 152 | 525 370 549 310 549 234 _c 153 | 549 155 524 94 475 51 _c 154 | 426 8 357 -13 269 -13 _c 155 | 238 -13 207 -10 175 -6 _c 156 | 143 -1 111 6 77 17 _c 157 | 77 116 _l 158 | 106 100 136 88 168 80 _c 159 | 199 72 232 69 267 69 _c 160 | }_e{323 69 368 83 401 113 _c 161 | 433 143 450 183 450 234 _c 162 | 450 284 433 324 401 354 _c 163 | 368 384 323 399 267 399 _c 164 | 241 399 214 396 188 390 _c 165 | 162 384 135 375 108 363 _c 166 | 108 729 _l 167 | _cl}_e}_d 168 | /six{{636 0 70 -13 573 742 _sc 169 | 330 404 _m 170 | 286 404 251 388 225 358 _c 171 | 199 328 186 286 186 234 _c 172 | 186 181 199 139 225 109 _c 173 | 251 79 286 64 330 64 _c 174 | 374 64 409 79 435 109 _c 175 | 461 139 474 181 474 234 _c 176 | 474 286 461 328 435 358 _c 177 | 409 388 374 404 330 404 _c 178 | 526 713 _m 179 | 526 623 _l 180 | 501 635 476 644 451 650 _c 181 | 425 656 400 659 376 659 _c 182 | 310 659 260 637 226 593 _c 183 | }_e{192 549 172 482 168 394 _c 184 | 187 422 211 444 240 459 _c 185 | 269 474 301 482 336 482 _c 186 | 409 482 467 459 509 415 _c 187 | 551 371 573 310 573 234 _c 188 | 573 159 550 99 506 54 _c 189 | 462 9 403 -13 330 -13 _c 190 | 246 -13 181 19 137 83 _c 191 | 92 147 70 241 70 364 _c 192 | 70 479 97 571 152 639 _c 193 | 206 707 280 742 372 742 _c 194 | 396 742 421 739 447 735 _c 195 | 472 730 498 723 526 713 _c 196 | _cl}_e}_d 197 | /seven{636 0 82 0 551 729 _sc 198 | 82 729 _m 199 | 551 729 _l 200 | 551 687 _l 201 | 286 0 _l 202 | 183 0 _l 203 | 432 646 _l 204 | 82 646 _l 205 | 82 729 _l 206 | _cl}_d 207 | /eight{{636 0 68 -13 568 742 _sc 208 | 318 346 _m 209 | 271 346 234 333 207 308 _c 210 | 180 283 167 249 167 205 _c 211 | 167 161 180 126 207 101 _c 212 | 234 76 271 64 318 64 _c 213 | 364 64 401 76 428 102 _c 214 | 455 127 469 161 469 205 _c 215 | 469 249 455 283 429 308 _c 216 | 402 333 365 346 318 346 _c 217 | 219 388 _m 218 | 177 398 144 418 120 447 _c 219 | 96 476 85 511 85 553 _c 220 | 85 611 105 657 147 691 _c 221 | 188 725 245 742 318 742 _c 222 | }_e{390 742 447 725 489 691 _c 223 | 530 657 551 611 551 553 _c 224 | 551 511 539 476 515 447 _c 225 | 491 418 459 398 417 388 _c 226 | 464 377 501 355 528 323 _c 227 | 554 291 568 251 568 205 _c 228 | 568 134 546 80 503 43 _c 229 | 459 5 398 -13 318 -13 _c 230 | 237 -13 175 5 132 43 _c 231 | 89 80 68 134 68 205 _c 232 | 68 251 81 291 108 323 _c 233 | 134 355 171 377 219 388 _c 234 | 183 544 _m 235 | 183 506 194 476 218 455 _c 236 | }_e{242 434 275 424 318 424 _c 237 | 360 424 393 434 417 455 _c 238 | 441 476 453 506 453 544 _c 239 | 453 582 441 611 417 632 _c 240 | 393 653 360 664 318 664 _c 241 | 275 664 242 653 218 632 _c 242 | 194 611 183 582 183 544 _c 243 | _cl}_e}_d 244 | /C{{698 0 56 -13 644 742 _sc 245 | 644 673 _m 246 | 644 569 _l 247 | 610 599 575 622 537 638 _c 248 | 499 653 460 661 418 661 _c 249 | 334 661 270 635 226 584 _c 250 | 182 533 160 460 160 364 _c 251 | 160 268 182 194 226 143 _c 252 | 270 92 334 67 418 67 _c 253 | 460 67 499 74 537 90 _c 254 | 575 105 610 128 644 159 _c 255 | 644 56 _l 256 | 609 32 572 15 534 4 _c 257 | 496 -7 455 -13 412 -13 _c 258 | 302 -13 215 20 151 87 _c 259 | }_e{87 154 56 246 56 364 _c 260 | 56 481 87 573 151 641 _c 261 | 215 708 302 742 412 742 _c 262 | 456 742 497 736 535 725 _c 263 | 573 713 610 696 644 673 _c 264 | _cl}_e}_d 265 | /E{632 0 98 0 568 729 _sc 266 | 98 729 _m 267 | 559 729 _l 268 | 559 646 _l 269 | 197 646 _l 270 | 197 430 _l 271 | 544 430 _l 272 | 544 347 _l 273 | 197 347 _l 274 | 197 83 _l 275 | 568 83 _l 276 | 568 0 _l 277 | 98 0 _l 278 | 98 729 _l 279 | _cl}_d 280 | /a{{613 0 60 -13 522 560 _sc 281 | 343 275 _m 282 | 270 275 220 266 192 250 _c 283 | 164 233 150 205 150 165 _c 284 | 150 133 160 107 181 89 _c 285 | 202 70 231 61 267 61 _c 286 | 317 61 357 78 387 114 _c 287 | 417 149 432 196 432 255 _c 288 | 432 275 _l 289 | 343 275 _l 290 | 522 312 _m 291 | 522 0 _l 292 | 432 0 _l 293 | 432 83 _l 294 | 411 49 385 25 355 10 _c 295 | 325 -5 287 -13 243 -13 _c 296 | 187 -13 142 2 109 33 _c 297 | 76 64 60 106 60 159 _c 298 | }_e{60 220 80 266 122 298 _c 299 | 163 329 224 345 306 345 _c 300 | 432 345 _l 301 | 432 354 _l 302 | 432 395 418 427 391 450 _c 303 | 364 472 326 484 277 484 _c 304 | 245 484 215 480 185 472 _c 305 | 155 464 127 453 100 439 _c 306 | 100 522 _l 307 | 132 534 164 544 195 550 _c 308 | 226 556 256 560 286 560 _c 309 | 365 560 424 539 463 498 _c 310 | 502 457 522 395 522 312 _c 311 | _cl}_e}_d 312 | /c{{550 0 55 -13 488 560 _sc 313 | 488 526 _m 314 | 488 442 _l 315 | 462 456 437 466 411 473 _c 316 | 385 480 360 484 334 484 _c 317 | 276 484 230 465 198 428 _c 318 | 166 391 150 339 150 273 _c 319 | 150 206 166 154 198 117 _c 320 | 230 80 276 62 334 62 _c 321 | 360 62 385 65 411 72 _c 322 | 437 79 462 90 488 104 _c 323 | 488 21 _l 324 | 462 9 436 0 410 -5 _c 325 | 383 -10 354 -13 324 -13 _c 326 | 242 -13 176 12 128 64 _c 327 | }_e{79 115 55 185 55 273 _c 328 | 55 362 79 432 128 483 _c 329 | 177 534 244 560 330 560 _c 330 | 358 560 385 557 411 551 _c 331 | 437 545 463 537 488 526 _c 332 | _cl}_e}_d 333 | /f{352 0 23 0 371 760 _sc 334 | 371 760 _m 335 | 371 685 _l 336 | 285 685 _l 337 | 253 685 230 678 218 665 _c 338 | 205 652 199 629 199 595 _c 339 | 199 547 _l 340 | 347 547 _l 341 | 347 477 _l 342 | 199 477 _l 343 | 199 0 _l 344 | 109 0 _l 345 | 109 477 _l 346 | 23 477 _l 347 | 23 547 _l 348 | 109 547 _l 349 | 109 585 _l 350 | 109 645 123 690 151 718 _c 351 | 179 746 224 760 286 760 _c 352 | 371 760 _l 353 | _cl}_d 354 | /h{634 0 91 0 549 760 _sc 355 | 549 330 _m 356 | 549 0 _l 357 | 459 0 _l 358 | 459 327 _l 359 | 459 379 448 417 428 443 _c 360 | 408 469 378 482 338 482 _c 361 | 289 482 251 466 223 435 _c 362 | 195 404 181 362 181 309 _c 363 | 181 0 _l 364 | 91 0 _l 365 | 91 760 _l 366 | 181 760 _l 367 | 181 462 _l 368 | 202 494 227 519 257 535 _c 369 | 286 551 320 560 358 560 _c 370 | 420 560 468 540 500 501 _c 371 | 532 462 549 405 549 330 _c 372 | _cl}_d 373 | /i{278 0 94 0 184 760 _sc 374 | 94 547 _m 375 | 184 547 _l 376 | 184 0 _l 377 | 94 0 _l 378 | 94 547 _l 379 | 94 760 _m 380 | 184 760 _l 381 | 184 646 _l 382 | 94 646 _l 383 | 94 760 _l 384 | _cl}_d 385 | /l{278 0 94 0 184 760 _sc 386 | 94 760 _m 387 | 184 760 _l 388 | 184 0 _l 389 | 94 0 _l 390 | 94 760 _l 391 | _cl}_d 392 | /n{634 0 91 0 549 560 _sc 393 | 549 330 _m 394 | 549 0 _l 395 | 459 0 _l 396 | 459 327 _l 397 | 459 379 448 417 428 443 _c 398 | 408 469 378 482 338 482 _c 399 | 289 482 251 466 223 435 _c 400 | 195 404 181 362 181 309 _c 401 | 181 0 _l 402 | 91 0 _l 403 | 91 547 _l 404 | 181 547 _l 405 | 181 462 _l 406 | 202 494 227 519 257 535 _c 407 | 286 551 320 560 358 560 _c 408 | 420 560 468 540 500 501 _c 409 | 532 462 549 405 549 330 _c 410 | _cl}_d 411 | /o{612 0 55 -13 557 560 _sc 412 | 306 484 _m 413 | 258 484 220 465 192 427 _c 414 | 164 389 150 338 150 273 _c 415 | 150 207 163 156 191 118 _c 416 | 219 80 257 62 306 62 _c 417 | 354 62 392 80 420 118 _c 418 | 448 156 462 207 462 273 _c 419 | 462 337 448 389 420 427 _c 420 | 392 465 354 484 306 484 _c 421 | 306 560 _m 422 | 384 560 445 534 490 484 _c 423 | 534 433 557 363 557 273 _c 424 | 557 183 534 113 490 63 _c 425 | 445 12 384 -13 306 -13 _c 426 | 227 -13 165 12 121 63 _c 427 | 77 113 55 183 55 273 _c 428 | 55 363 77 433 121 484 _c 429 | 165 534 227 560 306 560 _c 430 | _cl}_d 431 | /p{{635 0 91 -207 580 560 _sc 432 | 181 82 _m 433 | 181 -207 _l 434 | 91 -207 _l 435 | 91 547 _l 436 | 181 547 _l 437 | 181 464 _l 438 | 199 496 223 520 252 536 _c 439 | 281 552 316 560 356 560 _c 440 | 422 560 476 533 518 481 _c 441 | 559 428 580 359 580 273 _c 442 | 580 187 559 117 518 65 _c 443 | 476 13 422 -13 356 -13 _c 444 | 316 -13 281 -5 252 10 _c 445 | 223 25 199 49 181 82 _c 446 | 487 273 _m 447 | 487 339 473 390 446 428 _c 448 | 418 466 381 485 334 485 _c 449 | }_e{286 485 249 466 222 428 _c 450 | 194 390 181 339 181 273 _c 451 | 181 207 194 155 222 117 _c 452 | 249 79 286 61 334 61 _c 453 | 381 61 418 79 446 117 _c 454 | 473 155 487 207 487 273 _c 455 | _cl}_e}_d 456 | /r{411 0 91 0 411 560 _sc 457 | 411 463 _m 458 | 401 469 390 473 378 476 _c 459 | 366 478 353 480 339 480 _c 460 | 288 480 249 463 222 430 _c 461 | 194 397 181 350 181 288 _c 462 | 181 0 _l 463 | 91 0 _l 464 | 91 547 _l 465 | 181 547 _l 466 | 181 462 _l 467 | 199 495 224 520 254 536 _c 468 | 284 552 321 560 365 560 _c 469 | 371 560 378 559 386 559 _c 470 | 393 558 401 557 411 555 _c 471 | 411 463 _l 472 | _cl}_d 473 | /s{{521 0 54 -13 472 560 _sc 474 | 443 531 _m 475 | 443 446 _l 476 | 417 458 391 468 364 475 _c 477 | 336 481 308 485 279 485 _c 478 | 234 485 200 478 178 464 _c 479 | 156 450 145 430 145 403 _c 480 | 145 382 153 366 169 354 _c 481 | 185 342 217 330 265 320 _c 482 | 296 313 _l 483 | 360 299 405 279 432 255 _c 484 | 458 230 472 195 472 151 _c 485 | 472 100 452 60 412 31 _c 486 | 372 1 316 -13 246 -13 _c 487 | 216 -13 186 -10 154 -5 _c 488 | }_e{122 0 89 8 54 20 _c 489 | 54 113 _l 490 | 87 95 120 82 152 74 _c 491 | 184 65 216 61 248 61 _c 492 | 290 61 323 68 346 82 _c 493 | 368 96 380 117 380 144 _c 494 | 380 168 371 187 355 200 _c 495 | 339 213 303 226 247 238 _c 496 | 216 245 _l 497 | 160 257 119 275 95 299 _c 498 | 70 323 58 356 58 399 _c 499 | 58 450 76 490 112 518 _c 500 | 148 546 200 560 268 560 _c 501 | 301 560 332 557 362 552 _c 502 | 391 547 418 540 443 531 _c 503 | }_e{_cl}_e}_d 504 | /t{392 0 27 0 368 702 _sc 505 | 183 702 _m 506 | 183 547 _l 507 | 368 547 _l 508 | 368 477 _l 509 | 183 477 _l 510 | 183 180 _l 511 | 183 135 189 106 201 94 _c 512 | 213 81 238 75 276 75 _c 513 | 368 75 _l 514 | 368 0 _l 515 | 276 0 _l 516 | 206 0 158 13 132 39 _c 517 | 106 65 93 112 93 180 _c 518 | 93 477 _l 519 | 27 477 _l 520 | 27 547 _l 521 | 93 547 _l 522 | 93 702 _l 523 | 183 702 _l 524 | _cl}_d 525 | /u{634 0 85 -13 543 547 _sc 526 | 85 216 _m 527 | 85 547 _l 528 | 175 547 _l 529 | 175 219 _l 530 | 175 167 185 129 205 103 _c 531 | 225 77 255 64 296 64 _c 532 | 344 64 383 79 411 110 _c 533 | 439 141 453 183 453 237 _c 534 | 453 547 _l 535 | 543 547 _l 536 | 543 0 _l 537 | 453 0 _l 538 | 453 84 _l 539 | 431 50 405 26 377 10 _c 540 | 348 -5 315 -13 277 -13 _c 541 | 214 -13 166 6 134 45 _c 542 | 101 83 85 140 85 216 _c 543 | _cl}_d 544 | /y{592 0 30 -207 562 547 _sc 545 | 322 -50 _m 546 | 296 -114 271 -157 247 -177 _c 547 | 223 -197 191 -207 151 -207 _c 548 | 79 -207 _l 549 | 79 -132 _l 550 | 132 -132 _l 551 | 156 -132 175 -126 189 -114 _c 552 | 203 -102 218 -75 235 -31 _c 553 | 251 9 _l 554 | 30 547 _l 555 | 125 547 _l 556 | 296 119 _l 557 | 467 547 _l 558 | 562 547 _l 559 | 322 -50 _l 560 | _cl}_d 561 | end readonly def 562 | 563 | /BuildGlyph 564 | {exch begin 565 | CharStrings exch 566 | 2 copy known not{pop /.notdef}if 567 | true 3 1 roll get exec 568 | end}_d 569 | 570 | /BuildChar { 571 | 1 index /Encoding get exch get 572 | 1 index /BuildGlyph get exec 573 | }_d 574 | 575 | FontName currentdict end definefont pop 576 | end 577 | %%EndProlog 578 | mpldict begin 579 | 18 180 translate 580 | 576 432 0 0 clipbox 581 | gsave 582 | 0 0 m 583 | 576 0 l 584 | 576 432 l 585 | 0 432 l 586 | cl 587 | 1.000 setgray 588 | fill 589 | grestore 590 | gsave 591 | 72 43.2 m 592 | 518.4 43.2 l 593 | 518.4 200.291 l 594 | 72 200.291 l 595 | cl 596 | 1.000 setgray 597 | fill 598 | grestore 599 | 1.000 setlinewidth 600 | 1 setlinejoin 601 | 2 setlinecap 602 | [] 0 setdash 603 | 0.000 0.000 1.000 setrgbcolor 604 | gsave 605 | 446.4 157.1 72 43.2 clipbox 606 | 72 58.9091 m 607 | 94.32 70.6909 l 608 | 116.64 86.4 l 609 | 138.96 109.964 l 610 | 161.28 121.745 l 611 | 183.6 90.3273 l 612 | 205.92 121.745 l 613 | 228.24 168.873 l 614 | 250.56 106.036 l 615 | 272.88 98.1818 l 616 | 295.2 125.673 l 617 | 317.52 133.527 l 618 | 339.84 117.818 l 619 | 362.16 133.527 l 620 | 384.48 113.891 l 621 | 406.8 200.291 l 622 | 429.12 192.436 l 623 | 451.44 157.091 l 624 | 473.76 188.509 l 625 | 496.08 180.655 l 626 | stroke 627 | grestore 628 | 0.000 0.500 0.000 setrgbcolor 629 | gsave 630 | 446.4 157.1 72 43.2 clipbox 631 | 72 59.3018 m 632 | 94.32 64.4073 l 633 | 116.64 73.0473 l 634 | 138.96 73.0473 l 635 | 161.28 83.2582 l 636 | 183.6 89.5418 l 637 | 205.92 84.0436 l 638 | 228.24 91.5055 l 639 | 250.56 94.2545 l 640 | 272.88 82.8655 l 641 | 295.2 88.3636 l 642 | 317.52 92.2909 l 643 | 339.84 97.0036 l 644 | 362.16 91.5055 l 645 | 384.48 97.0036 l 646 | 406.8 99.7527 l 647 | 429.12 103.68 l 648 | 451.44 100.145 l 649 | 473.76 103.68 l 650 | 496.08 102.502 l 651 | stroke 652 | grestore 653 | 0 setlinejoin 654 | 0.000 setgray 655 | gsave 656 | 72 200.291 m 657 | 518.4 200.291 l 658 | stroke 659 | grestore 660 | gsave 661 | 518.4 43.2 m 662 | 518.4 200.291 l 663 | stroke 664 | grestore 665 | gsave 666 | 72 43.2 m 667 | 518.4 43.2 l 668 | stroke 669 | grestore 670 | gsave 671 | 72 43.2 m 672 | 72 200.291 l 673 | stroke 674 | grestore 675 | 0.500 setlinewidth 676 | 1 setlinejoin 677 | 0 setlinecap 678 | gsave 679 | /o { 680 | gsave 681 | newpath 682 | translate 683 | 0.5 setlinewidth 684 | 1 setlinejoin 685 | 0 setlinecap 686 | 0 0 m 687 | 0 4 l 688 | gsave 689 | 0.000 setgray 690 | fill 691 | grestore 692 | stroke 693 | grestore 694 | } bind def 695 | 72 43.2 o 696 | grestore 697 | gsave 698 | /o { 699 | gsave 700 | newpath 701 | translate 702 | 0.5 setlinewidth 703 | 1 setlinejoin 704 | 0 setlinecap 705 | 0 0 m 706 | 0 -4 l 707 | gsave 708 | 0.000 setgray 709 | fill 710 | grestore 711 | stroke 712 | grestore 713 | } bind def 714 | 72 200.291 o 715 | grestore 716 | /BitstreamVeraSans-Roman findfont 717 | 12.000 scalefont 718 | setfont 719 | gsave 720 | 68.976562 30.075000 translate 721 | 0.000000 rotate 722 | 0.000000 0.000000 m /zero glyphshow 723 | grestore 724 | gsave 725 | /o { 726 | gsave 727 | newpath 728 | translate 729 | 0.5 setlinewidth 730 | 1 setlinejoin 731 | 0 setlinecap 732 | 0 0 m 733 | 0 4 l 734 | gsave 735 | 0.000 setgray 736 | fill 737 | grestore 738 | stroke 739 | grestore 740 | } bind def 741 | 183.6 43.2 o 742 | grestore 743 | gsave 744 | /o { 745 | gsave 746 | newpath 747 | translate 748 | 0.5 setlinewidth 749 | 1 setlinejoin 750 | 0 setlinecap 751 | 0 0 m 752 | 0 -4 l 753 | gsave 754 | 0.000 setgray 755 | fill 756 | grestore 757 | stroke 758 | grestore 759 | } bind def 760 | 183.6 200.291 o 761 | grestore 762 | gsave 763 | 180.764062 30.075000 translate 764 | 0.000000 rotate 765 | 0.000000 0.000000 m /five glyphshow 766 | grestore 767 | gsave 768 | /o { 769 | gsave 770 | newpath 771 | translate 772 | 0.5 setlinewidth 773 | 1 setlinejoin 774 | 0 setlinecap 775 | 0 0 m 776 | 0 4 l 777 | gsave 778 | 0.000 setgray 779 | fill 780 | grestore 781 | stroke 782 | grestore 783 | } bind def 784 | 295.2 43.2 o 785 | grestore 786 | gsave 787 | /o { 788 | gsave 789 | newpath 790 | translate 791 | 0.5 setlinewidth 792 | 1 setlinejoin 793 | 0 setlinecap 794 | 0 0 m 795 | 0 -4 l 796 | gsave 797 | 0.000 setgray 798 | fill 799 | grestore 800 | stroke 801 | grestore 802 | } bind def 803 | 295.2 200.291 o 804 | grestore 805 | gsave 806 | 288.614062 30.075000 translate 807 | 0.000000 rotate 808 | 0.000000 0.000000 m /one glyphshow 809 | 7.634766 0.000000 m /zero glyphshow 810 | grestore 811 | gsave 812 | /o { 813 | gsave 814 | newpath 815 | translate 816 | 0.5 setlinewidth 817 | 1 setlinejoin 818 | 0 setlinecap 819 | 0 0 m 820 | 0 4 l 821 | gsave 822 | 0.000 setgray 823 | fill 824 | grestore 825 | stroke 826 | grestore 827 | } bind def 828 | 406.8 43.2 o 829 | grestore 830 | gsave 831 | /o { 832 | gsave 833 | newpath 834 | translate 835 | 0.5 setlinewidth 836 | 1 setlinejoin 837 | 0 setlinecap 838 | 0 0 m 839 | 0 -4 l 840 | gsave 841 | 0.000 setgray 842 | fill 843 | grestore 844 | stroke 845 | grestore 846 | } bind def 847 | 406.8 200.291 o 848 | grestore 849 | gsave 850 | 400.339063 30.075000 translate 851 | 0.000000 rotate 852 | 0.000000 0.000000 m /one glyphshow 853 | 7.634766 0.000000 m /five glyphshow 854 | grestore 855 | gsave 856 | /o { 857 | gsave 858 | newpath 859 | translate 860 | 0.5 setlinewidth 861 | 1 setlinejoin 862 | 0 setlinecap 863 | 0 0 m 864 | 0 4 l 865 | gsave 866 | 0.000 setgray 867 | fill 868 | grestore 869 | stroke 870 | grestore 871 | } bind def 872 | 518.4 43.2 o 873 | grestore 874 | gsave 875 | /o { 876 | gsave 877 | newpath 878 | translate 879 | 0.5 setlinewidth 880 | 1 setlinejoin 881 | 0 setlinecap 882 | 0 0 m 883 | 0 -4 l 884 | gsave 885 | 0.000 setgray 886 | fill 887 | grestore 888 | stroke 889 | grestore 890 | } bind def 891 | 518.4 200.291 o 892 | grestore 893 | gsave 894 | 511.595312 30.075000 translate 895 | 0.000000 rotate 896 | 0.000000 0.000000 m /two glyphshow 897 | 7.634766 0.000000 m /zero glyphshow 898 | grestore 899 | gsave 900 | 277.918750 13.450000 translate 901 | 0.000000 rotate 902 | 0.000000 0.000000 m /E glyphshow 903 | 7.582031 0.000000 m /p glyphshow 904 | 15.199219 0.000000 m /o glyphshow 905 | 22.541016 0.000000 m /c glyphshow 906 | 29.138672 0.000000 m /h glyphshow 907 | grestore 908 | gsave 909 | /o { 910 | gsave 911 | newpath 912 | translate 913 | 0.5 setlinewidth 914 | 1 setlinejoin 915 | 0 setlinecap 916 | 0 0 m 917 | 4 0 l 918 | gsave 919 | 0.000 setgray 920 | fill 921 | grestore 922 | stroke 923 | grestore 924 | } bind def 925 | 72 43.2 o 926 | grestore 927 | gsave 928 | /o { 929 | gsave 930 | newpath 931 | translate 932 | 0.5 setlinewidth 933 | 1 setlinejoin 934 | 0 setlinecap 935 | 0 0 m 936 | -4 0 l 937 | gsave 938 | 0.000 setgray 939 | fill 940 | grestore 941 | stroke 942 | grestore 943 | } bind def 944 | 518.4 43.2 o 945 | grestore 946 | gsave 947 | 42.859375 39.887500 translate 948 | 0.000000 rotate 949 | 0.000000 0.000000 m /zero glyphshow 950 | 7.634766 0.000000 m /period glyphshow 951 | 11.449219 0.000000 m /four glyphshow 952 | 19.083984 0.000000 m /zero glyphshow 953 | grestore 954 | gsave 955 | /o { 956 | gsave 957 | newpath 958 | translate 959 | 0.5 setlinewidth 960 | 1 setlinejoin 961 | 0 setlinecap 962 | 0 0 m 963 | 4 0 l 964 | gsave 965 | 0.000 setgray 966 | fill 967 | grestore 968 | stroke 969 | grestore 970 | } bind def 971 | 72 62.8364 o 972 | grestore 973 | gsave 974 | /o { 975 | gsave 976 | newpath 977 | translate 978 | 0.5 setlinewidth 979 | 1 setlinejoin 980 | 0 setlinecap 981 | 0 0 m 982 | -4 0 l 983 | gsave 984 | 0.000 setgray 985 | fill 986 | grestore 987 | stroke 988 | grestore 989 | } bind def 990 | 518.4 62.8364 o 991 | grestore 992 | gsave 993 | 43.109375 59.523864 translate 994 | 0.000000 rotate 995 | 0.000000 0.000000 m /zero glyphshow 996 | 7.634766 0.000000 m /period glyphshow 997 | 11.449219 0.000000 m /four glyphshow 998 | 19.083984 0.000000 m /five glyphshow 999 | grestore 1000 | gsave 1001 | /o { 1002 | gsave 1003 | newpath 1004 | translate 1005 | 0.5 setlinewidth 1006 | 1 setlinejoin 1007 | 0 setlinecap 1008 | 0 0 m 1009 | 4 0 l 1010 | gsave 1011 | 0.000 setgray 1012 | fill 1013 | grestore 1014 | stroke 1015 | grestore 1016 | } bind def 1017 | 72 82.4727 o 1018 | grestore 1019 | gsave 1020 | /o { 1021 | gsave 1022 | newpath 1023 | translate 1024 | 0.5 setlinewidth 1025 | 1 setlinejoin 1026 | 0 setlinecap 1027 | 0 0 m 1028 | -4 0 l 1029 | gsave 1030 | 0.000 setgray 1031 | fill 1032 | grestore 1033 | stroke 1034 | grestore 1035 | } bind def 1036 | 518.4 82.4727 o 1037 | grestore 1038 | gsave 1039 | 42.859375 79.160227 translate 1040 | 0.000000 rotate 1041 | 0.000000 0.000000 m /zero glyphshow 1042 | 7.634766 0.000000 m /period glyphshow 1043 | 11.449219 0.000000 m /five glyphshow 1044 | 19.083984 0.000000 m /zero glyphshow 1045 | grestore 1046 | gsave 1047 | /o { 1048 | gsave 1049 | newpath 1050 | translate 1051 | 0.5 setlinewidth 1052 | 1 setlinejoin 1053 | 0 setlinecap 1054 | 0 0 m 1055 | 4 0 l 1056 | gsave 1057 | 0.000 setgray 1058 | fill 1059 | grestore 1060 | stroke 1061 | grestore 1062 | } bind def 1063 | 72 102.109 o 1064 | grestore 1065 | gsave 1066 | /o { 1067 | gsave 1068 | newpath 1069 | translate 1070 | 0.5 setlinewidth 1071 | 1 setlinejoin 1072 | 0 setlinecap 1073 | 0 0 m 1074 | -4 0 l 1075 | gsave 1076 | 0.000 setgray 1077 | fill 1078 | grestore 1079 | stroke 1080 | grestore 1081 | } bind def 1082 | 518.4 102.109 o 1083 | grestore 1084 | gsave 1085 | 43.109375 98.796591 translate 1086 | 0.000000 rotate 1087 | 0.000000 0.000000 m /zero glyphshow 1088 | 7.634766 0.000000 m /period glyphshow 1089 | 11.449219 0.000000 m /five glyphshow 1090 | 19.083984 0.000000 m /five glyphshow 1091 | grestore 1092 | gsave 1093 | /o { 1094 | gsave 1095 | newpath 1096 | translate 1097 | 0.5 setlinewidth 1098 | 1 setlinejoin 1099 | 0 setlinecap 1100 | 0 0 m 1101 | 4 0 l 1102 | gsave 1103 | 0.000 setgray 1104 | fill 1105 | grestore 1106 | stroke 1107 | grestore 1108 | } bind def 1109 | 72 121.745 o 1110 | grestore 1111 | gsave 1112 | /o { 1113 | gsave 1114 | newpath 1115 | translate 1116 | 0.5 setlinewidth 1117 | 1 setlinejoin 1118 | 0 setlinecap 1119 | 0 0 m 1120 | -4 0 l 1121 | gsave 1122 | 0.000 setgray 1123 | fill 1124 | grestore 1125 | stroke 1126 | grestore 1127 | } bind def 1128 | 518.4 121.745 o 1129 | grestore 1130 | gsave 1131 | 42.859375 118.432955 translate 1132 | 0.000000 rotate 1133 | 0.000000 0.000000 m /zero glyphshow 1134 | 7.634766 0.000000 m /period glyphshow 1135 | 11.449219 0.000000 m /six glyphshow 1136 | 19.083984 0.000000 m /zero glyphshow 1137 | grestore 1138 | gsave 1139 | /o { 1140 | gsave 1141 | newpath 1142 | translate 1143 | 0.5 setlinewidth 1144 | 1 setlinejoin 1145 | 0 setlinecap 1146 | 0 0 m 1147 | 4 0 l 1148 | gsave 1149 | 0.000 setgray 1150 | fill 1151 | grestore 1152 | stroke 1153 | grestore 1154 | } bind def 1155 | 72 141.382 o 1156 | grestore 1157 | gsave 1158 | /o { 1159 | gsave 1160 | newpath 1161 | translate 1162 | 0.5 setlinewidth 1163 | 1 setlinejoin 1164 | 0 setlinecap 1165 | 0 0 m 1166 | -4 0 l 1167 | gsave 1168 | 0.000 setgray 1169 | fill 1170 | grestore 1171 | stroke 1172 | grestore 1173 | } bind def 1174 | 518.4 141.382 o 1175 | grestore 1176 | gsave 1177 | 43.109375 138.069318 translate 1178 | 0.000000 rotate 1179 | 0.000000 0.000000 m /zero glyphshow 1180 | 7.634766 0.000000 m /period glyphshow 1181 | 11.449219 0.000000 m /six glyphshow 1182 | 19.083984 0.000000 m /five glyphshow 1183 | grestore 1184 | gsave 1185 | /o { 1186 | gsave 1187 | newpath 1188 | translate 1189 | 0.5 setlinewidth 1190 | 1 setlinejoin 1191 | 0 setlinecap 1192 | 0 0 m 1193 | 4 0 l 1194 | gsave 1195 | 0.000 setgray 1196 | fill 1197 | grestore 1198 | stroke 1199 | grestore 1200 | } bind def 1201 | 72 161.018 o 1202 | grestore 1203 | gsave 1204 | /o { 1205 | gsave 1206 | newpath 1207 | translate 1208 | 0.5 setlinewidth 1209 | 1 setlinejoin 1210 | 0 setlinecap 1211 | 0 0 m 1212 | -4 0 l 1213 | gsave 1214 | 0.000 setgray 1215 | fill 1216 | grestore 1217 | stroke 1218 | grestore 1219 | } bind def 1220 | 518.4 161.018 o 1221 | grestore 1222 | gsave 1223 | 42.859375 157.705682 translate 1224 | 0.000000 rotate 1225 | 0.000000 0.000000 m /zero glyphshow 1226 | 7.634766 0.000000 m /period glyphshow 1227 | 11.449219 0.000000 m /seven glyphshow 1228 | 19.083984 0.000000 m /zero glyphshow 1229 | grestore 1230 | gsave 1231 | /o { 1232 | gsave 1233 | newpath 1234 | translate 1235 | 0.5 setlinewidth 1236 | 1 setlinejoin 1237 | 0 setlinecap 1238 | 0 0 m 1239 | 4 0 l 1240 | gsave 1241 | 0.000 setgray 1242 | fill 1243 | grestore 1244 | stroke 1245 | grestore 1246 | } bind def 1247 | 72 180.655 o 1248 | grestore 1249 | gsave 1250 | /o { 1251 | gsave 1252 | newpath 1253 | translate 1254 | 0.5 setlinewidth 1255 | 1 setlinejoin 1256 | 0 setlinecap 1257 | 0 0 m 1258 | -4 0 l 1259 | gsave 1260 | 0.000 setgray 1261 | fill 1262 | grestore 1263 | stroke 1264 | grestore 1265 | } bind def 1266 | 518.4 180.655 o 1267 | grestore 1268 | gsave 1269 | 43.109375 177.342045 translate 1270 | 0.000000 rotate 1271 | 0.000000 0.000000 m /zero glyphshow 1272 | 7.634766 0.000000 m /period glyphshow 1273 | 11.449219 0.000000 m /seven glyphshow 1274 | 19.083984 0.000000 m /five glyphshow 1275 | grestore 1276 | gsave 1277 | /o { 1278 | gsave 1279 | newpath 1280 | translate 1281 | 0.5 setlinewidth 1282 | 1 setlinejoin 1283 | 0 setlinecap 1284 | 0 0 m 1285 | 4 0 l 1286 | gsave 1287 | 0.000 setgray 1288 | fill 1289 | grestore 1290 | stroke 1291 | grestore 1292 | } bind def 1293 | 72 200.291 o 1294 | grestore 1295 | gsave 1296 | /o { 1297 | gsave 1298 | newpath 1299 | translate 1300 | 0.5 setlinewidth 1301 | 1 setlinejoin 1302 | 0 setlinecap 1303 | 0 0 m 1304 | -4 0 l 1305 | gsave 1306 | 0.000 setgray 1307 | fill 1308 | grestore 1309 | stroke 1310 | grestore 1311 | } bind def 1312 | 518.4 200.291 o 1313 | grestore 1314 | gsave 1315 | 42.859375 196.978409 translate 1316 | 0.000000 rotate 1317 | 0.000000 0.000000 m /zero glyphshow 1318 | 7.634766 0.000000 m /period glyphshow 1319 | 11.449219 0.000000 m /eight glyphshow 1320 | 19.083984 0.000000 m /zero glyphshow 1321 | grestore 1322 | gsave 1323 | 35.359375 56.378267 translate 1324 | 90.000000 rotate 1325 | 0.000000 0.000000 m /C glyphshow 1326 | 8.378906 0.000000 m /l glyphshow 1327 | 11.712891 0.000000 m /a glyphshow 1328 | 19.066406 0.000000 m /s glyphshow 1329 | 25.318359 0.000000 m /i glyphshow 1330 | 28.652344 0.000000 m /f glyphshow 1331 | 32.876953 0.000000 m /i glyphshow 1332 | 36.210938 0.000000 m /c glyphshow 1333 | 42.808594 0.000000 m /a glyphshow 1334 | 50.162109 0.000000 m /t glyphshow 1335 | 54.867188 0.000000 m /i glyphshow 1336 | 58.201172 0.000000 m /o glyphshow 1337 | 65.542969 0.000000 m /n glyphshow 1338 | 73.148438 0.000000 m /space glyphshow 1339 | 76.962891 0.000000 m /a glyphshow 1340 | 84.316406 0.000000 m /c glyphshow 1341 | 90.914062 0.000000 m /c glyphshow 1342 | 97.511719 0.000000 m /u glyphshow 1343 | 105.117188 0.000000 m /r glyphshow 1344 | 110.050781 0.000000 m /a glyphshow 1345 | 117.404297 0.000000 m /c glyphshow 1346 | 124.001953 0.000000 m /y glyphshow 1347 | grestore 1348 | /BitstreamVeraSans-Roman findfont 1349 | 14.400 scalefont 1350 | setfont 1351 | gsave 1352 | 186.106250 205.290909 translate 1353 | 0.000000 rotate 1354 | 0.000000 0.000000 m /C glyphshow 1355 | 10.037231 0.000000 m /l glyphshow 1356 | 14.031067 0.000000 m /a glyphshow 1357 | 22.839966 0.000000 m /s glyphshow 1358 | 30.329285 0.000000 m /s glyphshow 1359 | 37.818604 0.000000 m /i glyphshow 1360 | 41.812439 0.000000 m /f glyphshow 1361 | 46.873169 0.000000 m /i glyphshow 1362 | 50.867004 0.000000 m /c glyphshow 1363 | 58.770447 0.000000 m /a glyphshow 1364 | 67.579346 0.000000 m /t glyphshow 1365 | 73.215637 0.000000 m /i glyphshow 1366 | 77.209473 0.000000 m /o glyphshow 1367 | 86.004333 0.000000 m /n glyphshow 1368 | 95.115051 0.000000 m /space glyphshow 1369 | 99.684448 0.000000 m /a glyphshow 1370 | 108.493347 0.000000 m /c glyphshow 1371 | 116.396790 0.000000 m /c glyphshow 1372 | 124.300232 0.000000 m /u glyphshow 1373 | 133.410950 0.000000 m /r glyphshow 1374 | 139.320984 0.000000 m /a glyphshow 1375 | 148.129883 0.000000 m /c glyphshow 1376 | 156.033325 0.000000 m /y glyphshow 1377 | 164.540405 0.000000 m /space glyphshow 1378 | 169.109802 0.000000 m /h glyphshow 1379 | 178.220520 0.000000 m /i glyphshow 1380 | 182.214355 0.000000 m /s glyphshow 1381 | 189.703674 0.000000 m /t glyphshow 1382 | 195.339966 0.000000 m /o glyphshow 1383 | 204.134827 0.000000 m /r glyphshow 1384 | 210.044861 0.000000 m /y glyphshow 1385 | grestore 1386 | 1387 | end 1388 | showpage 1389 | -------------------------------------------------------------------------------- /report/kmeans_acc-eps-converted-to.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/report/kmeans_acc-eps-converted-to.pdf -------------------------------------------------------------------------------- /report/kmeans_acc.eps: -------------------------------------------------------------------------------- 1 | %!PS-Adobe-3.0 EPSF-3.0 2 | %%Title: ./report/kmeans_acc.eps 3 | %%Creator: matplotlib version 1.5.1, http://matplotlib.org/ 4 | %%CreationDate: Tue Apr 19 23:40:27 2016 5 | %%Orientation: portrait 6 | %%BoundingBox: 18 180 594 612 7 | %%EndComments 8 | %%BeginProlog 9 | /mpldict 8 dict def 10 | mpldict begin 11 | /m { moveto } bind def 12 | /l { lineto } bind def 13 | /r { rlineto } bind def 14 | /c { curveto } bind def 15 | /cl { closepath } bind def 16 | /box { 17 | m 18 | 1 index 0 r 19 | 0 exch r 20 | neg 0 r 21 | cl 22 | } bind def 23 | /clipbox { 24 | box 25 | clip 26 | newpath 27 | } bind def 28 | %!PS-Adobe-3.0 Resource-Font 29 | %%Title: Bitstream Vera Sans 30 | %%Copyright: Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. 31 | %%Creator: Converted from TrueType to type 3 by PPR 32 | 25 dict begin 33 | /_d{bind def}bind def 34 | /_m{moveto}_d 35 | /_l{lineto}_d 36 | /_cl{closepath eofill}_d 37 | /_c{curveto}_d 38 | /_sc{7 -1 roll{setcachedevice}{pop pop pop pop pop pop}ifelse}_d 39 | /_e{exec}_d 40 | /FontName /BitstreamVeraSans-Roman def 41 | /PaintType 0 def 42 | /FontMatrix[.001 0 0 .001 0 0]def 43 | /FontBBox[-183 -236 1287 928]def 44 | /FontType 3 def 45 | /Encoding [ /space /period /zero /one /two /three /four /five /six /seven /eight /nine /C /E /a /c /f /h /i /l /n /o /p /r /s /t /u /y ] def 46 | /FontInfo 10 dict dup begin 47 | /FamilyName (Bitstream Vera Sans) def 48 | /FullName (Bitstream Vera Sans) def 49 | /Notice (Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera is a trademark of Bitstream, Inc.) def 50 | /Weight (Roman) def 51 | /Version (Release 1.10) def 52 | /ItalicAngle 0.0 def 53 | /isFixedPitch false def 54 | /UnderlinePosition -213 def 55 | /UnderlineThickness 143 def 56 | end readonly def 57 | /CharStrings 28 dict dup begin 58 | /space{318 0 0 0 0 0 _sc 59 | }_d 60 | /period{318 0 107 0 210 124 _sc 61 | 107 124 _m 62 | 210 124 _l 63 | 210 0 _l 64 | 107 0 _l 65 | 107 124 _l 66 | _cl}_d 67 | /zero{636 0 66 -13 570 742 _sc 68 | 318 664 _m 69 | 267 664 229 639 203 589 _c 70 | 177 539 165 464 165 364 _c 71 | 165 264 177 189 203 139 _c 72 | 229 89 267 64 318 64 _c 73 | 369 64 407 89 433 139 _c 74 | 458 189 471 264 471 364 _c 75 | 471 464 458 539 433 589 _c 76 | 407 639 369 664 318 664 _c 77 | 318 742 _m 78 | 399 742 461 709 505 645 _c 79 | 548 580 570 486 570 364 _c 80 | 570 241 548 147 505 83 _c 81 | 461 19 399 -13 318 -13 _c 82 | 236 -13 173 19 130 83 _c 83 | 87 147 66 241 66 364 _c 84 | 66 486 87 580 130 645 _c 85 | 173 709 236 742 318 742 _c 86 | _cl}_d 87 | /one{636 0 110 0 544 729 _sc 88 | 124 83 _m 89 | 285 83 _l 90 | 285 639 _l 91 | 110 604 _l 92 | 110 694 _l 93 | 284 729 _l 94 | 383 729 _l 95 | 383 83 _l 96 | 544 83 _l 97 | 544 0 _l 98 | 124 0 _l 99 | 124 83 _l 100 | _cl}_d 101 | /two{{636 0 73 0 536 742 _sc 102 | 192 83 _m 103 | 536 83 _l 104 | 536 0 _l 105 | 73 0 _l 106 | 73 83 _l 107 | 110 121 161 173 226 239 _c 108 | 290 304 331 346 348 365 _c 109 | 380 400 402 430 414 455 _c 110 | 426 479 433 504 433 528 _c 111 | 433 566 419 598 392 622 _c 112 | 365 646 330 659 286 659 _c 113 | 255 659 222 653 188 643 _c 114 | 154 632 117 616 78 594 _c 115 | 78 694 _l 116 | 118 710 155 722 189 730 _c 117 | 223 738 255 742 284 742 _c 118 | }_e{359 742 419 723 464 685 _c 119 | 509 647 532 597 532 534 _c 120 | 532 504 526 475 515 449 _c 121 | 504 422 484 390 454 354 _c 122 | 446 344 420 317 376 272 _c 123 | 332 227 271 164 192 83 _c 124 | _cl}_e}_d 125 | /three{{636 0 76 -13 556 742 _sc 126 | 406 393 _m 127 | 453 383 490 362 516 330 _c 128 | 542 298 556 258 556 212 _c 129 | 556 140 531 84 482 45 _c 130 | 432 6 362 -13 271 -13 _c 131 | 240 -13 208 -10 176 -4 _c 132 | 144 1 110 10 76 22 _c 133 | 76 117 _l 134 | 103 101 133 89 166 81 _c 135 | 198 73 232 69 268 69 _c 136 | 330 69 377 81 409 105 _c 137 | 441 129 458 165 458 212 _c 138 | 458 254 443 288 413 312 _c 139 | 383 336 341 349 287 349 _c 140 | }_e{202 349 _l 141 | 202 430 _l 142 | 291 430 _l 143 | 339 430 376 439 402 459 _c 144 | 428 478 441 506 441 543 _c 145 | 441 580 427 609 401 629 _c 146 | 374 649 336 659 287 659 _c 147 | 260 659 231 656 200 650 _c 148 | 169 644 135 635 98 623 _c 149 | 98 711 _l 150 | 135 721 170 729 203 734 _c 151 | 235 739 266 742 296 742 _c 152 | 370 742 429 725 473 691 _c 153 | 517 657 539 611 539 553 _c 154 | 539 513 527 479 504 451 _c 155 | 481 423 448 403 406 393 _c 156 | _cl}_e}_d 157 | /four{636 0 49 0 580 729 _sc 158 | 378 643 _m 159 | 129 254 _l 160 | 378 254 _l 161 | 378 643 _l 162 | 352 729 _m 163 | 476 729 _l 164 | 476 254 _l 165 | 580 254 _l 166 | 580 172 _l 167 | 476 172 _l 168 | 476 0 _l 169 | 378 0 _l 170 | 378 172 _l 171 | 49 172 _l 172 | 49 267 _l 173 | 352 729 _l 174 | _cl}_d 175 | /five{{636 0 77 -13 549 729 _sc 176 | 108 729 _m 177 | 495 729 _l 178 | 495 646 _l 179 | 198 646 _l 180 | 198 467 _l 181 | 212 472 227 476 241 478 _c 182 | 255 480 270 482 284 482 _c 183 | 365 482 429 459 477 415 _c 184 | 525 370 549 310 549 234 _c 185 | 549 155 524 94 475 51 _c 186 | 426 8 357 -13 269 -13 _c 187 | 238 -13 207 -10 175 -6 _c 188 | 143 -1 111 6 77 17 _c 189 | 77 116 _l 190 | 106 100 136 88 168 80 _c 191 | 199 72 232 69 267 69 _c 192 | }_e{323 69 368 83 401 113 _c 193 | 433 143 450 183 450 234 _c 194 | 450 284 433 324 401 354 _c 195 | 368 384 323 399 267 399 _c 196 | 241 399 214 396 188 390 _c 197 | 162 384 135 375 108 363 _c 198 | 108 729 _l 199 | _cl}_e}_d 200 | /six{{636 0 70 -13 573 742 _sc 201 | 330 404 _m 202 | 286 404 251 388 225 358 _c 203 | 199 328 186 286 186 234 _c 204 | 186 181 199 139 225 109 _c 205 | 251 79 286 64 330 64 _c 206 | 374 64 409 79 435 109 _c 207 | 461 139 474 181 474 234 _c 208 | 474 286 461 328 435 358 _c 209 | 409 388 374 404 330 404 _c 210 | 526 713 _m 211 | 526 623 _l 212 | 501 635 476 644 451 650 _c 213 | 425 656 400 659 376 659 _c 214 | 310 659 260 637 226 593 _c 215 | }_e{192 549 172 482 168 394 _c 216 | 187 422 211 444 240 459 _c 217 | 269 474 301 482 336 482 _c 218 | 409 482 467 459 509 415 _c 219 | 551 371 573 310 573 234 _c 220 | 573 159 550 99 506 54 _c 221 | 462 9 403 -13 330 -13 _c 222 | 246 -13 181 19 137 83 _c 223 | 92 147 70 241 70 364 _c 224 | 70 479 97 571 152 639 _c 225 | 206 707 280 742 372 742 _c 226 | 396 742 421 739 447 735 _c 227 | 472 730 498 723 526 713 _c 228 | _cl}_e}_d 229 | /seven{636 0 82 0 551 729 _sc 230 | 82 729 _m 231 | 551 729 _l 232 | 551 687 _l 233 | 286 0 _l 234 | 183 0 _l 235 | 432 646 _l 236 | 82 646 _l 237 | 82 729 _l 238 | _cl}_d 239 | /eight{{636 0 68 -13 568 742 _sc 240 | 318 346 _m 241 | 271 346 234 333 207 308 _c 242 | 180 283 167 249 167 205 _c 243 | 167 161 180 126 207 101 _c 244 | 234 76 271 64 318 64 _c 245 | 364 64 401 76 428 102 _c 246 | 455 127 469 161 469 205 _c 247 | 469 249 455 283 429 308 _c 248 | 402 333 365 346 318 346 _c 249 | 219 388 _m 250 | 177 398 144 418 120 447 _c 251 | 96 476 85 511 85 553 _c 252 | 85 611 105 657 147 691 _c 253 | 188 725 245 742 318 742 _c 254 | }_e{390 742 447 725 489 691 _c 255 | 530 657 551 611 551 553 _c 256 | 551 511 539 476 515 447 _c 257 | 491 418 459 398 417 388 _c 258 | 464 377 501 355 528 323 _c 259 | 554 291 568 251 568 205 _c 260 | 568 134 546 80 503 43 _c 261 | 459 5 398 -13 318 -13 _c 262 | 237 -13 175 5 132 43 _c 263 | 89 80 68 134 68 205 _c 264 | 68 251 81 291 108 323 _c 265 | 134 355 171 377 219 388 _c 266 | 183 544 _m 267 | 183 506 194 476 218 455 _c 268 | }_e{242 434 275 424 318 424 _c 269 | 360 424 393 434 417 455 _c 270 | 441 476 453 506 453 544 _c 271 | 453 582 441 611 417 632 _c 272 | 393 653 360 664 318 664 _c 273 | 275 664 242 653 218 632 _c 274 | 194 611 183 582 183 544 _c 275 | _cl}_e}_d 276 | /nine{{636 0 63 -13 566 742 _sc 277 | 110 15 _m 278 | 110 105 _l 279 | 134 93 159 84 185 78 _c 280 | 210 72 235 69 260 69 _c 281 | 324 69 374 90 408 134 _c 282 | 442 178 462 244 468 334 _c 283 | 448 306 424 284 396 269 _c 284 | 367 254 335 247 300 247 _c 285 | 226 247 168 269 126 313 _c 286 | 84 357 63 417 63 494 _c 287 | 63 568 85 628 129 674 _c 288 | 173 719 232 742 306 742 _c 289 | 390 742 455 709 499 645 _c 290 | 543 580 566 486 566 364 _c 291 | }_e{566 248 538 157 484 89 _c 292 | 429 21 356 -13 264 -13 _c 293 | 239 -13 214 -10 189 -6 _c 294 | 163 -2 137 5 110 15 _c 295 | 306 324 _m 296 | 350 324 385 339 411 369 _c 297 | 437 399 450 441 450 494 _c 298 | 450 546 437 588 411 618 _c 299 | 385 648 350 664 306 664 _c 300 | 262 664 227 648 201 618 _c 301 | 175 588 162 546 162 494 _c 302 | 162 441 175 399 201 369 _c 303 | 227 339 262 324 306 324 _c 304 | _cl}_e}_d 305 | /C{{698 0 56 -13 644 742 _sc 306 | 644 673 _m 307 | 644 569 _l 308 | 610 599 575 622 537 638 _c 309 | 499 653 460 661 418 661 _c 310 | 334 661 270 635 226 584 _c 311 | 182 533 160 460 160 364 _c 312 | 160 268 182 194 226 143 _c 313 | 270 92 334 67 418 67 _c 314 | 460 67 499 74 537 90 _c 315 | 575 105 610 128 644 159 _c 316 | 644 56 _l 317 | 609 32 572 15 534 4 _c 318 | 496 -7 455 -13 412 -13 _c 319 | 302 -13 215 20 151 87 _c 320 | }_e{87 154 56 246 56 364 _c 321 | 56 481 87 573 151 641 _c 322 | 215 708 302 742 412 742 _c 323 | 456 742 497 736 535 725 _c 324 | 573 713 610 696 644 673 _c 325 | _cl}_e}_d 326 | /E{632 0 98 0 568 729 _sc 327 | 98 729 _m 328 | 559 729 _l 329 | 559 646 _l 330 | 197 646 _l 331 | 197 430 _l 332 | 544 430 _l 333 | 544 347 _l 334 | 197 347 _l 335 | 197 83 _l 336 | 568 83 _l 337 | 568 0 _l 338 | 98 0 _l 339 | 98 729 _l 340 | _cl}_d 341 | /a{{613 0 60 -13 522 560 _sc 342 | 343 275 _m 343 | 270 275 220 266 192 250 _c 344 | 164 233 150 205 150 165 _c 345 | 150 133 160 107 181 89 _c 346 | 202 70 231 61 267 61 _c 347 | 317 61 357 78 387 114 _c 348 | 417 149 432 196 432 255 _c 349 | 432 275 _l 350 | 343 275 _l 351 | 522 312 _m 352 | 522 0 _l 353 | 432 0 _l 354 | 432 83 _l 355 | 411 49 385 25 355 10 _c 356 | 325 -5 287 -13 243 -13 _c 357 | 187 -13 142 2 109 33 _c 358 | 76 64 60 106 60 159 _c 359 | }_e{60 220 80 266 122 298 _c 360 | 163 329 224 345 306 345 _c 361 | 432 345 _l 362 | 432 354 _l 363 | 432 395 418 427 391 450 _c 364 | 364 472 326 484 277 484 _c 365 | 245 484 215 480 185 472 _c 366 | 155 464 127 453 100 439 _c 367 | 100 522 _l 368 | 132 534 164 544 195 550 _c 369 | 226 556 256 560 286 560 _c 370 | 365 560 424 539 463 498 _c 371 | 502 457 522 395 522 312 _c 372 | _cl}_e}_d 373 | /c{{550 0 55 -13 488 560 _sc 374 | 488 526 _m 375 | 488 442 _l 376 | 462 456 437 466 411 473 _c 377 | 385 480 360 484 334 484 _c 378 | 276 484 230 465 198 428 _c 379 | 166 391 150 339 150 273 _c 380 | 150 206 166 154 198 117 _c 381 | 230 80 276 62 334 62 _c 382 | 360 62 385 65 411 72 _c 383 | 437 79 462 90 488 104 _c 384 | 488 21 _l 385 | 462 9 436 0 410 -5 _c 386 | 383 -10 354 -13 324 -13 _c 387 | 242 -13 176 12 128 64 _c 388 | }_e{79 115 55 185 55 273 _c 389 | 55 362 79 432 128 483 _c 390 | 177 534 244 560 330 560 _c 391 | 358 560 385 557 411 551 _c 392 | 437 545 463 537 488 526 _c 393 | _cl}_e}_d 394 | /f{352 0 23 0 371 760 _sc 395 | 371 760 _m 396 | 371 685 _l 397 | 285 685 _l 398 | 253 685 230 678 218 665 _c 399 | 205 652 199 629 199 595 _c 400 | 199 547 _l 401 | 347 547 _l 402 | 347 477 _l 403 | 199 477 _l 404 | 199 0 _l 405 | 109 0 _l 406 | 109 477 _l 407 | 23 477 _l 408 | 23 547 _l 409 | 109 547 _l 410 | 109 585 _l 411 | 109 645 123 690 151 718 _c 412 | 179 746 224 760 286 760 _c 413 | 371 760 _l 414 | _cl}_d 415 | /h{634 0 91 0 549 760 _sc 416 | 549 330 _m 417 | 549 0 _l 418 | 459 0 _l 419 | 459 327 _l 420 | 459 379 448 417 428 443 _c 421 | 408 469 378 482 338 482 _c 422 | 289 482 251 466 223 435 _c 423 | 195 404 181 362 181 309 _c 424 | 181 0 _l 425 | 91 0 _l 426 | 91 760 _l 427 | 181 760 _l 428 | 181 462 _l 429 | 202 494 227 519 257 535 _c 430 | 286 551 320 560 358 560 _c 431 | 420 560 468 540 500 501 _c 432 | 532 462 549 405 549 330 _c 433 | _cl}_d 434 | /i{278 0 94 0 184 760 _sc 435 | 94 547 _m 436 | 184 547 _l 437 | 184 0 _l 438 | 94 0 _l 439 | 94 547 _l 440 | 94 760 _m 441 | 184 760 _l 442 | 184 646 _l 443 | 94 646 _l 444 | 94 760 _l 445 | _cl}_d 446 | /l{278 0 94 0 184 760 _sc 447 | 94 760 _m 448 | 184 760 _l 449 | 184 0 _l 450 | 94 0 _l 451 | 94 760 _l 452 | _cl}_d 453 | /n{634 0 91 0 549 560 _sc 454 | 549 330 _m 455 | 549 0 _l 456 | 459 0 _l 457 | 459 327 _l 458 | 459 379 448 417 428 443 _c 459 | 408 469 378 482 338 482 _c 460 | 289 482 251 466 223 435 _c 461 | 195 404 181 362 181 309 _c 462 | 181 0 _l 463 | 91 0 _l 464 | 91 547 _l 465 | 181 547 _l 466 | 181 462 _l 467 | 202 494 227 519 257 535 _c 468 | 286 551 320 560 358 560 _c 469 | 420 560 468 540 500 501 _c 470 | 532 462 549 405 549 330 _c 471 | _cl}_d 472 | /o{612 0 55 -13 557 560 _sc 473 | 306 484 _m 474 | 258 484 220 465 192 427 _c 475 | 164 389 150 338 150 273 _c 476 | 150 207 163 156 191 118 _c 477 | 219 80 257 62 306 62 _c 478 | 354 62 392 80 420 118 _c 479 | 448 156 462 207 462 273 _c 480 | 462 337 448 389 420 427 _c 481 | 392 465 354 484 306 484 _c 482 | 306 560 _m 483 | 384 560 445 534 490 484 _c 484 | 534 433 557 363 557 273 _c 485 | 557 183 534 113 490 63 _c 486 | 445 12 384 -13 306 -13 _c 487 | 227 -13 165 12 121 63 _c 488 | 77 113 55 183 55 273 _c 489 | 55 363 77 433 121 484 _c 490 | 165 534 227 560 306 560 _c 491 | _cl}_d 492 | /p{{635 0 91 -207 580 560 _sc 493 | 181 82 _m 494 | 181 -207 _l 495 | 91 -207 _l 496 | 91 547 _l 497 | 181 547 _l 498 | 181 464 _l 499 | 199 496 223 520 252 536 _c 500 | 281 552 316 560 356 560 _c 501 | 422 560 476 533 518 481 _c 502 | 559 428 580 359 580 273 _c 503 | 580 187 559 117 518 65 _c 504 | 476 13 422 -13 356 -13 _c 505 | 316 -13 281 -5 252 10 _c 506 | 223 25 199 49 181 82 _c 507 | 487 273 _m 508 | 487 339 473 390 446 428 _c 509 | 418 466 381 485 334 485 _c 510 | }_e{286 485 249 466 222 428 _c 511 | 194 390 181 339 181 273 _c 512 | 181 207 194 155 222 117 _c 513 | 249 79 286 61 334 61 _c 514 | 381 61 418 79 446 117 _c 515 | 473 155 487 207 487 273 _c 516 | _cl}_e}_d 517 | /r{411 0 91 0 411 560 _sc 518 | 411 463 _m 519 | 401 469 390 473 378 476 _c 520 | 366 478 353 480 339 480 _c 521 | 288 480 249 463 222 430 _c 522 | 194 397 181 350 181 288 _c 523 | 181 0 _l 524 | 91 0 _l 525 | 91 547 _l 526 | 181 547 _l 527 | 181 462 _l 528 | 199 495 224 520 254 536 _c 529 | 284 552 321 560 365 560 _c 530 | 371 560 378 559 386 559 _c 531 | 393 558 401 557 411 555 _c 532 | 411 463 _l 533 | _cl}_d 534 | /s{{521 0 54 -13 472 560 _sc 535 | 443 531 _m 536 | 443 446 _l 537 | 417 458 391 468 364 475 _c 538 | 336 481 308 485 279 485 _c 539 | 234 485 200 478 178 464 _c 540 | 156 450 145 430 145 403 _c 541 | 145 382 153 366 169 354 _c 542 | 185 342 217 330 265 320 _c 543 | 296 313 _l 544 | 360 299 405 279 432 255 _c 545 | 458 230 472 195 472 151 _c 546 | 472 100 452 60 412 31 _c 547 | 372 1 316 -13 246 -13 _c 548 | 216 -13 186 -10 154 -5 _c 549 | }_e{122 0 89 8 54 20 _c 550 | 54 113 _l 551 | 87 95 120 82 152 74 _c 552 | 184 65 216 61 248 61 _c 553 | 290 61 323 68 346 82 _c 554 | 368 96 380 117 380 144 _c 555 | 380 168 371 187 355 200 _c 556 | 339 213 303 226 247 238 _c 557 | 216 245 _l 558 | 160 257 119 275 95 299 _c 559 | 70 323 58 356 58 399 _c 560 | 58 450 76 490 112 518 _c 561 | 148 546 200 560 268 560 _c 562 | 301 560 332 557 362 552 _c 563 | 391 547 418 540 443 531 _c 564 | }_e{_cl}_e}_d 565 | /t{392 0 27 0 368 702 _sc 566 | 183 702 _m 567 | 183 547 _l 568 | 368 547 _l 569 | 368 477 _l 570 | 183 477 _l 571 | 183 180 _l 572 | 183 135 189 106 201 94 _c 573 | 213 81 238 75 276 75 _c 574 | 368 75 _l 575 | 368 0 _l 576 | 276 0 _l 577 | 206 0 158 13 132 39 _c 578 | 106 65 93 112 93 180 _c 579 | 93 477 _l 580 | 27 477 _l 581 | 27 547 _l 582 | 93 547 _l 583 | 93 702 _l 584 | 183 702 _l 585 | _cl}_d 586 | /u{634 0 85 -13 543 547 _sc 587 | 85 216 _m 588 | 85 547 _l 589 | 175 547 _l 590 | 175 219 _l 591 | 175 167 185 129 205 103 _c 592 | 225 77 255 64 296 64 _c 593 | 344 64 383 79 411 110 _c 594 | 439 141 453 183 453 237 _c 595 | 453 547 _l 596 | 543 547 _l 597 | 543 0 _l 598 | 453 0 _l 599 | 453 84 _l 600 | 431 50 405 26 377 10 _c 601 | 348 -5 315 -13 277 -13 _c 602 | 214 -13 166 6 134 45 _c 603 | 101 83 85 140 85 216 _c 604 | _cl}_d 605 | /y{592 0 30 -207 562 547 _sc 606 | 322 -50 _m 607 | 296 -114 271 -157 247 -177 _c 608 | 223 -197 191 -207 151 -207 _c 609 | 79 -207 _l 610 | 79 -132 _l 611 | 132 -132 _l 612 | 156 -132 175 -126 189 -114 _c 613 | 203 -102 218 -75 235 -31 _c 614 | 251 9 _l 615 | 30 547 _l 616 | 125 547 _l 617 | 296 119 _l 618 | 467 547 _l 619 | 562 547 _l 620 | 322 -50 _l 621 | _cl}_d 622 | end readonly def 623 | 624 | /BuildGlyph 625 | {exch begin 626 | CharStrings exch 627 | 2 copy known not{pop /.notdef}if 628 | true 3 1 roll get exec 629 | end}_d 630 | 631 | /BuildChar { 632 | 1 index /Encoding get exch get 633 | 1 index /BuildGlyph get exec 634 | }_d 635 | 636 | FontName currentdict end definefont pop 637 | end 638 | %%EndProlog 639 | mpldict begin 640 | 18 180 translate 641 | 576 432 0 0 clipbox 642 | 100000 setmiterlimit 643 | gsave 644 | 0 0 m 645 | 576 0 l 646 | 576 432 l 647 | 0 432 l 648 | cl 649 | 1.000 setgray 650 | fill 651 | grestore 652 | gsave 653 | 72 43.2 m 654 | 518.4 43.2 l 655 | 518.4 388.8 l 656 | 72 388.8 l 657 | cl 658 | 1.000 setgray 659 | fill 660 | grestore 661 | 1.000 setlinewidth 662 | 1 setlinejoin 663 | 2 setlinecap 664 | [] 0 setdash 665 | 0.000 0.000 1.000 setrgbcolor 666 | gsave 667 | 446.4 345.6 72 43.2 clipbox 668 | 72 52.457143 m 669 | 73.7856 156.6 l 670 | 77.3568 218.314286 l 671 | 79.1424 202.885714 l 672 | 80.928 253.028571 l 673 | 82.7136 249.171429 l 674 | 84.4992 229.885714 l 675 | 86.2848 287.742857 l 676 | 88.0704 253.028571 l 677 | 89.856 237.6 l 678 | 91.6416 291.6 l 679 | 93.4272 233.742857 l 680 | 95.2128 283.885714 l 681 | 96.9984 280.028571 l 682 | 98.784 280.028571 l 683 | 100.5696 249.171429 l 684 | 102.3552 287.742857 l 685 | 104.1408 249.171429 l 686 | 105.9264 283.885714 l 687 | 107.712 283.885714 l 688 | 109.4976 299.314286 l 689 | 111.2832 260.742857 l 690 | 113.0688 229.885714 l 691 | 114.8544 287.742857 l 692 | 116.64 283.885714 l 693 | 118.4256 256.885714 l 694 | 120.2112 299.314286 l 695 | 121.9968 326.314286 l 696 | 123.7824 291.6 l 697 | 125.568 318.6 l 698 | 127.3536 276.171429 l 699 | 129.1392 268.457143 l 700 | 134.496 303.171429 l 701 | 136.2816 253.028571 l 702 | 138.0672 291.6 l 703 | 139.8528 272.314286 l 704 | 141.6384 318.6 l 705 | 143.424 260.742857 l 706 | 145.2096 307.028571 l 707 | 146.9952 280.028571 l 708 | 148.7808 280.028571 l 709 | 150.5664 295.457143 l 710 | 152.352 303.171429 l 711 | 154.1376 322.457143 l 712 | 155.9232 334.028571 l 713 | 157.7088 291.6 l 714 | 159.4944 326.314286 l 715 | 161.28 280.028571 l 716 | 163.0656 307.028571 l 717 | 164.8512 299.314286 l 718 | 166.6368 299.314286 l 719 | 168.4224 307.028571 l 720 | 170.208 318.6 l 721 | 171.9936 303.171429 l 722 | 173.7792 283.885714 l 723 | 175.5648 303.171429 l 724 | 177.3504 299.314286 l 725 | 179.136 303.171429 l 726 | 180.9216 303.171429 l 727 | 182.7072 307.028571 l 728 | 184.4928 303.171429 l 729 | 186.2784 291.6 l 730 | 189.8496 322.457143 l 731 | 191.6352 268.457143 l 732 | 193.4208 295.457143 l 733 | 195.2064 307.028571 l 734 | 196.992 291.6 l 735 | 198.7776 295.457143 l 736 | 200.5632 314.742857 l 737 | 202.3488 295.457143 l 738 | 204.1344 322.457143 l 739 | 205.92 322.457143 l 740 | 207.7056 314.742857 l 741 | 209.4912 310.885714 l 742 | 211.2768 314.742857 l 743 | 213.0624 322.457143 l 744 | 214.848 334.028571 l 745 | 218.4192 303.171429 l 746 | 220.2048 357.171429 l 747 | 221.9904 303.171429 l 748 | 225.5616 326.314286 l 749 | 227.3472 322.457143 l 750 | 229.1328 307.028571 l 751 | 230.9184 345.6 l 752 | 232.704 326.314286 l 753 | 234.4896 361.028571 l 754 | 236.2752 307.028571 l 755 | 238.0608 314.742857 l 756 | 239.8464 372.6 l 757 | 241.632 322.457143 l 758 | 243.4176 283.885714 l 759 | 245.2032 314.742857 l 760 | 246.9888 318.6 l 761 | 248.7744 326.314286 l 762 | 250.56 341.742857 l 763 | 252.3456 322.457143 l 764 | 254.1312 334.028571 l 765 | 255.9168 307.028571 l 766 | 257.7024 326.314286 l 767 | 259.488 307.028571 l 768 | 261.2736 314.742857 l 769 | 263.0592 334.028571 l 770 | 264.8448 318.6 l 771 | 266.6304 322.457143 l 772 | 268.416 322.457143 l 773 | 270.2016 299.314286 l 774 | 271.9872 303.171429 l 775 | 273.7728 334.028571 l 776 | 275.5584 349.457143 l 777 | 277.344 314.742857 l 778 | 280.9152 337.885714 l 779 | 282.7008 341.742857 l 780 | 284.4864 310.885714 l 781 | 286.272 330.171429 l 782 | 288.0576 330.171429 l 783 | 289.8432 307.028571 l 784 | 291.6288 318.6 l 785 | 293.4144 353.314286 l 786 | 295.2 337.885714 l 787 | 296.9856 314.742857 l 788 | 298.7712 341.742857 l 789 | 300.5568 353.314286 l 790 | 302.3424 326.314286 l 791 | 304.128 322.457143 l 792 | 305.9136 322.457143 l 793 | 307.6992 303.171429 l 794 | 313.056 361.028571 l 795 | 314.8416 299.314286 l 796 | 316.6272 345.6 l 797 | 318.4128 349.457143 l 798 | 320.1984 345.6 l 799 | 321.984 326.314286 l 800 | 323.7696 318.6 l 801 | 325.5552 322.457143 l 802 | 327.3408 337.885714 l 803 | 329.1264 322.457143 l 804 | 330.912 364.885714 l 805 | 332.6976 310.885714 l 806 | 334.4832 349.457143 l 807 | 336.2688 334.028571 l 808 | 338.0544 353.314286 l 809 | 339.84 345.6 l 810 | 341.6256 330.171429 l 811 | 343.4112 353.314286 l 812 | 348.768 330.171429 l 813 | 350.5536 334.028571 l 814 | 352.3392 334.028571 l 815 | 354.1248 330.171429 l 816 | 355.9104 334.028571 l 817 | 357.696 314.742857 l 818 | 359.4816 334.028571 l 819 | 361.2672 326.314286 l 820 | 363.0528 364.885714 l 821 | 364.8384 345.6 l 822 | 366.624 337.885714 l 823 | 368.4096 357.171429 l 824 | 370.1952 318.6 l 825 | 371.9808 353.314286 l 826 | 373.7664 326.314286 l 827 | 375.552 349.457143 l 828 | 377.3376 330.171429 l 829 | 379.1232 326.314286 l 830 | 382.6944 341.742857 l 831 | 384.48 337.885714 l 832 | 386.2656 341.742857 l 833 | 388.0512 357.171429 l 834 | 389.8368 334.028571 l 835 | 391.6224 341.742857 l 836 | 393.408 364.885714 l 837 | 395.1936 368.742857 l 838 | 396.9792 314.742857 l 839 | 398.7648 376.457143 l 840 | 400.5504 353.314286 l 841 | 402.336 361.028571 l 842 | 404.1216 353.314286 l 843 | 405.9072 376.457143 l 844 | 407.6928 334.028571 l 845 | 409.4784 357.171429 l 846 | 411.264 357.171429 l 847 | 413.0496 334.028571 l 848 | 414.8352 337.885714 l 849 | 416.6208 337.885714 l 850 | 418.4064 341.742857 l 851 | 420.192 349.457143 l 852 | 421.9776 361.028571 l 853 | 423.7632 322.457143 l 854 | 425.5488 314.742857 l 855 | 427.3344 330.171429 l 856 | 429.12 330.171429 l 857 | 430.9056 326.314286 l 858 | 434.4768 349.457143 l 859 | 436.2624 322.457143 l 860 | 438.048 337.885714 l 861 | 439.8336 341.742857 l 862 | 443.4048 341.742857 l 863 | 443.4048 341.742857 l 864 | stroke 865 | grestore 866 | 0.000 0.500 0.000 setrgbcolor 867 | gsave 868 | 446.4 345.6 72 43.2 clipbox 869 | 72 75.291429 m 870 | 73.7856 140.955429 l 871 | 75.5712 178.477714 l 872 | 77.3568 197.732571 l 873 | 79.1424 215.506286 l 874 | 80.928 215.506286 l 875 | 82.7136 225.380571 l 876 | 84.4992 225.874286 l 877 | 86.2848 232.292571 l 878 | 88.0704 235.748571 l 879 | 89.856 235.254857 l 880 | 91.6416 242.166857 l 881 | 93.4272 247.104 l 882 | 95.2128 241.179429 l 883 | 96.9984 255.003429 l 884 | 98.784 255.003429 l 885 | 100.5696 256.978286 l 886 | 102.3552 256.978286 l 887 | 104.1408 255.497143 l 888 | 105.9264 259.940571 l 889 | 107.712 260.928 l 890 | 109.4976 264.877714 l 891 | 111.2832 255.990857 l 892 | 113.0688 260.928 l 893 | 114.8544 268.333714 l 894 | 116.64 259.446857 l 895 | 118.4256 271.296 l 896 | 120.2112 263.396571 l 897 | 121.9968 267.346286 l 898 | 123.7824 273.764571 l 899 | 125.568 275.245714 l 900 | 127.3536 271.296 l 901 | 129.1392 274.752 l 902 | 130.9248 278.701714 l 903 | 132.7104 271.296 l 904 | 134.496 274.752 l 905 | 136.2816 276.233143 l 906 | 138.0672 276.233143 l 907 | 139.8528 275.245714 l 908 | 141.6384 280.676571 l 909 | 143.424 272.777143 l 910 | 145.2096 284.132571 l 911 | 146.9952 276.233143 l 912 | 148.7808 281.664 l 913 | 150.5664 279.689143 l 914 | 152.352 282.157714 l 915 | 154.1376 289.069714 l 916 | 155.9232 289.069714 l 917 | 157.7088 292.525714 l 918 | 159.4944 288.576 l 919 | 161.28 285.12 l 920 | 163.0656 293.019429 l 921 | 164.8512 281.170286 l 922 | 166.6368 281.664 l 923 | 168.4224 287.094857 l 924 | 170.208 290.057143 l 925 | 171.9936 288.576 l 926 | 173.7792 291.044571 l 927 | 175.5648 292.032 l 928 | 177.3504 294.500571 l 929 | 179.136 288.082286 l 930 | 180.9216 293.019429 l 931 | 182.7072 296.969143 l 932 | 184.4928 293.513143 l 933 | 186.2784 292.525714 l 934 | 188.064 293.513143 l 935 | 189.8496 292.032 l 936 | 191.6352 294.500571 l 937 | 193.4208 284.626286 l 938 | 195.2064 291.538286 l 939 | 196.992 296.969143 l 940 | 198.7776 294.994286 l 941 | 200.5632 292.525714 l 942 | 202.3488 298.450286 l 943 | 204.1344 301.412571 l 944 | 205.92 293.513143 l 945 | 207.7056 296.475429 l 946 | 209.4912 298.944 l 947 | 211.2768 292.525714 l 948 | 213.0624 295.981714 l 949 | 214.848 297.462857 l 950 | 216.6336 292.032 l 951 | 218.4192 297.462857 l 952 | 220.2048 293.513143 l 953 | 221.9904 301.412571 l 954 | 223.776 296.969143 l 955 | 225.5616 306.843429 l 956 | 227.3472 294.994286 l 957 | 229.1328 297.462857 l 958 | 230.9184 293.019429 l 959 | 232.704 292.525714 l 960 | 234.4896 297.462857 l 961 | 236.2752 301.412571 l 962 | 238.0608 300.425143 l 963 | 239.8464 296.969143 l 964 | 241.632 300.918857 l 965 | 243.4176 304.374857 l 966 | 245.2032 303.881143 l 967 | 246.9888 301.906286 l 968 | 248.7744 296.475429 l 969 | 250.56 295.488 l 970 | 252.3456 299.437714 l 971 | 254.1312 299.437714 l 972 | 255.9168 295.488 l 973 | 257.7024 303.387429 l 974 | 259.488 299.437714 l 975 | 261.2736 301.906286 l 976 | 263.0592 300.918857 l 977 | 264.8448 303.387429 l 978 | 266.6304 300.918857 l 979 | 268.416 303.387429 l 980 | 270.2016 300.425143 l 981 | 271.9872 303.387429 l 982 | 273.7728 300.425143 l 983 | 275.5584 306.349714 l 984 | 277.344 301.412571 l 985 | 279.1296 299.437714 l 986 | 280.9152 305.362286 l 987 | 282.7008 307.830857 l 988 | 284.4864 301.412571 l 989 | 286.272 304.374857 l 990 | 288.0576 304.374857 l 991 | 289.8432 302.893714 l 992 | 291.6288 305.856 l 993 | 293.4144 300.918857 l 994 | 295.2 303.881143 l 995 | 296.9856 304.868571 l 996 | 298.7712 307.830857 l 997 | 300.5568 304.868571 l 998 | 307.6992 304.868571 l 999 | 309.4848 305.362286 l 1000 | 311.2704 296.969143 l 1001 | 313.056 302.893714 l 1002 | 314.8416 308.324571 l 1003 | 316.6272 305.856 l 1004 | 318.4128 303.881143 l 1005 | 320.1984 303.881143 l 1006 | 321.984 304.374857 l 1007 | 323.7696 304.374857 l 1008 | 325.5552 310.793143 l 1009 | 327.3408 307.337143 l 1010 | 329.1264 305.362286 l 1011 | 330.912 307.337143 l 1012 | 332.6976 306.349714 l 1013 | 334.4832 305.856 l 1014 | 336.2688 308.818286 l 1015 | 338.0544 307.830857 l 1016 | 339.84 302.893714 l 1017 | 341.6256 305.856 l 1018 | 343.4112 303.881143 l 1019 | 345.1968 305.362286 l 1020 | 346.9824 303.881143 l 1021 | 350.5536 302.893714 l 1022 | 352.3392 309.805714 l 1023 | 354.1248 306.349714 l 1024 | 355.9104 309.805714 l 1025 | 357.696 308.324571 l 1026 | 359.4816 305.856 l 1027 | 361.2672 311.286857 l 1028 | 363.0528 307.337143 l 1029 | 364.8384 311.780571 l 1030 | 366.624 307.830857 l 1031 | 368.4096 309.312 l 1032 | 370.1952 306.349714 l 1033 | 371.9808 312.768 l 1034 | 373.7664 308.818286 l 1035 | 377.3376 312.768 l 1036 | 379.1232 303.387429 l 1037 | 380.9088 304.868571 l 1038 | 382.6944 307.337143 l 1039 | 384.48 308.818286 l 1040 | 386.2656 305.362286 l 1041 | 388.0512 303.387429 l 1042 | 389.8368 313.755429 l 1043 | 391.6224 308.324571 l 1044 | 393.408 309.312 l 1045 | 395.1936 307.830857 l 1046 | 396.9792 308.324571 l 1047 | 398.7648 306.349714 l 1048 | 400.5504 310.793143 l 1049 | 402.336 307.830857 l 1050 | 404.1216 306.843429 l 1051 | 405.9072 306.349714 l 1052 | 407.6928 306.349714 l 1053 | 409.4784 309.312 l 1054 | 411.264 308.324571 l 1055 | 413.0496 309.312 l 1056 | 414.8352 307.830857 l 1057 | 416.6208 311.286857 l 1058 | 418.4064 307.830857 l 1059 | 420.192 308.324571 l 1060 | 421.9776 310.299429 l 1061 | 423.7632 308.324571 l 1062 | 425.5488 307.337143 l 1063 | 427.3344 311.780571 l 1064 | 429.12 309.312 l 1065 | 430.9056 312.768 l 1066 | 432.6912 309.805714 l 1067 | 434.4768 310.793143 l 1068 | 436.2624 309.312 l 1069 | 438.048 311.286857 l 1070 | 439.8336 304.868571 l 1071 | 441.6192 311.780571 l 1072 | 443.4048 311.780571 l 1073 | 443.4048 311.780571 l 1074 | stroke 1075 | grestore 1076 | 0 setlinejoin 1077 | 0.000 setgray 1078 | gsave 1079 | 72 388.8 m 1080 | 518.4 388.8 l 1081 | stroke 1082 | grestore 1083 | gsave 1084 | 518.4 43.2 m 1085 | 518.4 388.8 l 1086 | stroke 1087 | grestore 1088 | gsave 1089 | 72 43.2 m 1090 | 518.4 43.2 l 1091 | stroke 1092 | grestore 1093 | gsave 1094 | 72 43.2 m 1095 | 72 388.8 l 1096 | stroke 1097 | grestore 1098 | 0.500 setlinewidth 1099 | 1 setlinejoin 1100 | 0 setlinecap 1101 | [1 3] 0 setdash 1102 | gsave 1103 | 446.4 345.6 72 43.2 clipbox 1104 | 72 43.2 m 1105 | 72 388.8 l 1106 | stroke 1107 | grestore 1108 | [] 0 setdash 1109 | gsave 1110 | /o { 1111 | gsave 1112 | newpath 1113 | translate 1114 | 0.5 setlinewidth 1115 | 1 setlinejoin 1116 | 0 setlinecap 1117 | 0 0 m 1118 | 0 4 l 1119 | 1120 | gsave 1121 | 0.000 setgray 1122 | fill 1123 | grestore 1124 | stroke 1125 | grestore 1126 | } bind def 1127 | 72 43.2 o 1128 | grestore 1129 | gsave 1130 | /o { 1131 | gsave 1132 | newpath 1133 | translate 1134 | 0.5 setlinewidth 1135 | 1 setlinejoin 1136 | 0 setlinecap 1137 | 0 0 m 1138 | 0 -4 l 1139 | 1140 | gsave 1141 | 0.000 setgray 1142 | fill 1143 | grestore 1144 | stroke 1145 | grestore 1146 | } bind def 1147 | 72 388.8 o 1148 | grestore 1149 | /BitstreamVeraSans-Roman findfont 1150 | 12.000 scalefont 1151 | setfont 1152 | gsave 1153 | 68.179688 30.075000 translate 1154 | 0.000000 rotate 1155 | 0.000000 0.000000 m /zero glyphshow 1156 | grestore 1157 | [1 3] 0 setdash 1158 | gsave 1159 | 446.4 345.6 72 43.2 clipbox 1160 | 161.28 43.2 m 1161 | 161.28 388.8 l 1162 | stroke 1163 | grestore 1164 | [] 0 setdash 1165 | gsave 1166 | /o { 1167 | gsave 1168 | newpath 1169 | translate 1170 | 0.5 setlinewidth 1171 | 1 setlinejoin 1172 | 0 setlinecap 1173 | 0 0 m 1174 | 0 4 l 1175 | 1176 | gsave 1177 | 0.000 setgray 1178 | fill 1179 | grestore 1180 | stroke 1181 | grestore 1182 | } bind def 1183 | 161.28 43.2 o 1184 | grestore 1185 | gsave 1186 | /o { 1187 | gsave 1188 | newpath 1189 | translate 1190 | 0.5 setlinewidth 1191 | 1 setlinejoin 1192 | 0 setlinecap 1193 | 0 0 m 1194 | 0 -4 l 1195 | 1196 | gsave 1197 | 0.000 setgray 1198 | fill 1199 | grestore 1200 | stroke 1201 | grestore 1202 | } bind def 1203 | 161.28 388.8 o 1204 | grestore 1205 | gsave 1206 | 153.639375 30.075000 translate 1207 | 0.000000 rotate 1208 | 0.000000 0.000000 m /five glyphshow 1209 | 7.634766 0.000000 m /zero glyphshow 1210 | grestore 1211 | [1 3] 0 setdash 1212 | gsave 1213 | 446.4 345.6 72 43.2 clipbox 1214 | 250.56 43.2 m 1215 | 250.56 388.8 l 1216 | stroke 1217 | grestore 1218 | [] 0 setdash 1219 | gsave 1220 | /o { 1221 | gsave 1222 | newpath 1223 | translate 1224 | 0.5 setlinewidth 1225 | 1 setlinejoin 1226 | 0 setlinecap 1227 | 0 0 m 1228 | 0 4 l 1229 | 1230 | gsave 1231 | 0.000 setgray 1232 | fill 1233 | grestore 1234 | stroke 1235 | grestore 1236 | } bind def 1237 | 250.56 43.2 o 1238 | grestore 1239 | gsave 1240 | /o { 1241 | gsave 1242 | newpath 1243 | translate 1244 | 0.5 setlinewidth 1245 | 1 setlinejoin 1246 | 0 setlinecap 1247 | 0 0 m 1248 | 0 -4 l 1249 | 1250 | gsave 1251 | 0.000 setgray 1252 | fill 1253 | grestore 1254 | stroke 1255 | grestore 1256 | } bind def 1257 | 250.56 388.8 o 1258 | grestore 1259 | gsave 1260 | 239.099062 30.075000 translate 1261 | 0.000000 rotate 1262 | 0.000000 0.000000 m /one glyphshow 1263 | 7.634766 0.000000 m /zero glyphshow 1264 | 15.269531 0.000000 m /zero glyphshow 1265 | grestore 1266 | [1 3] 0 setdash 1267 | gsave 1268 | 446.4 345.6 72 43.2 clipbox 1269 | 339.84 43.2 m 1270 | 339.84 388.8 l 1271 | stroke 1272 | grestore 1273 | [] 0 setdash 1274 | gsave 1275 | /o { 1276 | gsave 1277 | newpath 1278 | translate 1279 | 0.5 setlinewidth 1280 | 1 setlinejoin 1281 | 0 setlinecap 1282 | 0 0 m 1283 | 0 4 l 1284 | 1285 | gsave 1286 | 0.000 setgray 1287 | fill 1288 | grestore 1289 | stroke 1290 | grestore 1291 | } bind def 1292 | 339.84 43.2 o 1293 | grestore 1294 | gsave 1295 | /o { 1296 | gsave 1297 | newpath 1298 | translate 1299 | 0.5 setlinewidth 1300 | 1 setlinejoin 1301 | 0 setlinecap 1302 | 0 0 m 1303 | 0 -4 l 1304 | 1305 | gsave 1306 | 0.000 setgray 1307 | fill 1308 | grestore 1309 | stroke 1310 | grestore 1311 | } bind def 1312 | 339.84 388.8 o 1313 | grestore 1314 | gsave 1315 | 328.379062 30.075000 translate 1316 | 0.000000 rotate 1317 | 0.000000 0.000000 m /one glyphshow 1318 | 7.634766 0.000000 m /five glyphshow 1319 | 15.269531 0.000000 m /zero glyphshow 1320 | grestore 1321 | [1 3] 0 setdash 1322 | gsave 1323 | 446.4 345.6 72 43.2 clipbox 1324 | 429.12 43.2 m 1325 | 429.12 388.8 l 1326 | stroke 1327 | grestore 1328 | [] 0 setdash 1329 | gsave 1330 | /o { 1331 | gsave 1332 | newpath 1333 | translate 1334 | 0.5 setlinewidth 1335 | 1 setlinejoin 1336 | 0 setlinecap 1337 | 0 0 m 1338 | 0 4 l 1339 | 1340 | gsave 1341 | 0.000 setgray 1342 | fill 1343 | grestore 1344 | stroke 1345 | grestore 1346 | } bind def 1347 | 429.12 43.2 o 1348 | grestore 1349 | gsave 1350 | /o { 1351 | gsave 1352 | newpath 1353 | translate 1354 | 0.5 setlinewidth 1355 | 1 setlinejoin 1356 | 0 setlinecap 1357 | 0 0 m 1358 | 0 -4 l 1359 | 1360 | gsave 1361 | 0.000 setgray 1362 | fill 1363 | grestore 1364 | stroke 1365 | grestore 1366 | } bind def 1367 | 429.12 388.8 o 1368 | grestore 1369 | gsave 1370 | 417.659062 30.075000 translate 1371 | 0.000000 rotate 1372 | 0.000000 0.000000 m /two glyphshow 1373 | 7.634766 0.000000 m /zero glyphshow 1374 | 15.269531 0.000000 m /zero glyphshow 1375 | grestore 1376 | [1 3] 0 setdash 1377 | gsave 1378 | 446.4 345.6 72 43.2 clipbox 1379 | 518.4 43.2 m 1380 | 518.4 388.8 l 1381 | stroke 1382 | grestore 1383 | [] 0 setdash 1384 | gsave 1385 | /o { 1386 | gsave 1387 | newpath 1388 | translate 1389 | 0.5 setlinewidth 1390 | 1 setlinejoin 1391 | 0 setlinecap 1392 | 0 0 m 1393 | 0 4 l 1394 | 1395 | gsave 1396 | 0.000 setgray 1397 | fill 1398 | grestore 1399 | stroke 1400 | grestore 1401 | } bind def 1402 | 518.4 43.2 o 1403 | grestore 1404 | gsave 1405 | /o { 1406 | gsave 1407 | newpath 1408 | translate 1409 | 0.5 setlinewidth 1410 | 1 setlinejoin 1411 | 0 setlinecap 1412 | 0 0 m 1413 | 0 -4 l 1414 | 1415 | gsave 1416 | 0.000 setgray 1417 | fill 1418 | grestore 1419 | stroke 1420 | grestore 1421 | } bind def 1422 | 518.4 388.8 o 1423 | grestore 1424 | gsave 1425 | 506.939062 30.075000 translate 1426 | 0.000000 rotate 1427 | 0.000000 0.000000 m /two glyphshow 1428 | 7.634766 0.000000 m /five glyphshow 1429 | 15.269531 0.000000 m /zero glyphshow 1430 | grestore 1431 | gsave 1432 | 276.825000 13.450000 translate 1433 | 0.000000 rotate 1434 | 0.000000 0.000000 m /E glyphshow 1435 | 7.582031 0.000000 m /p glyphshow 1436 | 15.199219 0.000000 m /o glyphshow 1437 | 22.541016 0.000000 m /c glyphshow 1438 | 29.138672 0.000000 m /h glyphshow 1439 | grestore 1440 | [1 3] 0 setdash 1441 | gsave 1442 | 446.4 345.6 72 43.2 clipbox 1443 | 72 43.2 m 1444 | 518.4 43.2 l 1445 | stroke 1446 | grestore 1447 | [] 0 setdash 1448 | gsave 1449 | /o { 1450 | gsave 1451 | newpath 1452 | translate 1453 | 0.5 setlinewidth 1454 | 1 setlinejoin 1455 | 0 setlinecap 1456 | 0 0 m 1457 | 4 0 l 1458 | 1459 | gsave 1460 | 0.000 setgray 1461 | fill 1462 | grestore 1463 | stroke 1464 | grestore 1465 | } bind def 1466 | 72 43.2 o 1467 | grestore 1468 | gsave 1469 | /o { 1470 | gsave 1471 | newpath 1472 | translate 1473 | 0.5 setlinewidth 1474 | 1 setlinejoin 1475 | 0 setlinecap 1476 | 0 0 m 1477 | -4 0 l 1478 | 1479 | gsave 1480 | 0.000 setgray 1481 | fill 1482 | grestore 1483 | stroke 1484 | grestore 1485 | } bind def 1486 | 518.4 43.2 o 1487 | grestore 1488 | gsave 1489 | 48.906250 39.887500 translate 1490 | 0.000000 rotate 1491 | 0.000000 0.000000 m /zero glyphshow 1492 | 7.634766 0.000000 m /period glyphshow 1493 | 11.449219 0.000000 m /two glyphshow 1494 | grestore 1495 | [1 3] 0 setdash 1496 | gsave 1497 | 446.4 345.6 72 43.2 clipbox 1498 | 72 92.571429 m 1499 | 518.4 92.571429 l 1500 | stroke 1501 | grestore 1502 | [] 0 setdash 1503 | gsave 1504 | /o { 1505 | gsave 1506 | newpath 1507 | translate 1508 | 0.5 setlinewidth 1509 | 1 setlinejoin 1510 | 0 setlinecap 1511 | 0 0 m 1512 | 4 0 l 1513 | 1514 | gsave 1515 | 0.000 setgray 1516 | fill 1517 | grestore 1518 | stroke 1519 | grestore 1520 | } bind def 1521 | 72 92.5714 o 1522 | grestore 1523 | gsave 1524 | /o { 1525 | gsave 1526 | newpath 1527 | translate 1528 | 0.5 setlinewidth 1529 | 1 setlinejoin 1530 | 0 setlinecap 1531 | 0 0 m 1532 | -4 0 l 1533 | 1534 | gsave 1535 | 0.000 setgray 1536 | fill 1537 | grestore 1538 | stroke 1539 | grestore 1540 | } bind def 1541 | 518.4 92.5714 o 1542 | grestore 1543 | gsave 1544 | 48.906250 89.258929 translate 1545 | 0.000000 rotate 1546 | 0.000000 0.000000 m /zero glyphshow 1547 | 7.634766 0.000000 m /period glyphshow 1548 | 11.449219 0.000000 m /three glyphshow 1549 | grestore 1550 | [1 3] 0 setdash 1551 | gsave 1552 | 446.4 345.6 72 43.2 clipbox 1553 | 72 141.942857 m 1554 | 518.4 141.942857 l 1555 | stroke 1556 | grestore 1557 | [] 0 setdash 1558 | gsave 1559 | /o { 1560 | gsave 1561 | newpath 1562 | translate 1563 | 0.5 setlinewidth 1564 | 1 setlinejoin 1565 | 0 setlinecap 1566 | 0 0 m 1567 | 4 0 l 1568 | 1569 | gsave 1570 | 0.000 setgray 1571 | fill 1572 | grestore 1573 | stroke 1574 | grestore 1575 | } bind def 1576 | 72 141.943 o 1577 | grestore 1578 | gsave 1579 | /o { 1580 | gsave 1581 | newpath 1582 | translate 1583 | 0.5 setlinewidth 1584 | 1 setlinejoin 1585 | 0 setlinecap 1586 | 0 0 m 1587 | -4 0 l 1588 | 1589 | gsave 1590 | 0.000 setgray 1591 | fill 1592 | grestore 1593 | stroke 1594 | grestore 1595 | } bind def 1596 | 518.4 141.943 o 1597 | grestore 1598 | gsave 1599 | 48.906250 138.630357 translate 1600 | 0.000000 rotate 1601 | 0.000000 0.000000 m /zero glyphshow 1602 | 7.634766 0.000000 m /period glyphshow 1603 | 11.449219 0.000000 m /four glyphshow 1604 | grestore 1605 | [1 3] 0 setdash 1606 | gsave 1607 | 446.4 345.6 72 43.2 clipbox 1608 | 72 191.314286 m 1609 | 518.4 191.314286 l 1610 | stroke 1611 | grestore 1612 | [] 0 setdash 1613 | gsave 1614 | /o { 1615 | gsave 1616 | newpath 1617 | translate 1618 | 0.5 setlinewidth 1619 | 1 setlinejoin 1620 | 0 setlinecap 1621 | 0 0 m 1622 | 4 0 l 1623 | 1624 | gsave 1625 | 0.000 setgray 1626 | fill 1627 | grestore 1628 | stroke 1629 | grestore 1630 | } bind def 1631 | 72 191.314 o 1632 | grestore 1633 | gsave 1634 | /o { 1635 | gsave 1636 | newpath 1637 | translate 1638 | 0.5 setlinewidth 1639 | 1 setlinejoin 1640 | 0 setlinecap 1641 | 0 0 m 1642 | -4 0 l 1643 | 1644 | gsave 1645 | 0.000 setgray 1646 | fill 1647 | grestore 1648 | stroke 1649 | grestore 1650 | } bind def 1651 | 518.4 191.314 o 1652 | grestore 1653 | gsave 1654 | 48.906250 188.001786 translate 1655 | 0.000000 rotate 1656 | 0.000000 0.000000 m /zero glyphshow 1657 | 7.634766 0.000000 m /period glyphshow 1658 | 11.449219 0.000000 m /five glyphshow 1659 | grestore 1660 | [1 3] 0 setdash 1661 | gsave 1662 | 446.4 345.6 72 43.2 clipbox 1663 | 72 240.685714 m 1664 | 518.4 240.685714 l 1665 | stroke 1666 | grestore 1667 | [] 0 setdash 1668 | gsave 1669 | /o { 1670 | gsave 1671 | newpath 1672 | translate 1673 | 0.5 setlinewidth 1674 | 1 setlinejoin 1675 | 0 setlinecap 1676 | 0 0 m 1677 | 4 0 l 1678 | 1679 | gsave 1680 | 0.000 setgray 1681 | fill 1682 | grestore 1683 | stroke 1684 | grestore 1685 | } bind def 1686 | 72 240.686 o 1687 | grestore 1688 | gsave 1689 | /o { 1690 | gsave 1691 | newpath 1692 | translate 1693 | 0.5 setlinewidth 1694 | 1 setlinejoin 1695 | 0 setlinecap 1696 | 0 0 m 1697 | -4 0 l 1698 | 1699 | gsave 1700 | 0.000 setgray 1701 | fill 1702 | grestore 1703 | stroke 1704 | grestore 1705 | } bind def 1706 | 518.4 240.686 o 1707 | grestore 1708 | gsave 1709 | 48.906250 237.373214 translate 1710 | 0.000000 rotate 1711 | 0.000000 0.000000 m /zero glyphshow 1712 | 7.634766 0.000000 m /period glyphshow 1713 | 11.449219 0.000000 m /six glyphshow 1714 | grestore 1715 | [1 3] 0 setdash 1716 | gsave 1717 | 446.4 345.6 72 43.2 clipbox 1718 | 72 290.057143 m 1719 | 518.4 290.057143 l 1720 | stroke 1721 | grestore 1722 | [] 0 setdash 1723 | gsave 1724 | /o { 1725 | gsave 1726 | newpath 1727 | translate 1728 | 0.5 setlinewidth 1729 | 1 setlinejoin 1730 | 0 setlinecap 1731 | 0 0 m 1732 | 4 0 l 1733 | 1734 | gsave 1735 | 0.000 setgray 1736 | fill 1737 | grestore 1738 | stroke 1739 | grestore 1740 | } bind def 1741 | 72 290.057 o 1742 | grestore 1743 | gsave 1744 | /o { 1745 | gsave 1746 | newpath 1747 | translate 1748 | 0.5 setlinewidth 1749 | 1 setlinejoin 1750 | 0 setlinecap 1751 | 0 0 m 1752 | -4 0 l 1753 | 1754 | gsave 1755 | 0.000 setgray 1756 | fill 1757 | grestore 1758 | stroke 1759 | grestore 1760 | } bind def 1761 | 518.4 290.057 o 1762 | grestore 1763 | gsave 1764 | 48.906250 286.744643 translate 1765 | 0.000000 rotate 1766 | 0.000000 0.000000 m /zero glyphshow 1767 | 7.634766 0.000000 m /period glyphshow 1768 | 11.449219 0.000000 m /seven glyphshow 1769 | grestore 1770 | [1 3] 0 setdash 1771 | gsave 1772 | 446.4 345.6 72 43.2 clipbox 1773 | 72 339.428571 m 1774 | 518.4 339.428571 l 1775 | stroke 1776 | grestore 1777 | [] 0 setdash 1778 | gsave 1779 | /o { 1780 | gsave 1781 | newpath 1782 | translate 1783 | 0.5 setlinewidth 1784 | 1 setlinejoin 1785 | 0 setlinecap 1786 | 0 0 m 1787 | 4 0 l 1788 | 1789 | gsave 1790 | 0.000 setgray 1791 | fill 1792 | grestore 1793 | stroke 1794 | grestore 1795 | } bind def 1796 | 72 339.429 o 1797 | grestore 1798 | gsave 1799 | /o { 1800 | gsave 1801 | newpath 1802 | translate 1803 | 0.5 setlinewidth 1804 | 1 setlinejoin 1805 | 0 setlinecap 1806 | 0 0 m 1807 | -4 0 l 1808 | 1809 | gsave 1810 | 0.000 setgray 1811 | fill 1812 | grestore 1813 | stroke 1814 | grestore 1815 | } bind def 1816 | 518.4 339.429 o 1817 | grestore 1818 | gsave 1819 | 48.906250 336.116071 translate 1820 | 0.000000 rotate 1821 | 0.000000 0.000000 m /zero glyphshow 1822 | 7.634766 0.000000 m /period glyphshow 1823 | 11.449219 0.000000 m /eight glyphshow 1824 | grestore 1825 | [1 3] 0 setdash 1826 | gsave 1827 | 446.4 345.6 72 43.2 clipbox 1828 | 72 388.8 m 1829 | 518.4 388.8 l 1830 | stroke 1831 | grestore 1832 | [] 0 setdash 1833 | gsave 1834 | /o { 1835 | gsave 1836 | newpath 1837 | translate 1838 | 0.5 setlinewidth 1839 | 1 setlinejoin 1840 | 0 setlinecap 1841 | 0 0 m 1842 | 4 0 l 1843 | 1844 | gsave 1845 | 0.000 setgray 1846 | fill 1847 | grestore 1848 | stroke 1849 | grestore 1850 | } bind def 1851 | 72 388.8 o 1852 | grestore 1853 | gsave 1854 | /o { 1855 | gsave 1856 | newpath 1857 | translate 1858 | 0.5 setlinewidth 1859 | 1 setlinejoin 1860 | 0 setlinecap 1861 | 0 0 m 1862 | -4 0 l 1863 | 1864 | gsave 1865 | 0.000 setgray 1866 | fill 1867 | grestore 1868 | stroke 1869 | grestore 1870 | } bind def 1871 | 518.4 388.8 o 1872 | grestore 1873 | gsave 1874 | 48.906250 385.487500 translate 1875 | 0.000000 rotate 1876 | 0.000000 0.000000 m /zero glyphshow 1877 | 7.634766 0.000000 m /period glyphshow 1878 | 11.449219 0.000000 m /nine glyphshow 1879 | grestore 1880 | /BitstreamVeraSans-Roman findfont 1881 | 14.400 scalefont 1882 | setfont 1883 | gsave 1884 | 185.887500 393.800000 translate 1885 | 0.000000 rotate 1886 | 0.000000 0.000000 m /C glyphshow 1887 | 10.037231 0.000000 m /l glyphshow 1888 | 14.031067 0.000000 m /a glyphshow 1889 | 22.839966 0.000000 m /s glyphshow 1890 | 30.329285 0.000000 m /s glyphshow 1891 | 37.818604 0.000000 m /i glyphshow 1892 | 41.812439 0.000000 m /f glyphshow 1893 | 46.873169 0.000000 m /i glyphshow 1894 | 50.867004 0.000000 m /c glyphshow 1895 | 58.770447 0.000000 m /a glyphshow 1896 | 67.579346 0.000000 m /t glyphshow 1897 | 73.215637 0.000000 m /i glyphshow 1898 | 77.209473 0.000000 m /o glyphshow 1899 | 86.004333 0.000000 m /n glyphshow 1900 | 95.115051 0.000000 m /space glyphshow 1901 | 99.684448 0.000000 m /a glyphshow 1902 | 108.493347 0.000000 m /c glyphshow 1903 | 116.396790 0.000000 m /c glyphshow 1904 | 124.300232 0.000000 m /u glyphshow 1905 | 133.410950 0.000000 m /r glyphshow 1906 | 139.320984 0.000000 m /a glyphshow 1907 | 148.129883 0.000000 m /c glyphshow 1908 | 156.033325 0.000000 m /y glyphshow 1909 | 164.540405 0.000000 m /space glyphshow 1910 | 169.109802 0.000000 m /h glyphshow 1911 | 178.220520 0.000000 m /i glyphshow 1912 | 182.214355 0.000000 m /s glyphshow 1913 | 189.703674 0.000000 m /t glyphshow 1914 | 195.339966 0.000000 m /o glyphshow 1915 | 204.134827 0.000000 m /r glyphshow 1916 | 210.044861 0.000000 m /y glyphshow 1917 | grestore 1918 | 1919 | end 1920 | showpage 1921 | -------------------------------------------------------------------------------- /report/kmeans_his-eps-converted-to.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/report/kmeans_his-eps-converted-to.pdf -------------------------------------------------------------------------------- /report/loss-eps-converted-to.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/report/loss-eps-converted-to.pdf -------------------------------------------------------------------------------- /report/mp1_Yihui He.aux: -------------------------------------------------------------------------------- 1 | \relax 2 | \@writefile{toc}{\contentsline {section}{\numberline {I}Implementation}{1}} 3 | \@writefile{toc}{\contentsline {subsection}{\numberline {\unhbox \voidb@x \hbox {I-A}}Forward pass and Loss}{1}} 4 | \@writefile{toc}{\contentsline {subsection}{\numberline {\unhbox \voidb@x \hbox {I-B}}Backward pass and Gradient check}{1}} 5 | \@writefile{toc}{\contentsline {subsection}{\numberline {\unhbox \voidb@x \hbox {I-C}}Train and Predict}{1}} 6 | \@writefile{toc}{\contentsline {section}{\numberline {II}Model Building}{1}} 7 | \@writefile{lot}{\contentsline {table}{\numberline {I}{\ignorespaces top accuracy}}{2}} 8 | \newlabel{top-accuracy}{{I}{2}} 9 | \@writefile{toc}{\contentsline {section}{\numberline {III}Extra Credits}{2}} 10 | \@writefile{toc}{\contentsline {subsection}{\numberline {\unhbox \voidb@x \hbox {III-A}}momentum and other update methods}{2}} 11 | \@writefile{lot}{\contentsline {table}{\numberline {II}{\ignorespaces Differences between update methods}}{2}} 12 | \newlabel{Differences between update methods}{{II}{2}} 13 | \@writefile{toc}{\contentsline {subsection}{\numberline {\unhbox \voidb@x \hbox {III-B}}Dropout}{2}} 14 | \@writefile{toc}{\contentsline {subsection}{\numberline {\unhbox \voidb@x \hbox {III-C}}Initialization method}{2}} 15 | \newlabel{fig:tf_plot}{{\unhbox \voidb@x \hbox {III-B}}{3}} 16 | \@writefile{toc}{\contentsline {subsection}{\numberline {\unhbox \voidb@x \hbox {III-D}}Activation functions}{3}} 17 | \newlabel{fig:tf_plot}{{\unhbox \voidb@x \hbox {III-C}}{3}} 18 | \@writefile{toc}{\contentsline {subsection}{\numberline {\unhbox \voidb@x \hbox {III-E}}Preprocessing}{3}} 19 | \@writefile{toc}{\contentsline {section}{\numberline {IV}Results}{4}} 20 | \@writefile{lot}{\contentsline {table}{\numberline {III}{\ignorespaces Differences between update methods}}{4}} 21 | \newlabel{Differences between update methods}{{III}{4}} 22 | \@writefile{toc}{\contentsline {section}{\numberline {V}Challenges}{4}} 23 | \@writefile{toc}{\contentsline {section}{\numberline {VI}Possible Improvements}{4}} 24 | -------------------------------------------------------------------------------- /report/mp1_Yihui He.dvi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/report/mp1_Yihui He.dvi -------------------------------------------------------------------------------- /report/mp1_Yihui He.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/report/mp1_Yihui He.pdf -------------------------------------------------------------------------------- /report/mp1_Yihui He.tex: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/report/mp1_Yihui He.tex -------------------------------------------------------------------------------- /report/mp1_Yihui He.tex.backup: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/report/mp1_Yihui He.tex.backup -------------------------------------------------------------------------------- /report/parameters: -------------------------------------------------------------------------------- 1 | naive:150 2 | num_iters=12250, batch_size=100, 3 | learning_rate=1e-3, learning_rate_decay=0.95, 4 | reg=1.3 5 | .57 6 | .51 7 | .55 8 | 9 | momentum: 500 10 | num_iters=12250, batch_size=100, 11 | learning_rate=1e-4, learning_rate_decay=0.95, 12 | reg=.8, verbose=True,update="momentum",arg=0.9) 13 | 14 | Train accuracy: 0.655 15 | Validation accuracy: 0.553 16 | Test accuracy: 0.532 17 | 18 | both Training 1000 19 | momentum 20 | Train accuracy: 0.485755102041 21 | Validation accuracy: 0.472 22 | Test accuracy: 0.458 23 | naive 24 | Train accuracy: 0.271591836735 25 | Validation accuracy: 0.275 26 | Test accuracy: 0.282 27 | 28 | Drop 29 | retest this one 30 | 31 | 50 0.0001 0.5 Validation accuracy: 0.285 32 | 50 0.0001 5.0 Validation accuracy: 0.282 33 | 50 0.0001 50.0 Validation accuracy: 0.203 34 | 50 0.001 0.5 Validation accuracy: 0.483 35 | 50 0.001 5.0 Validation accuracy: 0.413 36 | 50 0.001 50.0 Validation accuracy: 0.258 37 | 50 0.01 0.5 Validation accuracy: 0.078 38 | 50 0.01 5.0 Validation accuracy: 0.133 39 | 50 0.01 50.0 Validation accuracy: 0.087 40 | 100 0.0001 0.5 Validation accuracy: 0.294 41 | 100 0.0001 5.0 Validation accuracy: 0.282 42 | 100 0.0001 50.0 Validation accuracy: 0.206 43 | 100 0.001 0.5 Validation accuracy: 0.47 44 | 100 0.001 5.0 Validation accuracy: 0.435 45 | 100 0.001 50.0 Validation accuracy: 0.253 46 | 100 0.01 0.5 Validation accuracy: 0.11 47 | 100 0.01 5.0 Validation accuracy: 0.086 48 | 100 0.01 50.0 Validation accuracy: 0.069 49 | 150 0.0001 0.5 Validation accuracy: 0.298 50 | 150 0.0001 5.0 Validation accuracy: 0.297 51 | 150 0.0001 50.0 Validation accuracy: 0.225 52 | 150 0.001 0.5 Validation accuracy: 0.496 53 | 150 0.001 5.0 Validation accuracy: 0.439 54 | 150 0.001 50.0 Validation accuracy: 0.257 55 | 150 0.01 0.5 Validation accuracy: 0.086 56 | 150 0.01 5.0 Validation accuracy: 0.069 57 | 150 0.01 50.0 Validation accuracy: 0.051 58 | 200 0.0005 0.005 Validation accuracy: 0.449 59 | 200 0.0005 0.05 Validation accuracy: 0.46 60 | 200 0.0005 0.5 Validation accuracy: 0.471 61 | 200 0.001 0.005 Validation accuracy: 0.476 62 | 200 0.001 0.05 Validation accuracy: 0.479 63 | 200 0.001 0.5 Validation accuracy: 0.476 64 | 200 0.002 0.005 Validation accuracy: 0.464 65 | 200 0.002 0.05 Validation accuracy: 0.444 66 | 200 0.002 0.5 Validation accuracy: 0.493 67 | 250 0.0005 0.005 Validation accuracy: 0.444 68 | 250 0.0005 0.05 Validation accuracy: 0.455 69 | 250 0.0005 0.5 Validation accuracy: 0.456 70 | 250 0.001 0.005 Validation accuracy: 0.489 71 | 250 0.001 0.05 Validation accuracy: 0.489 72 | 250 0.001 0.5 Validation accuracy: 0.481 73 | 250 0.002 0.005 Validation accuracy: 0.487 74 | 250 0.002 0.05 Validation accuracy: 0.429 75 | 250 0.002 0.5 Validation accuracy: 0.48 76 | 300 0.0005 0.005 Validation accuracy: 0.44 77 | 300 0.0005 0.05 Validation accuracy: 0.46 78 | 300 0.0005 0.5 Validation accuracy: 0.456 79 | 300 0.001 0.005 Validation accuracy: 0.495 80 | 300 0.001 0.05 Validation accuracy: 0.491 81 | 300 0.001 0.5 Validation accuracy: 0.489 82 | 300 0.002 0.005 Validation accuracy: 0.497 83 | 300 0.002 0.05 Validation accuracy: 0.471 84 | 300 0.002 0.5 Validation accuracy: 0.487 85 | 350 0.0005 0.005 Validation accuracy: 0.464 86 | 350 0.0005 0.05 Validation accuracy: 0.475 87 | 350 0.0005 0.5 Validation accuracy: 0.472 88 | 350 0.001 0.005 Validation accuracy: 0.493 89 | 350 0.001 0.05 Validation accuracy: 0.504 90 | 350 0.001 0.5 Validation accuracy: 0.477 91 | 350 0.002 0.005 Validation accuracy: 0.475 92 | 350 0.002 0.05 Validation accuracy: 0.472 93 | 350 0.002 0.5 Validation accuracy: 0.491 94 | 95 | -------------------------------------------------------------------------------- /report/plot_tf.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanhe42/An-Analysis-of-Single-Layer-Networks-in-Unsupervised-Feature-Learning/df286113a2db5ffd287a2cb3b00aa45d487792f1/report/plot_tf.pdf -------------------------------------------------------------------------------- /tune_naive.py: -------------------------------------------------------------------------------- 1 | from neural_net import * 2 | from data_utils import * 3 | 4 | 5 | def get_CIFAR10_data(num_training=49000, num_validation=1000, num_test=1000): 6 | """ 7 | Load the CIFAR-10 dataset from disk and perform preprocessing to prepare 8 | it for the two-layer neural net classifier. These are the same steps as 9 | we used for the SVM, but condensed to a single function. 10 | """ 11 | # Load the raw CIFAR-10 data 12 | cifar10_dir = "C:\Users\Pomodori\workspace\cifar-10-batches-py" 13 | X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir) 14 | 15 | # Subsample the data 16 | mask = range(num_training, num_training + num_validation) 17 | X_val = X_train[mask] 18 | y_val = y_train[mask] 19 | mask = range(num_training) 20 | X_train = X_train[mask] 21 | y_train = y_train[mask] 22 | mask = range(num_test) 23 | X_test = X_test[mask] 24 | y_test = y_test[mask] 25 | 26 | # Normalize the data: subtract the mean image 27 | mean_image = np.mean(X_train, axis=0) 28 | X_train -= mean_image 29 | X_val -= mean_image 30 | X_test -= mean_image 31 | 32 | # Reshape data to rows 33 | X_train = X_train.reshape(num_training, -1) 34 | X_val = X_val.reshape(num_validation, -1) 35 | X_test = X_test.reshape(num_test, -1) 36 | 37 | return X_train, y_train, X_val, y_val, X_test, y_test 38 | 39 | 40 | # Invoke the above function to get our data. 41 | X_train, y_train, X_val, y_val, X_test, y_test = get_CIFAR10_data() 42 | print 'Train data shape: ', X_train.shape 43 | print 'Train labels shape: ', y_train.shape 44 | print 'Validation data shape: ', X_val.shape 45 | print 'Validation labels shape: ', y_val.shape 46 | print 'Test data shape: ', X_test.shape 47 | print 'Test labels shape: ', y_test.shape 48 | 49 | 50 | 51 | def tryArgs(hidden_size,learning_rate,reg): 52 | net = TwoLayerNet(3072, i, 10) 53 | 54 | # Train the network 55 | stats = net.train(X_train, y_train, X_val, y_val, 56 | num_iters=1000, batch_size=200, 57 | learning_rate=learning_rate, learning_rate_decay=0.95, 58 | reg=reg, verbose=False) 59 | 60 | # Predict on the validation set 61 | val_acc = (net.predict(X_val) == y_val).mean() 62 | f=open("naive_nn.csv","a") 63 | f.write(str(hidden_size)+','+str(learning_rate)+','+str(reg)+','+str(val_acc)+'\n') 64 | f.close() 65 | print hidden_size,learning_rate,reg, val_acc 66 | 67 | 68 | hidden_size = range(150,600,50) 69 | lr=[1e-3*10**i for i in range(-2,3)] 70 | reg=[0.5*10**i for i in range(-3,3)] 71 | f=open("naive_nn.csv","w") 72 | for i in hidden_size: 73 | for j in lr: 74 | for k in reg: 75 | # t=Thread(target=tryArgs,args=(i,j,k)) 76 | # t.daemon=True 77 | # t.start() 78 | tryArgs(i,j,k) 79 | -------------------------------------------------------------------------------- /zca.py: -------------------------------------------------------------------------------- 1 | def whitening(X): 2 | print "whitening" 3 | [D,V]=np.linalg.eig(np.cov(X,rowvar=False)) 4 | P = V.dot(np.diag(np.sqrt(1/(D + 1e-9)))).dot(V.T) 5 | X = X.dot(P) 6 | return X, P 7 | --------------------------------------------------------------------------------