├── gelus.pdf ├── README.md ├── LICENSE ├── load_cifar10.py ├── nn.py ├── mnist_ae.py ├── all_convnet.py ├── mnist_fcn.py ├── timit_fcn.py ├── twitter_pos.py ├── SGDR_WRNs_gelu.py └── data └── Tweets └── tweets-dev.txt /gelus.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hendrycks/GELUs/HEAD/gelus.pdf -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Gaussian Error Linear Units (GELUs) 2 | This software allows users to reproduce the results in Gaussian Error Linear Units (GELUs), Dan Hendrycks and Kevin Gimpel 2016. 3 | 4 | # GELU Approximations 5 | The `sigmoid(1.702 * x) * x` approximation is fast but is somewhat inaccurate. Meanwhile `0.5 * x * (1 + tanh(x * 0.7978845608 * (1 + 0.044715 * x * x)))` is slower but more accurate. 6 | 7 | However, exact versions are now available in pytorch, so approximations are no longer necessary for suitable speed. 8 | 9 | # Execution 10 | Please install Tensorflow, Lasagne, and Python 3+. 11 | 12 | ## Citation 13 | 14 | If you find this useful in your research, please consider citing: 15 | 16 | @article{hendrycks2016gelu, 17 | title={Gaussian Error Linear Units (GELUs)}, 18 | author={Hendrycks, Dan and Gimpel, Kevin}, 19 | journal={arXiv preprint arXiv:1606.08415}, 20 | year={2016} 21 | } 22 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 hendrycks 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /load_cifar10.py: -------------------------------------------------------------------------------- 1 | # code repurposed from the tf-learn library 2 | import sys 3 | import os 4 | import pickle 5 | import numpy as np 6 | from six.moves import urllib 7 | import tarfile 8 | 9 | def to_categorical(y, nb_classes): 10 | y = np.asarray(y, dtype='int32') 11 | if not nb_classes: 12 | nb_classes = np.max(y)+1 13 | Y = np.zeros((len(y), nb_classes)) 14 | for i in range(len(y)): 15 | Y[i, y[i]] = 1. 16 | return Y 17 | 18 | # load training and testing data 19 | def load_data10(randomize=True, return_val=False, one_hot=False, dirname="cifar-10-batches-py"): 20 | tarpath = maybe_download("cifar-10-python.tar.gz", 21 | "http://www.cs.toronto.edu/~kriz/", dirname) 22 | X_train = [] 23 | Y_train = [] 24 | 25 | for i in range(1, 6): 26 | fpath = os.path.join(dirname, 'data_batch_' + str(i)) 27 | data, labels = load_batch(fpath) 28 | if i == 1: 29 | X_train = data 30 | Y_train = labels 31 | else: 32 | X_train = np.concatenate([X_train, data], axis=0) 33 | Y_train = np.concatenate([Y_train, labels], axis=0) 34 | 35 | X_test, Y_test = load_batch(os.path.join(dirname, 'test_batch')) 36 | 37 | X_train = np.dstack((X_train[:, :1024], X_train[:, 1024:2048], 38 | X_train[:, 2048:])) / 255. 39 | X_train = np.reshape(X_train, [-1, 32, 32, 3]) 40 | X_test = np.dstack((X_test[:, :1024], X_test[:, 1024:2048], 41 | X_test[:, 2048:])) / 255. 42 | X_test = np.reshape(X_test, [-1, 32, 32, 3]) 43 | 44 | if randomize is True: 45 | test_perm = np.array(np.random.permutation(X_test.shape[0])) 46 | X_test = X_test[test_perm] 47 | Y_test = np.asarray(Y_test) 48 | Y_test = Y_test[test_perm] 49 | 50 | perm = np.array(np.random.permutation(X_train.shape[0])) 51 | X_train = X_train[perm] 52 | Y_train = np.asarray(Y_train) 53 | Y_train = Y_train[perm] 54 | if return_val: 55 | X_train, X_val = np.split(X_train, [45000]) # 45000 for training, 5000 for validation 56 | Y_train, Y_val = np.split(Y_train, [45000]) 57 | 58 | if one_hot: 59 | Y_train, Y_val, Y_test = to_categorical(Y_train, 10), to_categorical(Y_val, 10), to_categorical(Y_test, 10) 60 | return X_train, Y_train, X_val, Y_val, X_test, Y_test 61 | else: 62 | return X_train, Y_train, X_val, Y_val, X_test, Y_test 63 | else: 64 | if one_hot: 65 | Y_train, Y_test = to_categorical(Y_train, 10), to_categorical(Y_test, 10) 66 | return X_train, Y_train, X_test, Y_test 67 | else: 68 | return X_train, Y_train, X_test, Y_test 69 | 70 | 71 | def load_batch(fpath): 72 | with open(fpath, 'rb') as f: 73 | d = pickle.load(f, encoding='latin1') 74 | data = d["data"] 75 | labels = d["labels"] 76 | return data, labels 77 | 78 | 79 | def maybe_download(filename, source_url, work_directory): 80 | if not os.path.exists(work_directory): 81 | os.mkdir(work_directory) 82 | filepath = os.path.join(work_directory, filename) 83 | if not os.path.exists(filepath): 84 | print("Downloading CIFAR 10...") 85 | filepath, _ = urllib.request.urlretrieve(source_url + filename, 86 | filepath) 87 | statinfo = os.stat(filepath) 88 | print(('CIFAR 10 downloaded', filename, statinfo.st_size, 'bytes.')) 89 | untar(filepath) 90 | return filepath 91 | 92 | 93 | def untar(fname): 94 | if (fname.endswith("tar.gz")): 95 | tar = tarfile.open(fname) 96 | tar.extractall() 97 | tar.close() 98 | print("File Extracted in Current Directory") 99 | else: 100 | print("Not a tar.gz file: '%s '" % sys.argv[0]) 101 | 102 | if __name__ == '__main__': 103 | load_data10() 104 | -------------------------------------------------------------------------------- /nn.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import theano as th 3 | import theano.tensor as T 4 | from scipy import linalg 5 | import lasagne 6 | 7 | class ZCA(object): 8 | def __init__(self, regularization=1e-5, x=None): 9 | self.regularization = regularization 10 | if x is not None: 11 | self.fit(x) 12 | 13 | def fit(self, x): 14 | s = x.shape 15 | x = x.copy().reshape((s[0],np.prod(s[1:]))) 16 | m = np.mean(x, axis=0) 17 | x -= m 18 | sigma = np.dot(x.T,x) / x.shape[0] 19 | U, S, V = linalg.svd(sigma) 20 | tmp = np.dot(U, np.diag(1./np.sqrt(S+self.regularization))) 21 | tmp2 = np.dot(U, np.diag(np.sqrt(S+self.regularization))) 22 | self.ZCA_mat = th.shared(np.dot(tmp, U.T).astype(th.config.floatX)) 23 | self.inv_ZCA_mat = th.shared(np.dot(tmp2, U.T).astype(th.config.floatX)) 24 | self.mean = th.shared(m.astype(th.config.floatX)) 25 | 26 | def apply(self, x): 27 | s = x.shape 28 | if isinstance(x, np.ndarray): 29 | return np.dot(x.reshape((s[0],np.prod(s[1:]))) - self.mean.get_value(), self.ZCA_mat.get_value()).reshape(s) 30 | elif isinstance(x, T.TensorVariable): 31 | return T.dot(x.flatten(2) - self.mean.dimshuffle('x',0), self.ZCA_mat).reshape(s) 32 | else: 33 | raise NotImplementedError("Whitening only implemented for numpy arrays or Theano TensorVariables") 34 | 35 | def invert(self, x): 36 | s = x.shape 37 | if isinstance(x, np.ndarray): 38 | return (np.dot(x.reshape((s[0],np.prod(s[1:]))), self.inv_ZCA_mat.get_value()) + self.mean.get_value()).reshape(s) 39 | elif isinstance(x, T.TensorVariable): 40 | return (T.dot(x.flatten(2), self.inv_ZCA_mat) + self.mean.dimshuffle('x',0)).reshape(s) 41 | else: 42 | raise NotImplementedError("Whitening only implemented for numpy arrays or Theano TensorVariables") 43 | 44 | # T.nnet.relu has some issues with very large inputs, this is more stable 45 | def relu(x): 46 | return T.maximum(x, 0) 47 | 48 | def lrelu(x, a=0.1): 49 | return T.maximum(x, a*x) 50 | 51 | def gelu(x): 52 | return 0.5 * x * (1 + T.tanh(T.sqrt(2 / np.pi) * (x + 0.044715 * T.pow(x, 3)))) 53 | 54 | def log_sum_exp(x, axis=1): 55 | m = T.max(x, axis=axis) 56 | return m+T.log(T.sum(T.exp(x-m.dimshuffle(0,'x')), axis=axis)) 57 | 58 | def adamax_updates(params, cost, lr=0.001, mom1=0.9, mom2=0.999): 59 | updates = [] 60 | grads = T.grad(cost, params) 61 | for p, g in zip(params, grads): 62 | mg = th.shared(np.cast[th.config.floatX](p.get_value() * 0.)) 63 | v = th.shared(np.cast[th.config.floatX](p.get_value() * 0.)) 64 | if mom1>0: 65 | v_t = mom1*v + (1. - mom1)*g 66 | updates.append((v,v_t)) 67 | else: 68 | v_t = g 69 | mg_t = T.maximum(mom2*mg, abs(g)) 70 | g_t = v_t / (mg_t + 1e-6) 71 | p_t = p - lr * g_t 72 | updates.append((mg, mg_t)) 73 | updates.append((p, p_t)) 74 | return updates 75 | 76 | def adam_updates(params, cost, lr=0.001, mom1=0.9, mom2=0.999): 77 | updates = [] 78 | grads = T.grad(cost, params) 79 | t = th.shared(np.cast[th.config.floatX](1.)) 80 | for p, g in zip(params, grads): 81 | v = th.shared(np.cast[th.config.floatX](p.get_value() * 0.)) 82 | mg = th.shared(np.cast[th.config.floatX](p.get_value() * 0.)) 83 | v_t = mom1*v + (1. - mom1)*g 84 | mg_t = mom2*mg + (1. - mom2)*T.square(g) 85 | v_hat = v_t / (1. - mom1 ** t) 86 | mg_hat = mg_t / (1. - mom2 ** t) 87 | g_t = v_hat / T.sqrt(mg_hat + 1e-8) 88 | p_t = p - lr * g_t 89 | updates.append((v, v_t)) 90 | updates.append((mg, mg_t)) 91 | updates.append((p, p_t)) 92 | updates.append((t, t+1)) 93 | return updates 94 | 95 | def softmax_loss(p_true, output_before_softmax): 96 | output_before_softmax -= T.max(output_before_softmax, axis=1, keepdims=True) 97 | if p_true.ndim==2: 98 | return T.mean(T.log(T.sum(T.exp(output_before_softmax),axis=1)) - T.sum(p_true*output_before_softmax, axis=1)) 99 | else: 100 | return T.mean(T.log(T.sum(T.exp(output_before_softmax),axis=1)) - output_before_softmax[T.arange(p_true.shape[0]),p_true]) 101 | 102 | class GlobalAvgLayer(lasagne.layers.Layer): 103 | def __init__(self, incoming, **kwargs): 104 | super(GlobalAvgLayer, self).__init__(incoming, **kwargs) 105 | def get_output_for(self, input, **kwargs): 106 | return T.mean(input, axis=(2,3)) 107 | def get_output_shape_for(self, input_shape): 108 | return input_shape[:2] 109 | -------------------------------------------------------------------------------- /mnist_ae.py: -------------------------------------------------------------------------------- 1 | # import MNIST data, Tensorflow, and other helpers 2 | from tensorflow.examples.tutorials.mnist import input_data 3 | mnist = input_data.read_data_sets("/tmp/data/") 4 | import tensorflow as tf 5 | import numpy as np 6 | import sys 7 | import os 8 | import pickle 9 | 10 | # training parameters 11 | training_epochs = 500 12 | batch_size = 64 13 | 14 | # architecture parameters 15 | image_pixels = 28 * 28 16 | 17 | try: 18 | nonlinearity_name = sys.argv[1] # 'relu', 'elu', 'gelu', 'silu' 19 | except: 20 | print('Defaulted to gelu since no nonlinearity specified through command line') 21 | nonlinearity_name = 'gelu' 22 | 23 | try: 24 | learning_rate = float(sys.argv[2]) # 0.001, 0.0001, 0.00001 25 | except: 26 | print('Defaulted to a learning rate of 0.001') 27 | learning_rate = 1e-3 28 | 29 | x = tf.placeholder(dtype=tf.float32, shape=[None, image_pixels]) 30 | 31 | W = { 32 | '1': tf.Variable(tf.nn.l2_normalize(tf.random_normal([image_pixels, 1000]), 0)), 33 | '2': tf.Variable(tf.nn.l2_normalize(tf.random_normal([1000, 500]), 0)), 34 | '3': tf.Variable(tf.nn.l2_normalize(tf.random_normal([500, 250]), 0)), 35 | '4': tf.Variable(tf.nn.l2_normalize(tf.random_normal([250, 30]), 0)), 36 | '5': tf.Variable(tf.nn.l2_normalize(tf.random_normal([30, 250]), 0)), 37 | '6': tf.Variable(tf.nn.l2_normalize(tf.random_normal([250, 500]), 0)), 38 | '7': tf.Variable(tf.nn.l2_normalize(tf.random_normal([500, 1000]), 0)), 39 | '8': tf.Variable(tf.nn.l2_normalize(tf.random_normal([1000, image_pixels]), 0)) 40 | } 41 | 42 | b = { 43 | '1': tf.Variable(tf.zeros([1000])), 44 | '2': tf.Variable(tf.zeros([500])), 45 | '3': tf.Variable(tf.zeros([250])), 46 | '4': tf.Variable(tf.zeros([30])), 47 | '5': tf.Variable(tf.zeros([250])), 48 | '6': tf.Variable(tf.zeros([500])), 49 | '7': tf.Variable(tf.zeros([1000])), 50 | '8': tf.Variable(tf.zeros([image_pixels])) 51 | } 52 | 53 | def ae(x): 54 | if nonlinearity_name == 'relu': 55 | f = tf.nn.relu 56 | elif nonlinearity_name == 'elu': 57 | f = tf.nn.elu 58 | elif nonlinearity_name == 'gelu': 59 | # def gelu(x): 60 | # return tf.mul(x, tf.erfc(-x / tf.sqrt(2.)) / 2.) 61 | # f = gelu 62 | def gelu_fast(_x): 63 | return 0.5 * _x * (1 + tf.tanh(tf.sqrt(2 / np.pi) * (_x + 0.044715 * tf.pow(_x, 3)))) 64 | f = gelu_fast 65 | elif nonlinearity_name == 'silu': 66 | def silu(_x): 67 | return _x * tf.sigmoid(_x) 68 | f = silu 69 | # elif nonlinearity_name == 'soi': 70 | # def soi_map(x): 71 | # u = tf.random_uniform(tf.shape(x)) 72 | # mask = tf.to_float(tf.less(u, (1 + tf.erf(x / tf.sqrt(2.))) / 2.)) 73 | # return tf.cond(is_training, lambda: tf.mul(mask, x), 74 | # lambda: tf.mul(x, tf.erfc(-x / tf.sqrt(2.)) / 2.)) 75 | # f = soi_map 76 | 77 | else: 78 | raise NameError("Need 'relu', 'elu', 'gelu', or 'silu' for nonlinearity_name") 79 | 80 | h1 = f(tf.matmul(x, W['1']) + b['1']) 81 | h2 = f(tf.matmul(h1, W['2']) + b['2']) 82 | h3 = f(tf.matmul(h2, W['3']) + b['3']) 83 | h4 = f(tf.matmul(h3, W['4']) + b['4']) 84 | h5 = f(tf.matmul(h4, W['5']) + b['5']) 85 | h6 = f(tf.matmul(h5, W['6']) + b['6']) 86 | h7 = f(tf.matmul(h6, W['7']) + b['7']) 87 | return tf.matmul(h7, W['8']) + b['8'] 88 | 89 | reconstruction = ae(x) 90 | loss = tf.reduce_mean(tf.square(reconstruction - x)) 91 | optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(loss) 92 | 93 | # store future results with previous results 94 | if not os.path.exists("./data/"): 95 | os.makedirs("./data/") 96 | 97 | if os.path.exists("./data/mnist_ae_" + nonlinearity_name + ".p"): 98 | history = pickle.load(open("./data/mnist_ae_" + nonlinearity_name + ".p", "rb")) 99 | key_str = str(len(history)//3 + 1) 100 | history["lr" + key_str] = learning_rate 101 | history["train_loss" + key_str] = [] 102 | history["test_loss" + key_str] = [] 103 | else: 104 | history = {'lr1': learning_rate, 'train_loss1': [], 'test_loss1': []} 105 | key_str = '1' 106 | 107 | 108 | with tf.Session() as sess: 109 | print('Loading Data') 110 | X_train = np.concatenate((mnist.train.images, mnist.validation.images), axis=0) 111 | 112 | print('Beginning training') 113 | sess.run(tf.initialize_all_variables()) 114 | 115 | num_batches = 60000 // batch_size 116 | save_every = num_batches//5 # save training information 3 times per epoch 117 | 118 | for epoch in range(training_epochs): 119 | # shuffle 120 | indices = np.arange(X_train.shape[0]) 121 | np.random.shuffle(indices) 122 | X_train = X_train[indices] 123 | 124 | for i in range(num_batches): 125 | offset = i * batch_size 126 | _, l = sess.run([optimizer, loss], feed_dict={x: X_train[offset:offset+batch_size]}) 127 | 128 | history["train_loss" + key_str].append(l) 129 | if i % save_every == 0: 130 | l = sess.run(loss, feed_dict={x: mnist.test.images}) 131 | history["test_loss" + key_str].append(l) 132 | 133 | # print('Epoch', epoch + 1, 'Complete') 134 | 135 | # save history 136 | pickle.dump(history, open("./data/mnist_ae_" + nonlinearity_name + ".p", "wb")) 137 | -------------------------------------------------------------------------------- /all_convnet.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import pickle 3 | import time 4 | import os 5 | import logging 6 | import numpy as np 7 | import theano as th 8 | import theano.tensor as T 9 | from theano.sandbox.rng_mrg import MRG_RandomStreams 10 | import lasagne 11 | import lasagne.layers as ll 12 | from lasagne.layers import dnn, batch_norm 13 | import nn 14 | logging.basicConfig(level=logging.INFO) 15 | 16 | # settings 17 | parser = argparse.ArgumentParser() 18 | parser.add_argument('--seed', default=1, type=int) 19 | parser.add_argument('--batch_size', default=100, type=int) 20 | parser.add_argument('--activation', default='relu', type=str) 21 | parser.add_argument('--learning_rate', default=0.001, type=float) 22 | args = parser.parse_args() 23 | logging.info(args) 24 | 25 | # fixed random seeds 26 | rng = np.random.RandomState(args.seed) 27 | theano_rng = MRG_RandomStreams(rng.randint(2 ** 15)) 28 | lasagne.random.set_rng(np.random.RandomState(rng.randint(2 ** 15))) 29 | 30 | # setup output 31 | time_str = time.strftime("%m-%d-%H-%M", time.gmtime()) 32 | exp_dir = "./data/" + args.activation + "_" + time_str + "_" + "{}".format(args.learning_rate).replace(".", "p") 33 | try: 34 | os.stat(exp_dir) 35 | except: 36 | os.makedirs(exp_dir) 37 | logging.info("OPENING " + exp_dir + '/results.csv') 38 | results_file = open(exp_dir + '/results.csv', 'w') 39 | results_file.write('epoch, time, train_error, test_error\n') 40 | results_file.flush() 41 | 42 | # load CIFAR-10 data 43 | def unpickle(file): 44 | fo = open(file, 'rb') 45 | d = pickle.load(fo, encoding='latin1') 46 | fo.close() 47 | return {'x': np.cast[th.config.floatX]((-127.5 + d['data'].reshape((10000,3,32,32)))/128.), 'y': np.array(d['labels']).astype(np.uint8)} 48 | 49 | print('Loading data') 50 | train_data = [unpickle('/home-nfs/dan/cifar_data/cifar-10-batches-py/data_batch_' + str(i)) for i in range(1,6)] 51 | trainx = np.concatenate([d['x'] for d in train_data],axis=0) 52 | trainy = np.concatenate([d['y'] for d in train_data]) 53 | test_data = unpickle('/home-nfs/dan/cifar_data/cifar-10-batches-py/test_batch') 54 | testx = test_data['x'] 55 | testy = test_data['y'] 56 | nr_batches_train = int(trainx.shape[0]/args.batch_size) 57 | nr_batches_test = int(testx.shape[0]/args.batch_size) 58 | 59 | print('Whitening') 60 | # whitening 61 | whitener = nn.ZCA(x=trainx) 62 | trainx_white = whitener.apply(trainx) 63 | testx_white = whitener.apply(testx) 64 | print('Done whitening') 65 | 66 | if args.activation == 'relu': 67 | f = nn.relu 68 | elif args.activation == 'elu': 69 | f = lasagne.nonlinearities.elu 70 | elif args.activation == 'gelu': 71 | f = nn.gelu 72 | else: 73 | assert False, 'Need "relu" "elu" or "gelu" nonlinearity as input name' 74 | 75 | x = T.tensor4() 76 | 77 | layers = [ll.InputLayer(shape=(None, 3, 32, 32), input_var=x)] 78 | layers.append(ll.GaussianNoiseLayer(layers[-1], sigma=0.15)) 79 | layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 96, (3,3), pad=1, nonlinearity=f))) 80 | layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 96, (3,3), pad=1, nonlinearity=f))) 81 | layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 96, (3,3), pad=1, nonlinearity=f))) 82 | layers.append(ll.MaxPool2DLayer(layers[-1], 2)) 83 | layers.append(ll.DropoutLayer(layers[-1], p=0.5)) 84 | layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 192, (3,3), pad=1, nonlinearity=f))) 85 | layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 192, (3,3), pad=1, nonlinearity=f))) 86 | layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 192, (3,3), pad=1, nonlinearity=f))) 87 | layers.append(ll.MaxPool2DLayer(layers[-1], 2)) 88 | layers.append(ll.DropoutLayer(layers[-1], p=0.5)) 89 | layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 192, (3,3), pad=0, nonlinearity=f))) 90 | layers.append(batch_norm(ll.NINLayer(layers[-1], num_units=192, nonlinearity=f))) 91 | layers.append(batch_norm(ll.NINLayer(layers[-1], num_units=192, nonlinearity=f))) 92 | layers.append(nn.GlobalAvgLayer(layers[-1])) 93 | layers.append(batch_norm(ll.DenseLayer(layers[-1], num_units=10, nonlinearity=None))) 94 | 95 | 96 | # discriminative cost & updates 97 | output_before_softmax = ll.get_output(layers[-1], x) 98 | y = T.ivector() 99 | cost = nn.softmax_loss(y, output_before_softmax) 100 | train_err = T.mean(T.neq(T.argmax(output_before_softmax,axis=1),y)) 101 | params = ll.get_all_params(layers, trainable=True) 102 | lr = T.scalar() 103 | mom1 = T.scalar() 104 | param_updates = nn.adam_updates(params, cost, lr=lr, mom1=mom1) 105 | 106 | test_output_before_softmax = ll.get_output(layers[-1], x, deterministic=True) 107 | test_err = T.mean(T.neq(T.argmax(test_output_before_softmax,axis=1),y)) 108 | 109 | print('Compiling') 110 | # compile Theano functions 111 | train_batch = th.function(inputs=[x,y,lr,mom1], outputs=train_err, updates=param_updates) 112 | test_batch = th.function(inputs=[x,y], outputs=test_err) 113 | 114 | print('Beginning training') 115 | # //////////// perform training ////////////// 116 | begin_all = time.time() 117 | for epoch in range(200): 118 | begin_epoch = time.time() 119 | lr = np.cast[th.config.floatX](args.learning_rate * np.minimum(2. - epoch/100., 1.)) 120 | if epoch < 100: 121 | mom1 = 0.9 122 | else: 123 | mom1 = 0.5 124 | 125 | # permute the training data 126 | inds = rng.permutation(trainx_white.shape[0]) 127 | trainx_white = trainx_white[inds] 128 | trainy = trainy[inds] 129 | 130 | # train 131 | train_err = 0. 132 | for t in range(nr_batches_train): 133 | train_err += train_batch(trainx_white[t*args.batch_size:(t+1)*args.batch_size], 134 | trainy[t*args.batch_size:(t+1)*args.batch_size],lr,mom1) 135 | train_err /= nr_batches_train 136 | 137 | # test 138 | test_err = 0. 139 | for t in range(nr_batches_test): 140 | test_err += test_batch(testx_white[t*args.batch_size:(t+1)*args.batch_size], 141 | testy[t*args.batch_size:(t+1)*args.batch_size]) 142 | test_err /= nr_batches_test 143 | 144 | logging.info('Iteration %d, time = %ds, train_err = %.6f, test_err = %.6f' % (epoch, time.time()-begin_epoch, train_err, test_err)) 145 | results_file.write('%d, %d, %.6f, %.6f\n' % (epoch, time.time()-begin_all, train_err, test_err)) 146 | results_file.flush() 147 | 148 | if epoch % 5 == 0: 149 | np.savez(exp_dir + "/network.npz", *lasagne.layers.get_all_param_values(layers)) 150 | print('Saved') 151 | -------------------------------------------------------------------------------- /mnist_fcn.py: -------------------------------------------------------------------------------- 1 | # import MNIST data, Tensorflow, and other helpers 2 | from tensorflow.examples.tutorials.mnist import input_data 3 | mnist = input_data.read_data_sets("/tmp/data/") 4 | import tensorflow as tf 5 | import numpy as np 6 | import sys 7 | import os 8 | import pickle 9 | 10 | # training parameters 11 | training_epochs = 50 12 | batch_size = 128 13 | 14 | # architecture parameters 15 | n_hidden = 128 16 | n_labels = 10 17 | image_pixels = 28 * 28 18 | 19 | try: 20 | nonlinearity_name = sys.argv[1] # 'relu', 'elu', 'gelu', 'silu' 21 | except: 22 | print('Defaulted to gelu since no nonlinearity specified through command line') 23 | nonlinearity_name = 'gelu' 24 | 25 | try: 26 | learning_rate = float(sys.argv[2]) # 0.001, 0.0001, 0.00001 27 | except: 28 | print('Defaulted to a learning rate of 0.001') 29 | learning_rate = 1e-3 30 | 31 | try: 32 | p = float(sys.argv[3]) # 1 or 0.5 33 | except: 34 | print('Defaulted to to a dropout keep probability of 1.0') 35 | p = 1. 36 | 37 | x = tf.placeholder(dtype=tf.float32, shape=[None, image_pixels]) 38 | y = tf.placeholder(dtype=tf.int64, shape=[None]) 39 | is_training = tf.placeholder(tf.bool) 40 | 41 | 42 | W = { 43 | '1': tf.Variable(tf.nn.l2_normalize(tf.random_normal([image_pixels, n_hidden]), 0)), 44 | '2': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_hidden]), 0)), 45 | '3': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_hidden]), 0)), 46 | '4': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_hidden]), 0)), 47 | '5': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_hidden]), 0)), 48 | '6': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_hidden]), 0)), 49 | '7': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_hidden]), 0)), 50 | '8': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_hidden]), 0)), 51 | '9': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_labels]), 0)) 52 | } 53 | 54 | b = { 55 | '1': tf.Variable(tf.zeros([n_hidden])), 56 | '2': tf.Variable(tf.zeros([n_hidden])), 57 | '3': tf.Variable(tf.zeros([n_hidden])), 58 | '4': tf.Variable(tf.zeros([n_hidden])), 59 | '5': tf.Variable(tf.zeros([n_hidden])), 60 | '6': tf.Variable(tf.zeros([n_hidden])), 61 | '7': tf.Variable(tf.zeros([n_hidden])), 62 | '8': tf.Variable(tf.zeros([n_hidden])), 63 | '9': tf.Variable(tf.zeros([n_labels])) 64 | } 65 | 66 | def feedforward(x): 67 | if nonlinearity_name == 'relu': 68 | f = tf.nn.relu 69 | elif nonlinearity_name == 'elu': 70 | f = tf.nn.elu 71 | elif nonlinearity_name == 'gelu': 72 | # def gelu(x): 73 | # return tf.mul(x, tf.erfc(-x / tf.sqrt(2.)) / 2.) 74 | # f = gelu 75 | def gelu_fast(_x): 76 | return 0.5 * _x * (1 + tf.tanh(tf.sqrt(2 / np.pi) * (_x + 0.044715 * tf.pow(_x, 3)))) 77 | f = gelu_fast 78 | elif nonlinearity_name == 'silu': 79 | def silu(_x): 80 | return _x * tf.sigmoid(_x) 81 | f = silu 82 | # elif nonlinearity_name == 'soi': 83 | # def soi_map(x): 84 | # u = tf.random_uniform(tf.shape(x)) 85 | # mask = tf.to_float(tf.less(u, (1 + tf.erf(x / tf.sqrt(2.))) / 2.)) 86 | # return tf.cond(is_training, lambda: tf.mul(mask, x), 87 | # lambda: tf.mul(x, tf.erfc(-x / tf.sqrt(2.)) / 2.)) 88 | # f = soi_map 89 | 90 | else: 91 | raise NameError("Need 'relu', 'elu', 'gelu', or 'silu' for nonlinearity_name") 92 | 93 | h1 = f(tf.matmul(x, W['1']) + b['1']) 94 | h1 = tf.cond(is_training, lambda: tf.nn.dropout(h1, p), lambda: h1) 95 | h2 = f(tf.matmul(h1, W['2']) + b['2']) 96 | h2 = tf.cond(is_training, lambda: tf.nn.dropout(h2, p), lambda: h2) 97 | h3 = f(tf.matmul(h2, W['3']) + b['3']) 98 | h3 = tf.cond(is_training, lambda: tf.nn.dropout(h3, p), lambda: h3) 99 | h4 = f(tf.matmul(h3, W['4']) + b['4']) 100 | h4 = tf.cond(is_training, lambda: tf.nn.dropout(h4, p), lambda: h4) 101 | h5 = f(tf.matmul(h4, W['5']) + b['5']) 102 | h5 = tf.cond(is_training, lambda: tf.nn.dropout(h5, p), lambda: h5) 103 | h6 = f(tf.matmul(h5, W['6']) + b['6']) 104 | h6 = tf.cond(is_training, lambda: tf.nn.dropout(h6, p), lambda: h6) 105 | h7 = f(tf.matmul(h6, W['7']) + b['7']) 106 | h7 = tf.cond(is_training, lambda: tf.nn.dropout(h7, p), lambda: h7) 107 | h8 = f(tf.matmul(h7, W['8']) + b['8']) 108 | h8 = tf.cond(is_training, lambda: tf.nn.dropout(h8, p), lambda: h8) 109 | return tf.matmul(h8, W['9']) + b['9'] 110 | 111 | logits = feedforward(x) 112 | loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits, y)) 113 | optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(loss) 114 | 115 | compute_error = tf.reduce_mean(tf.to_float(tf.not_equal(tf.argmax(logits, 1), y))) 116 | 117 | # store future results with previous results 118 | if not os.path.exists("./data/"): 119 | os.makedirs("./data/") 120 | 121 | if os.path.exists("./data/mnist_fcn_" + nonlinearity_name + ".p"): 122 | history = pickle.load(open("./data/mnist_fcn_" + nonlinearity_name + ".p", "rb")) 123 | key_str = str(len(history)//8 + 1) 124 | history["lr" + key_str] = learning_rate 125 | history["dropout" + key_str] = p 126 | history["train_loss" + key_str] = [] 127 | history["val_loss" + key_str] = [] 128 | history["test_loss" + key_str] = [] 129 | history["train_err" + key_str] = [] 130 | history["val_err" + key_str] = [] 131 | history["test_err" + key_str] = [] 132 | else: 133 | history = { 134 | "lr1": learning_rate, "dropout1": p, 135 | 'train_loss1': [], 'val_loss1': [], 'test_loss1': [], 136 | 'train_err1': [], 'val_err1': [], 'test_err1': [] 137 | } 138 | key_str = '1' 139 | 140 | 141 | with tf.Session() as sess: 142 | print('Beginning training') 143 | sess.run(tf.initialize_all_variables()) 144 | 145 | num_batches = mnist.train.num_examples // batch_size 146 | save_every = num_batches//3 # save training information 3 times per epoch 147 | 148 | for epoch in range(training_epochs): 149 | for i in range(num_batches): 150 | bx, by = mnist.train.next_batch(batch_size) 151 | 152 | if p < 1-1e-5: # we want to know how the full network is being optimized instead of the reduced version 153 | l, err = sess.run([loss, compute_error], feed_dict={x: bx, y: by, is_training: False}) 154 | 155 | _, l_drop, err_drop = sess.run([optimizer, loss, compute_error], feed_dict={x: bx, y: by, 156 | is_training: True}) 157 | 158 | if p < 1-1e-5: # we want to know how the full network is being optimized instead of the reduced version 159 | history["train_loss" + key_str].append(l) 160 | history["train_err" + key_str].append(err) 161 | else: 162 | history["train_loss" + key_str].append(l_drop) 163 | history["train_err" + key_str].append(err_drop) 164 | 165 | # save 166 | if i % save_every == 0: 167 | l, err = sess.run([loss, compute_error], 168 | feed_dict={x: mnist.validation.images, 169 | y: mnist.validation.labels, 170 | is_training: False}) 171 | history["val_loss" + key_str].append(l) 172 | history["val_err" + key_str].append(err) 173 | 174 | l, err = sess.run([loss, compute_error], 175 | feed_dict={x: mnist.test.images, 176 | y: mnist.test.labels, 177 | is_training: False}) 178 | history["test_loss" + key_str].append(l) 179 | history["test_err" + key_str].append(err) 180 | 181 | # print('Epoch', epoch + 1, 'Complete') 182 | 183 | # save history 184 | pickle.dump(history, open("./data/mnist_fcn_" + nonlinearity_name + ".p", "wb")) 185 | -------------------------------------------------------------------------------- /timit_fcn.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import numpy as np 3 | import h5py 4 | import pickle 5 | import sys 6 | import io 7 | import os 8 | 9 | # training parameters 10 | training_epochs = 30 11 | batch_size = 64 12 | 13 | # architecture parameters 14 | n_hidden = 2048 15 | n_labels = 39 # 39 phones 16 | n_coeffs = 26 17 | n_context_frames = 11 # 5 + 1 + 5 18 | p = 0.5 # keep rate 19 | 20 | try: 21 | nonlinearity_name = sys.argv[1] # 'relu', 'elu', or 'gelu' 22 | except: 23 | print('Defaulted to gelu since no nonlinearity specified through command line') 24 | nonlinearity_name = 'gelu' 25 | 26 | try: 27 | learning_rate = float(sys.argv[2]) # 0.001, 0.0001, 0.00001 28 | except: 29 | print('Defaulted to a learning rate of 0.001') 30 | learning_rate = 1e-3 31 | 32 | 33 | def enumerate_context(i, sentence, num_frames): 34 | r = range(i-num_frames, i+num_frames+1) 35 | r = [x if x>=0 else 0 for x in r] 36 | r = [x if x [sentence_length, num_frames, coefficients] 41 | 42 | assert num_frames % 2 == 1, "Number of frames must be odd (since left + 1 + right, left = right)" 43 | 44 | if num_frames == 1: 45 | return sentence 46 | 47 | context_sent = [] 48 | 49 | for i in range(0, len(sentence)): 50 | context_sent.append([context for context in enumerate_context(i, sentence, (num_frames-1)//2)]) 51 | 52 | return np.array(context_sent).reshape((-1, num_frames*n_coeffs)) 53 | 54 | print('Making graph') 55 | graph = tf.Graph() 56 | with graph.as_default(): 57 | x = tf.placeholder(dtype=tf.float32, shape=[None, n_coeffs*n_context_frames]) 58 | y = tf.placeholder(dtype=tf.int64, shape=[None]) 59 | is_training = tf.placeholder(tf.bool) 60 | 61 | if nonlinearity_name == 'relu': 62 | f = tf.nn.relu 63 | elif nonlinearity_name == 'elu': 64 | f = tf.nn.elu 65 | elif nonlinearity_name == 'gelu': 66 | # def gelu(x): 67 | # return tf.mul(x, tf.erfc(-x / tf.sqrt(2.)) / 2.) 68 | # f = gelu 69 | def gelu_fast(_x): 70 | return 0.5 * _x * (1 + tf.tanh(tf.sqrt(2 / np.pi) * (_x + 0.044715 * tf.pow(_x, 3)))) 71 | f = gelu_fast 72 | else: 73 | raise NameError("Need 'relu', 'elu', 'gelu', for nonlinearity_name") 74 | 75 | W = { 76 | '1': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_context_frames*n_coeffs, n_hidden]), 0)), 77 | '2': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_hidden]), 0)), 78 | '3': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_hidden]), 0)), 79 | '4': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_hidden]), 0)), 80 | '5': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_hidden]), 0)), 81 | '6': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_hidden]), 0)), 82 | '7': tf.Variable(tf.nn.l2_normalize(tf.random_normal([n_hidden, n_labels]), 0)), 83 | } 84 | b = { 85 | '1': tf.Variable(tf.zeros([n_hidden])), 86 | '2': tf.Variable(tf.zeros([n_hidden])), 87 | '3': tf.Variable(tf.zeros([n_hidden])), 88 | '4': tf.Variable(tf.zeros([n_hidden])), 89 | '5': tf.Variable(tf.zeros([n_hidden])), 90 | '6': tf.Variable(tf.zeros([n_hidden])), 91 | '7': tf.Variable(tf.zeros([n_labels])) 92 | } 93 | 94 | def feedforward(x): 95 | h1 = f(tf.matmul(x, W['1']) + b['1']) 96 | h1 = tf.cond(is_training, lambda: tf.nn.dropout(h1, p), lambda: h1) 97 | h2 = f(tf.matmul(h1, W['2']) + b['2']) 98 | h2 = tf.cond(is_training, lambda: tf.nn.dropout(h2, p), lambda: h2) 99 | h3 = f(tf.matmul(h2, W['3']) + b['3']) 100 | h3 = tf.cond(is_training, lambda: tf.nn.dropout(h3, p), lambda: h3) 101 | h4 = f(tf.matmul(h3, W['4']) + b['4']) 102 | h4 = tf.cond(is_training, lambda: tf.nn.dropout(h4, p), lambda: h4) 103 | h5 = f(tf.matmul(h4, W['5']) + b['5']) 104 | h5 = tf.cond(is_training, lambda: tf.nn.dropout(h5, p), lambda: h5) 105 | 106 | return tf.matmul(h5, W['6']) + b['6'] 107 | 108 | logits = feedforward(x) 109 | loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits, y)) 110 | 111 | optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(loss) 112 | 113 | compute_error = tf.reduce_mean(tf.to_float(tf.not_equal(tf.argmax(logits, 1), y))) 114 | 115 | 116 | # store future results with previous results 117 | if not os.path.exists("./data/"): 118 | os.makedirs("./data/") 119 | 120 | if os.path.exists("./data/timit_fcn_" + nonlinearity_name + ".p"): 121 | history = pickle.load(open("./data/timit_fcn_" + nonlinearity_name + ".p", "rb")) 122 | key_str = str(len(history)//7 + 1) 123 | history["lr" + key_str] = learning_rate 124 | history["train_loss" + key_str] = [] 125 | history["val_loss" + key_str] = [] 126 | history["test_loss" + key_str] = [] 127 | history["train_err" + key_str] = [] 128 | history["val_err" + key_str] = [] 129 | history["test_err" + key_str] = [] 130 | else: 131 | history = { 132 | "lr1": learning_rate, 133 | 'train_loss1': [], 'val_loss1': [], 'test_loss1': [], 134 | 'train_err1': [], 'val_err1': [], 'test_err1': [] 135 | } 136 | key_str = '1' 137 | 138 | print('Loading Data') 139 | data = h5py.File("./data/train.h5") 140 | X_train = data['X'][()] 141 | Y_train = data['y'][()] 142 | train_idxs = data['start_idx'][()] 143 | 144 | train_mean = np.mean(X_train, axis=(0,1)) 145 | train_std = np.std(X_train, axis=(0,1)) 146 | X_train -= train_mean 147 | X_train /= (train_std + 1e-11) 148 | 149 | data = h5py.File("./data/dev.h5") 150 | X_val = data['X'][()] - train_mean 151 | Y_val = data['y'][()] 152 | val_idxs = data['start_idx'][()] 153 | X_val -= train_mean 154 | X_val /= (train_std + 1e-11) 155 | 156 | data = h5py.File("./data/core.h5") 157 | X_test = data['X'][()] - train_mean 158 | Y_test = data['y'][()] 159 | test_idxs = data['start_idx'][()] 160 | X_test -= train_mean 161 | X_test /= (train_std + 1e-11) 162 | del data 163 | print('Number of training examples', X_train.shape[0]) 164 | print('Number of validation examples', X_val.shape[0]) 165 | print('Number of testing examples', X_test.shape[0]) 166 | 167 | 168 | with tf.Session(graph=graph) as sess: 169 | sess.run(tf.initialize_all_variables()) 170 | 171 | num_batches = X_train.shape[0] // batch_size 172 | save_every = num_batches//5 # save training information 5 times per epoch 173 | 174 | for epoch in range(training_epochs): 175 | # shuffle data 176 | indices = np.arange(X_train.shape[0]) 177 | np.random.shuffle(indices) 178 | X_train = X_train[indices] 179 | Y_train = Y_train[indices] 180 | train_idxs = train_idxs[indices] 181 | 182 | for i in range(num_batches): 183 | offset = i * batch_size 184 | _bx, mask_x, _by = X_train[offset:offset+batch_size], train_idxs[offset:offset+batch_size], Y_train[offset:offset+batch_size] 185 | 186 | bx, by = [], [] 187 | for j in range(_bx.shape[0]): 188 | sentence_frames = add_context(_bx[j][mask_x[j]:]) 189 | bx.append(sentence_frames) 190 | by.append(_by[j][mask_x[j]:]) 191 | 192 | bx, by = np.concatenate(bx), np.concatenate(by) 193 | 194 | if p < 1-1e-5: # we want to know how the full network is being optimized instead of the reduced version 195 | l, err = sess.run([loss, compute_error], feed_dict={x: bx, y: by, is_training: False}) 196 | 197 | _, l_drop, err_drop = sess.run([optimizer, loss, compute_error], feed_dict={x: bx, y: by, 198 | is_training: True}) 199 | 200 | if p < 1-1e-5: 201 | history["train_loss" + key_str].append(l) 202 | history["train_err" + key_str].append(err) 203 | else: 204 | history["train_loss" + key_str].append(l_drop) 205 | history["train_err" + key_str].append(err_drop) 206 | 207 | if i % save_every == 0: 208 | err_total = 0 209 | loss_total = 0 210 | for j in range(X_test.shape[0]//batch_size): 211 | offset = j * batch_size 212 | _bx, mask_x, _by = X_test[offset:offset+batch_size], test_idxs[offset:offset+batch_size], Y_test[offset:offset+batch_size] 213 | 214 | bx, by = [], [] 215 | for k in range(_bx.shape[0]): 216 | sentence_frames = add_context(_bx[k][mask_x[k]:]) 217 | bx.append(sentence_frames) 218 | by.append(_by[k][mask_x[k]:]) 219 | 220 | bx, by = np.concatenate(bx), np.concatenate(by) 221 | 222 | l, err = sess.run([loss, compute_error], feed_dict={x: bx, y: by, is_training: False}) 223 | loss_total += l 224 | err_total += err 225 | history["test_loss" + key_str].append(loss_total/(X_test.shape[0]//batch_size)) 226 | history["test_err" + key_str].append(err_total/(X_test.shape[0]//batch_size)) 227 | 228 | err_total = 0 229 | loss_total = 0 230 | for j in range(X_val.shape[0]//batch_size): 231 | offset = j * batch_size 232 | _bx, mask_x, _by = X_val[offset:offset+batch_size], val_idxs[offset:offset+batch_size], Y_val[offset:offset+batch_size] 233 | 234 | bx, by = [], [] 235 | for k in range(_bx.shape[0]): 236 | sentence_frames = add_context(_bx[k][mask_x[k]:]) 237 | bx.append(sentence_frames) 238 | by.append(_by[k][mask_x[k]:]) 239 | 240 | bx, by = np.concatenate(bx), np.concatenate(by) 241 | 242 | l, err = sess.run([loss, compute_error], feed_dict={x: bx, y: by, is_training: False}) 243 | loss_total += l 244 | err_total += err 245 | history["val_loss" + key_str].append(loss_total/(X_val.shape[0]//batch_size)) 246 | history["val_err" + key_str].append(err_total/(X_val.shape[0]//batch_size)) 247 | 248 | print('Epoch', epoch+1, 'Complete.', 'Val loss', history["val_loss" + key_str][-1], 'Val error', history["val_err" + key_str][-1]) 249 | 250 | # save history 251 | pickle.dump(history, open("./data/timit_fcn_" + nonlinearity_name + ".p", "wb")) 252 | -------------------------------------------------------------------------------- /twitter_pos.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import tensorflow as tf 3 | import pickle 4 | import sys 5 | import io 6 | import os 7 | 8 | try: 9 | nonlinearity_name = sys.argv[1] # 'relu', 'elu', 'gelu', or 'silu' 10 | except: 11 | print('Defaulted to gelu since no nonlinearity specified through command line') 12 | nonlinearity_name = 'gelu' 13 | 14 | try: 15 | learning_rate = float(sys.argv[2]) # 0.001, 0.0001, 0.00001 16 | except: 17 | print('Defaulted to a learning rate of 0.001') 18 | learning_rate = 1e-3 19 | 20 | p = 0.8 21 | 22 | # 23 | # Begin Twitter Helper Functions 24 | # 25 | 26 | def embeddings_to_dict(filename): 27 | ''' 28 | :param filename: the file name of the word embeddings | file is assumed 29 | to follow this format: "word[tab]dimension 1[space]dimension 2[space]...[space]dimension 50" 30 | :return: a dictionary with keys that are words and values that are the embedding of a word 31 | ''' 32 | with io.open(filename, 'r', encoding='utf-8') as f: 33 | word_vecs = {} 34 | for line in f: 35 | line = line.strip('\n').split() 36 | word_vecs[line[0]] = np.array([float(s) for s in line[1:]]) 37 | 38 | return word_vecs 39 | 40 | 41 | def data_to_mat(filename, vocab, tag_to_number, window_size=1, start_symbol=u'UUUNKKK', 42 | one_hot=False, return_labels=True): 43 | ''' 44 | :param filename: the filename of a training, development, devtest, or test set 45 | :param vocab: a list of strings, one for each embedding (the keys of a dictionary) 46 | :param tag_to_number: a dictionary of tags to predict and a numerical encoding of those tags; 47 | with this, we will predict numbers instead of strings 48 | :param window_size: the context window size for the left and right; thus we have 2*window_size + 1 49 | words considered at a time 50 | :param start_symbol: since the symbol has no embedding given, chose a symbol in the vocab 51 | to replace . Common choices are u'UUUNKKK' or u'' 52 | :return: a n x (window_size*2 + 1) matrix containing context windows and the center word 53 | represented as strings; n is the number of examples. ALSO return a n x |tag_to_number| 54 | matrix of labels for the n examples with a one-hot (1-of-k) encoding 55 | ''' 56 | with io.open(filename, 'r', encoding='utf-8') as f: 57 | x, tweet_words, y = [], [], [] 58 | start = True 59 | for line in f: 60 | line = line.strip('\n') 61 | 62 | if len(line) == 0: # if end of tweet 63 | tweet_words.extend([u''] * window_size) 64 | 65 | # ensure tweet words are in vocab; if not, map to "UUUNKKK" 66 | 67 | tweet_words = [w if w in vocab else u'UUUNKKK' for w in tweet_words] 68 | 69 | # from this tweet, add the training tasks to dataset 70 | # the tags were already added to y 71 | for i in range(window_size, len(tweet_words) - window_size): 72 | x.append(tweet_words[i-window_size:i+window_size+1]) 73 | 74 | tweet_words = [] 75 | start = True 76 | continue 77 | 78 | # if before end 79 | word, label = line.split('\t') 80 | 81 | if start: 82 | tweet_words.extend([start_symbol] * window_size) 83 | start = False 84 | 85 | tweet_words.append(word) 86 | 87 | if return_labels is True: 88 | if one_hot is True: 89 | label_one_hot = len(tag_to_number) * [0] 90 | label_one_hot[tag_to_number[label]] += 1 91 | 92 | y.append(label_one_hot) 93 | else: 94 | y.append(tag_to_number[label]) 95 | 96 | return np.array(x), np.array(y) 97 | 98 | 99 | def word_list_to_embedding(words, embeddings, embedding_dimension=50): 100 | ''' 101 | :param words: an n x (2*window_size + 1) matrix from data_to_mat 102 | :param embeddings: an embedding dictionary where keys are strings and values 103 | are embeddings; the output from embeddings_to_dict 104 | :param embedding_dimension: the dimension of the values in embeddings; in this 105 | assignment, embedding_dimension=50 106 | :return: an n x ((2*window_size + 1)*embedding_dimension) matrix where each entry of the 107 | words matrix is replaced with its embedding 108 | ''' 109 | m, n = words.shape 110 | words = words.reshape((-1)) 111 | 112 | return np.array([embeddings[w] for w in words], dtype=np.float32).reshape(m, n*embedding_dimension) 113 | 114 | # 115 | # End Twitter Helper Functions 116 | # 117 | 118 | window_size = 1 119 | 120 | # note that we encode the tags with numbers for later convenience 121 | tag_to_number = { 122 | u'N': 0, u'O': 1, u'S': 2, u'^': 3, u'Z': 4, u'L': 5, u'M': 6, 123 | u'V': 7, u'A': 8, u'R': 9, u'!': 10, u'D': 11, u'P': 12, u'&': 13, u'T': 14, 124 | u'X': 15, u'Y': 16, u'#': 17, u'@': 18, u'~': 19, u'U': 20, u'E': 21, u'$': 22, 125 | u',': 23, u'G': 24 126 | } 127 | 128 | embeddings = embeddings_to_dict('./data/Tweets/embeddings-twitter.txt') 129 | vocab = embeddings.keys() 130 | 131 | # we replace with since it has no embedding, and is a better embedding than UNK 132 | xt, yt = data_to_mat('./data/Tweets/tweets-train.txt', vocab, tag_to_number, window_size=window_size, 133 | start_symbol=u'') 134 | xdev, ydev = data_to_mat('./data/Tweets/tweets-dev.txt', vocab, tag_to_number, window_size=window_size, 135 | start_symbol=u'') 136 | xdtest, ydtest = data_to_mat('./data/Tweets/tweets-devtest.txt', vocab, tag_to_number, window_size=window_size, 137 | start_symbol=u'') 138 | 139 | data = { 140 | 'x_train': xt, 'y_train': yt, 141 | 'x_dev': xdev, 'y_dev': ydev, 142 | 'x_test': xdtest, 'y_test': ydtest 143 | } 144 | 145 | num_epochs = 30 146 | num_tags = 25 147 | hidden_size = 256 148 | batch_size = 16 149 | embedding_dimension = 50 150 | example_size = (2*window_size + 1)*embedding_dimension 151 | num_examples = data['y_train'].shape[0] 152 | num_batches = num_examples//batch_size 153 | 154 | graph = tf.Graph() 155 | with graph.as_default(): 156 | x = tf.placeholder(tf.float32, [None, example_size]) 157 | y = tf.placeholder(tf.int64, [None]) 158 | is_training = tf.placeholder(tf.bool) 159 | 160 | w1 = tf.Variable(tf.nn.l2_normalize(tf.random_normal([example_size, hidden_size]), 0)) 161 | b1 = tf.Variable(tf.zeros([hidden_size])) 162 | w2 = tf.Variable(tf.nn.l2_normalize(tf.random_normal([hidden_size, hidden_size]), 0)) 163 | b2 = tf.Variable(tf.zeros([hidden_size])) 164 | w_out = tf.Variable(tf.nn.l2_normalize(tf.random_normal([hidden_size, num_tags]), 0)) 165 | b_out = tf.Variable(tf.zeros([num_tags])) 166 | 167 | if nonlinearity_name == 'relu': 168 | f = tf.nn.relu 169 | elif nonlinearity_name == 'elu': 170 | f = tf.nn.elu 171 | elif nonlinearity_name == 'gelu': 172 | # def gelu(x): 173 | # return tf.mul(x, tf.erfc(-x / tf.sqrt(2.)) / 2.) 174 | # f = gelu 175 | def gelu_fast(_x): 176 | return 0.5 * _x * (1 + tf.tanh(tf.sqrt(2 / np.pi) * (_x + 0.044715 * tf.pow(_x, 3)))) 177 | f = gelu_fast 178 | elif nonlinearity_name == 'silu': 179 | def silu(_x): 180 | return _x * tf.sigmoid(_x) 181 | f = silu 182 | # elif nonlinearity_name == 'soi': 183 | # def soi_map(x): 184 | # u = tf.random_uniform(tf.shape(x)) 185 | # mask = tf.to_float(tf.less(u, (1 + tf.erf(x / tf.sqrt(2.))) / 2.)) 186 | # return tf.cond(is_training, lambda: tf.mul(mask, x), 187 | # lambda: tf.mul(x, tf.erfc(-x / tf.sqrt(2.)) / 2.)) 188 | # f = soi_map 189 | 190 | else: 191 | raise NameError("Need 'relu', 'elu', 'gelu', or 'silu' for nonlinearity_name") 192 | 193 | def model(data_feed): 194 | h1 = f(tf.matmul(data_feed, w1) + b1) 195 | h1 = tf.cond(is_training, lambda: tf.nn.dropout(h1, p), lambda: h1) 196 | h2 = f(tf.matmul(h1, w2) + b2) 197 | h2 = tf.cond(is_training, lambda: tf.nn.dropout(h2, p), lambda: h2) 198 | return tf.matmul(h2, w_out) + b_out 199 | 200 | logits = model(x) 201 | loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits, y)) 202 | 203 | # pick optimizer 204 | optimizer = tf.train.AdamOptimizer(learning_rate).minimize(loss) 205 | 206 | compute_error = tf.reduce_mean(tf.to_float(tf.not_equal(tf.argmax(logits, 1), y))) 207 | 208 | # store future results with previous results 209 | if not os.path.exists("./data/"): 210 | os.makedirs("./data/") 211 | 212 | if os.path.exists("./data/twitter_pos_" + nonlinearity_name + ".p"): 213 | history = pickle.load(open("./data/twitter_pos_" + nonlinearity_name + ".p", "rb")) 214 | key_str = str(len(history)//7 + 1) 215 | history["lr" + key_str] = learning_rate 216 | history["train_loss" + key_str] = [] 217 | history["val_loss" + key_str] = [] 218 | history["test_loss" + key_str] = [] 219 | history["train_err" + key_str] = [] 220 | history["val_err" + key_str] = [] 221 | history["test_err" + key_str] = [] 222 | else: 223 | history = { 224 | "lr1": learning_rate, 225 | 'train_loss1': [], 'val_loss1': [], 'test_loss1': [], 226 | 'train_err1': [], 'val_err1': [], 'test_err1': [] 227 | } 228 | key_str = '1' 229 | 230 | with tf.Session(graph=graph) as sess: 231 | print('Beginning training') 232 | sess.run(tf.initialize_all_variables()) 233 | 234 | save_every = num_batches//5 # save training information 5 times per epoch 235 | 236 | # train 237 | for epoch in range(num_epochs): 238 | # shuffle data every epoch 239 | indices = np.arange(num_examples) 240 | np.random.shuffle(indices) 241 | data['x_train'] = data['x_train'][indices] 242 | data['y_train'] = data['y_train'][indices] 243 | 244 | for i in range(num_batches): 245 | offset = i * batch_size 246 | 247 | bx = word_list_to_embedding(data['x_train'][offset:offset + batch_size, :], 248 | embeddings, embedding_dimension) 249 | by = data['y_train'][offset:offset + batch_size] 250 | 251 | if p < 1-1e-5: # we want to know how the full network is being optimized instead of the reduced version 252 | l, err = sess.run([loss, compute_error], feed_dict={x: bx, y: by, is_training: False}) 253 | 254 | _, l_drop, err_drop = sess.run([optimizer, loss, compute_error], feed_dict={x: bx, y: by, 255 | is_training: True}) 256 | 257 | if p < 1-1e-5: # we want to know how the full network is being optimized instead of the reduced version 258 | history["train_loss" + key_str].append(l) 259 | history["train_err" + key_str].append(err) 260 | else: 261 | history["train_loss" + key_str].append(l_drop) 262 | history["train_err" + key_str].append(err_drop) 263 | 264 | if i % save_every == 0: 265 | l, err = sess.run([loss, compute_error], 266 | feed_dict={x: word_list_to_embedding(data['x_dev'], embeddings, embedding_dimension), 267 | y: data['y_dev'], is_training: False}) 268 | history["val_loss" + key_str].append(l) 269 | history["val_err" + key_str].append(err) 270 | 271 | l, err = sess.run([loss, compute_error], 272 | feed_dict={x: word_list_to_embedding(data['x_test'], embeddings, embedding_dimension), 273 | y: data['y_test'], is_training: False}) 274 | history["test_loss" + key_str].append(l) 275 | history["test_err" + key_str].append(err) 276 | 277 | # print('Epoch', epoch + 1, 'Complete') 278 | 279 | # save history 280 | pickle.dump(history, open("./data/twitter_pos_" + nonlinearity_name + ".p", "wb")) 281 | -------------------------------------------------------------------------------- /SGDR_WRNs_gelu.py: -------------------------------------------------------------------------------- 1 | """ 2 | Lasagne implementation of SGDR on WRNs from "SGDR: Stochastic Gradient Descent with Restarts" (http://arxiv.org/abs/XXXX) 3 | This code is based on Lasagne Recipes available at 4 | https://github.com/Lasagne/Recipes/blob/master/papers/deep_residual_learning/Deep_Residual_Learning_CIFAR-10.py 5 | and on WRNs implementation by Florian Muellerklein available at 6 | https://gist.github.com/FlorianMuellerklein/3d9ba175038a3f2e7de3794fa303f1ee 7 | 8 | """ 9 | 10 | import sys 11 | import os 12 | import time 13 | import string 14 | import random 15 | import pickle 16 | 17 | import numpy as np 18 | import theano 19 | import theano.tensor as T 20 | import lasagne 21 | import math 22 | 23 | from lasagne.nonlinearities import rectify, softmax 24 | from lasagne.layers import InputLayer, DenseLayer, DropoutLayer, batch_norm 25 | try: 26 | from lasagne.layers.dnn import BatchNormDNNLayer as BatchNormLayer 27 | except ImportError: 28 | from lasagne.layers import BatchNormLayer 29 | 30 | from lasagne.layers import ElemwiseSumLayer, NonlinearityLayer, GlobalPoolLayer 31 | from lasagne.layers.dnn import Conv2DDNNLayer as ConvLayer 32 | from lasagne.init import HeNormal 33 | from lasagne.layers import Conv2DLayer as ConvLayer 34 | 35 | 36 | # for the larger networks (n>=9), we need to adjust pythons recursion limit 37 | sys.setrecursionlimit(10000) 38 | 39 | # ##################### Load data from CIFAR datasets ####################### 40 | # this code assumes the CIFAR dataset files have been extracted in current working directory 41 | # from 'https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz' for CIFAR-10 42 | # from 'https://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz' for CIFAR-100 43 | 44 | def unpickle(file): 45 | import pickle 46 | fo = open(file, 'rb') 47 | dict = pickle.load(fo, encoding='latin1') 48 | fo.close() 49 | return dict 50 | 51 | def load_data(dataset): 52 | xs = [] 53 | ys = [] 54 | if dataset == 'CIFAR-10': 55 | for j in range(5): 56 | d = unpickle('cifar-10-batches-py/data_batch_'+str(j+1)) 57 | x = d['data'] 58 | y = d['labels'] 59 | xs.append(x) 60 | ys.append(y) 61 | 62 | d = unpickle('cifar-10-batches-py/test_batch') 63 | xs.append(d['data']) 64 | ys.append(d['labels']) 65 | if dataset == 'CIFAR-100': 66 | d = unpickle('/home-nfs/dan/cifar_data/cifar-100-python/train') 67 | x = d['data'] 68 | y = d['fine_labels'] 69 | xs.append(x) 70 | ys.append(y) 71 | 72 | d = unpickle('/home-nfs/dan/cifar_data/cifar-100-python/test') 73 | xs.append(d['data']) 74 | ys.append(d['fine_labels']) 75 | 76 | x = np.concatenate(xs)/np.float32(255) 77 | y = np.concatenate(ys) 78 | x = np.dstack((x[:, :1024], x[:, 1024:2048], x[:, 2048:])) 79 | x = x.reshape((x.shape[0], 32, 32, 3)).transpose(0,3,1,2) 80 | 81 | # subtract per-pixel mean 82 | pixel_mean = np.mean(x[0:50000],axis=0) 83 | x -= pixel_mean 84 | 85 | # create mirrored images 86 | X_train = x[0:50000,:,:,:] 87 | Y_train = y[0:50000] 88 | X_train_flip = X_train[:,:,:,::-1] 89 | Y_train_flip = Y_train 90 | X_train = np.concatenate((X_train,X_train_flip),axis=0) 91 | Y_train = np.concatenate((Y_train,Y_train_flip),axis=0) 92 | 93 | X_test = x[50000:,:,:,:] 94 | Y_test = y[50000:] 95 | 96 | return dict( 97 | X_train=lasagne.utils.floatX(X_train), 98 | Y_train=Y_train.astype('int32'), 99 | X_test=lasagne.utils.floatX(X_test), 100 | Y_test=Y_test.astype('int32'),) 101 | 102 | # ##################### Build the neural network model ####################### 103 | 104 | 105 | 106 | def ResNet_FullPre_Wide(input_var=None, nout=10, n=3, k=2, dropoutrate = 0): 107 | def gelu(x): 108 | return 0.5 * x * (1 + T.tanh(T.sqrt(2 / np.pi) * (x + 0.044715 * T.pow(x, 3)))) 109 | f = gelu 110 | ''' 111 | Adapted from https://gist.github.com/FlorianMuellerklein/3d9ba175038a3f2e7de3794fa303f1ee 112 | which was tweaked to be consistent with 'Identity Mappings in Deep Residual Networks', Kaiming He et al. 2016 (https://arxiv.org/abs/1603.05027) 113 | And 'Wide Residual Networks', Sergey Zagoruyko, Nikos Komodakis 2016 (http://arxiv.org/pdf/1605.07146v1.pdf) 114 | ''' 115 | n_filters = {0:16, 1:16*k, 2:32*k, 3:64*k} 116 | 117 | # create a residual learning building block with two stacked 3x3 convlayers and dropout 118 | def residual_block(l, first=False, increase_dim=False, filters=16): 119 | if increase_dim: 120 | first_stride = (2,2) 121 | else: 122 | first_stride = (1,1) 123 | 124 | conv_1 = ConvLayer(l, num_filters=filters, filter_size=(3,3), stride=first_stride, nonlinearity=f, pad='same', W=HeNormal(gain='relu')) 125 | 126 | if dropoutrate > 0: # with dropout 127 | dropout = DropoutLayer(conv_1, p=dropoutrate) 128 | 129 | # contains the last weight portion, step 6 130 | conv_2 = ConvLayer(dropout, num_filters=filters, filter_size=(3,3), stride=(1,1), nonlinearity=f, pad='same', W=HeNormal(gain='relu')) 131 | else: # without dropout 132 | conv_2 = ConvLayer(conv_1, num_filters=filters, filter_size=(3,3), stride=(1,1), nonlinearity=f, pad='same', W=HeNormal(gain='relu')) 133 | 134 | stack_3 = BatchNormLayer(conv_2) 135 | 136 | # add shortcut connections 137 | if increase_dim: 138 | # projection shortcut, as option B in paper 139 | projection = ConvLayer(l, num_filters=filters, filter_size=(1,1), stride=(2,2), nonlinearity=None, pad='same', b=None) 140 | block = ElemwiseSumLayer([stack_3, projection]) 141 | elif first: 142 | # projection shortcut, as option B in paper 143 | projection = ConvLayer(l, num_filters=filters, filter_size=(1,1), stride=(1,1), nonlinearity=None, pad='same', b=None) 144 | block = ElemwiseSumLayer([stack_3, projection]) 145 | else: 146 | block = ElemwiseSumLayer([stack_3, l]) 147 | 148 | return block 149 | 150 | # Building the network 151 | l_in = InputLayer(shape=(None, 3, 32, 32), input_var=input_var) 152 | 153 | # we're normalizing the input as the net sees fit, and we normalize the output 154 | l = batch_norm(ConvLayer(l_in, num_filters=n_filters[0], filter_size=(3,3), stride=(1,1), nonlinearity=f, pad='same', W=HeNormal(gain='relu'))) 155 | l = BatchNormLayer(l) 156 | 157 | # first stack of residual blocks 158 | l = residual_block(l, first=True, filters=n_filters[1]) 159 | for _ in range(1,n): 160 | l = residual_block(l, filters=n_filters[1]) 161 | 162 | # second stack of residual blocks 163 | l = residual_block(l, increase_dim=True, filters=n_filters[2]) 164 | for _ in range(1,n): 165 | l = residual_block(l, filters=n_filters[2]) 166 | 167 | # third stack of residual blocks 168 | l = residual_block(l, increase_dim=True, filters=n_filters[3]) 169 | for _ in range(1,n): 170 | l = residual_block(l, filters=n_filters[3]) 171 | 172 | bn_post_conv = BatchNormLayer(l) 173 | bn_post_relu = NonlinearityLayer(bn_post_conv, f) 174 | 175 | # average pooling 176 | avg_pool = GlobalPoolLayer(bn_post_relu) 177 | 178 | # fully connected layer 179 | network = DenseLayer(avg_pool, num_units=nout, W=HeNormal(), nonlinearity=softmax) 180 | 181 | return network 182 | 183 | # ############################# Batch iterator ############################### 184 | 185 | def iterate_minibatches(inputs, targets, batchsize, shuffle=False, augment=False): 186 | assert len(inputs) == len(targets) 187 | if shuffle: 188 | indices = np.arange(len(inputs)) 189 | np.random.shuffle(indices) 190 | for start_idx in range(0, len(inputs) - batchsize + 1, batchsize): 191 | if shuffle: 192 | excerpt = indices[start_idx:start_idx + batchsize] 193 | else: 194 | excerpt = slice(start_idx, start_idx + batchsize) 195 | if augment: 196 | # as in paper : 197 | # pad feature arrays with 4 pixels on each side 198 | # and do random cropping of 32x32 199 | padded = np.pad(inputs[excerpt],((0,0),(0,0),(4,4),(4,4)),mode='constant') 200 | random_cropped = np.zeros(inputs[excerpt].shape, dtype=np.float32) 201 | crops = np.random.random_integers(0,high=8,size=(batchsize,2)) 202 | for r in range(batchsize): 203 | random_cropped[r,:,:,:] = padded[r,:,crops[r,0]:(crops[r,0]+32),crops[r,1]:(crops[r,1]+32)] 204 | inp_exc = random_cropped 205 | else: 206 | inp_exc = inputs[excerpt] 207 | 208 | yield inp_exc, targets[excerpt] 209 | 210 | # ############################## Main program ################################ 211 | 212 | def main(dataset = 'CIFAR-10', iscenario = 0, n=5, k = 1, num_epochs=82, model = None, irun = 0, Te = 2.0, E1 = 41, E2 = 61, E3 = 81, 213 | lr=0.1, lr_fac=0.1, reg_fac=0.0005, t0=math.pi/2.0, Estart = 0, dropoutrate = 0, multFactor = 1): 214 | # Check if CIFAR data exists 215 | if dataset == 'CIFAR-10': 216 | if not os.path.exists("./cifar-10-batches-py"): 217 | print("CIFAR-10 dataset can not be found. Please download the dataset from 'https://www.cs.toronto.edu/~kriz/cifar.html'.") 218 | return 219 | nout = 10 220 | if dataset == 'CIFAR-100': 221 | if not os.path.exists("/home-nfs/dan/cifar_data/cifar-100-python/"): 222 | print("CIFAR-100 dataset can not be found. Please download the dataset from 'https://www.cs.toronto.edu/~kriz/cifar.html'.") 223 | return 224 | nout = 100 225 | # Load the dataset 226 | print("Loading data...") 227 | data = load_data(dataset) 228 | X_train = data['X_train'] 229 | Y_train = data['Y_train'] 230 | X_test = data['X_test'] 231 | Y_test = data['Y_test'] 232 | 233 | # Prepare Theano variables for inputs and targets 234 | input_var = T.tensor4('inputs') 235 | target_var = T.ivector('targets') 236 | 237 | # Create neural network model 238 | print("Building model and compiling functions...") 239 | network = ResNet_FullPre_Wide(input_var, nout, n, k, dropoutrate) 240 | print("number of parameters in model: %d" % lasagne.layers.count_params(network, trainable=True)) 241 | 242 | if model is None: 243 | # Create a loss expression for training, i.e., a scalar objective we want 244 | # to minimize (for our multi-class problem, it is the cross-entropy loss): 245 | prediction = lasagne.layers.get_output(network) 246 | loss = lasagne.objectives.categorical_crossentropy(prediction, target_var) 247 | loss = loss.mean() 248 | # add weight decay 249 | all_layers = lasagne.layers.get_all_layers(network) 250 | sh_reg_fac = theano.shared(lasagne.utils.floatX(reg_fac)) 251 | l2_penalty = lasagne.regularization.regularize_layer_params(all_layers, lasagne.regularization.l2) * sh_reg_fac 252 | loss = loss + l2_penalty 253 | 254 | # Create update expressions for training 255 | # Stochastic Gradient Descent (SGD) with momentum 256 | params = lasagne.layers.get_all_params(network, trainable=True) 257 | sh_lr = theano.shared(lasagne.utils.floatX(lr)) 258 | # updates = lasagne.updates.momentum(loss, params, learning_rate=sh_lr, momentum=0.9) 259 | updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=sh_lr, momentum=0.9) 260 | 261 | # Compile a function performing a training step on a mini-batch (by giving 262 | # the updates dictionary) and returning the corresponding training loss: 263 | train_fn = theano.function([input_var, target_var], loss, updates=updates) 264 | 265 | # Create a loss expression for validation/testing 266 | test_prediction = lasagne.layers.get_output(network, deterministic=True) 267 | test_loss = lasagne.objectives.categorical_crossentropy(test_prediction, target_var) 268 | 269 | test_loss = test_loss.mean() 270 | test_acc = T.mean(T.eq(T.argmax(test_prediction, axis=1), target_var), dtype=theano.config.floatX) 271 | 272 | # Compile a second function computing the validation loss and accuracy: 273 | val_fn = theano.function([input_var, target_var], [test_loss, test_acc]) 274 | 275 | # statistics file 276 | filename = "stat_{}_{}.txt".format(iscenario, irun) 277 | myfile=open(filename, 'w+') 278 | start_time0 = time.time() 279 | 280 | tt = 0 281 | TeNext = Te 282 | batchsize = 128 283 | 284 | if model is None: 285 | # launch the training loop 286 | print("Starting training...") 287 | # We iterate over epochs: 288 | for epoch in range(num_epochs): 289 | # shuffle training data 290 | train_indices = np.arange(X_train.shape[0]) 291 | np.random.shuffle(train_indices) 292 | X_train = X_train[train_indices,:,:,:] 293 | Y_train = Y_train[train_indices] 294 | 295 | # In each epoch, we do a full pass over the training data: 296 | train_err = 0 297 | train_batches = 0 298 | start_time = time.time() 299 | 300 | for batch in iterate_minibatches(X_train, Y_train, batchsize, shuffle=True, augment=True): 301 | inputs, targets = batch 302 | train_err += train_fn(inputs, targets) 303 | train_batches += 1 304 | 305 | if (epoch+1 >= Estart): # time to start adjust learning tate 306 | dt = 2.0*math.pi/float(2.0*Te) 307 | tt = tt + float(dt)/(len(Y_train)/float(batchsize)) 308 | if tt >= math.pi: 309 | tt = tt - math.pi 310 | curT = t0 + tt 311 | new_lr = lr * (1.0 + math.sin(curT))/2.0 # lr_min = 0, lr_max = lr 312 | sh_lr.set_value(lasagne.utils.floatX(new_lr)) 313 | 314 | if epoch+1 == TeNext: # time to restart TODO: remove second condition 315 | tt = 0 # by setting to 0 we set lr to lr_max, see above 316 | Te = int(Te * multFactor) # change the period of restarts 317 | TeNext = TeNext + Te # note the next restart's epoch 318 | 319 | # And a full pass over the validation data: 320 | val_err = 0 321 | val_acc = 0 322 | val_batches = 0 323 | for batch in iterate_minibatches(X_test, Y_test, 500, shuffle=False): 324 | inputs, targets = batch 325 | err, acc = val_fn(inputs, targets) 326 | val_err += err 327 | val_acc += acc 328 | val_batches += 1 329 | 330 | # Then we print the results for this epoch: 331 | print("Epoch {} of {} took {:.3f}s".format(epoch + 1, num_epochs, time.time() - start_time)) 332 | print(" training loss:\t\t{:.6f}".format(train_err / train_batches)) 333 | print(" validation loss:\t\t{:.6f}".format(val_err / val_batches)) 334 | print(" validation accuracy:\t\t{:.2f} %".format(val_acc / val_batches * 100)) 335 | 336 | # print some statistics 337 | myfile.write("{}\t{:.15g}\t{:.15g}\t{:.15g}\t{:.15g}\t{:.15g}\t{:.15g}\n".format(epoch, float(sh_lr.get_value()), 338 | time.time() - start_time0, Te, train_err / train_batches, val_err / val_batches, val_acc / val_batches * 100)) 339 | 340 | # dump the network weights to a file : 341 | if epoch % 10 == 0 or epoch > 45: 342 | filesave = "./data/network_{}_{}_{}.npz".format(iscenario,irun,epoch) 343 | np.savez(filesave, *lasagne.layers.get_all_param_values(network)) 344 | pickle.dump(lasagne.layers.get_all_param_values(network), 345 | open("./data/network_{}_{}_{}.pkl".format(iscenario,irun,epoch), 'wb')) 346 | print('Saved') 347 | 348 | # adjust learning rate as in the original approach 349 | if (epoch+1) == E1 or (epoch+1) == E2 or (epoch+1) == E3: 350 | new_lr = sh_lr.get_value() * lr_fac 351 | print("New LR:"+str(new_lr)) 352 | sh_lr.set_value(lasagne.utils.floatX(new_lr)) 353 | 354 | # final save 355 | filesave = "./data/network_{}_{}_end.npz".format(iscenario,irun) 356 | np.savez(filesave, *lasagne.layers.get_all_param_values(network)) 357 | pickle.dump(lasagne.layers.get_all_param_values(network), 358 | open("./data/network_{}_{}_end.pkl".format(iscenario,irun), 'wb')) 359 | 360 | else: 361 | # load network weights from model file 362 | with np.load(model) as f: 363 | param_values = [f['arr_%d' % i] for i in range(len(f.files))] 364 | lasagne.layers.set_all_param_values(network, param_values) 365 | 366 | myfile.close() 367 | 368 | # Calculate validation error of model: 369 | test_err = 0 370 | test_acc = 0 371 | test_batches = 0 372 | for batch in iterate_minibatches(X_test, Y_test, 500, shuffle=False): 373 | inputs, targets = batch 374 | err, acc = val_fn(inputs, targets) 375 | test_err += err 376 | test_acc += acc 377 | test_batches += 1 378 | print("Final results:") 379 | print(" test loss:\t\t\t{:.6f}".format(test_err / test_batches)) 380 | print(" test accuracy:\t\t{:.2f} %".format( 381 | test_acc / test_batches * 100)) 382 | 383 | 384 | if __name__ == '__main__': 385 | 386 | # the only input is 'iscenario' index used to reproduce the experiments given in the paper 387 | # scenario #1 and #2 correspond to the original multi-step learning rate decay on CIFAR-10 388 | # scenarios [3-6] are 4 options for our SGDR 389 | # scenarios [7-10] are the same options but for 2 times wider WRNs, i.e., WRN-28-20 390 | # scenarios [11-20] are the same as [1-10] but for CIFAR-100 391 | iscenario = int(sys.argv[1]) 392 | model = None 393 | 394 | dataset = 'CIFAR-100' 395 | 396 | iruns = [1,2,3,4,5] 397 | lr = 0.1 398 | lr_fac = 0.2 399 | reg_fac = 0.0005 400 | t0 = math.pi/2.0 401 | Te = 50 402 | dropoutrate = 0.3 # TODO: note this is on 403 | multFactor = 1 404 | num_epochs = 50 405 | E1 = -1; E2 = -1; E3 = -1; Estart = -1 406 | 407 | # CIFAR-10 408 | if (iscenario == 1): dataset = 'CIFAR-10'; n = 4; k = 10; E1 = 60; E2 = 120; E3 = 160; Estart = 10000; lr = 0.1; 409 | if (iscenario == 2): dataset = 'CIFAR-10'; n = 4; k = 10; E1 = 60; E2 = 120; E3 = 160; Estart = 10000; lr = 0.05; 410 | if (iscenario == 3): dataset = 'CIFAR-10'; n = 6; k = 4; Te = 50; # 40-4 411 | if (iscenario == 4): dataset = 'CIFAR-10'; n = 4; k = 10; Te = 100; 412 | if (iscenario == 5): dataset = 'CIFAR-10'; n = 6; k = 2; Te = 1; multFactor = 2; 413 | if (iscenario == 6): dataset = 'CIFAR-10'; n = 4; k = 10; Te = 10; multFactor = 2; 414 | if (iscenario == 7): dataset = 'CIFAR-10'; n = 4; k = 20; Te = 50; 415 | if (iscenario == 8): dataset = 'CIFAR-10'; n = 4; k = 20; Te = 100; 416 | if (iscenario == 9): dataset = 'CIFAR-10'; n = 4; k = 20; Te = 1; multFactor = 2; 417 | if (iscenario == 10): dataset = 'CIFAR-10'; n = 4; k = 20; Te = 10; multFactor = 2; 418 | 419 | # the same for CIFAR-100 420 | if (iscenario == 11): dataset = 'CIFAR-100'; n = 4; k = 10; E1 = 60; E2 = 120; E3 = 160; Estart = 10000; lr = 0.1; 421 | if (iscenario == 12): dataset = 'CIFAR-100'; n = 4; k = 10; E1 = 60; E2 = 120; E3 = 160; Estart = 10000; lr = 0.05; 422 | if (iscenario == 13): dataset = 'CIFAR-100'; n = 6; k = 4; Te = 50; # 40-4 423 | if (iscenario == 14): dataset = 'CIFAR-100'; n = 4; k = 10; Te = 100; 424 | if (iscenario == 15): dataset = 'CIFAR-100'; n = 6; k = 2; Te = 1; multFactor = 2; 425 | if (iscenario == 16): dataset = 'CIFAR-100'; n = 4; k = 10; Te = 10; multFactor = 2; 426 | if (iscenario == 17): dataset = 'CIFAR-100'; n = 4; k = 20; Te = 50; 427 | if (iscenario == 18): dataset = 'CIFAR-100'; n = 4; k = 20; Te = 100; 428 | if (iscenario == 19): dataset = 'CIFAR-100'; n = 4; k = 20; Te = 1; multFactor = 2; 429 | if (iscenario == 20): dataset = 'CIFAR-100'; n = 4; k = 20; Te = 10; multFactor = 2; 430 | 431 | # very wide nets on CIFAR-10 and CIFAR-100 432 | if (iscenario == 21): dataset = 'CIFAR-10'; n = 4; k = 20; E1 = 60; E2 = 120; E3 = 160; Estart = 10000; lr = 0.1; 433 | if (iscenario == 22): dataset = 'CIFAR-10'; n = 4; k = 20; E1 = 60; E2 = 120; E3 = 160; Estart = 10000; lr = 0.05; 434 | if (iscenario == 23): dataset = 'CIFAR-100'; n = 4; k = 20; E1 = 60; E2 = 120; E3 = 160; Estart = 10000; lr = 0.1; 435 | if (iscenario == 24): dataset = 'CIFAR-100'; n = 4; k = 20; E1 = 60; E2 = 120; E3 = 160; Estart = 10000; lr = 0.05; 436 | if (iscenario == 25): dataset = 'CIFAR-10'; n = 4; k = 20; E1 = 60; E2 = 120; E3 = 160; Estart = 10000; lr = 0.1; 437 | if (iscenario == 26): dataset = 'CIFAR-10'; n = 4; k = 20; E1 = 60; E2 = 120; E3 = 160; Estart = 10000; lr = 0.05; 438 | if (iscenario == 27): dataset = 'CIFAR-100'; n = 4; k = 20; E1 = 60; E2 = 120; E3 = 160; Estart = 10000; lr = 0.1; 439 | if (iscenario == 28): dataset = 'CIFAR-100'; n = 4; k = 20; E1 = 60; E2 = 120; E3 = 160; Estart = 10000; lr = 0.05; 440 | 441 | irun = 1 442 | main(dataset, iscenario, n, k, num_epochs, model, irun, Te, E1, E2, E3, lr, lr_fac, reg_fac, t0, Estart, dropoutrate, multFactor) 443 | -------------------------------------------------------------------------------- /data/Tweets/tweets-dev.txt: -------------------------------------------------------------------------------- 1 | <@MENTION> @ 2 | it O 3 | was V 4 | on P 5 | football N 6 | wives N 7 | , , 8 | one $ 9 | of P 10 | the D 11 | players N 12 | and & 13 | his D 14 | wife N 15 | own V 16 | smash ^ 17 | burger ^ 18 | 19 | rt ~ 20 | <@MENTION> @ 21 | : ~ 22 | currently R 23 | laughing V 24 | at P 25 | laker ^ 26 | haters N 27 | . , 28 | 29 | rt ~ 30 | <@MENTION> @ 31 | : ~ 32 | sat ^ 33 | november ^ 34 | 6 $ 35 | ill L 36 | be V 37 | at P 38 | nashville ^ 39 | center N 40 | stage N 41 | for P 42 | the D 43 | ice N 44 | kream N 45 | party N 46 | for P 47 | << ~ 48 | bruh N 49 | it's L 50 | the D 51 | music N 52 | center N 53 | event N 54 | center N 55 | 56 | you O 57 | don't V 58 | know V 59 | my D 60 | struggle N 61 | 62 | wind N 63 | 2.0 $ 64 | mph N 65 | se N 66 | . , 67 | barometer N 68 | 29.676 $ 69 | in N 70 | , , 71 | rising V 72 | . , 73 | temperature N 74 | 56.3 $ 75 | °f ^ 76 | . , 77 | rain N 78 | today N 79 | 0.00 $ 80 | in N 81 | . , 82 | humidity N 83 | 45% $ 84 | 85 | <@MENTION> @ 86 | forgive V 87 | me O 88 | for P 89 | blowing V 90 | up T 91 | your D 92 | youtube ^ 93 | comment N 94 | section N 95 | . , 96 | 97 | new A 98 | question N 99 | : , 100 | how R 101 | can V 102 | you O 103 | mend V 104 | a D 105 | broken A 106 | heart N 107 | ? , 108 | really R 109 | ? , 110 | please V 111 | ?: , 112 | U 113 | 114 | <@MENTION> @ 115 | can V 116 | u O 117 | follow V 118 | me O 119 | please V 120 | ??? , 121 | it O 122 | will V 123 | mean V 124 | the D 125 | world N 126 | to P 127 | me O 128 | !! , 129 | :) E 130 | ♥♥♥ E 131 | 132 | <@MENTION> @ 133 | =) E 134 | lls ! 135 | 136 | <@MENTION> @ 137 | i O 138 | knew V 139 | about P 140 | it O 141 | last A 142 | night N 143 | , , 144 | but & 145 | didn't V 146 | bother V 147 | calling V 148 | shawn ^ 149 | because P 150 | i'd L 151 | just R 152 | be V 153 | working V 154 | on P 155 | it O 156 | this D 157 | morning N 158 | w/ P 159 | same A 160 | info N 161 | . , 162 | 163 | senate ^ 164 | #artsgrades N 165 | are V 166 | in P 167 | ! , 168 | see V 169 | who O 170 | passed V 171 | and & 172 | who O 173 | made V 174 | the D 175 | dirty ^ 176 | dozen ^ 177 | . , 178 | #arts # 179 | U 180 | via P 181 | <@MENTION> @ 182 | 183 | rt ~ 184 | <@MENTION> @ 185 | : ~ 186 | i O 187 | want V 188 | to P 189 | sit V 190 | at P 191 | a D 192 | counter N 193 | in P 194 | a D 195 | diner N 196 | and & 197 | watch V 198 | the D 199 | news N 200 | and & 201 | tune V 202 | out T 203 | over P 204 | some D 205 | fresh A 206 | pancakes N 207 | , , 208 | bananas N 209 | with P 210 | rum N 211 | , , 212 | cri G 213 | ... ~ 214 | 215 | 29p N 216 | 11r N 217 | pal ^ 218 | gasol ^ 219 | went V 220 | da D 221 | fuck N 222 | off P 223 | 224 | <@MENTION> @ 225 | i O 226 | don't V 227 | use V 228 | chrome ^ 229 | due P 230 | to P 231 | the D 232 | lack N 233 | of P 234 | a D 235 | good A 236 | twitter ^ 237 | extension N 238 | ( , 239 | and & 240 | extension N 241 | behavior N 242 | in P 243 | general) N 244 | . , 245 | also R 246 | , , 247 | it O 248 | looks V 249 | weird A 250 | 251 | so P 252 | who's L 253 | going V 254 | to P 255 | the D 256 | ethernet ^ 257 | expo ^ 258 | next A 259 | week N 260 | in P 261 | nyc ^ 262 | ? , 263 | 264 | i'm L 265 | addicted A 266 | to P 267 | that D 268 | shitt N 269 | . , 270 | x E 271 | ) E 272 | rt ~ 273 | <@MENTION> @ 274 | : ~ 275 | tetris ^ 276 | ! , 277 | (: E 278 | " , 279 | 280 | <@MENTION> @ 281 | i O 282 | see V 283 | that P 284 | , , 285 | regretfully R 286 | i O 287 | was V 288 | tied V 289 | up T 290 | , , 291 | physed N 292 | at P 293 | forest ^ 294 | green ^ 295 | - , 296 | have V 297 | a D 298 | good A 299 | one $ 300 | 301 | costume N 302 | ready A 303 | ! , 304 | where R 305 | to P 306 | go V 307 | for P 308 | halloween ^ 309 | on P 310 | fri ^ 311 | and & 312 | sat ^ 313 | ... , 314 | thinking V 315 | pyramid ^ 316 | on P 317 | sat ^ 318 | ... , 319 | 320 | confirmed V 321 | no D 322 | phone N 323 | , , 324 | now R 325 | i O 326 | have V 327 | to P 328 | go V 329 | to P 330 | school N 331 | tomorrow N 332 | so P 333 | i O 334 | don't V 335 | get V 336 | to P 337 | see V 338 | <@MENTION> @ 339 | till P 340 | like R 341 | the D 342 | weekend N 343 | or & 344 | something N 345 | . , 346 | / E 347 | : E 348 | mom's L 349 | yelling V 350 | . , 351 | 352 | 10/27- $ 353 | make V 354 | sure A 355 | to P 356 | get V 357 | your D 358 | ir N 359 | projects N 360 | uploaded V 361 | ... , 362 | and & 363 | dress V 364 | up T 365 | for P 366 | halloween ^ 367 | on P 368 | friday ^ 369 | !! , 370 | 371 | " , 372 | because P 373 | if P 374 | seeing V 375 | is V 376 | believing V 377 | , , 378 | then R 379 | believe V 380 | that P 381 | we O 382 | have V 383 | lost V 384 | our D 385 | eyes N 386 | ..." , 387 | #fb # 388 | 389 | <@MENTION> @ 390 | u O 391 | from P 392 | nashville ^ 393 | ? , 394 | 395 | rt ~ 396 | <@MENTION> @ 397 | : ~ 398 | get V 399 | into P 400 | an D 401 | argument N 402 | with P 403 | your D 404 | tattooist N 405 | ? , 406 | what O 407 | could V 408 | possibly R 409 | go V 410 | wrong A 411 | ? , 412 | U 413 | wow ! 414 | 415 | jay-z ^ 416 | responds V 417 | to P 418 | beyonce ^ 419 | pregnancy N 420 | rumors N 421 | | G 422 | the D 423 | urban ^ 424 | daily ^ 425 | U 426 | 427 | <@MENTION> @ 428 | lol ! 429 | boy N 430 | shut V 431 | up T 432 | ! , 433 | 434 | i O 435 | wonder V 436 | if P 437 | you O 438 | realize V 439 | we O 440 | were V 441 | talking V 442 | about P 443 | you O 444 | . , 445 | 446 | rt ~ 447 | <@MENTION> @ 448 | : ~ 449 | charice ^ 450 | !! , 451 | #breakoutmusicartist # 452 | #peopleschoice # 453 | <@MENTION> @ 454 | U 455 | 456 | #follow V 457 | ----> G 458 | <@MENTION> @ 459 | <@MENTION> @ 460 | <@MENTION> @ 461 | <@MENTION> @ 462 | <@MENTION> @ 463 | <@MENTION> @ 464 | <@MENTION> @ 465 | <@MENTION> @ 466 | 467 | rt ~ 468 | <@MENTION> @ 469 | : ~ 470 | holy A 471 | crap N 472 | - , 473 | oracle/hp ^ 474 | going V 475 | to P 476 | war N 477 | U 478 | 479 | ignite V 480 | to P 481 | write V 482 | : , 483 | this D 484 | photo N 485 | needs V 486 | a D 487 | title N 488 | !: , 489 | this D 490 | photograph N 491 | certainly R 492 | needs V 493 | a D 494 | title N 495 | ! , 496 | can V 497 | you O 498 | give V 499 | it O 500 | one $ 501 | ? , 502 | it O 503 | se G 504 | ... ~ 505 | U 506 | 507 | <@MENTION> @ 508 | nah ! 509 | man N 510 | your L 511 | a D 512 | #biebro N 513 | don't V 514 | think V 515 | i O 516 | didn't V 517 | save V 518 | that D 519 | phone N 520 | call N 521 | ! , 522 | ;) E 523 | 524 | what's L 525 | scarier A 526 | than P 527 | fake A 528 | blood N 529 | , , 530 | guts N 531 | and & 532 | scars N 533 | ? , 534 | how R 535 | much A 536 | they O 537 | can V 538 | cost V 539 | : E 540 | s E 541 | so P 542 | try V 543 | these D 544 | gory A 545 | homemade A 546 | solutions N 547 | ! , 548 | U 549 | 550 | rt ~ 551 | <@MENTION> @ 552 | : ~ 553 | i O 554 | know V 555 | it O 556 | gets V 557 | repetitive A 558 | but & 559 | ..... , 560 | i O 561 | love V 562 | my D 563 | life N 564 | 565 | 76 $ 566 | degrees N 567 | ?? , 568 | 569 | america ^ 570 | , , 571 | fuck ! 572 | yeah ! 573 | ! , 574 | <@MENTION> @ 575 | awesome A 576 | old A 577 | man N 578 | makes V 579 | a D 580 | jack-o-lantern N 581 | with P 582 | a D 583 | gun N 584 | ( , 585 | video N 586 | ) , 587 | U 588 | 589 | <@MENTION> @ 590 | lmao ! 591 | u O 592 | should V 593 | see V 594 | my D 595 | hand N 596 | motions N 597 | 598 | we O 599 | went V 600 | 2 R 601 | long A 602 | without P 603 | talkin V 604 | bout P 605 | some D 606 | shit N 607 | i O 608 | don't V 609 | care V 610 | 2 P 611 | discuss V 612 | . , 613 | i O 614 | guess V 615 | its L 616 | about P 617 | that D 618 | time N 619 | . , 620 | the D 621 | peace N 622 | was V 623 | good A 624 | while P 625 | it O 626 | lasted V 627 | * , 628 | sighs V 629 | 630 | am V 631 | i O 632 | ur D 633 | one A 634 | and & 635 | only A 636 | desire N 637 | ? , 638 | am V 639 | i O 640 | the D 641 | reason N 642 | you O 643 | breathe V 644 | , , 645 | or & 646 | am V 647 | i O 648 | the D 649 | reason N 650 | you O 651 | #cry V 652 | ? , 653 | 654 | #np # 655 | can't V 656 | get V 657 | enough N 658 | <@MENTION> @ 659 | ♥ E 660 | 661 | the D 662 | blog N 663 | updates N 664 | begin V 665 | ... , 666 | U 667 | 668 | etonline ^ 669 | coverage N 670 | of P 671 | jt ^ 672 | and & 673 | friends ^ 674 | concert N 675 | U 676 | 677 | ... , 678 | i O 679 | love V 680 | him O 681 | and & 682 | he O 683 | love V 684 | me O 685 | ... , 686 | aint V 687 | nuthin N 688 | u O 689 | can V 690 | tell V 691 | us O 692 | ... , 693 | u O 694 | can V 695 | try V 696 | all D 697 | u O 698 | want V 699 | to P 700 | this D 701 | bond N 702 | is V 703 | too R 704 | strong A 705 | ... , 706 | so R 707 | imma L 708 | n… G 709 | U 710 | 711 | <@MENTION> @ 712 | him O 713 | , , 714 | lmfao ! 715 | - , 716 | 717 | but & 718 | of P 719 | course N 720 | his D 721 | fat A 722 | ass N 723 | wants V 724 | treats N 725 | so P 726 | either D 727 | way N 728 | i O 729 | had V 730 | to P 731 | get V 732 | up T 733 | . , 734 | 735 | <@MENTION> @ 736 | my D 737 | comment N 738 | ( , 739 | currently R 740 | awaiting V 741 | moderation N 742 | ) , 743 | will V 744 | probably R 745 | chase V 746 | them O 747 | away R 748 | ;) E 749 | 750 | wait V 751 | a D 752 | second N 753 | it's L 754 | god ^ 755 | on P 756 | my D 757 | phone N 758 | hammer ^ 759 | u O 760 | are V 761 | mad R 762 | gay A 763 | and & 764 | u O 765 | will V 766 | not R 767 | do V 768 | anything N 769 | u O 770 | mad A 771 | i O 772 | am V 773 | illuminate A 774 | i O 775 | am V 776 | king N 777 | augustus ^ 778 | say V 779 | it O 780 | to P 781 | me O 782 | 783 | rt ~ 784 | <@MENTION> @ 785 | : ~ 786 | fuel ^ 787 | with P 788 | special A 789 | guests N 790 | is V 791 | playing V 792 | at P 793 | trees ^ 794 | this D 795 | thursday ^ 796 | ! , 797 | want V 798 | to P 799 | be V 800 | a D 801 | personal A 802 | guest N 803 | of P 804 | the ^ 805 | observer ^ 806 | ? , 807 | rt V 808 | to P 809 | win V 810 | t G 811 | ... ~ 812 | 813 | i O 814 | love V 815 | my D 816 | manager N 817 | she O 818 | lettin V 819 | me O 820 | out T 821 | just R 822 | in P 823 | time N 824 | for P 825 | my D 826 | #colts ^ 827 | #mnf ^ 828 | game N 829 | siiiiiced ! 830 | 831 | rt ~ 832 | <@MENTION> @ 833 | : ~ 834 | i O 835 | can V 836 | say V 837 | that P 838 | tonights S 839 | 106 $ 840 | is V 841 | gonna V 842 | be V 843 | a D 844 | zoo N 845 | ! , 846 | <@MENTION> @ 847 | & & 848 | janet ^ 849 | on P 850 | the D 851 | same A 852 | show N 853 | ? , 854 | #goingtoworkontime # 855 | ! , 856 | 857 | world ^ 858 | cup ^ 859 | soccer's S 860 | psychic N 861 | octopus N 862 | paul ^ 863 | dies V 864 | in P 865 | germany ^ 866 | U 867 | 868 | ring N 869 | ceremony N 870 | baby N 871 | ! , 872 | #lakers # 873 | 874 | slightly R 875 | stoopid A 876 | @ P 877 | acl ^ 878 | interview N 879 | with P 880 | slacker ^ 881 | radio ^ 882 | part N 883 | 2 $ 884 | : , 885 | U 886 | 887 | #warriors ^ 888 | first A 889 | game N 890 | on P 891 | the D 892 | 27th $ 893 | vs P 894 | . , 895 | houston ^ 896 | 897 | this O 898 | is V 899 | startin V 900 | to P 901 | get V 902 | borin A 903 | ... , 904 | 905 | <@MENTION> @ 906 | thanks N 907 | :d E 908 | 909 | east ^ 910 | bay ^ 911 | football N 912 | poll N 913 | – , 914 | 10/26 $ 915 | U 916 | 917 | happy A 918 | international A 919 | year N 920 | of P 921 | biodiversity N 922 | ! , 923 | what O 924 | better A 925 | way N 926 | to P 927 | celebrate V 928 | than P 929 | tuning V 930 | in T 931 | to P 932 | croplife's Z 933 | biodiversity ^ 934 | world ^ 935 | tour ^ 936 | . , 937 | #bwt2010 # 938 | 939 | stumbleupon ^ 940 | video N 941 | finds V 942 | ted ^ 943 | and & 944 | hulu ^ 945 | content N 946 | and & 947 | takes V 948 | surfing V 949 | social A 950 | : , 951 | “people N 952 | like V 953 | stumbling V 954 | videos N 955 | more R 956 | than P 957 | web N 958 | ... ~ 959 | U 960 | 961 | i'm L 962 | not R 963 | a D 964 | boy N 965 | -_- E 966 | 967 | <@MENTION> @ 968 | hahah ! 969 | well ! 970 | that O 971 | a D 972 | diff A 973 | story N 974 | then R 975 | :p E 976 | lmao ! 977 | he O 978 | doesnt V 979 | wanna V 980 | talk V 981 | to P 982 | ya O 983 | . , 984 | hes L 985 | talkin V 986 | to P 987 | me O 988 | foo N 989 | ! , 990 | ;) E 991 | 992 | we O 993 | has V 994 | real A 995 | internets N 996 | at P 997 | laaaaaaaaast A 998 | ! , 999 | 1000 | {{ , 1001 | is V 1002 | only R 1003 | in P 1004 | <@MENTION> N 1005 | while P 1006 | watching V 1007 | get V 1008 | him O 1009 | to P 1010 | the D 1011 | greek A 1012 | ! , 1013 | }} , 1014 | 1015 | rt ~ 1016 | <@MENTION> @ 1017 | : ~ 1018 | 8 $ 1019 | days N 1020 | until P 1021 | they O 1022 | #freeweezy G 1023 | . , 1024 | shoutout V 1025 | #freeweezy # 1026 | all D 1027 | day N 1028 | everyday N 1029 | until P 1030 | he's L 1031 | home N 1032 | !! , 1033 | 1034 | rt ~ 1035 | <@MENTION> @ 1036 | : ~ 1037 | but & 1038 | i'm L 1039 | going V 1040 | to P 1041 | praise V 1042 | god ^ 1043 | at P 1044 | my D 1045 | lowest A 1046 | point N 1047 | . , 1048 | amen ! 1049 | 1050 | super R 1051 | excited A 1052 | for P 1053 | our D 1054 | halloweeen ^ 1055 | sisterhood N 1056 | tomorrowwww N 1057 | :) E 1058 | trick N 1059 | or & 1060 | treat N 1061 | ! , 1062 | 1063 | <@MENTION> @ 1064 | your L 1065 | local A 1066 | and & 1067 | that's L 1068 | cool A 1069 | , , 1070 | hit V 1071 | me O 1072 | up T 1073 | direct A 1074 | would V 1075 | like V 1076 | to P 1077 | discuss V 1078 | where R 1079 | this O 1080 | is V 1081 | going V 1082 | ... , 1083 | 1084 | now R 1085 | i O 1086 | need V 1087 | to P 1088 | go V 1089 | home R 1090 | and & 1091 | make V 1092 | nachos N 1093 | with P 1094 | this O 1095 | probably R 1096 | bad A 1097 | for P 1098 | me O 1099 | tostitos ^ 1100 | cheese N 1101 | sauce N 1102 | . , 1103 | #mmmm # 1104 | 1105 | plan ^ 1106 | express/isqft ^ 1107 | announce V 1108 | print N 1109 | partnership N 1110 | with P 1111 | usgn ^ 1112 | U 1113 | 1114 | <@MENTION> @ 1115 | #oneword # 1116 | : , 1117 | lazyasfuck A 1118 | lol ! 1119 | 1120 | rt ~ 1121 | <@MENTION> @ 1122 | ray ^ 1123 | allen ^ 1124 | look V 1125 | just R 1126 | like P 1127 | my D 1128 | dad N 1129 | ! , 1130 | lol ! 1131 | <== ~ 1132 | #flagontheplay # 1133 | 1134 | <@MENTION> @ 1135 | <@MENTION> @ 1136 | she O 1137 | cant V 1138 | cus P 1139 | she O 1140 | probably R 1141 | the D 1142 | same A 1143 | size N 1144 | as P 1145 | the D 1146 | bop N 1147 | haha ! 1148 | 1149 | well ! 1150 | that O 1151 | was V 1152 | strange A 1153 | ! , 1154 | hahahahhaa ! 1155 | . , 1156 | 1157 | american ^ 1158 | airlines ^ 1159 | discounts N 1160 | from P 1161 | sta ^ 1162 | U 1163 | 1164 | support V 1165 | pray V 1166 | for P 1167 | indonesia ^ 1168 | , , 1169 | add V 1170 | a D 1171 | #twibbon N 1172 | to P 1173 | your D 1174 | avatar N 1175 | now R 1176 | ! , 1177 | - G 1178 | U 1179 | #prayforindonesia # 1180 | 1181 | <@MENTION> @ 1182 | the D 1183 | invite N 1184 | was V 1185 | actually R 1186 | 2 P 1187 | lure V 1188 | ' , 1189 | em O 1190 | in2 P 1191 | the D 1192 | den N 1193 | of P 1194 | lions N 1195 | . , 1196 | she O 1197 | assured V 1198 | me O 1199 | they'd L 1200 | bring V 1201 | napkins N 1202 | 2 P 1203 | wipe V 1204 | away T 1205 | tears N 1206 | frm P 1207 | #roastin V 1208 | 1209 | <@MENTION> @ 1210 | self-importance N 1211 | ( , 1212 | ego N 1213 | ) , 1214 | prevents V 1215 | a D 1216 | person N 1217 | from P 1218 | seeing V 1219 | the D 1220 | the D 1221 | value N 1222 | in P 1223 | others N 1224 | . , 1225 | 1226 | i O 1227 | think V 1228 | i O 1229 | get V 1230 | fever N 1231 | . , 1232 | geez ! 1233 | . , 1234 | 1235 | rt ~ 1236 | <@MENTION> @ 1237 | : ~ 1238 | black ^ 1239 | party ^ 1240 | hits V 1241 | power ^ 1242 | 98 $ 1243 | todoay R 1244 | ! , 1245 | 7046614311 $ 1246 | for P 1247 | party N 1248 | info N 1249 | <@MENTION> @ 1250 | <@MENTION> @ 1251 | <@MENTION> @ 1252 | <@MENTION> @ 1253 | 1254 | <@MENTION> @ 1255 | we O 1256 | are V 1257 | making V 1258 | a D 1259 | difference N 1260 | already R 1261 | : , 1262 | U 1263 | 1264 | rt ~ 1265 | <@MENTION> @ 1266 | : ~ 1267 | <@MENTION> @ 1268 | lol ! 1269 | he O 1270 | cant V 1271 | win V 1272 | a D 1273 | game N 1274 | by P 1275 | his D 1276 | self(that's G 1277 | why R 1278 | he O 1279 | got V 1280 | the D 1281 | other A 1282 | 2 $ 1283 | !! ! 1284 | )( , 1285 | 1286 | rt ~ 1287 | <@MENTION> @ 1288 | fun A 1289 | times N 1290 | U 1291 | < ~ 1292 | love V 1293 | this O 1294 | haha ! 1295 | heelllooo ! 1296 | 1297 | just R 1298 | found V 1299 | out T 1300 | who's L 1301 | been V 1302 | callin V 1303 | me O 1304 | private R 1305 | n & 1306 | hangin V 1307 | up T 1308 | , , 1309 | and & 1310 | all D 1311 | i O 1312 | have V 1313 | to P 1314 | say V 1315 | is V 1316 | ... , 1317 | bitch N 1318 | get V 1319 | the D 1320 | fuck N 1321 | outta O 1322 | here N 1323 | ! , 1324 | lmao ! 1325 | 1326 | <@MENTION> @ 1327 | what O 1328 | about P 1329 | me O 1330 | ..... , 1331 | cry V 1332 | cry V 1333 | 1334 | what O 1335 | you O 1336 | see V 1337 | is V 1338 | what O 1339 | you O 1340 | get V 1341 | 1342 | part N 1343 | builder N 1344 | i $ 1345 | : , 1346 | define V 1347 | U 1348 | 1349 | rt ~ 1350 | <@MENTION> @ 1351 | : ~ 1352 | overhearing V 1353 | the D 1354 | way N 1355 | some D 1356 | guys N 1357 | talk V 1358 | about P 1359 | girls N 1360 | makes V 1361 | me O 1362 | cringe V 1363 | 1364 | how R 1365 | does V 1366 | adam ^ 1367 | morrison ^ 1368 | have V 1369 | a D 1370 | ring N 1371 | , , 1372 | and & 1373 | reggie ^ 1374 | miller ^ 1375 | doesn't V 1376 | have V 1377 | one $ 1378 | ? , 1379 | #mysteriesofhoops # 1380 | 1381 | techsmith ^ 1382 | | G 1383 | jing ^ 1384 | , , 1385 | instant A 1386 | screenshots N 1387 | and & 1388 | screencasts N 1389 | , , 1390 | home N 1391 | U 1392 | via P 1393 | <@MENTION> @ 1394 | 1395 | ╚✰ѕнoυтoυт'ѕ™✰╝>>>> N 1396 | <@MENTION> @ 1397 | 1398 | animation N 1399 | of P 1400 | yesterdays S 1401 | historic A 1402 | storm N 1403 | . , 1404 | U 1405 | 1406 | <@MENTION> @ 1407 | lol ! 1408 | how R 1409 | ? , 1410 | i'm L 1411 | never R 1412 | small A 1413 | minded A 1414 | 1415 | rt ~ 1416 | <@MENTION> @ 1417 | : ~ 1418 | grrrrrrrr ! 1419 | ! , 1420 | / , 1421 | guau ! 1422 | ! , 1423 | 1424 | i O 1425 | love V 1426 | dwayne ^ 1427 | wade ^ 1428 | !!!! , 1429 | i O 1430 | wanna V 1431 | marry V 1432 | him O 1433 | !!! , 1434 | ;) E 1435 | 1436 | rt ~ 1437 | <@MENTION> @ 1438 | : ~ 1439 | shannon ^ 1440 | brown ^ 1441 | is V 1442 | kobe ^ 1443 | tonight N 1444 | : , 1445 | 16 $ 1446 | points N 1447 | in P 1448 | 17 $ 1449 | minutes N 1450 | , , 1451 | 6 $ 1452 | of P 1453 | 8 $ 1454 | shooting N 1455 | , , 1456 | 4 $ 1457 | of P 1458 | 5 $ 1459 | from P 1460 | deep A 1461 | -- , 1462 | plus & 1463 | some D 1464 | key A 1465 | steals N 1466 | . , 1467 | 1468 | rt ~ 1469 | <@MENTION> @ 1470 | : ~ 1471 | “all D 1472 | that P 1473 | glitters V 1474 | is V 1475 | not R 1476 | gold” N 1477 | true A 1478 | . , 1479 | sometimes R 1480 | it’s L 1481 | morons N 1482 | pretending V 1483 | to P 1484 | be V 1485 | vampires N 1486 | . , 1487 | case N 1488 | in P 1489 | point N 1490 | , , 1491 | edward ^ 1492 | . , 1493 | what O 1494 | ... ~ 1495 | 1496 | i've L 1497 | had V 1498 | a D 1499 | few A 1500 | requests N 1501 | that P 1502 | i O 1503 | re-enable V 1504 | comments N 1505 | on P 1506 | poetry N 1507 | on P 1508 | my D 1509 | site N 1510 | . , 1511 | 1512 | looking V 1513 | for P 1514 | video N 1515 | rigs N 1516 | for P 1517 | my D 1518 | #7d ^ 1519 | if P 1520 | any1 O 1521 | can V 1522 | point V 1523 | me O 1524 | in P 1525 | the D 1526 | right A 1527 | direction N 1528 | thatd L 1529 | be V 1530 | great A 1531 | ! , 1532 | #video # 1533 | #ikan # 1534 | #redrockmicro # 1535 | #zacuto # 1536 | #idc # 1537 | #ebay # 1538 | 1539 | rt ~ 1540 | <@MENTION> @ 1541 | : ~ 1542 | <@MENTION> @ 1543 | usac ^ 1544 | pledged V 1545 | 10¢ $ 1546 | for P 1547 | shane ^ 1548 | hmiel ^ 1549 | recovery N 1550 | fund N 1551 | for P 1552 | every D 1553 | new A 1554 | fb ^ 1555 | fan- N 1556 | link N 1557 | : , 1558 | U 1559 | . , 1560 | lets L 1561 | get V 1562 | ... ~ 1563 | 1564 | people N 1565 | always R 1566 | fear V 1567 | what O 1568 | they O 1569 | don't V 1570 | know V 1571 | . , 1572 | 1573 | back R 1574 | to P 1575 | real A 1576 | life N 1577 | : , 1578 | picking V 1579 | up T 1580 | camp ^ 1581 | kern ^ 1582 | fundraiser N 1583 | pizzas N 1584 | tonight R 1585 | , , 1586 | hoping V 1587 | to P 1588 | start V 1589 | on P 1590 | bathroom N 1591 | tile N 1592 | tomorrow R 1593 | and & 1594 | laundry N 1595 | ... , 1596 | always R 1597 | laundry N 1598 | .. , 1599 | 1600 | <@MENTION> @ 1601 | i O 1602 | would V 1603 | of V 1604 | swore V 1605 | u O 1606 | said V 1607 | that O 1608 | yesterday N 1609 | :/ E 1610 | lol ! 1611 | 1612 | before P 1613 | i O 1614 | get V 1615 | started V 1616 | on P 1617 | this O 1618 | post N 1619 | let V 1620 | me O 1621 | make V 1622 | one $ 1623 | thing N 1624 | absolutely R 1625 | clear A 1626 | . , 1627 |   G 1628 | i O 1629 | am V 1630 | a D 1631 | data N 1632 | junkie N 1633 | . , 1634 |   G 1635 | my D 1636 | job N 1637 | is V 1638 | to P 1639 | develo V 1640 | - ~ 1641 | U 1642 | 1643 | rt ~ 1644 | <@MENTION> @ 1645 | : ~ 1646 | are V 1647 | you O 1648 | sure A 1649 | this O 1650 | was V 1651 | big A 1652 | 3 $ 1653 | vs P 1654 | . , 1655 | big A 1656 | 3 $ 1657 | ? , 1658 | bosh ^ 1659 | has V 1660 | played V 1661 | up P 1662 | in P 1663 | toronto ^ 1664 | , , 1665 | away R 1666 | from P 1667 | spotlight N 1668 | , , 1669 | pressure N 1670 | . , 1671 | this D 1672 | stage N 1673 | loo G 1674 | ... ~ 1675 | 1676 | runaway ^ 1677 | is V 1678 | already R 1679 | on P 1680 | the D 1681 | 106 $ 1682 | top A 1683 | ten $ 1684 | countdown N 1685 | , , 1686 | it O 1687 | really R 1688 | is V 1689 | mind A 1690 | blowing A 1691 | . , 1692 | 1693 | <@MENTION> @ 1694 | i O 1695 | was V 1696 | going V 1697 | to P 1698 | go V 1699 | to P 1700 | your D 1701 | concert N 1702 | in P 1703 | la ^ 1704 | and & 1705 | i O 1706 | couldnt V 1707 | because P 1708 | i O 1709 | couldnt V 1710 | get V 1711 | a D 1712 | ride N 1713 | :(( E 1714 | i O 1715 | wish V 1716 | went V 1717 | . , 1718 | 1719 | rt ~ 1720 | <@MENTION> @ 1721 | time ^ 1722 | warner ^ 1723 | cable ^ 1724 | what O 1725 | the D 1726 | fuck N 1727 | is V 1728 | the D 1729 | fuckin A 1730 | problem N 1731 | 1732 | it O 1733 | needs V 1734 | to P 1735 | be V 1736 | wrestling N 1737 | season N 1738 | so P 1739 | i O 1740 | can V 1741 | concentrate V 1742 | on P 1743 | something N 1744 | other A 1745 | than P 1746 | this O 1747 | . , 1748 | 1749 | real A 1750 | niqqa N 1751 | tlkin V 1752 | stfu G 1753 | hoe N 1754 | 1755 | rt ~ 1756 | <@MENTION> @ 1757 | : ~ 1758 | chocolate N 1759 | mod N 1760 | beatle N 1761 | boots N 1762 | by P 1763 | eklecticxplosion ^ 1764 | on P 1765 | etsy ^ 1766 | U 1767 | #etsy # 1768 | 1769 | <@MENTION> @ 1770 | what O 1771 | is V 1772 | ? , 1773 | 1774 | <@MENTION> @ 1775 | wat O 1776 | cha O 1777 | doin V 1778 | 1779 | flirty A 1780 | aprons N 1781 | mother's ^ 1782 | day ^ 1783 | 40% $ 1784 | off R 1785 | sale N 1786 | , , 1787 | code N 1788 | fa-4110 $ 1789 | . , 1790 | U 1791 | 1792 | teaching V 1793 | . , 1794 | 1795 | fuck V 1796 | you O 1797 | america ^ 1798 | ... , 1799 | 1800 | stfu ! 1801 | #teamceltics ^ 1802 | is V 1803 | not R 1804 | silent A 1805 | ... , 1806 | and & 1807 | we O 1808 | still R 1809 | up R 1810 | nigga ! 1811 | what ! 1812 | !!! , 1813 | 1814 | <@MENTION> @ 1815 | me O 1816 | =) E 1817 | 1818 | <@MENTION> @ 1819 | yea ! 1820 | you O 1821 | right A 1822 | ! , 1823 | :) E 1824 | i O 1825 | ain't V 1826 | got V 1827 | time N 1828 | .. , 1829 | bye ! 1830 | bye ! 1831 | . , 1832 | 1833 | a D 1834 | day N 1835 | without P 1836 | you O 1837 | is V 1838 | like P 1839 | a D 1840 | year N 1841 | without P 1842 | rain N 1843 | <3 E 1844 | 1845 | <@MENTION> @ 1846 | first A 1847 | concert N 1848 | ? , 1849 | ohmygosh ! 1850 | awww ! 1851 | <3 E 1852 | . , 1853 | how R 1854 | it O 1855 | was V 1856 | ? , 1857 | :d E 1858 | 1859 | rt ~ 1860 | <@MENTION> @ 1861 | : ~ 1862 | i O 1863 | don't V 1864 | see V 1865 | any D 1866 | rockets ^ 1867 | fans N 1868 | on P 1869 | my D 1870 | timeline N 1871 | ... , 1872 | all D 1873 | i O 1874 | see V 1875 | is V 1876 | #teamlakers ^ 1877 | ... , 1878 | question N 1879 | is V 1880 | ... , 1881 | do V 1882 | rocket ^ 1883 | fans N 1884 | exist V 1885 | ? , 1886 | 1887 | mc ^ 1888 | hammer ^ 1889 | beefing V 1890 | with P 1891 | jay-z ^ 1892 | !!?? , 1893 | lmao ! 1894 | wait V 1895 | no ! 1896 | " , 1897 | king ^ 1898 | hammer ^ 1899 | " , 1900 | 1901 | rt ~ 1902 | <@MENTION> @ 1903 | : ~ 1904 | true A 1905 | shawols N 1906 | : , 1907 | they're L 1908 | who O 1909 | accept V 1910 | any D 1911 | member's S 1912 | decision N 1913 | !!! , 1914 | rt V 1915 | if P 1916 | u O 1917 | are V 1918 | true A 1919 | shawol N 1920 | !!! , 1921 | i'm L 1922 | supporting V 1923 | them O 1924 | even R 1925 | i O 1926 | fell V 1927 | a D 1928 | bit N 1929 | ... ~ 1930 | 1931 | rt ~ 1932 | <@MENTION> @ 1933 | : ~ 1934 | grungereport ^ 1935 | : , 1936 | dave ^ 1937 | grohl ^ 1938 | & & 1939 | krist ^ 1940 | novoselic ^ 1941 | of P 1942 | nirvana ^ 1943 | confirmed V 1944 | to P 1945 | reunite V 1946 | for P 1947 | a D 1948 | song N 1949 | on P 1950 | new A 1951 | foo ^ 1952 | fighters ^ 1953 | album N 1954 | http U 1955 | :/ U 1956 | ... ~ 1957 | 1958 | love V 1959 | <@MENTION> @ 1960 | ! , 1961 | thanks N 1962 | for P 1963 | taking V 1964 | awesome A 1965 | care N 1966 | of P 1967 | our D 1968 | clients N 1969 | :) E 1970 | 1971 | <@MENTION> @ 1972 | anida ^ 1973 | !! , 1974 | :) E 1975 | 1976 | today's S 1977 | metrotube ^ 1978 | : , 1979 | an D 1980 | oddly R 1981 | touching A 1982 | electro N 1983 | pop N 1984 | tribute N 1985 | to P 1986 | celebrity N 1987 | mugshots N 1988 | — , 1989 | it O 1990 | almost R 1991 | makes V 1992 | you O 1993 | feel V 1994 | bad A 1995 | for P 1996 | them O 1997 | U 1998 | 1999 | rt ~ 2000 | <@MENTION> @ 2001 | : ~ 2002 | i'm L 2003 | glad A 2004 | we O 2005 | live V 2006 | in P 2007 | a D 2008 | world N 2009 | where R 2010 | a D 2011 | little A 2012 | kid N 2013 | can V 2014 | wear V 2015 | a D 2016 | superman ^ 2017 | cape N 2018 | to P 2019 | the D 2020 | airport N 2021 | . , 2022 | 2023 | <@MENTION> @ 2024 | replace V 2025 | mansfield ^ 2026 | with P 2027 | azle ^ 2028 | & & 2029 | i O 2030 | can V 2031 | totally R 2032 | relate V 2033 | ! , 2034 | 2035 | you O 2036 | smile V 2037 | i O 2038 | smile V 2039 | ! , 2040 | 2041 | and & 2042 | from P 2043 | now R 2044 | to P 2045 | my D 2046 | very R 2047 | last A 2048 | breaththis G 2049 | day N 2050 | i'll L 2051 | cherishyou G 2052 | look V 2053 | so R 2054 | beautiful A 2055 | in P 2056 | whitetonight G 2057 | 2058 | great A 2059 | evening N 2060 | touring V 2061 | van ^ 2062 | buren ^ 2063 | with P 2064 | lt ^ 2065 | . , 2066 | governor ^ 2067 | candidate N 2068 | brian ^ 2069 | calley ^ 2070 | . , 2071 | we O 2072 | visited V 2073 | with P 2074 | residents N 2075 | at P 2076 | white ^ 2077 | oaks ^ 2078 | ... ~ 2079 | U 2080 | 2081 | rotflmao ! 2082 | my D 2083 | nigga N 2084 | <@MENTION> @ 2085 | was V 2086 | gonna V 2087 | in P 2088 | on P 2089 | that O 2090 | #tt # 2091 | #myexgirlfriend # 2092 | . , 2093 | lol ! 2094 | i'm L 2095 | over P 2096 | diein V 2097 | lol ! 2098 | 2099 | rt ~ 2100 | <@MENTION> @ 2101 | : ~ 2102 | rt ~ 2103 | <@MENTION> @ 2104 | : ~ 2105 | u.s. ^ 2106 | dept ^ 2107 | of P 2108 | commerce ^ 2109 | , , 2110 | eda ^ 2111 | grants V 2112 | $500k $ 2113 | to P 2114 | develop V 2115 | metrics N 2116 | for P 2117 | #econdev N 2118 | U 2119 | 2120 | rt ~ 2121 | <@MENTION> @ 2122 | : ~ 2123 | miami ^ 2124 | put V 2125 | a D 2126 | fork N 2127 | in P 2128 | it O 2129 | ... , 2130 | 2131 | <@MENTION> @ 2132 | man ! 2133 | , , 2134 | would V 2135 | you O 2136 | look V 2137 | at P 2138 | that D 2139 | scenery N 2140 | . , 2141 | a D 2142 | man N 2143 | could V 2144 | die V 2145 | happy A 2146 | with P 2147 | a D 2148 | view N 2149 | like P 2150 | that O 2151 | . , 2152 | #potsandpans # 2153 | 2154 | my D 2155 | pen N 2156 | died V 2157 | .... , 2158 | it's L 2159 | the D 2160 | only A 2161 | pen N 2162 | i've L 2163 | ever R 2164 | used V 2165 | in P 2166 | college N 2167 | . , 2168 | i'm L 2169 | lost V 2170 | with P 2171 | out P 2172 | you O 2173 | . , 2174 | 2175 | #win V 2176 | grave ^ 2177 | witch ^ 2178 | by P 2179 | kalayna ^ 2180 | price ^ 2181 | U 2182 | 2183 | <@MENTION> @ 2184 | lol ! 2185 | @ P 2186 | elec ^ 2187 | boogaloo ^ 2188 | . , 2189 | congrats ! 2190 | family N 2191 | ! , 2192 | 2193 | rt ~ 2194 | <@MENTION> @ 2195 | rob ^ 2196 | ford's Z 2197 | interview N 2198 | on P 2199 | <@MENTION> @ 2200 | U 2201 | #voteto # 2202 | #yyccc # 2203 | | G 2204 | interesting A 2205 | juxtaposed V 2206 | to P 2207 | shaheen's Z 2208 | speech N 2209 | 2210 | it O 2211 | is V 2212 | a D 2213 | good A 2214 | day N 2215 | to P 2216 | get V 2217 | inspired V 2218 | on P 2219 | <@MENTION> @ 2220 | - , 2221 | <@MENTION> @ 2222 | , , 2223 | <@MENTION> @ 2224 | and & 2225 | <@MENTION> @ 2226 | . , 2227 | i'm L 2228 | totally R 2229 | buying V 2230 | the D 2231 | movie N 2232 | & & 2233 | books N 2234 | ! , 2235 | 2236 | main A 2237 | girl N 2238 | > G 2239 | phone N 2240 | full A 2241 | of P 2242 | bitches N 2243 | . , 2244 | 2245 | rt ~ 2246 | <@MENTION> @ 2247 | : ~ 2248 | united ^ 2249 | way ^ 2250 | update N 2251 | : , 2252 | so R 2253 | far R 2254 | you O 2255 | have V 2256 | helped V 2257 | raise V 2258 | 15.7 $ 2259 | m $ 2260 | dollars N 2261 | . , 2262 | the D 2263 | campaign N 2264 | goal N 2265 | is V 2266 | 33.1 $ 2267 | m $ 2268 | and & 2269 | it O 2270 | wraps V 2271 | up T 2272 | dec ^ 2273 | 2nd $ 2274 | . , 2275 | 2276 | <@MENTION> @ 2277 | thanks N 2278 | shaun ^ 2279 | ! , 2280 | 2281 | <@MENTION> @ 2282 | i O 2283 | always R 2284 | do V 2285 | . , 2286 | 2287 | bb ^ 2288 | kings Z 2289 | is V 2290 | unwalkable A 2291 | ! , 2292 | ( , 2293 | <@MENTION> @ 2294 | voice N 2295 | ) , 2296 | for P 2297 | <@MENTION> @ 2298 | #supportrealhiphop # 2299 | ! , 2300 | 2301 | <@MENTION> @ 2302 | lol ! 2303 | ? , 2304 | 2305 | rt ~ 2306 | <@MENTION> @ 2307 | <@MENTION> @ 2308 | u O 2309 | sit V 2310 | down T 2311 | lol ! 2312 | <<< ~ 2313 | lol ! 2314 | doin V 2315 | it O 2316 | already R 2317 | 2318 | ray ^ 2319 | ray ^ 2320 | all D 2321 | day N 2322 | 2323 | check V 2324 | out T 2325 | <@MENTION> @ 2326 | avatar N 2327 | . , 2328 | it O 2329 | is V 2330 | so R 2331 | perfect A 2332 | . , 2333 | vintage N 2334 | with P 2335 | a D 2336 | cause N 2337 | . , 2338 | :) E 2339 | 2340 | <@MENTION> @ 2341 | morning N 2342 | <3 E 2343 | 2344 | <@MENTION> @ 2345 | onyx G 2346 | ??? , 2347 | onyd G 2348 | = , 2349 | oh ! 2350 | no ! 2351 | you O 2352 | didn't V 2353 | . , 2354 | i O 2355 | think V 2356 | you O 2357 | made V 2358 | that D 2359 | one $ 2360 | up T 2361 | 2362 | <@MENTION> @ 2363 | no D 2364 | one N 2365 | cares V 2366 | . , 2367 | ppl N 2368 | are V 2369 | used A 2370 | to P 2371 | foolish A 2372 | grabs N 2373 | for P 2374 | attention N 2375 | . , 2376 | they'll L 2377 | just R 2378 | * , 2379 | shrug V 2380 | * , 2381 | 2382 | ' , 2383 | rent N 2384 | too R 2385 | damn R 2386 | high A 2387 | ' , 2388 | ny ^ 2389 | candidate N 2390 | jimmy ^ 2391 | mcmillan ^ 2392 | inspires V 2393 | talking N 2394 | doll N 2395 | - , 2396 | the… D 2397 | U 2398 | #palin # 2399 | #teaparty # 2400 | 2401 | <@MENTION> @ 2402 | just R 2403 | to P 2404 | clarify V 2405 | when R 2406 | you O 2407 | cleared V 2408 | your D 2409 | browser N 2410 | cookies N 2411 | you O 2412 | also R 2413 | cleared V 2414 | the D 2415 | browser N 2416 | cache N 2417 | ? , 2418 | ^sm G 2419 | 2420 | rt ~ 2421 | <@MENTION> @ 2422 | : ~ 2423 | name N 2424 | says V 2425 | it O 2426 | all D 2427 | : , 2428 | U 2429 | not R 2430 | a D 2431 | scam N 2432 | . , 2433 | it's L 2434 | for P 2435 | real A 2436 | . , 2437 | might V 2438 | be V 2439 | the D 2440 | only A 2441 | robot N 2442 | you O 2443 | can V 2444 | trust V 2445 | on P 2446 | the D 2447 | comput N 2448 | ... ~ 2449 | 2450 | i O 2451 | pick V 2452 | my D 2453 | nose N 2454 | wit P 2455 | my D 2456 | penis N 2457 | 2458 | well ! 2459 | my D 2460 | night N 2461 | away R 2462 | from P 2463 | here R 2464 | didn't V 2465 | last V 2466 | too R 2467 | long A 2468 | . , 2469 | i O 2470 | want V 2471 | the D 2472 | new A 2473 | sims ^ 2474 | 3 $ 2475 | coming V 2476 | out T 2477 | for P 2478 | xbox ^ 2479 | 360 $ 2480 | - , 2481 | so P 2482 | who O 2483 | wants V 2484 | to P 2485 | buy V 2486 | it O 2487 | for P 2488 | me O 2489 | ?! , 2490 | lol ! 2491 | 2492 | why R 2493 | did V 2494 | they O 2495 | add V 2496 | urban ^ 2497 | league ^ 2498 | to P 2499 | cleveland ^ 2500 | ??? , 2501 | i O 2502 | miss V 2503 | ms. ^ 2504 | jenkins ^ 2505 | being V 2506 | with P 2507 | us O 2508 | everyday N 2509 | ;( E 2510 | 2511 | rt ~ 2512 | <@MENTION> @ 2513 | : ~ 2514 | looking V 2515 | 4 P 2516 | a D 2517 | new A 2518 | ride N 2519 | ? , 2520 | ( , 2521 | #purrsuit N 2522 | team N 2523 | <@MENTION> @ 2524 | , , 2525 | maybe R 2526 | ? , 2527 | ) , 2528 | aaa ^ 2529 | auto ^ 2530 | sales ^ 2531 | makes V 2532 | it O 2533 | easy A 2534 | U 2535 | 2536 | 28 $ 2537 | inspirational A 2538 | examples N 2539 | of P 2540 | well R 2541 | designed A 2542 | contact N 2543 | pages N 2544 | U 2545 | 2546 | lol ! 2547 | we O 2548 | dumb A 2549 | ... , 2550 | but & 2551 | i O 2552 | like V 2553 | that O 2554 | 2555 | xnevershoutbrianna ^ 2556 | asked V 2557 | : , 2558 | i’m L 2559 | sorry A 2560 | ! , 2561 | its L 2562 | ten $ 2563 | her R 2564 | and & 2565 | i O 2566 | has V 2567 | school N 2568 | tomorrow N 2569 | and & 2570 | i O 2571 | gotta V 2572 | take V 2573 | a D 2574 | shower N 2575 | . , 2576 | wait V 2577 | wait V 2578 | ... ~ 2579 | U 2580 | 2581 | #prayforindonesia # 2582 | , ~ 2583 | rt ~ 2584 | <@MENTION> @ 2585 | : ~ 2586 | <@MENTION> @ 2587 | <@MENTION> @ 2588 | <@MENTION> @ 2589 | <@MENTION> @ 2590 | <@MENTION> @ 2591 | <@MENTION> @ 2592 | ( , 2593 | cont ~ 2594 | ) , 2595 | U 2596 | 2597 | rt ~ 2598 | <@MENTION> @ 2599 | : ~ 2600 | #sayno2 G 2601 | flat A 2602 | asses N 2603 | 2604 | do V 2605 | i O 2606 | have V 2607 | to P 2608 | be V 2609 | a D 2610 | slut N 2611 | to P 2612 | get V 2613 | attention N 2614 | wtf ! 2615 | : , 2616 | U 2617 | 2618 | <@MENTION> @ 2619 | that's L 2620 | pretty R 2621 | hawt A 2622 | is V 2623 | that O 2624 | all D 2625 | of P 2626 | the D 2627 | outfit N 2628 | ? , 2629 | 2630 | exactly R 2631 | how R 2632 | i O 2633 | expected V 2634 | it O 2635 | 2636 | i O 2637 | wish V 2638 | i O 2639 | can V 2640 | rewind V 2641 | time N 2642 | n & 2643 | go V 2644 | way R 2645 | back R 2646 | to P 2647 | when R 2648 | i O 2649 | was V 2650 | playing V 2651 | with P 2652 | my D 2653 | doll N 2654 | house N 2655 | n & 2656 | tea N 2657 | set N 2658 | with P 2659 | my D 2660 | imaginary A 2661 | friends N 2662 | it O 2663 | was V 2664 | much R 2665 | better A 2666 | then R 2667 | . , 2668 | 2669 | photo N 2670 | : , 2671 | awwwww ! 2672 | . , 2673 | honestly R 2674 | imma L 2675 | lakers ^ 2676 | just R 2677 | cuz P 2678 | they O 2679 | are V 2680 | :p E 2681 | joking V 2682 | . , 2683 | i O 2684 | don’t V 2685 | even R 2686 | have V 2687 | a D 2688 | favorite A 2689 | team N 2690 | :/ E 2691 | U 2692 | 2693 | <@MENTION> @ 2694 | <33 E 2695 | love V 2696 | yur D 2697 | twitconn N 2698 | 2699 | <@MENTION> @ 2700 | lmaoooo ! 2701 | i O 2702 | gotta V 2703 | team N 2704 | that P 2705 | does V 2706 | that O 2707 | ?!? , 2708 | who O 2709 | ?? , 2710 | 2711 | help V 2712 | us O 2713 | reach V 2714 | 500 $ 2715 | ! , 2716 | rt ~ 2717 | " , 2718 | #glarex2mender ^ 2719 | products N 2720 | ! , 2721 | all D 2722 | this O 2723 | in P 2724 | one $ 2725 | #contest N 2726 | ! , 2727 | details N 2728 | here R 2729 | U 2730 | . , 2731 | rt/follow V 2732 | <@MENTION> @ 2733 | to P 2734 | win V 2735 | !" , 2736 | 2737 | <@MENTION> @ 2738 | <@MENTION> @ 2739 | just R 2740 | don't V 2741 | let V 2742 | nel ^ 2743 | near P 2744 | him O 2745 | ... , 2746 | she's L 2747 | all R 2748 | about P 2749 | the D 2750 | fine A 2751 | art N 2752 | of P 2753 | decapation N 2754 | lately R 2755 | * , 2756 | cookoo ! 2757 | cookoo ! 2758 | * , 2759 | 2760 | rt ~ 2761 | <@MENTION> @ 2762 | : ~ 2763 | coultrain ^ 2764 | – , 2765 | green A 2766 | - , 2767 | almost R 2768 | forgot V 2769 | how R 2770 | brilliant A 2771 | this D 2772 | album N 2773 | was V 2774 | ... , 2775 | <@MENTION> @ 2776 | ♫ G 2777 | U 2778 | 2779 | this D 2780 | lil A 2781 | girl N 2782 | got V 2783 | some D 2784 | unnecessary A 2785 | tattoos N 2786 | but & 2787 | the D 2788 | art N 2789 | work N 2790 | is V 2791 | superb A 2792 | . , 2793 | 2794 | rt ~ 2795 | <@MENTION> @ 2796 | : ~ 2797 | rt ~ 2798 | <@MENTION> @ 2799 | : ~ 2800 | god ^ 2801 | allows V 2802 | us O 2803 | to P 2804 | experience V 2805 | the D 2806 | low A 2807 | points N 2808 | of P 2809 | life N 2810 | in P 2811 | order N 2812 | to P 2813 | teach V 2814 | us O 2815 | ... ~ 2816 | U 2817 | 2818 | <@MENTION> @ 2819 | heyyy ! 2820 | did V 2821 | you O 2822 | receive V 2823 | the D 2824 | email N 2825 | i O 2826 | sent V 2827 | earlier R 2828 | today N 2829 | ??? , 2830 | let V 2831 | me O 2832 | know V 2833 | por G 2834 | favor G 2835 | :) E 2836 | 2837 | <@MENTION> @ 2838 | but & 2839 | you O 2840 | gotta V 2841 | think V 2842 | nigga N 2843 | they O 2844 | playin V 2845 | teams N 2846 | that P 2847 | already R 2848 | got V 2849 | the D 2850 | chemistry N 2851 | not R 2852 | lookn V 2853 | 4 P 2854 | it O 2855 | 2856 | find V 2857 | great A 2858 | domains N 2859 | for P 2860 | sale N 2861 | ! , 2862 | come V 2863 | and & 2864 | check V 2865 | our D 2866 | domains N 2867 | for P 2868 | sale N 2869 | section N 2870 | . , 2871 | list V 2872 | your D 2873 | domains N 2874 | for P 2875 | $ G 2876 | 12.99 $ 2877 | /year N 2878 | U 2879 | 2880 | carving V 2881 | a D 2882 | pumpkin N 2883 | with P 2884 | a D 2885 | gun N 2886 | [ , 2887 | video N 2888 | ]: , 2889 | this D 2890 | man N 2891 | is V 2892 | hickcok45 ^ 2893 | . , 2894 | his D 2895 | pump N 2896 | ... ~ 2897 | U 2898 | 2899 | <@MENTION> @ 2900 | excuse V 2901 | me O 2902 | ? , 2903 | uppity A 2904 | ? , 2905 | :: G 2906 | confused A 2907 | :: G 2908 | 2909 | glc ^ 2910 | - , 2911 | the D 2912 | light N 2913 | U 2914 | 2915 | <@MENTION> @ 2916 | oh ! 2917 | i O 2918 | sooo R 2919 | am V 2920 | !! , 2921 | lol ! 2922 | waiting V 2923 | to P 2924 | see V 2925 | if P 2926 | he's L 2927 | gonna V 2928 | txt V 2929 | me O 2930 | back R 2931 | ! , 2932 | and & 2933 | it O 2934 | doesn't V 2935 | seem V 2936 | like P 2937 | it O 2938 | . , 2939 | 2940 | #howcome L 2941 | there X 2942 | is V 2943 | so R 2944 | many A 2945 | bops N 2946 | at P 2947 | tsu ^ 2948 | 2949 | rt ~ 2950 | <@MENTION> @ 2951 | : ~ 2952 | ( , 2953 | yup ! 2954 | , , 2955 | here R 2956 | comes V 2957 | the D 2958 | guilt) N 2959 | . , 2960 | remember V 2961 | , , 2962 | your D 2963 | tweets N 2964 | * , 2965 | do V 2966 | * , 2967 | matter V 2968 | . , 2969 | see V 2970 | the D 2971 | result N 2972 | of P 2973 | * , 2974 | your D 2975 | * , 2976 | efforts N 2977 | here R 2978 | : , 2979 | U 2980 | 2981 | rt ~ 2982 | <@MENTION> @ 2983 | : ~ 2984 | rt ~ 2985 | <@MENTION> @ 2986 | : ~ 2987 | #nhl ^ 2988 | news N 2989 | . , 2990 | early A 2991 | indications N 2992 | are V 2993 | that P 2994 | #leafs Z 2995 | colby ^ 2996 | armstrong ^ 2997 | may V 2998 | need V 2999 | surgery N 3000 | on P 3001 | his D 3002 | hand N 3003 | and & 3004 | could V 3005 | ... ~ 3006 | 3007 | <@MENTION> @ 3008 | say V 3009 | what O 3010 | ? , 3011 | 3012 | <@MENTION> @ 3013 | oh ! 3014 | shit ! 3015 | its L 3016 | beyonce ^ 3017 | piukin V 3018 | u O 3019 | up T 3020 | from P 3021 | da D 3022 | train N 3023 | station N 3024 | 3025 | rt ~ 3026 | <@MENTION> @ 3027 | : ~ 3028 | rt ~ 3029 | <@MENTION> @ 3030 | : ~ 3031 | rt ~ 3032 | <@MENTION> @ 3033 | & & 3034 | <@MENTION> @ 3035 | , , 3036 | <@MENTION> @ 3037 | & & 3038 | <@MENTION> @ 3039 | present V 3040 | " , 3041 | get V 3042 | money N 3043 | or & 3044 | go V 3045 | broke A 3046 | " , 3047 | on P 3048 | <@MENTION> @ 3049 | ... ~ 3050 | U 3051 | ... ~ 3052 | 3053 | just R 3054 | watched V 3055 | iron ^ 3056 | man ^ 3057 | 2 $ 3058 | , , 3059 | what X 3060 | a D 3061 | nearly-perfect A 3062 | superhero N 3063 | movie N 3064 | ! , 3065 | so R 3066 | good A 3067 | you O 3068 | don't V 3069 | even R 3070 | mind V 3071 | mickey ^ 3072 | rourke ^ 3073 | ! , 3074 | ( , 3075 | j/k ! 3076 | , , 3077 | he O 3078 | was V 3079 | great A 3080 | too R 3081 | ) , 3082 | 3083 | agree V 3084 | that P 3085 | humanizing V 3086 | brand N 3087 | is V 3088 | paramount A 3089 | but & 3090 | associating V 3091 | w P 3092 | 1 $ 3093 | personality N 3094 | has V 3095 | big A 3096 | risk N 3097 | : , 3098 | dell ^ 3099 | , , 3100 | vzn ^ 3101 | , , 3102 | tiger ^ 3103 | , , 3104 | mj ^ 3105 | , , 3106 | ... , 3107 | #brandchat # 3108 | 3109 | <@MENTION> @ 3110 | <@MENTION> @ 3111 | <@MENTION> @ 3112 | <@MENTION> @ 3113 | <@MENTION> @ 3114 | <@MENTION> @ 3115 | hatin V 3116 | baby N 3117 | that's L 3118 | crazy A 3119 | , , 3120 | you O 3121 | unprepared A 3122 | niece N 3123 | . , 3124 | 3125 | it's L 3126 | already R 3127 | proving V 3128 | to P 3129 | be V 3130 | one $ 3131 | of P 3132 | those D 3133 | days N 3134 | . , 3135 | 3136 | i O 3137 | got V 3138 | woken V 3139 | up T 3140 | by P 3141 | my D 3142 | tv N 3143 | falling V 3144 | on P 3145 | tha D 3146 | floor N 3147 | . , 3148 | nice A 3149 | , , 3150 | right R 3151 | ? , 3152 | -_- E 3153 | 3154 | <@MENTION> @ 3155 | congrats ! 3156 | on P 3157 | the D 3158 | 100k $ 3159 | 3160 | rogue ^ 3161 | . , 3162 | no ! 3163 | . , 3164 | i O 3165 | didn't V 3166 | do V 3167 | it O 3168 | yet R 3169 | . , 3170 | ghdjfsshdfjg G 3171 | . , 3172 | should V 3173 | i O 3174 | . , 3175 | like R 3176 | , , 3177 | right R 3178 | now R 3179 | ? , 3180 | 3181 | larry ^ 3182 | ellison ^ 3183 | to P 3184 | prove V 3185 | hp’s Z 3186 | new A 3187 | ceo N 3188 | stole V 3189 | his D 3190 | software N 3191 | - G 3192 | U 3193 | 3194 | its L 3195 | really R 3196 | weird A 3197 | how R 3198 | fb ^ 3199 | shows V 3200 | " , 3201 | photo N 3202 | memories N 3203 | " , 3204 | of P 3205 | the D 3206 | hottest A 3207 | girl N 3208 | to P 3209 | me O 3210 | on P 3211 | the D 3212 | side N 3213 | 3214 | <@MENTION> @ 3215 | was V 3216 | asking V 3217 | about P 3218 | walk V 3219 | on P 3220 | music N 3221 | 3222 | rt ~ 3223 | <@MENTION> @ 3224 | : ~ 3225 | cheating V 3226 | on P 3227 | the D 3228 | person N 3229 | u O 3230 | love V 3231 | . , 3232 | #whodoesthat # 3233 | r G 3234 | e G 3235 | t G 3236 | w G 3237 | e G 3238 | e G 3239 | t G 3240 | if P 3241 | u O 3242 | dont V 3243 | cheat V 3244 | & & 3245 | u're L 3246 | loyal A 3247 | & & 3248 | faithful A 3249 | . , 3250 | - G 3251 | <@MENTION> @ 3252 | 3253 | i O 3254 | can V 3255 | usually R 3256 | handle V 3257 | the D 3258 | heat N 3259 | . , 3260 | the D 3261 | day N 3262 | of P 3263 | the D 3264 | dallas ^ 3265 | show N 3266 | i O 3267 | was V 3268 | ok A 3269 | but & 3270 | today N 3271 | i O 3272 | can't V 3273 | seem V 3274 | to P 3275 | get V 3276 | comfortable A 3277 | . , 3278 | 3279 | indiana ^ 3280 | jones ^ 3281 | movies N 3282 | get V 3283 | 3d A 3284 | treatment N 3285 | & & 3286 | re-release N 3287 | : , 3288 | following V 3289 | up T 3290 | on P 3291 | george ^ 3292 | lucas ^ 3293 | ' G 3294 | decision N 3295 | to P 3296 | re-release V 3297 | the D 3298 | star ^ 3299 | ... ~ 3300 | U 3301 | 3302 | denise ^ 3303 | schump ^ 3304 | chosen V 3305 | for P 3306 | teacher ^ 3307 | tuesday ^ 3308 | honor N 3309 | : , 3310 | ... , 3311 | with P 3312 | a D 3313 | certificate N 3314 | and & 3315 | a D 3316 | collection N 3317 | of P 3318 | gifts N 3319 | . , 3320 | shump ^ 3321 | said V 3322 | she O 3323 | w G 3324 | ... ~ 3325 | U 3326 | 3327 | rt ~ 3328 | <@MENTION> @ 3329 | : ~ 3330 | legitmileyc ^ 3331 | 10:55 $ 3332 | pm N 3333 | ಠ◡ಠ E 3334 | 3335 | <@MENTION> @ 3336 | true A 3337 | . , 3338 | then R 3339 | again R 3340 | you O 3341 | post V 3342 | several A 3343 | times N 3344 | a D 3345 | week N 3346 | and & 3347 | i O 3348 | only R 3349 | post V 3350 | once R 3351 | . , 3352 | so P 3353 | have V 3354 | to P 3355 | get V 3356 | it O 3357 | all D 3358 | in P 3359 | haha ! 3360 | . , 3361 | this D 3362 | next A 3363 | one $ 3364 | is V 3365 | shortish A 3366 | 3367 | my D 3368 | head N 3369 | is V 3370 | killing V 3371 | me O 3372 | good A 3373 | night N 3374 | to P 3375 | all# N 3376 | teamlakers ^ 3377 | fuck V 3378 | the D 3379 | rest N 3380 | of P 3381 | yall O 3382 | lol ! 3383 | 3384 | i O 3385 | kno V 3386 | thats L 3387 | right A 3388 | husband N 3389 | :) E 3390 | ily G 3391 | win V 3392 | or & 3393 | lose V 3394 | rt ~ 3395 | <@MENTION> @ 3396 | rt ~ 3397 | <@MENTION> @ 3398 | rome ^ 3399 | wasn't V 3400 | built V 3401 | in P 3402 | a D 3403 | day N 3404 | ! , 3405 | work N 3406 | U 3407 | 3408 | <@MENTION> @ 3409 | yes ! 3410 | . , 3411 | and & 3412 | how R 3413 | about P 3414 | your D 3415 | school N 3416 | ? , 3417 | 3418 | <@MENTION> @ 3419 | i O 3420 | love V 3421 | it O 3422 | when R 3423 | you O 3424 | talk V 3425 | contractor N 3426 | . , 3427 | it O 3428 | keeps V 3429 | me O 3430 | warm A 3431 | at P 3432 | night N 3433 | . , 3434 | 3435 | rt ~ 3436 | <@MENTION> @ 3437 | : ~ 3438 | check V 3439 | me O 3440 | out T 3441 | as P 3442 | a D 3443 | devil N 3444 | , , 3445 | age N 3446 | 5 $ 3447 | . , 3448 | email V 3449 | ur D 3450 | halloween ^ 3451 | look N 3452 | to P 3453 | <@MENTION> U 3454 | , , 3455 | win V 3456 | the D 3457 | bag N 3458 | of P 3459 | the D 3460 | season N 3461 | ! , 3462 | U 3463 | ... ~ 3464 | 3465 | rt ~ 3466 | <@MENTION> @ 3467 | : ~ 3468 | out P 3469 | of P 3470 | all D 3471 | your D 3472 | lies N 3473 | , , 3474 | " , 3475 | i O 3476 | love V 3477 | you O 3478 | " , 3479 | was V 3480 | my D 3481 | favorite A 3482 | . , 3483 | #factsaboutboys # 3484 | 3485 | new A 3486 | tweetaway N 3487 | ! , 3488 | follow V 3489 | <@MENTION> @ 3490 | & & 3491 | rt V 3492 | to P 3493 | enter V 3494 | to P 3495 | win V 3496 | a D 3497 | prize N 3498 | from P 3499 | obagi ^ 3500 | ! , 3501 | 1 $ 3502 | winner N 3503 | every D 3504 | day N 3505 | this D 3506 | week N 3507 | ! , 3508 | U 3509 | good A 3510 | luck N 3511 | ! , 3512 | 3513 | lmao ! 3514 | everybody N 3515 | tweeted V 3516 | bout P 3517 | kobe ^ 3518 | just R 3519 | now R 3520 | 3521 | <@MENTION> @ 3522 | same A 3523 | year N 3524 | ? , 3525 | 3526 | rt ~ 3527 | <@MENTION> @ 3528 | : ~ 3529 | grown'n V 3530 | up T 3531 | as P 3532 | a D 3533 | kid N 3534 | ... , 3535 | i O 3536 | thought V 3537 | i O 3538 | could V 3539 | get V 3540 | any D 3541 | girl N 3542 | ... , 3543 | who O 3544 | was V 3545 | i O 3546 | fool'n?! V 3547 | 3548 | the D 3549 | top A 3550 | priority N 3551 | when R 3552 | moving V 3553 | to P 3554 | a D 3555 | new A 3556 | city N 3557 | ? , 3558 | finding V 3559 | a D 3560 | hair N 3561 | dresser N 3562 | , , 3563 | of P 3564 | course N 3565 | . , 3566 | under P 3567 | the D 3568 | dryer N 3569 | now R 3570 | . , 3571 | we'll L 3572 | see V 3573 | if P 3574 | this D 3575 | place N 3576 | is V 3577 | a D 3578 | keeper N 3579 | ! , 3580 | 3581 | mozilla ^ 3582 | to P 3583 | drop V 3584 | os ^ 3585 | 10.4 $ 3586 | tiger ^ 3587 | support N 3588 | ? , 3589 | say V 3590 | it O 3591 | isn’t V 3592 | so R 3593 | - G 3594 | ^ 3595 | U 3596 | #education # 3597 | #teched # 3598 | #elearning # 3599 | 3600 | #rayray # 3601 | #thatisall # 3602 | !. , 3603 | 3604 | man ! 3605 | real A 3606 | talk N 3607 | i O 3608 | fuck V 3609 | wit P 3610 | a D 3611 | nikka N 3612 | from P 3613 | the D 3614 | bottoms N 3615 | imgood U 3616 | . U 3617 | com U 3618 | 3619 | U 3620 | 3621 | <@MENTION> @ 3622 | just R 3623 | sit V 3624 | down T 3625 | calmly R 3626 | with P 3627 | her O 3628 | & & 3629 | tell V 3630 | her O 3631 | every D 3632 | fucking V 3633 | thing N 3634 | that P 3635 | is V 3636 | annoying V 3637 | you O 3638 | . , 3639 | see V 3640 | if P 3641 | she O 3642 | stays V 3643 | . , 3644 | 3645 | rt ~ 3646 | <@MENTION> @ 3647 | : ~ 3648 | dealers N 3649 | : , 3650 | before P 3651 | you O 3652 | get V 3653 | sold V 3654 | " , 3655 | an D 3656 | app N 3657 | for P 3658 | that D 3659 | ", , 3660 | check V 3661 | out T 3662 | what O 3663 | smartphone N 3664 | users N 3665 | actually R 3666 | do V 3667 | U 3668 | ( , 3669 | via P 3670 | <@MENTION> @ 3671 | ... ~ 3672 | 3673 | <@MENTION> @ 3674 | yes ! 3675 | . , 3676 | 3677 | " , 3678 | actin V 3679 | ' , 3680 | like P 3681 | a D 3682 | bitch N 3683 | , , 3684 | finna P 3685 | get V 3686 | you O 3687 | hurt V 3688 | " , 3689 | - G 3690 | rihanna ^ 3691 | 3692 | <@MENTION> @ 3693 | head N 3694 | 3695 | they O 3696 | need V 3697 | to P 3698 | do V 3699 | an D 3700 | episode N 3701 | of P 3702 | cops ^ 3703 | here R 3704 | in P 3705 | cleveland ^ 3706 | 3707 | i O 3708 | missed V 3709 | house ^ 3710 | of P 3711 | glam ^ 3712 | : E 3713 | 0 E 3714 | ( E 3715 | 3716 | in P 3717 | the D 3718 | room N 3719 | before P 3720 | 1 $ 3721 | yess ! 3722 | ! , 3723 | ! , 3724 | nap N 3725 | time N 3726 | til P 3727 | practicee N 3728 | 3729 | no D 3730 | injuries N 3731 | reported V 3732 | in P 3733 | three $ 3734 | separate A 3735 | shooting N 3736 | incidents N 3737 | in P 3738 | muskegon ^ 3739 | : , 3740 | chronicle/jeffrey G 3741 | ballthe G 3742 | owner N 3743 | of P 3744 | a D 3745 | home N 3746 | i G 3747 | ... ~ 3748 | U 3749 | 3750 | <@MENTION> @ 3751 | since P 3752 | i O 3753 | didn't V 3754 | know V 3755 | what O 3756 | the D 3757 | hell N 3758 | you O 3759 | were V 3760 | talking V 3761 | about P 3762 | in P 3763 | the D 3764 | first A 3765 | place N 3766 | , , 3767 | i O 3768 | overlooked V 3769 | said V 3770 | spelling N 3771 | mistakes N 3772 | ... , 3773 | 3774 | <@MENTION> @ 3775 | i O 3776 | have V 3777 | to P 3778 | turn V 3779 | in T 3780 | something N 3781 | for P 3782 | someone N 3783 | else R 3784 | -_- E 3785 | 3786 | rt ~ 3787 | <@MENTION> @ 3788 | : ~ 3789 | lincecum ^ 3790 | lookin V 3791 | real R 3792 | anxious A 3793 | 3794 | heyyyy ! 3795 | justinnn ^ 3796 | hiiiiiiii ! 3797 | plzzzzzzzzz V 3798 | replyyyy V 3799 | plzzzzzzz V 3800 | +22 $ 3801 | 3802 | rt ~ 3803 | <@MENTION> @ 3804 | : ~ 3805 | five $ 3806 | tips N 3807 | to P 3808 | reduce V 3809 | your D 3810 | health N 3811 | risk N 3812 | while P 3813 | eating V 3814 | street N 3815 | food N 3816 | | G 3817 | gadling ^ 3818 | | G 3819 | U 3820 | 3821 | couldn't V 3822 | be V 3823 | more R 3824 | happy A 3825 | for P 3826 | casey ^ 3827 | & & 3828 | kali ^ 3829 | !! , 3830 | 3831 | just R 3832 | saw V 3833 | a D 3834 | commercial N 3835 | for P 3836 | the D 3837 | new A 3838 | goldeneye ^ 3839 | ahhhhh ! 3840 | !!!! , 3841 | 3842 | if P 3843 | i O 3844 | die V 3845 | dont V 3846 | cry V 3847 | just R 3848 | get V 3849 | high A 3850 | and & 3851 | fly V 3852 | with P 3853 | me O 3854 | 3855 | jesus ^ 3856 | christ ^ 3857 | .. , 3858 | 3859 | naw ! 3860 | we O 3861 | pressd V 3862 | da D 3863 | nigga N 3864 | he O 3865 | did V 3866 | some D 3867 | slimey A 3868 | shit N 3869 | it O 3870 | was V 3871 | 2 $ 3872 | of P 3873 | us O 3874 | against P 3875 | four $ 3876 | of P 3877 | dem O 3878 | and & 3879 | dey O 3880 | was V 3881 | some D 3882 | brolik A 3883 | grown A 3884 | ass A 3885 | men N 3886 | <@MENTION> @ 3887 | 3888 | <@MENTION> @ 3889 | yes ! 3890 | real R 3891 | soon A 3892 | !!! , 3893 | cant V 3894 | wait V 3895 | ! , 3896 | =) E 3897 | 3898 | <@MENTION> @ 3899 | <@MENTION> @ 3900 | thank V 3901 | u O 3902 | kitten N 3903 | ! , 3904 | watch V 3905 | it O 3906 | when R 3907 | i O 3908 | get V 3909 | home N 3910 | ! , 3911 | :p E 3912 | 3913 | club ^ 3914 | monaco ^ 3915 | asking V 3916 | customers N 3917 | to P 3918 | take V 3919 | pics N 3920 | for P 3921 | its D 3922 | new A 3923 | blog N 3924 | & & 3925 | they O 3926 | supply V 3927 | the D 3928 | camera N 3929 | U 3930 | 3931 | sugary A 3932 | drinks N 3933 | may V 3934 | raise V 3935 | diabetes N 3936 | risk N 3937 | : G 3938 | U 3939 | via P 3940 | <@MENTION> @ 3941 | 3942 | #tdfamily # 3943 | you O 3944 | must V 3945 | contact V 3946 | me O 3947 | if P 3948 | you O 3949 | will V 3950 | not R 3951 | be V 3952 | at P 3953 | either D 3954 | meeting N 3955 | 3956 | #arcticmonkeys # 3957 | -ibetyoulookgoodonthedancefloor G 3958 | 3959 | <@MENTION> @ 3960 | lol ! 3961 | ohhhhh ! 3962 | tru A 3963 | tru A 3964 | hahaha ! 3965 | gotta V 3966 | stay V 3967 | safe R 3968 | !!!! , 3969 | 3970 | i O 3971 | just R 3972 | put V 3973 | extra A 3974 | granola N 3975 | in P 3976 | my D 3977 | cereal N 3978 | . , 3979 | 3980 | you O 3981 | may V 3982 | feel V 3983 | as P 3984 | if P 3985 | you O 3986 | have V 3987 | run V 3988 | out R 3989 | of P 3990 | time N 3991 | because P 3992 | there X 3993 | are V 3994 | ... ~ 3995 | more A 3996 | for P 3997 | gemini ^ 3998 | U 3999 | 4000 | kinda R 4001 | pissed A 4002 | but & 4003 | still R 4004 | got V 4005 | til P 4006 | friday ^ 4007 | 4008 | <@MENTION> @ 4009 | why O 4010 | u O 4011 | put V 4012 | my D 4013 | heart N 4014 | in P 4015 | ur D 4016 | tweets N 4017 | ? , 4018 | hahaha ! 4019 | 4020 | #teamlakers # 4021 | #teamkobe # 4022 | #handsdown # 4023 | #nuffsaid # 4024 | 4025 | <@MENTION> @ 4026 | good A 4027 | mawnin N 4028 | gorgeous A 4029 | 4030 | <@MENTION> @ 4031 | nooooooo ! 4032 | :( E 4033 | ! , 4034 | don't V 4035 | say V 4036 | thaaaaat D 4037 | . , 4038 | 4039 | dis D 4040 | nigga N 4041 | just R 4042 | went V 4043 | ham A 4044 | on P 4045 | first A 4046 | 48 $ 4047 | ! , 4048 | 4049 | unc ^ 4050 | lookin V 4051 | ' , 4052 | good A 4053 | right R 4054 | about P 4055 | now R 4056 | . , 4057 | might V 4058 | hafta V 4059 | start V 4060 | this D 4061 | application N 4062 | 4063 | <@MENTION> @ 4064 | ha ! 4065 | . , 4066 | get V 4067 | one $ 4068 | of P 4069 | them O 4070 | ! , 4071 | 4072 | <@MENTION> @ 4073 | U 4074 | covered V 4075 | the D 4076 | story N 4077 | 4078 | <@MENTION> @ 4079 | rt V 4080 | U 4081 | halloweennight ^ 4082 | ! , 4083 | 4084 | photo N 4085 | : , 4086 | U 4087 | 4088 | rt ~ 4089 | <@MENTION> @ 4090 | : ~ 4091 | tweet V 4092 | me O 4093 | if P 4094 | you're L 4095 | online A 4096 | and & 4097 | bored A 4098 | ! , 4099 | :) E 4100 | 4101 | homeboy ^ 4102 | sandman ^ 4103 | : G 4104 | " , 4105 | calm A 4106 | tornado N 4107 | " , 4108 | [ , 4109 | video N 4110 | ] , 4111 | U 4112 | 4113 | rt ~ 4114 | <@MENTION> @ 4115 | : ~ 4116 | <@MENTION> @ 4117 | - G 4118 | beezid ^ 4119 | is V 4120 | celebrating V 4121 | their D 4122 | 1 $ 4123 | year N 4124 | birthday N 4125 | and & 4126 | giving V 4127 | you O 4128 | the D 4129 | greatest A 4130 | gift N 4131 | of P 4132 | all D 4133 | ! , 4134 | check V 4135 | out T 4136 | beezid™ ^ 4137 | bu G 4138 | ... ~ 4139 | 4140 | rt ~ 4141 | <@MENTION> @ 4142 | i O 4143 | am V 4144 | having V 4145 | the D 4146 | most R 4147 | awesome/inappropriate A 4148 | conversation N 4149 | with P 4150 | <@MENTION> @ 4151 | <@MENTION> @ 4152 | <@MENTION> @ 4153 | right R 4154 | now R 4155 | . , 4156 | #guesswhatabout # 4157 | 4158 | thanks N 4159 | to P 4160 | everyone N 4161 | who O 4162 | likes V 4163 | us O 4164 | and & 4165 | to P 4166 | erica ^ 4167 | haspel ^ 4168 | for P 4169 | being V 4170 | the D 4171 | 1,000 $ 4172 | th G 4173 | person N 4174 | to P 4175 | " , 4176 | like V 4177 | " , 4178 | our D 4179 | page N 4180 | ! , 4181 | 4182 | lmfaooooooooooooooo ! 4183 | tanaya ^ 4184 | said V 4185 | brody ^ 4186 | look V 4187 | like P 4188 | a D 4189 | lollipop N 4190 | 4191 | rt ~ 4192 | <@MENTION> @ 4193 | : ~ 4194 | " , 4195 | bobbito ^ 4196 | garcia’s Z 4197 | playground ^ 4198 | basketball ^ 4199 | film ^ 4200 | festival… ^ 4201 | 100% $ 4202 | swagg N 4203 | approved V 4204 | " , 4205 | U 4206 | 4207 | early R 4208 | trif N 4209 | : , 4210 | specific A 4211 | #2 $ 4212 | no R 4213 | longer R 4214 | a D 4215 | problem N 4216 | , , 4217 | i O 4218 | guess V 4219 | . , 4220 | new A 4221 | #2 $ 4222 | expected V 4223 | this D 4224 | weekend N 4225 | . , 4226 | 4227 | agreed V 4228 | !!! , 4229 | rt ~ 4230 | <@MENTION> @ 4231 | : ~ 4232 | it O 4233 | doesn't V 4234 | to R 4235 | much A 4236 | to P 4237 | turn V 4238 | me O 4239 | on T 4240 | but & 4241 | it O 4242 | really R 4243 | don't V 4244 | take V 4245 | much A 4246 | to P 4247 | turn V 4248 | me O 4249 | off T 4250 | ! , 4251 | 4252 | bj's ^ 4253 | is V 4254 | fuckin R 4255 | gross A 4256 | and & 4257 | a D 4258 | waste N 4259 | of P 4260 | money N 4261 | 4262 | rt ~ 4263 | <@MENTION> @ 4264 | i'm L 4265 | gettin V 4266 | a D 4267 | lil R 4268 | sad A 4269 | about P 4270 | #hc10 N 4271 | ... , 4272 | << ~ 4273 | y R 4274 | ?? , 4275 | ur L 4276 | not R 4277 | coming V 4278 | ?? , 4279 | 4280 | " , 4281 | i'm L 4282 | sure A 4283 | primo ^ 4284 | is V 4285 | very R 4286 | motivational A 4287 | ." , 4288 | 4289 | <@MENTION> @ 4290 | =) E 4291 | 4292 | <@MENTION> @ 4293 | my D 4294 | bad N 4295 | i O 4296 | said V 4297 | yes ! 4298 | sir N 4299 | i O 4300 | text V 4301 | bac R 4302 | on P 4303 | my D 4304 | phone N 4305 | didnt V 4306 | go V 4307 | thru R 4308 | i O 4309 | guess V 4310 | 4311 | catching V 4312 | up T 4313 | with P 4314 | bela ^ 4315 | ! , 4316 | (@ P 4317 | 48 $ 4318 | 1221 $ 4319 | 6th ^ 4320 | ave ^ 4321 | ) , 4322 | U 4323 | 4324 | vacation N 4325 | in P 4326 | my D 4327 | mind N 4328 | the D 4329 | movie N 4330 | drops V 4331 | march ^ 4332 | 17th $ 4333 | . , 4334 | wrote V 4335 | the D 4336 | script N 4337 | in P 4338 | 09 $ 4339 | ' , 4340 | way R 4341 | before P 4342 | emo N 4343 | man N 4344 | made V 4345 | his D 4346 | movie N 4347 | . , 4348 | #frosb4hoes # 4349 | -biggity ! 4350 | 4351 | <@MENTION> @ 4352 | bone N 4353 | fragments N 4354 | taken V 4355 | out T 4356 | , , 4357 | tendon N 4358 | repaired V 4359 | . , 4360 | 4361 | i'm L 4362 | too R 4363 | lazy A 4364 | to P 4365 | get V 4366 | & & 4367 | get V 4368 | dressed V 4369 | . , 4370 | just R 4371 | too R 4372 | lazy A 4373 | , , 4374 | i O 4375 | hope V 4376 | you O 4377 | understand V 4378 | . , 4379 | 4380 | gotta V 4381 | love V 4382 | getting V 4383 | pulled V 4384 | over P 4385 | right R 4386 | outside P 4387 | your D 4388 | office N 4389 | ... , 4390 | #goodstarttotheday # 4391 | :-\ E 4392 | 4393 | <@MENTION> @ 4394 | lets L 4395 | support V 4396 | breast N 4397 | cancer N 4398 | research N 4399 | click V 4400 | on P 4401 | the D 4402 | link N 4403 | $5 $ 4404 | from P 4405 | each D 4406 | sale N 4407 | donated V 4408 | U 4409 | 4410 | my D 4411 | cousin N 4412 | is V 4413 | deff R 4414 | losing V 4415 | ha D 4416 | fckn A 4417 | mind N 4418 | ! , 4419 | smh G 4420 | 4421 | tried V 4422 | to P 4423 | read V 4424 | more A 4425 | of P 4426 | amy ^ 4427 | mckay's Z 4428 | ' , 4429 | the D 4430 | birth N 4431 | house N 4432 | ', , 4433 | but & 4434 | it O 4435 | just R 4436 | isn't V 4437 | the D 4438 | same A 4439 | without P 4440 | <@MENTION> @ 4441 | reading V 4442 | it O 4443 | aloud R 4444 | . , 4445 | 4446 | <@MENTION> @ 4447 | yess ! 4448 | i O 4449 | am V 4450 | 4451 | <@MENTION> @ 4452 | U 4453 | 4454 | new A 4455 | blog N 4456 | post N 4457 | : , 4458 | camping V 4459 | creates V 4460 | strong A 4461 | family N 4462 | bonds N 4463 | U 4464 | 4465 | <@MENTION> @ 4466 | close R 4467 | enough R 4468 | to P 4469 | smell V 4470 | ittt O 4471 | 4472 | rt ~ 4473 | <@MENTION> @ 4474 | : ~ 4475 | #prayforindonesia # 4476 | U 4477 | 4478 | halloween ^ 4479 | activities N 4480 | U 4481 | 4482 | rt ~ 4483 | <@MENTION> @ 4484 | every D 4485 | girl N 4486 | lives V 4487 | for P 4488 | the D 4489 | " , 4490 | unexpected A 4491 | hugs N 4492 | from P 4493 | behind P 4494 | " , 4495 | moments N 4496 | < ~ 4497 | i O 4498 | wouldn't V 4499 | say V 4500 | " , 4501 | live V 4502 | "... , 4503 | but & 4504 | they O 4505 | r V 4506 | nice A 4507 | 4508 | how R 4509 | to P 4510 | create V 4511 | the D 4512 | perfect A 4513 | mudroom N 4514 | ( , 4515 | 10 $ 4516 | photos N 4517 | ): , 4518 | a D 4519 | mudroom N 4520 | is V 4521 | a D 4522 | terrific A 4523 | room N 4524 | to P 4525 | have V 4526 | in P 4527 | your D 4528 | home N 4529 | especially R 4530 | a D 4531 | ... ~ 4532 | U 4533 | #home # 4534 | 4535 | indonesians ^ 4536 | try V 4537 | to P 4538 | return V 4539 | to P 4540 | homes N 4541 | on P 4542 | mount ^ 4543 | merapi ^ 4544 | – G 4545 | bbc ^ 4546 | news ^ 4547 | : G 4548 | the D 4549 | hinduindonesians ^ 4550 | try V 4551 | to P 4552 | return V 4553 | t G 4554 | ... ~ 4555 | U 4556 | jk ^ 4557 | technologies ^ 4558 | 4559 | <@MENTION> @ 4560 | looking V 4561 | for P 4562 | the D 4563 | same A 4564 | 4565 | life N 4566 | is V 4567 | like P 4568 | photography N 4569 | , , 4570 | we O 4571 | use V 4572 | the D 4573 | negatives N 4574 | to P 4575 | develop V 4576 | . , 4577 | 4578 | rt ~ 4579 | <@MENTION> @ 4580 | : ~ 4581 | surrounded V 4582 | by P 4583 | men N 4584 | in P 4585 | uncomfortable-looking A 4586 | ties N 4587 | at P 4588 | ord ^ 4589 | . , 4590 | i'm L 4591 | in P 4592 | a D 4593 | t-shirt N 4594 | and & 4595 | jeans N 4596 | , , 4597 | thanking V 4598 | various A 4599 | entities N 4600 | that P 4601 | i O 4602 | ... ~ 4603 | 4604 | <@MENTION> @ 4605 | i'm L 4606 | just R 4607 | a D 4608 | normal A 4609 | girl N 4610 | : E 4611 | 3 E 4612 | 4613 | great A 4614 | collection N 4615 | of P 4616 | items N 4617 | from P 4618 | etsy ^ 4619 | ... , 4620 | U 4621 | 4622 | rt ~ 4623 | <@MENTION> @ 4624 | our D 4625 | time N 4626 | is V 4627 | now R 4628 | ! , 4629 | support V 4630 | women N 4631 | as P 4632 | full A 4633 | peacemaking N 4634 | partners N 4635 | . , 4636 | #make1325real G 4637 | & & 4638 | sign V 4639 | the D 4640 | petition N 4641 | : , 4642 | U 4643 | 4644 | <@MENTION> @ 4645 | oh ! 4646 | lord ^ 4647 | ! , 4648 | 4649 | today N 4650 | is V 4651 | #cliffmas ^ 4652 | day N 4653 | ! , 4654 | go V 4655 | <@MENTION> @ 4656 | 4657 | <@MENTION> @ 4658 | i O 4659 | was V 4660 | really R 4661 | hoping V 4662 | y'all O 4663 | would V 4664 | be V 4665 | driving V 4666 | through P 4667 | albuquerque ^ 4668 | on P 4669 | halloween ^ 4670 | ... , 4671 | i O 4672 | turn V 4673 | 21 $ 4674 | and & 4675 | you're L 4676 | the D 4677 | authority N 4678 | on P 4679 | cocktails N 4680 | . , 4681 | 4682 | <@MENTION> @ 4683 | im L 4684 | tryin V 4685 | to P 4686 | figure V 4687 | out T 4688 | the D 4689 | email N 4690 | but & 4691 | mckayla ^ 4692 | is V 4693 | the D 4694 | only A 4695 | one $ 4696 | with P 4697 | the D 4698 | pw N 4699 | 4700 | rt ~ 4701 | <@MENTION> @ 4702 | wikipedia ^ 4703 | is V 4704 | great A 4705 | idk L 4706 | what O 4707 | anybody N 4708 | says V 4709 | . , 4710 | anyone N 4711 | that P 4712 | disagrees V 4713 | is V 4714 | a D 4715 | fucking A 4716 | #herb N 4717 | #mark N 4718 | #trickassbuster N 4719 | 4720 | ★ⓕⓤⓒⓚⓘⓝⓕⓞⓛⓛⓞⓦⓝⓞⓦ★ G 4721 | <@MENTION> @ 4722 | <@MENTION> @ 4723 | <@MENTION> @ 4724 | <@MENTION> @ 4725 | <@MENTION> @ 4726 | <@MENTION> @ 4727 | <@MENTION> @ 4728 | <@MENTION> @ 4729 | <@MENTION> @ 4730 | 4731 | and & 4732 | yall O 4733 | knew V 4734 | heat ^ 4735 | wasn't V 4736 | fuckin V 4737 | wit P 4738 | the D 4739 | celtics ^ 4740 | 4741 | <@MENTION> @ 4742 | hahaha ! 4743 | lmao ! 4744 | @ P 4745 | mutha N 4746 | bustin V 4747 | .. , 4748 | and & 4749 | why R 4750 | not R 4751 | that O 4752 | seems V 4753 | like P 4754 | a D 4755 | fun A 4756 | game N 4757 | pahaha ! 4758 | jkaay ! 4759 | jkaay ! 4760 | 4761 | nfu ^ 4762 | and & 4763 | szu ^ 4764 | advanced ^ 4765 | technology ^ 4766 | park ^ 4767 | / , 4768 | jaeger ^ 4769 | and & 4770 | partner ^ 4771 | architects ^ 4772 | + & 4773 | sa_i ^ 4774 | : G 4775 | shenzhen ^ 4776 | based A 4777 | jaeger ^ 4778 | and & 4779 | partner ^ 4780 | arc ^ 4781 | ... ~ 4782 | U 4783 | 4784 | if P 4785 | you O 4786 | aren't V 4787 | reading V 4788 | this D 4789 | marshall ^ 4790 | mcluhan ^ 4791 | blog N 4792 | , , 4793 | you O 4794 | can't V 4795 | really R 4796 | understand V 4797 | media N 4798 | , , 4799 | sorry A 4800 | . , 4801 | U 4802 | 4803 | rt ~ 4804 | ♥ V 4805 | it O 4806 | lol ! 4807 | <@MENTION> @ 4808 | : ~ 4809 | thanks N 4810 | boo N 4811 | ! , 4812 | ;-) E 4813 | gotta V 4814 | keep V 4815 | the D 4816 | reputation N 4817 | good A 4818 | in P 4819 | all D 4820 | areas N 4821 | ! , 4822 | lol ! 4823 | rt ~ 4824 | <@MENTION> @ 4825 | * , 4826 | great A 4827 | choice N 4828 | of P 4829 | words N 4830 | * , 4831 | lmao ! 4832 | 4833 | <@MENTION> @ 4834 | <@MENTION> @ 4835 | call V 4836 | us O 4837 | now R 4838 | lol ! 4839 | y R 4840 | not R 4841 | 4842 | i O 4843 | just R 4844 | won V 4845 | this D 4846 | free A 4847 | auction N 4848 | : , 4849 | 3-bikes N 4850 | an & 4851 | a D 4852 | caddy N 4853 | combo N 4854 | box N 4855 | !! , 4856 | U 4857 | 4858 | <@MENTION> @ 4859 | thank V 4860 | you O 4861 | so R 4862 | much R 4863 | for P 4864 | the D 4865 | mention N 4866 | & & 4867 | rt N 4868 | ! , 4869 | make V 4870 | my D 4871 | day N 4872 | !! , 4873 | 4874 | <@MENTION> @ 4875 | come V 4876 | on P 4877 | out P 4878 | there R 4879 | ... , 4880 | don't V 4881 | b V 4882 | scared A 4883 | now R 4884 | .. , 4885 | 4886 | a D 4887 | chance N 4888 | to P 4889 | win V 4890 | a D 4891 | pocket ^ 4892 | devil ^ 4893 | hd ^ 4894 | promo N 4895 | code N 4896 | with P 4897 | a D 4898 | retweet N 4899 | or & 4900 | comment N 4901 | U 4902 | via P 4903 | <@MENTION> @ 4904 | 4905 | #teamlakers # 4906 | ya'll O 4907 | ready A 4908 | ?? , 4909 | :) E 4910 | 4911 | be V 4912 | the D 4913 | first A 4914 | to P 4915 | know V 4916 | what's L 4917 | going V 4918 | on T 4919 | ! , 4920 | check V 4921 | out T 4922 | the D 4923 | november ^ 4924 | " , 4925 | what's L 4926 | the D 4927 | buzz N 4928 | " , 4929 | here R 4930 | before R 4931 | it's L 4932 | even R 4933 | back A 4934 | from P 4935 | ... ~ 4936 | U 4937 | 4938 | #glee ^ 4939 | !! ! 4940 | 4941 | we O 4942 | have V 4943 | a D 4944 | game N 4945 | folks N 4946 | . , 4947 | 4948 | <@MENTION> @ 4949 | u O 4950 | already R 4951 | know V 4952 | ! , 4953 | 4954 | <@MENTION> @ 4955 | i O 4956 | am V 4957 | glad A 4958 | u O 4959 | were V 4960 | able A 4961 | to P 4962 | make V 4963 | it T 4964 | . , 4965 | #backchannel # 4966 | 4967 | overcast A 4968 | and & 4969 | 55 $ 4970 | f N 4971 | at P 4972 | presque ^ 4973 | isle ^ 4974 | , , 4975 | me N 4976 | winds N 4977 | are V 4978 | south N 4979 | at P 4980 | 9.2 $ 4981 | mph N 4982 | ( , 4983 | 8 $ 4984 | kt) N 4985 | . , 4986 | the D 4987 | humidity N 4988 | is V 4989 | 88% $ 4990 | . , 4991 | the D 4992 | wind N 4993 | chill N 4994 | is V 4995 | 52 $ 4996 | . , 4997 | la G 4998 | U 4999 | 5000 | <@MENTION> @ 5001 | ya'll O 5002 | need V 5003 | to P 5004 | make V 5005 | official A 5006 | api's N 5007 | like P 5008 | <@MENTION> @ 5009 | does V 5010 | for P 5011 | developers N 5012 | to P 5013 | tie V 5014 | into P 5015 | 5016 | rt ~ 5017 | <@MENTION> @ 5018 | : ~ 5019 | kobe ^ 5020 | doing V 5021 | anything N 5022 | cuz P 5023 | mj ^ 5024 | ballin V 5025 | on P 5026 | this D 5027 | 2k11 @ 5056 | : ~ 5057 | <@MENTION> @ 5058 | an & 5059 | it O 5060 | follows V 5061 | bello's Z 5062 | top A 5063 | journalism N 5064 | rule N 5065 | : , 5066 | if P 5067 | there X 5068 | is V 5069 | an D 5070 | animal N 5071 | , , 5072 | always R 5073 | get V 5074 | it's D 5075 | name N 5076 | . , 5077 | makes V 5078 | the D 5079 | story N 5080 | 1,000 $ 5081 | ... ~ 5082 | 5083 | tornado N 5084 | warning N 5085 | issued V 5086 | for P 5087 | parts N 5088 | of P 5089 | person ^ 5090 | county N 5091 | in P 5092 | nc ^ 5093 | until P 5094 | 04:45 $ 5095 | pm N 5096 | et N 5097 | - G 5098 | U 5099 | 5100 | <@MENTION> @ 5101 | it O 5102 | was V 5103 | a D 5104 | short A 5105 | , , 5106 | but & 5107 | good A 5108 | life N 5109 | . , 5110 | how R 5111 | many A 5112 | of P 5113 | us O 5114 | can V 5115 | say V 5116 | we've L 5117 | been V 5118 | an D 5119 | under-water A 5120 | bookie N 5121 | in P 5122 | our D 5123 | life N 5124 | ?! , 5125 | a D 5126 | winner N 5127 | to P 5128 | the D 5129 | end N 5130 | ! , 5131 | 5132 | still R 5133 | up A 5134 | 4 P 5135 | no D 5136 | reason N 5137 | tho P 5138 | 5139 | <@MENTION> @ 5140 | first R 5141 | a D 5142 | window N 5143 | now R 5144 | keys N 5145 | . , 5146 | what's L 5147 | next R 5148 | .. , 5149 | #js # 5150 | 5151 | --------------------------------------------------------------------------------