├── .gitignore ├── LICENSE ├── Turning_a_square_into_a_circle.ipynb ├── gan └── Turning a square into a circle.ipynb └── my_rnn_pytorched ├── The Tensor, My Friend.ipynb ├── aliceinwonderland.txt ├── charRNN.ipynb ├── dynamic_graph.gif └── nietzsche.txt /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # Jupyter Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # SageMath parsed files 80 | *.sage.py 81 | 82 | # dotenv 83 | .env 84 | 85 | # virtualenv 86 | .venv 87 | venv/ 88 | ENV/ 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | .spyproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # mkdocs documentation 98 | /site 99 | 100 | # mypy 101 | .mypy_cache/ 102 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Santi DSP 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /gan/Turning a square into a circle.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# import PyTorch utils\n", 12 | "import torch\n", 13 | "import torch.nn as nn\n", 14 | "from torch.autograd import Variable\n", 15 | "import torch.nn.functional as F\n", 16 | "import torch.optim as optim\n", 17 | "import torch.nn.init as init\n", 18 | "# import numpy, scipy and matplotlib for array ops and plots\n", 19 | "import numpy as np\n", 20 | "np.random.seed(3333)\n", 21 | "import matplotlib.pyplot as plt\n", 22 | "%matplotlib inline\n", 23 | "from scipy.stats import norm\n", 24 | "import timeit\n", 25 | "from tqdm import tqdm_notebook as tqdm" 26 | ] 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "metadata": {}, 31 | "source": [ 32 | "# Creating our synthetic data\n", 33 | "\n", 34 | "First let's create a dataset of 5000 samples following a Gaussian distribution with a pre-specified covariance matrix." 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": null, 40 | "metadata": { 41 | "collapsed": true 42 | }, 43 | "outputs": [], 44 | "source": [ 45 | "# create n_samples sampled from Gaussian distribution\n", 46 | "n_samples = 5000\n", 47 | "\n", 48 | "cov_mat = 0.01 * np.eye(2) + [[0., 0.0], [0.05, 0.]]\n", 49 | "print(\"real data cov matrix: \\n\", cov_mat)\n", 50 | "pdf_x = np.random.multivariate_normal(np.zeros(2), cov_mat, n_samples)\n", 51 | "# print \"pdf shape: \", pdf_x.shape\n", 52 | "_ = plt.scatter(pdf_x[:, 0], pdf_x[:, 1], edgecolor='none')\n", 53 | "_ = plt.title('real data distribution')" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "# Problem\n", 61 | "\n", 62 | "### We want to build a GAN that learns a mapping:\n", 63 | "\n", 64 | "#### uniform distribution `Z` in [-1, 1] --> Pdata distribution of `5000` samples.\n", 65 | "\n", 66 | "### We can do so with the following networks Generator and Discriminator:\n", 67 | "\n", 68 | "```\n", 69 | "Generator (\n", 70 | " (fc1): Linear (50 -> 256)\n", 71 | " (fc2): Linear (256 -> 256)\n", 72 | " (out_fc): Linear (256 -> 2)\n", 73 | ")\n", 74 | "```\n", 75 | "\n", 76 | "```\n", 77 | "Discriminator (\n", 78 | " (fc1): Linear (2 -> 128)\n", 79 | " (fc2): Linear (128 -> 128)\n", 80 | " (out_fc): Linear (128 -> 1)\n", 81 | ")\n", 82 | "```\n" 83 | ] 84 | }, 85 | { 86 | "cell_type": "markdown", 87 | "metadata": {}, 88 | "source": [ 89 | "### Exercise 1\n", 90 | "\n", 91 | "Build a Generator network with PyTorch tools following the definition provided in the problem statement." 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": null, 97 | "metadata": { 98 | "collapsed": true 99 | }, 100 | "outputs": [], 101 | "source": [ 102 | "class Generator(nn.Module):\n", 103 | " \"\"\" This network maps z (withdrawn from prior distribution) to the pdf in train data with dim `out_dim` \"\"\"\n", 104 | " def __init__(self, z_dim=100, out_dim=2, hidden_size=128):\n", 105 | " super().__init__()\n", 106 | " # TODO\n", 107 | " # self.fc1 = ...\n", 108 | " \n", 109 | " def forward(self, x):\n", 110 | " h1 = F.leaky_relu(self.fc1(x), 0.3)\n", 111 | " h2 = F.leaky_relu(self.fc2(h1), 0.3)\n", 112 | " h3 = self.out_fc(h2)\n", 113 | " return h3" 114 | ] 115 | }, 116 | { 117 | "cell_type": "markdown", 118 | "metadata": {}, 119 | "source": [ 120 | "### Exercise 2\n", 121 | "\n", 122 | "Build a Discriminator network with PyTorch tools following the definition provided in the problem statement. Implement the forward section of the Discriminator (based on Generator but with proper changes)." 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": null, 128 | "metadata": { 129 | "collapsed": true 130 | }, 131 | "outputs": [], 132 | "source": [ 133 | "class Discriminator(nn.Module):\n", 134 | " \"\"\" This network classifies its input as either real or fake \"\"\"\n", 135 | " def __init__(self, input_dim=2, hidden_size=128):\n", 136 | " super().__init__()\n", 137 | " # TODO\n", 138 | " # self.fc1 = ...\n", 139 | " \n", 140 | " def forward(self, x):\n", 141 | " # TODO\n", 142 | " return h3" 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": null, 148 | "metadata": { 149 | "collapsed": true 150 | }, 151 | "outputs": [], 152 | "source": [ 153 | "def weights_init(m):\n", 154 | " classname = m.__class__.__name__\n", 155 | " if classname.find('Linear') != -1:\n", 156 | " init.orthogonal(m.weight.data, gain=1.4)" 157 | ] 158 | }, 159 | { 160 | "cell_type": "markdown", 161 | "metadata": {}, 162 | "source": [ 163 | "### Exercise 3\n", 164 | "\n", 165 | "Fill in the blank of next code, specifying the loss required to perform a real/fake binary classification in the output of D network. Also, make sure you understand why we have 2 optimizers `g_opt` and `d_opt` before proceeding with training." 166 | ] 167 | }, 168 | { 169 | "cell_type": "code", 170 | "execution_count": null, 171 | "metadata": { 172 | "collapsed": true 173 | }, 174 | "outputs": [], 175 | "source": [ 176 | "# Hyperparams\n", 177 | "G_LR = 0.0001\n", 178 | "D_LR = 0.0001\n", 179 | "Z_DIM = 50\n", 180 | "BATCH_SIZE = 100\n", 181 | "N_EPOCHS = 28\n", 182 | "VIZ_EVERY = 100\n", 183 | "D_UPDATES = 1\n", 184 | "\n", 185 | "# Create both networks\n", 186 | "g_net = Generator(z_dim=Z_DIM, hidden_size=256)\n", 187 | "g_net.apply(weights_init)\n", 188 | "d_net = Discriminator()\n", 189 | "d_net.apply(weights_init)\n", 190 | "\n", 191 | "print(g_net)\n", 192 | "print(d_net)\n", 193 | "\n", 194 | "# TO DO: Loss is.... \n", 195 | "# criterion = ...\n", 196 | "\n", 197 | "# Create the two optimizers\n", 198 | "g_opt = optim.Adam(g_net.parameters(), lr=G_LR, betas=(0.5, 0.999))\n", 199 | "d_opt = optim.Adam(d_net.parameters(), lr=D_LR, betas=(0.5, 0.999))" 200 | ] 201 | }, 202 | { 203 | "cell_type": "markdown", 204 | "metadata": {}, 205 | "source": [ 206 | "### Exercise 4\n", 207 | "\n", 208 | "Make the update routine of network G (you should have a look at update routine of network D and map it into G, with proper changes)." 209 | ] 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": null, 214 | "metadata": { 215 | "collapsed": true, 216 | "scrolled": false 217 | }, 218 | "outputs": [], 219 | "source": [ 220 | "# store resulting losses out of training\n", 221 | "d_rl_losses = []\n", 222 | "d_fk_losses = []\n", 223 | "d_losses = []\n", 224 | "g_losses = []\n", 225 | "\n", 226 | "# Pick a big sample from z and project it through G and compare to pdf_x (original data pdf)\n", 227 | "# this is not data to be trained on, but to check G projections\n", 228 | "sample_z = np.random.uniform(-1, 1, [n_samples, Z_DIM]).astype(np.float32)\n", 229 | "# EXERCISE: NOTE THE VOLATILE=TRUE. WHAT IS IT DOING?\n", 230 | "v_sample_z = Variable(torch.FloatTensor(sample_z), volatile=True)\n", 231 | "batches_per_epoch = pdf_x.shape[0] / BATCH_SIZE\n", 232 | "counter = 0\n", 233 | "curr_epoch = -1\n", 234 | "batch_timings = []\n", 235 | "\n", 236 | "for counter in tqdm(range(int(N_EPOCHS * batches_per_epoch))):\n", 237 | " if counter % batches_per_epoch == 0:\n", 238 | " # epoch change. First time this if is true, so also init variables.\n", 239 | " batch_idx = 0\n", 240 | " curr_epoch += 1\n", 241 | " # randomize the pdf_x samples\n", 242 | " np.random.shuffle(pdf_x)\n", 243 | " beg_t = timeit.default_timer()\n", 244 | " # sample a batch from prior pdf z\n", 245 | " batch_z = torch.FloatTensor(np.random.uniform(-1, 1, [BATCH_SIZE, Z_DIM]).astype(np.float32))\n", 246 | " batch_z = Variable(batch_z)\n", 247 | " # get a batch of samples from gtruth pdf\n", 248 | " batch_x_real = torch.FloatTensor(pdf_x[batch_idx:(batch_idx + BATCH_SIZE)])\n", 249 | " batch_x_real = Variable(batch_x_real)\n", 250 | " d_opt.zero_grad()\n", 251 | " g_opt.zero_grad()\n", 252 | " # ------------ DISCRIMINATOR TRAINING\n", 253 | " # build real label\n", 254 | " for d_i in range(D_UPDATES):\n", 255 | " d_opt.zero_grad()\n", 256 | " labv = Variable(torch.ones(batch_x_real.size(0)))\n", 257 | " # (1) REAL D LOSS\n", 258 | " d_real = d_net(batch_x_real)\n", 259 | " d_real_loss = criterion(d_real, labv)\n", 260 | " d_real_loss.backward()\n", 261 | "\n", 262 | " # (2) FAKE D LOSS\n", 263 | " batch_x_fake = g_net(batch_z)\n", 264 | " # EXERCISE: NOTE THE DETACH. WHAT IS IT DOING?\n", 265 | " d_fake = d_net(batch_x_fake.detach())\n", 266 | " # build fake label\n", 267 | " labv.data.fill_(0.)\n", 268 | " d_fake_loss = criterion(d_fake, labv)\n", 269 | " d_fake_loss.backward()\n", 270 | " d_opt.step()\n", 271 | " \n", 272 | " d_loss = d_fake_loss + d_real_loss\n", 273 | " # ------------ GENERATOR TRAINING\n", 274 | " # TO DO:\n", 275 | " # (1) build real label `labv`\n", 276 | " # (2) forward the z batch through G\n", 277 | " # (3) compute the G real loss with the label Variable\n", 278 | " # (4) backprop gradients\n", 279 | " # (5) update network parameters\n", 280 | " \n", 281 | " # Gather losses to print later\n", 282 | " d_fk_losses.append(d_fake_loss.data.numpy())\n", 283 | " d_rl_losses.append(d_real_loss.data.numpy())\n", 284 | " d_losses.append(d_loss.data.numpy())\n", 285 | " g_losses.append(g_real_loss.data.numpy())\n", 286 | " \n", 287 | " end_t = timeit.default_timer()\n", 288 | " batch_timings.append(end_t - beg_t)\n", 289 | " \n", 290 | " if counter % VIZ_EVERY == 0:\n", 291 | " fig = plt.figure(figsize=(8, 8))\n", 292 | " fake_pred = g_net(v_sample_z).data.numpy()\n", 293 | " _ = plt.scatter(sample_z[:, 0], sample_z[:, 1], edgecolor='none', color='orange')\n", 294 | " _ = plt.scatter(pdf_x[:, 0], pdf_x[:, 1], edgecolor='none')\n", 295 | " _ = plt.scatter(fake_pred[:, 0], fake_pred[:, 1], color='green', edgecolor='none')\n", 296 | " plt.show()\n", 297 | "print(\"Done training for {} epochs! Elapsed time: {} s\".format(N_EPOCHS, np.sum(batch_timings)))\n", 298 | "print(\"Total amount of iterations done: \", counter)\n", 299 | " " 300 | ] 301 | }, 302 | { 303 | "cell_type": "code", 304 | "execution_count": null, 305 | "metadata": { 306 | "collapsed": true 307 | }, 308 | "outputs": [], 309 | "source": [ 310 | "# Visualize the learning curves (let's see the funny behavior of GANs in action!)\n", 311 | "_= plt.figure(figsize=(15, 10))\n", 312 | "_ = plt.plot(d_fk_losses, label='D fake loss', linewidth=2.)\n", 313 | "_ = plt.plot(d_rl_losses, label='D real loss', linewidth=2.)\n", 314 | "#plt.plot(d_losses, label='D loss')\n", 315 | "_ = plt.plot(g_losses, label='G loss', linewidth=2.)\n", 316 | "_ = plt.legend()\n", 317 | "# NOTE: there is no optimization towards a minima! Instead they are very noisy and paired to each other! \n", 318 | "# They have to be balanced through the training stage." 319 | ] 320 | }, 321 | { 322 | "cell_type": "markdown", 323 | "metadata": {}, 324 | "source": [ 325 | "### Exercise 5\n", 326 | "\n", 327 | "Sample a batch of `z` samples and make G infer `G(z)`, placing the result in variable `g_pred` to plot it." 328 | ] 329 | }, 330 | { 331 | "cell_type": "code", 332 | "execution_count": null, 333 | "metadata": { 334 | "collapsed": true 335 | }, 336 | "outputs": [], 337 | "source": [ 338 | "beg_t = timeit.default_timer()\n", 339 | "# TO DO: sample new z vectors from prior\n", 340 | "# batch_z = ...\n", 341 | "# TO DO: forward through G\n", 342 | "# g_pred = ...\n", 343 | "g_pred = g_pred.data.numpy()\n", 344 | "end_t = timeit.default_timer()\n", 345 | "print(\"Inferred {} G samples in {} s\".format(n_samples, end_t - beg_t))\n", 346 | "_ = plt.figure(figsize=(10, 10))\n", 347 | "#_ = plt.scatter(sample_z[:, 0], sample_z[:, 1], color='orange', label='Prior z', edgecolor='none')\n", 348 | "_ = plt.scatter(pdf_x[:, 0], pdf_x[:, 1], label='Real data', edgecolor='none')\n", 349 | "_ = plt.scatter(g_pred[:, 0], g_pred[:, 1], color='green', label='Generated data', edgecolor='none')\n", 350 | "_ = plt.legend()" 351 | ] 352 | } 353 | ], 354 | "metadata": { 355 | "kernelspec": { 356 | "display_name": "Python 3", 357 | "language": "python", 358 | "name": "python3" 359 | }, 360 | "language_info": { 361 | "codemirror_mode": { 362 | "name": "ipython", 363 | "version": 3 364 | }, 365 | "file_extension": ".py", 366 | "mimetype": "text/x-python", 367 | "name": "python", 368 | "nbconvert_exporter": "python", 369 | "pygments_lexer": "ipython3", 370 | "version": "3.6.1" 371 | } 372 | }, 373 | "nbformat": 4, 374 | "nbformat_minor": 2 375 | } 376 | -------------------------------------------------------------------------------- /my_rnn_pytorched/The Tensor, My Friend.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import torch\n", 10 | "tv = torch.__version__\n", 11 | "print('Using PyTorch version: ', tv)\n", 12 | "# check we have PyTorch 0.2.x\n", 13 | "assert tv[0] == '0' and tv[2] == '2', tv\n", 14 | "\n", 15 | "import numpy as np" 16 | ] 17 | }, 18 | { 19 | "cell_type": "markdown", 20 | "metadata": {}, 21 | "source": [ 22 | "# First things first: The world becomes tensorized" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": null, 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "# Every deep learning framework is built upon Tensors\n", 32 | "# These are marvelous multi-dimensional structures\n", 33 | "# We can create Tensors out of Python lists or NumPy arrays\n", 34 | "my_list = [0, 1, 2, 3]\n", 35 | "my_array = np.array(my_list)\n", 36 | "my_list_T = torch.LongTensor(my_list)\n", 37 | "my_array_T = torch.LongTensor(my_array)\n", 38 | "# These are the same, so the assertion will confirm it\n", 39 | "assert type(my_list_T) == type(my_array_T)\n", 40 | "\n", 41 | "# Now we'll create a multi-dimensional array out of a list of lists of lists (3-D)\n", 42 | "T_3 = [[[0, 1, 2.], [5, 6, 7]], [[0.2, 0.4, 2.2], [4.5, -6, -9]]]\n", 43 | "T_3 = np.array(T_3)\n", 44 | "\n", 45 | "assert T_3.ndim == 3, T_3.ndim\n", 46 | "print('Number of dimensions: ', T_3.ndim)\n", 47 | "print('Shape of each dimension: ', T_3.shape)\n", 48 | "# the dimensions of this NumPy array are [2, 2, 3]" 49 | ] 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "metadata": {}, 54 | "source": [ 55 | "### Congratz for your marvelous Tensors, but now what? \n", 56 | "Tensors have:\n", 57 | "1. Info about the data type and the size of each dimension (but NumPy too!)\n", 58 | "2. the GPU capabilities (NumPy DOES NOT)" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": null, 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "# We can operate with Tensors of course\n", 68 | "# weights matrix with [inputs x outputs] = [25 x 100]\n", 69 | "W = torch.randn(100, 25)\n", 70 | "# bias vector [100]\n", 71 | "b = torch.zeros(100)\n", 72 | "# input vector [25]\n", 73 | "x = torch.randn(25)\n", 74 | "# Yes, this is a single layer fully connected neural network\n", 75 | "y = torch.matmul(W, x) + b\n", 76 | "# y ~ [100] output vector\n", 77 | "print('x size: ', x.size())\n", 78 | "print('W size: ', W.size())\n", 79 | "print('b size: ', b.size())\n", 80 | "print('y = Wx + b, size: ', y.size())" 81 | ] 82 | }, 83 | { 84 | "cell_type": "markdown", 85 | "metadata": {}, 86 | "source": [ 87 | "### Some PyTorch notation for Tensors properties:" 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": null, 93 | "metadata": {}, 94 | "outputs": [], 95 | "source": [ 96 | "# NumPy --> PyTorch translation\n", 97 | "# --------------------------------\n", 98 | "# 1) shape --> size()\n", 99 | "y.size()\n", 100 | "print('y size: ', y.size())\n", 101 | "\n", 102 | "# 2) reshape() --> view()\n", 103 | "z = y.view(10, 10)\n", 104 | "print('z size (y reshaped to 10x10): ', z.size())\n", 105 | "\n", 106 | "# 3) expand_dims() --> unsqueeze()\n", 107 | "Y = y.unsqueeze(-1)\n", 108 | "print('Y size (y unsqueezed in last dim): ', Y.size())\n", 109 | "\n", 110 | "# 4) transpose(0, 1) --> t()\n", 111 | "Y_t = Y.t()\n", 112 | "print('Y transposed size: ', Y_t.size())" 113 | ] 114 | }, 115 | { 116 | "cell_type": "markdown", 117 | "metadata": {}, 118 | "source": [ 119 | "### The \"magic\" behind AUTOGRAD\n", 120 | "\n", 121 | "**Variable:** It wraps a Tensor, and supports nearly all of operations defined on it. Once you finish your computation you can call `.backward()` and have all the gradients computed automatically.\n", 122 | "\n", 123 | "You can access the raw tensor through the `.data` attribute, while the gradient w.r.t. this variable is accumulated into `.grad`[[1]](http://pytorch.org/tutorials/beginner/blitz/autograd_tutorial.html)." 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": null, 129 | "metadata": {}, 130 | "outputs": [], 131 | "source": [ 132 | "from torch.autograd import Variable\n", 133 | "\n", 134 | "T = torch.randn(10, 10)\n", 135 | "# we make the Variable by just wrapping the Tensor with it\n", 136 | "V = Variable(T)\n", 137 | "# This is a Variable containing a FloatTensor\n", 138 | "print(V)" 139 | ] 140 | }, 141 | { 142 | "cell_type": "markdown", 143 | "metadata": {}, 144 | "source": [ 145 | "### The reason to create Variables: the Graph\n", 146 | "\n", 147 | "Tensors are nodes in the graph. Edges are the computations relating Tensors (as in TensorFlow). However, the main difference between PyTorch and TensorFlow is: **DYNAMIC GRAPH!**\n", 148 | "\n", 149 | "\n", 150 | "\n", 151 | "[comment]: (Reference_for_the_figure:https://medium.com/intuitionmachine/pytorch-dynamic-computational-graphs-and-modular-deep-learning-7e7f89f18d1)\n", 152 | "\n", 153 | "The Graph is built operation by operation, thus on runtime!" 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": null, 159 | "metadata": {}, 160 | "outputs": [], 161 | "source": [ 162 | "# Example of a graph creation z = sum(x * y)\n", 163 | "# requires_grad tells the framework we want the gradient wrt to that variable to be computed\n", 164 | "x = Variable(torch.ones(10), requires_grad=True)\n", 165 | "y = Variable(torch.ones(10), requires_grad=True)\n", 166 | "z = x + y\n", 167 | "out = z.sum()" 168 | ] 169 | }, 170 | { 171 | "cell_type": "code", 172 | "execution_count": null, 173 | "metadata": {}, 174 | "outputs": [], 175 | "source": [ 176 | "out.backward()\n", 177 | "print(z)\n", 178 | "print(z.grad)\n", 179 | "print(x.grad)\n", 180 | "print(y.grad)" 181 | ] 182 | }, 183 | { 184 | "cell_type": "markdown", 185 | "metadata": { 186 | "collapsed": true 187 | }, 188 | "source": [ 189 | "For further reference: http://pytorch.org/tutorials/beginner/blitz/autograd_tutorial.html" 190 | ] 191 | } 192 | ], 193 | "metadata": { 194 | "kernelspec": { 195 | "display_name": "Python 3", 196 | "language": "python", 197 | "name": "python3" 198 | }, 199 | "language_info": { 200 | "codemirror_mode": { 201 | "name": "ipython", 202 | "version": 3 203 | }, 204 | "file_extension": ".py", 205 | "mimetype": "text/x-python", 206 | "name": "python", 207 | "nbconvert_exporter": "python", 208 | "pygments_lexer": "ipython3", 209 | "version": "3.6.1" 210 | } 211 | }, 212 | "nbformat": 4, 213 | "nbformat_minor": 2 214 | } 215 | -------------------------------------------------------------------------------- /my_rnn_pytorched/aliceinwonderland.txt: -------------------------------------------------------------------------------- 1 | I--DOWN THE RABBIT-HOLE 2 | 3 | 4 | Alice was beginning to get very tired of sitting by her sister on the 5 | bank, and of having nothing to do. Once or twice she had peeped into the 6 | book her sister was reading, but it had no pictures or conversations in 7 | it, "and what is the use of a book," thought Alice, "without pictures or 8 | conversations?" 9 | 10 | So she was considering in her own mind (as well as she could, for the 11 | day made her feel very sleepy and stupid), whether the pleasure of 12 | making a daisy-chain would be worth the trouble of getting up and 13 | picking the daisies, when suddenly a White Rabbit with pink eyes ran 14 | close by her. 15 | 16 | There was nothing so very remarkable in that, nor did Alice think it so 17 | very much out of the way to hear the Rabbit say to itself, "Oh dear! Oh 18 | dear! I shall be too late!" But when the Rabbit actually took a watch 19 | out of its waistcoat-pocket and looked at it and then hurried on, Alice 20 | started to her feet, for it flashed across her mind that she had never 21 | before seen a rabbit with either a waistcoat-pocket, or a watch to take 22 | out of it, and, burning with curiosity, she ran across the field after 23 | it and was just in time to see it pop down a large rabbit-hole, under 24 | the hedge. In another moment, down went Alice after it! 25 | 26 | [Illustration] 27 | 28 | The rabbit-hole went straight on like a tunnel for some way and then 29 | dipped suddenly down, so suddenly that Alice had not a moment to think 30 | about stopping herself before she found herself falling down what seemed 31 | to be a very deep well. 32 | 33 | Either the well was very deep, or she fell very slowly, for she had 34 | plenty of time, as she went down, to look about her. First, she tried to 35 | make out what she was coming to, but it was too dark to see anything; 36 | then she looked at the sides of the well and noticed that they were 37 | filled with cupboards and book-shelves; here and there she saw maps and 38 | pictures hung upon pegs. She took down a jar from one of the shelves as 39 | she passed. It was labeled "ORANGE MARMALADE," but, to her great 40 | disappointment, it was empty; she did not like to drop the jar, so 41 | managed to put it into one of the cupboards as she fell past it. 42 | 43 | Down, down, down! Would the fall never come to an end? There was nothing 44 | else to do, so Alice soon began talking to herself. "Dinah'll miss me 45 | very much to-night, I should think!" (Dinah was the cat.) "I hope 46 | they'll remember her saucer of milk at tea-time. Dinah, my dear, I wish 47 | you were down here with me!" Alice felt that she was dozing off, when 48 | suddenly, thump! thump! down she came upon a heap of sticks and dry 49 | leaves, and the fall was over. 50 | 51 | Alice was not a bit hurt, and she jumped up in a moment. She looked up, 52 | but it was all dark overhead; before her was another long passage and 53 | the White Rabbit was still in sight, hurrying down it. There was not a 54 | moment to be lost. Away went Alice like the wind and was just in time to 55 | hear it say, as it turned a corner, "Oh, my ears and whiskers, how late 56 | it's getting!" She was close behind it when she turned the corner, but 57 | the Rabbit was no longer to be seen. 58 | 59 | She found herself in a long, low hall, which was lit up by a row of 60 | lamps hanging from the roof. There were doors all 'round the hall, but 61 | they were all locked; and when Alice had been all the way down one side 62 | and up the other, trying every door, she walked sadly down the middle, 63 | wondering how she was ever to get out again. 64 | 65 | Suddenly she came upon a little table, all made of solid glass. There 66 | was nothing on it but a tiny golden key, and Alice's first idea was that 67 | this might belong to one of the doors of the hall; but, alas! either the 68 | locks were too large, or the key was too small, but, at any rate, it 69 | would not open any of them. However, on the second time 'round, she came 70 | upon a low curtain she had not noticed before, and behind it was a 71 | little door about fifteen inches high. She tried the little golden key 72 | in the lock, and to her great delight, it fitted! 73 | 74 | [Illustration] 75 | 76 | Alice opened the door and found that it led into a small passage, not 77 | much larger than a rat-hole; she knelt down and looked along the passage 78 | into the loveliest garden you ever saw. How she longed to get out of 79 | that dark hall and wander about among those beds of bright flowers and 80 | those cool fountains, but she could not even get her head through the 81 | doorway. "Oh," said Alice, "how I wish I could shut up like a telescope! 82 | I think I could, if I only knew how to begin." 83 | 84 | Alice went back to the table, half hoping she might find another key on 85 | it, or at any rate, a book of rules for shutting people up like 86 | telescopes. This time she found a little bottle on it ("which certainly 87 | was not here before," said Alice), and tied 'round the neck of the 88 | bottle was a paper label, with the words "DRINK ME" beautifully printed 89 | on it in large letters. 90 | 91 | "No, I'll look first," she said, "and see whether it's marked '_poison_' 92 | or not," for she had never forgotten that, if you drink from a bottle 93 | marked "poison," it is almost certain to disagree with you, sooner or 94 | later. However, this bottle was _not_ marked "poison," so Alice ventured 95 | to taste it, and, finding it very nice (it had a sort of mixed flavor of 96 | cherry-tart, custard, pineapple, roast turkey, toffy and hot buttered 97 | toast), she very soon finished it off. 98 | 99 | * * * * * 100 | 101 | "What a curious feeling!" said Alice. "I must be shutting up like a 102 | telescope!" 103 | 104 | And so it was indeed! She was now only ten inches high, and her face 105 | brightened up at the thought that she was now the right size for going 106 | through the little door into that lovely garden. 107 | 108 | After awhile, finding that nothing more happened, she decided on going 109 | into the garden at once; but, alas for poor Alice! When she got to the 110 | door, she found she had forgotten the little golden key, and when she 111 | went back to the table for it, she found she could not possibly reach 112 | it: she could see it quite plainly through the glass and she tried her 113 | best to climb up one of the legs of the table, but it was too slippery, 114 | and when she had tired herself out with trying, the poor little thing 115 | sat down and cried. 116 | 117 | "Come, there's no use in crying like that!" said Alice to herself rather 118 | sharply. "I advise you to leave off this minute!" She generally gave 119 | herself very good advice (though she very seldom followed it), and 120 | sometimes she scolded herself so severely as to bring tears into her 121 | eyes. 122 | 123 | Soon her eye fell on a little glass box that was lying under the table: 124 | she opened it and found in it a very small cake, on which the words "EAT 125 | ME" were beautifully marked in currants. "Well, I'll eat it," said 126 | Alice, "and if it makes me grow larger, I can reach the key; and if it 127 | makes me grow smaller, I can creep under the door: so either way I'll 128 | get into the garden, and I don't care which happens!" 129 | 130 | She ate a little bit and said anxiously to herself, "Which way? Which 131 | way?" holding her hand on the top of her head to feel which way she was 132 | growing; and she was quite surprised to find that she remained the same 133 | size. So she set to work and very soon finished off the cake. 134 | 135 | [Illustration] 136 | 137 | 138 | 139 | 140 | II--THE POOL OF TEARS 141 | 142 | 143 | "Curiouser and curiouser!" cried Alice (she was so much surprised that 144 | for the moment she quite forgot how to speak good English). "Now I'm 145 | opening out like the largest telescope that ever was! Good-by, feet! Oh, 146 | my poor little feet, I wonder who will put on your shoes and stockings 147 | for you now, dears? I shall be a great deal too far off to trouble 148 | myself about you." 149 | 150 | Just at this moment her head struck against the roof of the hall; in 151 | fact, she was now rather more than nine feet high, and she at once took 152 | up the little golden key and hurried off to the garden door. 153 | 154 | Poor Alice! It was as much as she could do, lying down on one side, to 155 | look through into the garden with one eye; but to get through was more 156 | hopeless than ever. She sat down and began to cry again. 157 | 158 | She went on shedding gallons of tears, until there was a large pool all 159 | 'round her and reaching half down the hall. 160 | 161 | After a time, she heard a little pattering of feet in the distance and 162 | she hastily dried her eyes to see what was coming. It was the White 163 | Rabbit returning, splendidly dressed, with a pair of white kid-gloves in 164 | one hand and a large fan in the other. He came trotting along in a 165 | great hurry, muttering to himself, "Oh! the Duchess, the Duchess! Oh! 166 | _won't_ she be savage if I've kept her waiting!" 167 | 168 | When the Rabbit came near her, Alice began, in a low, timid voice, "If 169 | you please, sir--" The Rabbit started violently, dropped the white 170 | kid-gloves and the fan and skurried away into the darkness as hard as he 171 | could go. 172 | 173 | [Illustration] 174 | 175 | Alice took up the fan and gloves and she kept fanning herself all the 176 | time she went on talking. "Dear, dear! How queer everything is to-day! 177 | And yesterday things went on just as usual. _Was_ I the same when I got 178 | up this morning? But if I'm not the same, the next question is, 'Who in 179 | the world am I?' Ah, _that's_ the great puzzle!" 180 | 181 | As she said this, she looked down at her hands and was surprised to see 182 | that she had put on one of the Rabbit's little white kid-gloves while 183 | she was talking. "How _can_ I have done that?" she thought. "I must be 184 | growing small again." She got up and went to the table to measure 185 | herself by it and found that she was now about two feet high and was 186 | going on shrinking rapidly. She soon found out that the cause of this 187 | was the fan she was holding and she dropped it hastily, just in time to 188 | save herself from shrinking away altogether. 189 | 190 | "That _was_ a narrow escape!" said Alice, a good deal frightened at the 191 | sudden change, but very glad to find herself still in existence. "And 192 | now for the garden!" And she ran with all speed back to the little door; 193 | but, alas! the little door was shut again and the little golden key was 194 | lying on the glass table as before. "Things are worse than ever," 195 | thought the poor child, "for I never was so small as this before, 196 | never!" 197 | 198 | As she said these words, her foot slipped, and in another moment, 199 | splash! she was up to her chin in salt-water. Her first idea was that 200 | she had somehow fallen into the sea. However, she soon made out that she 201 | was in the pool of tears which she had wept when she was nine feet high. 202 | 203 | [Illustration] 204 | 205 | Just then she heard something splashing about in the pool a little way 206 | off, and she swam nearer to see what it was: she soon made out that it 207 | was only a mouse that had slipped in like herself. 208 | 209 | "Would it be of any use, now," thought Alice, "to speak to this mouse? 210 | Everything is so out-of-the-way down here that I should think very 211 | likely it can talk; at any rate, there's no harm in trying." So she 212 | began, "O Mouse, do you know the way out of this pool? I am very tired 213 | of swimming about here, O Mouse!" The Mouse looked at her rather 214 | inquisitively and seemed to her to wink with one of its little eyes, but 215 | it said nothing. 216 | 217 | "Perhaps it doesn't understand English," thought Alice. "I dare say it's 218 | a French mouse, come over with William the Conqueror." So she began 219 | again: "Ou est ma chatte?" which was the first sentence in her French 220 | lesson-book. The Mouse gave a sudden leap out of the water and seemed to 221 | quiver all over with fright. "Oh, I beg your pardon!" cried Alice 222 | hastily, afraid that she had hurt the poor animal's feelings. "I quite 223 | forgot you didn't like cats." 224 | 225 | "Not like cats!" cried the Mouse in a shrill, passionate voice. "Would 226 | _you_ like cats, if you were me?" 227 | 228 | "Well, perhaps not," said Alice in a soothing tone; "don't be angry 229 | about it. And yet I wish I could show you our cat Dinah. I think you'd 230 | take a fancy to cats, if you could only see her. She is such a dear, 231 | quiet thing." The Mouse was bristling all over and she felt certain it 232 | must be really offended. "We won't talk about her any more, if you'd 233 | rather not." 234 | 235 | "We, indeed!" cried the Mouse, who was trembling down to the end of its 236 | tail. "As if _I_ would talk on such a subject! Our family always _hated_ 237 | cats--nasty, low, vulgar things! Don't let me hear the name again!" 238 | 239 | [Illustration: Alice at the Mad Tea Party.] 240 | 241 | "I won't indeed!" said Alice, in a great hurry to change the subject of 242 | conversation. "Are you--are you fond--of--of dogs? There is such a nice 243 | little dog near our house, I should like to show you! It kills all the 244 | rats and--oh, dear!" cried Alice in a sorrowful tone. "I'm afraid I've 245 | offended it again!" For the Mouse was swimming away from her as hard as 246 | it could go, and making quite a commotion in the pool as it went. 247 | 248 | So she called softly after it, "Mouse dear! Do come back again, and we 249 | won't talk about cats, or dogs either, if you don't like them!" When the 250 | Mouse heard this, it turned 'round and swam slowly back to her; its face 251 | was quite pale, and it said, in a low, trembling voice, "Let us get to 252 | the shore and then I'll tell you my history and you'll understand why it 253 | is I hate cats and dogs." 254 | 255 | It was high time to go, for the pool was getting quite crowded with the 256 | birds and animals that had fallen into it; there were a Duck and a Dodo, 257 | a Lory and an Eaglet, and several other curious creatures. Alice led the 258 | way and the whole party swam to the shore. 259 | 260 | [Illustration] 261 | 262 | 263 | 264 | 265 | III--A CAUCUS-RACE AND A LONG TALE 266 | 267 | 268 | They were indeed a queer-looking party that assembled on the bank--the 269 | birds with draggled feathers, the animals with their fur clinging close 270 | to them, and all dripping wet, cross and uncomfortable. 271 | 272 | [Illustration] 273 | 274 | The first question, of course, was how to get dry again. They had a 275 | consultation about this and after a few minutes, it seemed quite natural 276 | to Alice to find herself talking familiarly with them, as if she had 277 | known them all her life. 278 | 279 | At last the Mouse, who seemed to be a person of some authority among 280 | them, called out, "Sit down, all of you, and listen to me! _I'll_ soon 281 | make you dry enough!" They all sat down at once, in a large ring, with 282 | the Mouse in the middle. 283 | 284 | "Ahem!" said the Mouse with an important air. "Are you all ready? This 285 | is the driest thing I know. Silence all 'round, if you please! 'William 286 | the Conqueror, whose cause was favored by the pope, was soon submitted 287 | to by the English, who wanted leaders, and had been of late much 288 | accustomed to usurpation and conquest. Edwin and Morcar, the Earls of 289 | Mercia and Northumbria'--" 290 | 291 | "Ugh!" said the Lory, with a shiver. 292 | 293 | "--'And even Stigand, the patriotic archbishop of Canterbury, found it 294 | advisable'--" 295 | 296 | "Found _what_?" said the Duck. 297 | 298 | "Found _it_," the Mouse replied rather crossly; "of course, you know 299 | what 'it' means." 300 | 301 | "I know what 'it' means well enough, when _I_ find a thing," said the 302 | Duck; "it's generally a frog or a worm. The question is, what did the 303 | archbishop find?" 304 | 305 | The Mouse did not notice this question, but hurriedly went on, "'--found 306 | it advisable to go with Edgar Atheling to meet William and offer him the 307 | crown.'--How are you getting on now, my dear?" it continued, turning to 308 | Alice as it spoke. 309 | 310 | "As wet as ever," said Alice in a melancholy tone; "it doesn't seem to 311 | dry me at all." 312 | 313 | "In that case," said the Dodo solemnly, rising to its feet, "I move that 314 | the meeting adjourn, for the immediate adoption of more energetic 315 | remedies--" 316 | 317 | "Speak English!" said the Eaglet. "I don't know the meaning of half 318 | those long words, and, what's more, I don't believe you do either!" 319 | 320 | "What I was going to say," said the Dodo in an offended tone, "is that 321 | the best thing to get us dry would be a Caucus-race." 322 | 323 | "What _is_ a Caucus-race?" said Alice. 324 | 325 | [Illustration] 326 | 327 | "Why," said the Dodo, "the best way to explain it is to do it." First it 328 | marked out a race-course, in a sort of circle, and then all the party 329 | were placed along the course, here and there. There was no "One, two, 330 | three and away!" but they began running when they liked and left off 331 | when they liked, so that it was not easy to know when the race was over. 332 | However, when they had been running half an hour or so and were quite 333 | dry again, the Dodo suddenly called out, "The race is over!" and they 334 | all crowded 'round it, panting and asking, "But who has won?" 335 | 336 | This question the Dodo could not answer without a great deal of thought. 337 | At last it said, "_Everybody_ has won, and _all_ must have prizes." 338 | 339 | "But who is to give the prizes?" quite a chorus of voices asked. 340 | 341 | "Why, _she_, of course," said the Dodo, pointing to Alice with one 342 | finger; and the whole party at once crowded 'round her, calling out, in 343 | a confused way, "Prizes! Prizes!" 344 | 345 | Alice had no idea what to do, and in despair she put her hand into her 346 | pocket and pulled out a box of comfits (luckily the salt-water had not 347 | got into it) and handed them 'round as prizes. There was exactly one 348 | a-piece, all 'round. 349 | 350 | The next thing was to eat the comfits; this caused some noise and 351 | confusion, as the large birds complained that they could not taste 352 | theirs, and the small ones choked and had to be patted on the back. 353 | However, it was over at last and they sat down again in a ring and 354 | begged the Mouse to tell them something more. 355 | 356 | "You promised to tell me your history, you know," said Alice, "and why 357 | it is you hate--C and D," she added in a whisper, half afraid that it 358 | would be offended again. 359 | 360 | "Mine is a long and a sad tale!" said the Mouse, turning to Alice and 361 | sighing. 362 | 363 | "It _is_ a long tail, certainly," said Alice, looking down with wonder 364 | at the Mouse's tail, "but why do you call it sad?" And she kept on 365 | puzzling about it while the Mouse was speaking, so that her idea of the 366 | tale was something like this:-- 367 | 368 | "Fury said to 369 | a mouse, That 370 | he met in the 371 | house, 'Let 372 | us both go 373 | to law: _I_ 374 | will prosecute 375 | _you_.-- 376 | Come, I'll 377 | take no denial: 378 | We 379 | must have 380 | the trial; 381 | For really 382 | this morning 383 | I've 384 | nothing 385 | to do.' 386 | Said the 387 | mouse to 388 | the cur, 389 | 'Such a 390 | trial, dear 391 | sir, With 392 | no jury 393 | or judge, 394 | would 395 | be wasting 396 | our 397 | breath.' 398 | 'I'll be 399 | judge, 400 | I'll be 401 | jury,' 402 | said 403 | cunning 404 | old 405 | Fury; 406 | 'I'll 407 | try 408 | the 409 | whole 410 | cause, 411 | and 412 | condemn 413 | you to 414 | death.'" 415 | 416 | "You are not attending!" said the Mouse to Alice, severely. "What are 417 | you thinking of?" 418 | 419 | "I beg your pardon," said Alice very humbly, "you had got to the fifth 420 | bend, I think?" 421 | 422 | "You insult me by talking such nonsense!" said the Mouse, getting up and 423 | walking away. 424 | 425 | "Please come back and finish your story!" Alice called after it. And the 426 | others all joined in chorus, "Yes, please do!" But the Mouse only shook 427 | its head impatiently and walked a little quicker. 428 | 429 | "I wish I had Dinah, our cat, here!" said Alice. This caused a 430 | remarkable sensation among the party. Some of the birds hurried off at 431 | once, and a Canary called out in a trembling voice, to its children, 432 | "Come away, my dears! It's high time you were all in bed!" On various 433 | pretexts they all moved off and Alice was soon left alone. 434 | 435 | "I wish I hadn't mentioned Dinah! Nobody seems to like her down here and 436 | I'm sure she's the best cat in the world!" Poor Alice began to cry 437 | again, for she felt very lonely and low-spirited. In a little while, 438 | however, she again heard a little pattering of footsteps in the distance 439 | and she looked up eagerly. 440 | 441 | [Illustration] 442 | 443 | [Illustration] 444 | 445 | 446 | 447 | 448 | IV--THE RABBIT SENDS IN A LITTLE BILL 449 | 450 | 451 | It was the White Rabbit, trotting slowly back again and looking 452 | anxiously about as it went, as if it had lost something; Alice heard it 453 | muttering to itself, "The Duchess! The Duchess! Oh, my dear paws! Oh, my 454 | fur and whiskers! She'll get me executed, as sure as ferrets are 455 | ferrets! Where _can_ I have dropped them, I wonder?" Alice guessed in a 456 | moment that it was looking for the fan and the pair of white kid-gloves 457 | and she very good-naturedly began hunting about for them, but they were 458 | nowhere to be seen--everything seemed to have changed since her swim in 459 | the pool, and the great hall, with the glass table and the little door, 460 | had vanished completely. 461 | 462 | Very soon the Rabbit noticed Alice, and called to her, in an angry tone, 463 | "Why, Mary Ann, what _are_ you doing out here? Run home this moment and 464 | fetch me a pair of gloves and a fan! Quick, now!" 465 | 466 | "He took me for his housemaid!" said Alice, as she ran off. "How 467 | surprised he'll be when he finds out who I am!" As she said this, she 468 | came upon a neat little house, on the door of which was a bright brass 469 | plate with the name "W. RABBIT" engraved upon it. She went in without 470 | knocking and hurried upstairs, in great fear lest she should meet the 471 | real Mary Ann and be turned out of the house before she had found the 472 | fan and gloves. 473 | 474 | By this time, Alice had found her way into a tidy little room with a 475 | table in the window, and on it a fan and two or three pairs of tiny 476 | white kid-gloves; she took up the fan and a pair of the gloves and was 477 | just going to leave the room, when her eyes fell upon a little bottle 478 | that stood near the looking-glass. She uncorked it and put it to her 479 | lips, saying to herself, "I do hope it'll make me grow large again, for, 480 | really, I'm quite tired of being such a tiny little thing!" 481 | 482 | Before she had drunk half the bottle, she found her head pressing 483 | against the ceiling, and had to stoop to save her neck from being 484 | broken. She hastily put down the bottle, remarking, "That's quite 485 | enough--I hope I sha'n't grow any more." 486 | 487 | Alas! It was too late to wish that! She went on growing and growing and 488 | very soon she had to kneel down on the floor. Still she went on growing, 489 | and, as a last resource, she put one arm out of the window and one foot 490 | up the chimney, and said to herself, "Now I can do no more, whatever 491 | happens. What _will_ become of me?" 492 | 493 | [Illustration] 494 | 495 | Luckily for Alice, the little magic bottle had now had its full effect 496 | and she grew no larger. After a few minutes she heard a voice outside 497 | and stopped to listen. 498 | 499 | "Mary Ann! Mary Ann!" said the voice. "Fetch me my gloves this moment!" 500 | Then came a little pattering of feet on the stairs. Alice knew it was 501 | the Rabbit coming to look for her and she trembled till she shook the 502 | house, quite forgetting that she was now about a thousand times as large 503 | as the Rabbit and had no reason to be afraid of it. 504 | 505 | Presently the Rabbit came up to the door and tried to open it; but as 506 | the door opened inwards and Alice's elbow was pressed hard against it, 507 | that attempt proved a failure. Alice heard it say to itself, "Then I'll 508 | go 'round and get in at the window." 509 | 510 | "_That_ you won't!" thought Alice; and after waiting till she fancied 511 | she heard the Rabbit just under the window, she suddenly spread out her 512 | hand and made a snatch in the air. She did not get hold of anything, 513 | but she heard a little shriek and a fall and a crash of broken glass, 514 | from which she concluded that it was just possible it had fallen into a 515 | cucumber-frame or something of that sort. 516 | 517 | Next came an angry voice--the Rabbit's--"Pat! Pat! Where are you?" And 518 | then a voice she had never heard before, "Sure then, I'm here! Digging 519 | for apples, yer honor!" 520 | 521 | "Here! Come and help me out of this! Now tell me, Pat, what's that in 522 | the window?" 523 | 524 | "Sure, it's an arm, yer honor!" 525 | 526 | "Well, it's got no business there, at any rate; go and take it away!" 527 | 528 | There was a long silence after this and Alice could only hear whispers 529 | now and then, and at last she spread out her hand again and made another 530 | snatch in the air. This time there were _two_ little shrieks and more 531 | sounds of broken glass. "I wonder what they'll do next!" thought Alice. 532 | "As for pulling me out of the window, I only wish they _could_!" 533 | 534 | She waited for some time without hearing anything more. At last came a 535 | rumbling of little cart-wheels and the sound of a good many voices all 536 | talking together. She made out the words: "Where's the other ladder? 537 | Bill's got the other--Bill! Here, Bill! Will the roof bear?--Who's to go 538 | down the chimney?--Nay, _I_ sha'n't! _You_ do it! Here, Bill! The master 539 | says you've got to go down the chimney!" 540 | 541 | Alice drew her foot as far down the chimney as she could and waited till 542 | she heard a little animal scratching and scrambling about in the chimney 543 | close above her; then she gave one sharp kick and waited to see what 544 | would happen next. 545 | 546 | The first thing she heard was a general chorus of "There goes Bill!" 547 | then the Rabbit's voice alone--"Catch him, you by the hedge!" Then 548 | silence and then another confusion of voices--"Hold up his head--Brandy 549 | now--Don't choke him--What happened to you?" 550 | 551 | Last came a little feeble, squeaking voice, "Well, I hardly know--No 552 | more, thank ye. I'm better now--all I know is, something comes at me 553 | like a Jack-in-the-box and up I goes like a sky-rocket!" 554 | 555 | After a minute or two of silence, they began moving about again, and 556 | Alice heard the Rabbit say, "A barrowful will do, to begin with." 557 | 558 | "A barrowful of _what_?" thought Alice. But she had not long to doubt, 559 | for the next moment a shower of little pebbles came rattling in at the 560 | window and some of them hit her in the face. Alice noticed, with some 561 | surprise, that the pebbles were all turning into little cakes as they 562 | lay on the floor and a bright idea came into her head. "If I eat one of 563 | these cakes," she thought, "it's sure to make _some_ change in my size." 564 | 565 | So she swallowed one of the cakes and was delighted to find that she 566 | began shrinking directly. As soon as she was small enough to get through 567 | the door, she ran out of the house and found quite a crowd of little 568 | animals and birds waiting outside. They all made a rush at Alice the 569 | moment she appeared, but she ran off as hard as she could and soon found 570 | herself safe in a thick wood. 571 | 572 | [Illustration: "The Duchess tucked her arm affectionately into 573 | Alice's."] 574 | 575 | "The first thing I've got to do," said Alice to herself, as she 576 | wandered about in the wood, "is to grow to my right size again; and the 577 | second thing is to find my way into that lovely garden. I suppose I 578 | ought to eat or drink something or other, but the great question is 579 | 'What?'" 580 | 581 | Alice looked all around her at the flowers and the blades of grass, but 582 | she could not see anything that looked like the right thing to eat or 583 | drink under the circumstances. There was a large mushroom growing near 584 | her, about the same height as herself. She stretched herself up on 585 | tiptoe and peeped over the edge and her eyes immediately met those of a 586 | large blue caterpillar, that was sitting on the top, with its arms 587 | folded, quietly smoking a long hookah and taking not the smallest notice 588 | of her or of anything else. 589 | 590 | [Illustration] 591 | 592 | 593 | 594 | 595 | V--ADVICE FROM A CATERPILLAR 596 | 597 | 598 | At last the Caterpillar took the hookah out of its mouth and addressed 599 | Alice in a languid, sleepy voice. 600 | 601 | "Who are _you_?" said the Caterpillar. 602 | 603 | [Illustration] 604 | 605 | Alice replied, rather shyly, "I--I hardly know, sir, just at present--at 606 | least I know who I _was_ when I got up this morning, but I think I must 607 | have changed several times since then." 608 | 609 | "What do you mean by that?" said the Caterpillar, sternly. "Explain 610 | yourself!" 611 | 612 | "I can't explain _myself_, I'm afraid, sir," said Alice, "because I'm 613 | not myself, you see--being so many different sizes in a day is very 614 | confusing." She drew herself up and said very gravely, "I think you 615 | ought to tell me who _you_ are, first." 616 | 617 | "Why?" said the Caterpillar. 618 | 619 | As Alice could not think of any good reason and the Caterpillar seemed 620 | to be in a _very_ unpleasant state of mind, she turned away. 621 | 622 | "Come back!" the Caterpillar called after her. "I've something important 623 | to say!" Alice turned and came back again. 624 | 625 | "Keep your temper," said the Caterpillar. 626 | 627 | "Is that all?" said Alice, swallowing down her anger as well as she 628 | could. 629 | 630 | "No," said the Caterpillar. 631 | 632 | It unfolded its arms, took the hookah out of its mouth again, and said, 633 | "So you think you're changed, do you?" 634 | 635 | "I'm afraid, I am, sir," said Alice. "I can't remember things as I 636 | used--and I don't keep the same size for ten minutes together!" 637 | 638 | "What size do you want to be?" asked the Caterpillar. 639 | 640 | "Oh, I'm not particular as to size," Alice hastily replied, "only one 641 | doesn't like changing so often, you know. I should like to be a _little_ 642 | larger, sir, if you wouldn't mind," said Alice. "Three inches is such a 643 | wretched height to be." 644 | 645 | "It is a very good height indeed!" said the Caterpillar angrily, rearing 646 | itself upright as it spoke (it was exactly three inches high). 647 | 648 | In a minute or two, the Caterpillar got down off the mushroom and 649 | crawled away into the grass, merely remarking, as it went, "One side 650 | will make you grow taller, and the other side will make you grow 651 | shorter." 652 | 653 | "One side of _what_? The other side of _what_?" thought Alice to 654 | herself. 655 | 656 | "Of the mushroom," said the Caterpillar, just as if she had asked it 657 | aloud; and in another moment, it was out of sight. 658 | 659 | Alice remained looking thoughtfully at the mushroom for a minute, trying 660 | to make out which were the two sides of it. At last she stretched her 661 | arms 'round it as far as they would go, and broke off a bit of the edge 662 | with each hand. 663 | 664 | "And now which is which?" she said to herself, and nibbled a little of 665 | the right-hand bit to try the effect. The next moment she felt a violent 666 | blow underneath her chin--it had struck her foot! 667 | 668 | She was a good deal frightened by this very sudden change, as she was 669 | shrinking rapidly; so she set to work at once to eat some of the other 670 | bit. Her chin was pressed so closely against her foot that there was 671 | hardly room to open her mouth; but she did it at last and managed to 672 | swallow a morsel of the left-hand bit.... 673 | 674 | "Come, my head's free at last!" said Alice; but all she could see, when 675 | she looked down, was an immense length of neck, which seemed to rise 676 | like a stalk out of a sea of green leaves that lay far below her. 677 | 678 | "Where _have_ my shoulders got to? And oh, my poor hands, how is it I 679 | can't see you?" She was delighted to find that her neck would bend 680 | about easily in any direction, like a serpent. She had just succeeded in 681 | curving it down into a graceful zigzag and was going to dive in among 682 | the leaves, when a sharp hiss made her draw back in a hurry--a large 683 | pigeon had flown into her face and was beating her violently with its 684 | wings. 685 | 686 | [Illustration] 687 | 688 | "Serpent!" cried the Pigeon. 689 | 690 | "I'm _not_ a serpent!" said Alice indignantly. "Let me alone!" 691 | 692 | "I've tried the roots of trees, and I've tried banks, and I've tried 693 | hedges," the Pigeon went on, "but those serpents! There's no pleasing 694 | them!" 695 | 696 | Alice was more and more puzzled. 697 | 698 | "As if it wasn't trouble enough hatching the eggs," said the Pigeon, 699 | "but I must be on the look-out for serpents, night and day! And just as 700 | I'd taken the highest tree in the wood," continued the Pigeon, raising 701 | its voice to a shriek, "and just as I was thinking I should be free of 702 | them at last, they must needs come wriggling down from the sky! Ugh, 703 | Serpent!" 704 | 705 | "But I'm _not_ a serpent, I tell you!" said Alice. "I'm a--I'm a--I'm a 706 | little girl," she added rather doubtfully, as she remembered the number 707 | of changes she had gone through that day. 708 | 709 | "You're looking for eggs, I know _that_ well enough," said the Pigeon; 710 | "and what does it matter to me whether you're a little girl or a 711 | serpent?" 712 | 713 | "It matters a good deal to _me_," said Alice hastily; "but I'm not 714 | looking for eggs, as it happens, and if I was, I shouldn't want 715 | _yours_--I don't like them raw." 716 | 717 | "Well, be off, then!" said the Pigeon in a sulky tone, as it settled 718 | down again into its nest. Alice crouched down among the trees as well as 719 | she could, for her neck kept getting entangled among the branches, and 720 | every now and then she had to stop and untwist it. After awhile she 721 | remembered that she still held the pieces of mushroom in her hands, and 722 | she set to work very carefully, nibbling first at one and then at the 723 | other, and growing sometimes taller and sometimes shorter, until she had 724 | succeeded in bringing herself down to her usual height. 725 | 726 | It was so long since she had been anything near the right size that it 727 | felt quite strange at first. "The next thing is to get into that 728 | beautiful garden--how _is_ that to be done, I wonder?" As she said this, 729 | she came suddenly upon an open place, with a little house in it about 730 | four feet high. "Whoever lives there," thought Alice, "it'll never do to 731 | come upon them _this_ size; why, I should frighten them out of their 732 | wits!" She did not venture to go near the house till she had brought 733 | herself down to nine inches high. 734 | 735 | 736 | 737 | 738 | VI--PIG AND PEPPER 739 | 740 | 741 | For a minute or two she stood looking at the house, when suddenly a 742 | footman in livery came running out of the wood (judging by his face 743 | only, she would have called him a fish)--and rapped loudly at the door 744 | with his knuckles. It was opened by another footman in livery, with a 745 | round face and large eyes like a frog. 746 | 747 | [Illustration] 748 | 749 | The Fish-Footman began by producing from under his arm a great letter, 750 | and this he handed over to the other, saying, in a solemn tone, "For the 751 | Duchess. An invitation from the Queen to play croquet." The 752 | Frog-Footman repeated, in the same solemn tone, "From the Queen. An 753 | invitation for the Duchess to play croquet." Then they both bowed low 754 | and their curls got entangled together. 755 | 756 | When Alice next peeped out, the Fish-Footman was gone, and the other was 757 | sitting on the ground near the door, staring stupidly up into the sky. 758 | Alice went timidly up to the door and knocked. 759 | 760 | "There's no sort of use in knocking," said the Footman, "and that for 761 | two reasons. First, because I'm on the same side of the door as you are; 762 | secondly, because they're making such a noise inside, no one could 763 | possibly hear you." And certainly there _was_ a most extraordinary noise 764 | going on within--a constant howling and sneezing, and every now and then 765 | a great crash, as if a dish or kettle had been broken to pieces. 766 | 767 | "How am I to get in?" asked Alice. 768 | 769 | "_Are_ you to get in at all?" said the Footman. "That's the first 770 | question, you know." 771 | 772 | Alice opened the door and went in. The door led right into a large 773 | kitchen, which was full of smoke from one end to the other; the Duchess 774 | was sitting on a three-legged stool in the middle, nursing a baby; the 775 | cook was leaning over the fire, stirring a large caldron which seemed to 776 | be full of soup. 777 | 778 | "There's certainly too much pepper in that soup!" Alice said to herself, 779 | as well as she could for sneezing. Even the Duchess sneezed 780 | occasionally; and as for the baby, it was sneezing and howling 781 | alternately without a moment's pause. The only two creatures in the 782 | kitchen that did _not_ sneeze were the cook and a large cat, which was 783 | grinning from ear to ear. 784 | 785 | "Please would you tell me," said Alice, a little timidly, "why your cat 786 | grins like that?" 787 | 788 | "It's a Cheshire-Cat," said the Duchess, "and that's why." 789 | 790 | "I didn't know that Cheshire-Cats always grinned; in fact, I didn't know 791 | that cats _could_ grin," said Alice. 792 | 793 | "You don't know much," said the Duchess, "and that's a fact." 794 | 795 | Just then the cook took the caldron of soup off the fire, and at once 796 | set to work throwing everything within her reach at the Duchess and the 797 | baby--the fire-irons came first; then followed a shower of saucepans, 798 | plates and dishes. The Duchess took no notice of them, even when they 799 | hit her, and the baby was howling so much already that it was quite 800 | impossible to say whether the blows hurt it or not. 801 | 802 | "Oh, _please_ mind what you're doing!" cried Alice, jumping up and down 803 | in an agony of terror. 804 | 805 | "Here! You may nurse it a bit, if you like!" the Duchess said to Alice, 806 | flinging the baby at her as she spoke. "I must go and get ready to play 807 | croquet with the Queen," and she hurried out of the room. 808 | 809 | Alice caught the baby with some difficulty, as it was a queer-shaped 810 | little creature and held out its arms and legs in all directions. "If I 811 | don't take this child away with me," thought Alice, "they're sure to 812 | kill it in a day or two. Wouldn't it be murder to leave it behind?" She 813 | said the last words out loud and the little thing grunted in reply. 814 | 815 | "If you're going to turn into a pig, my dear," said Alice, "I'll have 816 | nothing more to do with you. Mind now!" 817 | 818 | Alice was just beginning to think to herself, "Now, what am I to do with 819 | this creature, when I get it home?" when it grunted again so violently 820 | that Alice looked down into its face in some alarm. This time there 821 | could be _no_ mistake about it--it was neither more nor less than a pig; 822 | so she set the little creature down and felt quite relieved to see it 823 | trot away quietly into the wood. 824 | 825 | Alice was a little startled by seeing the Cheshire-Cat sitting on a 826 | bough of a tree a few yards off. The Cat only grinned when it saw her. 827 | "Cheshire-Puss," began Alice, rather timidly, "would you please tell me 828 | which way I ought to go from here?" 829 | 830 | "In _that_ direction," the Cat said, waving the right paw 'round, "lives 831 | a Hatter; and in _that_ direction," waving the other paw, "lives a March 832 | Hare. Visit either you like; they're both mad." 833 | 834 | "But I don't want to go among mad people," Alice remarked. 835 | 836 | "Oh, you can't help that," said the Cat; "we're all mad here. Do you 837 | play croquet with the Queen to-day?" 838 | 839 | "I should like it very much," said Alice, "but I haven't been invited 840 | yet." 841 | 842 | "You'll see me there," said the Cat, and vanished. 843 | 844 | Alice had not gone much farther before she came in sight of the house of 845 | the March Hare; it was so large a house that she did not like to go near 846 | till she had nibbled some more of the left-hand bit of mushroom. 847 | 848 | 849 | 850 | 851 | VII--A MAD TEA-PARTY 852 | 853 | 854 | There was a table set out under a tree in front of the house, and the 855 | March Hare and the Hatter were having tea at it; a Dormouse was sitting 856 | between them, fast asleep. 857 | 858 | The table was a large one, but the three were all crowded together at 859 | one corner of it. "No room! No room!" they cried out when they saw Alice 860 | coming. "There's _plenty_ of room!" said Alice indignantly, and she sat 861 | down in a large arm-chair at one end of the table. 862 | 863 | The Hatter opened his eyes very wide on hearing this, but all he said 864 | was "Why is a raven like a writing-desk?" 865 | 866 | "I'm glad they've begun asking riddles--I believe I can guess that," she 867 | added aloud. 868 | 869 | "Do you mean that you think you can find out the answer to it?" said the 870 | March Hare. 871 | 872 | "Exactly so," said Alice. 873 | 874 | "Then you should say what you mean," the March Hare went on. 875 | 876 | "I do," Alice hastily replied; "at least--at least I mean what I 877 | say--that's the same thing, you know." 878 | 879 | "You might just as well say," added the Dormouse, which seemed to be 880 | talking in its sleep, "that 'I breathe when I sleep' is the same thing 881 | as 'I sleep when I breathe!'" 882 | 883 | "It _is_ the same thing with you," said the Hatter, and he poured a 884 | little hot tea upon its nose. The Dormouse shook its head impatiently 885 | and said, without opening its eyes, "Of course, of course; just what I 886 | was going to remark myself." 887 | 888 | [Illustration] 889 | 890 | "Have you guessed the riddle yet?" the Hatter said, turning to Alice 891 | again. 892 | 893 | "No, I give it up," Alice replied. "What's the answer?" 894 | 895 | "I haven't the slightest idea," said the Hatter. 896 | 897 | "Nor I," said the March Hare. 898 | 899 | Alice gave a weary sigh. "I think you might do something better with the 900 | time," she said, "than wasting it in asking riddles that have no 901 | answers." 902 | 903 | "Take some more tea," the March Hare said to Alice, very earnestly. 904 | 905 | "I've had nothing yet," Alice replied in an offended tone, "so I can't 906 | take more." 907 | 908 | "You mean you can't take _less_," said the Hatter; "it's very easy to 909 | take _more_ than nothing." 910 | 911 | At this, Alice got up and walked off. The Dormouse fell asleep instantly 912 | and neither of the others took the least notice of her going, though she 913 | looked back once or twice; the last time she saw them, they were 914 | trying to put the Dormouse into the tea-pot. 915 | 916 | [Illustration: The Trial of the Knave of Hearts.] 917 | 918 | "At any rate, I'll never go _there_ again!" said Alice, as she picked 919 | her way through the wood. "It's the stupidest tea-party I ever was at in 920 | all my life!" Just as she said this, she noticed that one of the trees 921 | had a door leading right into it. "That's very curious!" she thought. "I 922 | think I may as well go in at once." And in she went. 923 | 924 | Once more she found herself in the long hall and close to the little 925 | glass table. Taking the little golden key, she unlocked the door that 926 | led into the garden. Then she set to work nibbling at the mushroom (she 927 | had kept a piece of it in her pocket) till she was about a foot high; 928 | then she walked down the little passage; and _then_--she found herself 929 | at last in the beautiful garden, among the bright flower-beds and the 930 | cool fountains. 931 | 932 | 933 | 934 | 935 | VIII--THE QUEEN'S CROQUET GROUND 936 | 937 | 938 | A large rose-tree stood near the entrance of the garden; the roses 939 | growing on it were white, but there were three gardeners at it, busily 940 | painting them red. Suddenly their eyes chanced to fall upon Alice, as 941 | she stood watching them. "Would you tell me, please," said Alice, a 942 | little timidly, "why you are painting those roses?" 943 | 944 | Five and Seven said nothing, but looked at Two. Two began, in a low 945 | voice, "Why, the fact is, you see, Miss, this here ought to have been a 946 | _red_ rose-tree, and we put a white one in by mistake; and, if the Queen 947 | was to find it out, we should all have our heads cut off, you know. So 948 | you see, Miss, we're doing our best, afore she comes, to--" At this 949 | moment, Five, who had been anxiously looking across the garden, called 950 | out, "The Queen! The Queen!" and the three gardeners instantly threw 951 | themselves flat upon their faces. There was a sound of many footsteps 952 | and Alice looked 'round, eager to see the Queen. 953 | 954 | First came ten soldiers carrying clubs, with their hands and feet at the 955 | corners: next the ten courtiers; these were ornamented all over with 956 | diamonds. After these came the royal children; there were ten of them, 957 | all ornamented with hearts. Next came the guests, mostly Kings and 958 | Queens, and among them Alice recognized the White Rabbit. Then followed 959 | the Knave of Hearts, carrying the King's crown on a crimson velvet 960 | cushion; and last of all this grand procession came THE KING AND THE 961 | QUEEN OF HEARTS. 962 | 963 | When the procession came opposite to Alice, they all stopped and looked 964 | at her, and the Queen said severely, "Who is this?" She said it to the 965 | Knave of Hearts, who only bowed and smiled in reply. 966 | 967 | "My name is Alice, so please Your Majesty," said Alice very politely; 968 | but she added to herself, "Why, they're only a pack of cards, after 969 | all!" 970 | 971 | "Can you play croquet?" shouted the Queen. The question was evidently 972 | meant for Alice. 973 | 974 | "Yes!" said Alice loudly. 975 | 976 | "Come on, then!" roared the Queen. 977 | 978 | "It's--it's a very fine day!" said a timid voice to Alice. She was 979 | walking by the White Rabbit, who was peeping anxiously into her face. 980 | 981 | "Very," said Alice. "Where's the Duchess?" 982 | 983 | "Hush! Hush!" said the Rabbit. "She's under sentence of execution." 984 | 985 | "What for?" said Alice. 986 | 987 | "She boxed the Queen's ears--" the Rabbit began. 988 | 989 | "Get to your places!" shouted the Queen in a voice of thunder, and 990 | people began running about in all directions, tumbling up against each 991 | other. However, they got settled down in a minute or two, and the game 992 | began. 993 | 994 | Alice thought she had never seen such a curious croquet-ground in her 995 | life; it was all ridges and furrows. The croquet balls were live 996 | hedgehogs, and the mallets live flamingos and the soldiers had to double 997 | themselves up and stand on their hands and feet, to make the arches. 998 | 999 | The players all played at once, without waiting for turns, quarrelling 1000 | all the while and fighting for the hedgehogs; and in a very short time, 1001 | the Queen was in a furious passion and went stamping about and shouting, 1002 | "Off with his head!" or "Off with her head!" about once in a minute. 1003 | 1004 | "They're dreadfully fond of beheading people here," thought Alice; "the 1005 | great wonder is that there's anyone left alive!" 1006 | 1007 | She was looking about for some way of escape, when she noticed a curious 1008 | appearance in the air. "It's the Cheshire-Cat," she said to herself; 1009 | "now I shall have somebody to talk to." 1010 | 1011 | "How are you getting on?" said the Cat. 1012 | 1013 | "I don't think they play at all fairly," Alice said, in a rather 1014 | complaining tone; "and they all quarrel so dreadfully one can't hear 1015 | oneself speak--and they don't seem to have any rules in particular." 1016 | 1017 | "How do you like the Queen?" said the Cat in a low voice. 1018 | 1019 | "Not at all," said Alice. 1020 | 1021 | [Illustration] 1022 | 1023 | Alice thought she might as well go back and see how the game was going 1024 | on. So she went off in search of her hedgehog. The hedgehog was engaged 1025 | in a fight with another hedgehog, which seemed to Alice an excellent 1026 | opportunity for croqueting one of them with the other; the only 1027 | difficulty was that her flamingo was gone across to the other side of 1028 | the garden, where Alice could see it trying, in a helpless sort of way, 1029 | to fly up into a tree. She caught the flamingo and tucked it away under 1030 | her arm, that it might not escape again. 1031 | 1032 | Just then Alice ran across the Duchess (who was now out of prison). She 1033 | tucked her arm affectionately into Alice's and they walked off together. 1034 | Alice was very glad to find her in such a pleasant temper. She was a 1035 | little startled, however, when she heard the voice of the Duchess close 1036 | to her ear. "You're thinking about something, my dear, and that makes 1037 | you forget to talk." 1038 | 1039 | "The game's going on rather better now," Alice said, by way of keeping 1040 | up the conversation a little. 1041 | 1042 | "'Tis so," said the Duchess; "and the moral of that is--'Oh, 'tis love, 1043 | 'tis love that makes the world go 'round!'" 1044 | 1045 | "Somebody said," Alice whispered, "that it's done by everybody minding 1046 | his own business!" 1047 | 1048 | "Ah, well! It means much the same thing," said the Duchess, digging her 1049 | sharp little chin into Alice's shoulder, as she added "and the moral of 1050 | _that_ is--'Take care of the sense and the sounds will take care of 1051 | themselves.'" 1052 | 1053 | To Alice's great surprise, the Duchess's arm that was linked into hers 1054 | began to tremble. Alice looked up and there stood the Queen in front of 1055 | them, with her arms folded, frowning like a thunderstorm! 1056 | 1057 | "Now, I give you fair warning," shouted the Queen, stamping on the 1058 | ground as she spoke, "either you or your head must be off, and that in 1059 | about half no time. Take your choice!" The Duchess took her choice, and 1060 | was gone in a moment. 1061 | 1062 | "Let's go on with the game," the Queen said to Alice; and Alice was too 1063 | much frightened to say a word, but slowly followed her back to the 1064 | croquet-ground. 1065 | 1066 | All the time they were playing, the Queen never left off quarreling with 1067 | the other players and shouting, "Off with his head!" or "Off with her 1068 | head!" By the end of half an hour or so, all the players, except the 1069 | King, the Queen and Alice, were in custody of the soldiers and under 1070 | sentence of execution. 1071 | 1072 | Then the Queen left off, quite out of breath, and walked away with 1073 | Alice. 1074 | 1075 | Alice heard the King say in a low voice to the company generally, "You 1076 | are all pardoned." 1077 | 1078 | Suddenly the cry "The Trial's beginning!" was heard in the distance, and 1079 | Alice ran along with the others. 1080 | 1081 | 1082 | 1083 | 1084 | IX--WHO STOLE THE TARTS? 1085 | 1086 | 1087 | The King and Queen of Hearts were seated on their throne when they 1088 | arrived, with a great crowd assembled about them--all sorts of little 1089 | birds and beasts, as well as the whole pack of cards: the Knave was 1090 | standing before them, in chains, with a soldier on each side to guard 1091 | him; and near the King was the White Rabbit, with a trumpet in one hand 1092 | and a scroll of parchment in the other. In the very middle of the court 1093 | was a table, with a large dish of tarts upon it. "I wish they'd get the 1094 | trial done," Alice thought, "and hand 'round the refreshments!" 1095 | 1096 | The judge, by the way, was the King and he wore his crown over his great 1097 | wig. "That's the jury-box," thought Alice; "and those twelve creatures 1098 | (some were animals and some were birds) I suppose they are the jurors." 1099 | 1100 | Just then the White Rabbit cried out "Silence in the court!" 1101 | 1102 | "Herald, read the accusation!" said the King. 1103 | 1104 | [Illustration] 1105 | 1106 | On this, the White Rabbit blew three blasts on the trumpet, then 1107 | unrolled the parchment-scroll and read as follows: 1108 | 1109 | "The Queen of Hearts, she made some tarts, 1110 | All on a summer day; 1111 | The Knave of Hearts, he stole those tarts 1112 | And took them quite away!" 1113 | 1114 | "Call the first witness," said the King; and the White Rabbit blew three 1115 | blasts on the trumpet and called out, "First witness!" 1116 | 1117 | The first witness was the Hatter. He came in with a teacup in one hand 1118 | and a piece of bread and butter in the other. 1119 | 1120 | "You ought to have finished," said the King. "When did you begin?" 1121 | 1122 | The Hatter looked at the March Hare, who had followed him into the 1123 | court, arm in arm with the Dormouse. "Fourteenth of March, I _think_ it 1124 | was," he said. 1125 | 1126 | "Give your evidence," said the King, "and don't be nervous, or I'll have 1127 | you executed on the spot." 1128 | 1129 | This did not seem to encourage the witness at all; he kept shifting from 1130 | one foot to the other, looking uneasily at the Queen, and, in his 1131 | confusion, he bit a large piece out of his teacup instead of the bread 1132 | and butter. 1133 | 1134 | Just at this moment Alice felt a very curious sensation--she was 1135 | beginning to grow larger again. 1136 | 1137 | The miserable Hatter dropped his teacup and bread and butter and went 1138 | down on one knee. "I'm a poor man, Your Majesty," he began. 1139 | 1140 | "You're a _very_ poor _speaker_," said the King. 1141 | 1142 | "You may go," said the King, and the Hatter hurriedly left the court. 1143 | 1144 | "Call the next witness!" said the King. 1145 | 1146 | The next witness was the Duchess's cook. She carried the pepper-box in 1147 | her hand and the people near the door began sneezing all at once. 1148 | 1149 | "Give your evidence," said the King. 1150 | 1151 | "Sha'n't," said the cook. 1152 | 1153 | The King looked anxiously at the White Rabbit, who said, in a low voice, 1154 | "Your Majesty must cross-examine _this_ witness." 1155 | 1156 | "Well, if I must, I must," the King said. "What are tarts made of?" 1157 | 1158 | "Pepper, mostly," said the cook. 1159 | 1160 | For some minutes the whole court was in confusion and by the time they 1161 | had settled down again, the cook had disappeared. 1162 | 1163 | "Never mind!" said the King, "call the next witness." 1164 | 1165 | Alice watched the White Rabbit as he fumbled over the list. Imagine her 1166 | surprise when he read out, at the top of his shrill little voice, the 1167 | name "Alice!" 1168 | 1169 | 1170 | 1171 | 1172 | X--ALICE'S EVIDENCE 1173 | 1174 | 1175 | "Here!" cried Alice. She jumped up in such a hurry that she tipped over 1176 | the jury-box, upsetting all the jurymen on to the heads of the crowd 1177 | below. 1178 | 1179 | "Oh, I _beg_ your pardon!" she exclaimed in a tone of great dismay. 1180 | 1181 | "The trial cannot proceed," said the King, "until all the jurymen are 1182 | back in their proper places--_all_," he repeated with great emphasis, 1183 | looking hard at Alice. 1184 | 1185 | "What do you know about this business?" the King said to Alice. 1186 | 1187 | "Nothing whatever," said Alice. 1188 | 1189 | The King then read from his book: "Rule forty-two. _All persons more 1190 | than a mile high to leave the court_." 1191 | 1192 | "_I'm_ not a mile high," said Alice. 1193 | 1194 | "Nearly two miles high," said the Queen. 1195 | 1196 | [Illustration] 1197 | 1198 | "Well, I sha'n't go, at any rate," said Alice. 1199 | 1200 | The King turned pale and shut his note-book hastily. "Consider your 1201 | verdict," he said to the jury, in a low, trembling voice. 1202 | 1203 | "There's more evidence to come yet, please Your Majesty," said the White 1204 | Rabbit, jumping up in a great hurry. "This paper has just been picked 1205 | up. It seems to be a letter written by the prisoner to--to somebody." He 1206 | unfolded the paper as he spoke and added, "It isn't a letter, after all; 1207 | it's a set of verses." 1208 | 1209 | "Please, Your Majesty," said the Knave, "I didn't write it and they 1210 | can't prove that I did; there's no name signed at the end." 1211 | 1212 | "You _must_ have meant some mischief, or else you'd have signed your 1213 | name like an honest man," said the King. There was a general clapping of 1214 | hands at this. 1215 | 1216 | "Read them," he added, turning to the White Rabbit. 1217 | 1218 | There was dead silence in the court whilst the White Rabbit read out the 1219 | verses. 1220 | 1221 | "That's the most important piece of evidence we've heard yet," said the 1222 | King. 1223 | 1224 | "_I_ don't believe there's an atom of meaning in it," ventured Alice. 1225 | 1226 | "If there's no meaning in it," said the King, "that saves a world of 1227 | trouble, you know, as we needn't try to find any. Let the jury consider 1228 | their verdict." 1229 | 1230 | "No, no!" said the Queen. "Sentence first--verdict afterwards." 1231 | 1232 | "Stuff and nonsense!" said Alice loudly. "The idea of having the 1233 | sentence first!" 1234 | 1235 | "Hold your tongue!" said the Queen, turning purple. 1236 | 1237 | "I won't!" said Alice. 1238 | 1239 | "Off with her head!" the Queen shouted at the top of her voice. Nobody 1240 | moved. 1241 | 1242 | "Who cares for _you_?" said Alice (she had grown to her full size by 1243 | this time). "You're nothing but a pack of cards!" 1244 | 1245 | [Illustration] 1246 | 1247 | At this, the whole pack rose up in the air and came flying down upon 1248 | her; she gave a little scream, half of fright and half of anger, and 1249 | tried to beat them off, and found herself lying on the bank, with her 1250 | head in the lap of her sister, who was gently brushing away some dead 1251 | leaves that had fluttered down from the trees upon her face. 1252 | 1253 | "Wake up, Alice dear!" said her sister. "Why, what a long sleep you've 1254 | had!" 1255 | 1256 | "Oh, I've had such a curious dream!" said Alice. And she told her 1257 | sister, as well as she could remember them, all these strange adventures 1258 | of hers that you have just been reading about. Alice got up and ran off, 1259 | thinking while she ran, as well she might, what a wonderful dream it had 1260 | been. 1261 | 1262 | [Illustration] 1263 | 1264 | 1265 | 1266 | 1267 | 1268 | End of the Project Gutenberg EBook of Alice in Wonderland, by Lewis Carroll 1269 | 1270 | *** END OF THIS PROJECT GUTENBERG EBOOK ALICE IN WONDERLAND *** 1271 | 1272 | ***** This file should be named 19033.txt or 19033.zip ***** 1273 | This and all associated files of various formats will be found in: 1274 | http://www.gutenberg.org/1/9/0/3/19033/ 1275 | 1276 | Produced by Jason Isbell, Irma Spehar, and the Online 1277 | Distributed Proofreading Team at http://www.pgdp.net 1278 | 1279 | 1280 | Updated editions will replace the previous one--the old editions 1281 | will be renamed. 1282 | 1283 | Creating the works from public domain print editions means that no 1284 | one owns a United States copyright in these works, so the Foundation 1285 | (and you!) can copy and distribute it in the United States without 1286 | permission and without paying copyright royalties. Special rules, 1287 | set forth in the General Terms of Use part of this license, apply to 1288 | copying and distributing Project Gutenberg-tm electronic works to 1289 | protect the PROJECT GUTENBERG-tm concept and trademark. Project 1290 | Gutenberg is a registered trademark, and may not be used if you 1291 | charge for the eBooks, unless you receive specific permission. If you 1292 | do not charge anything for copies of this eBook, complying with the 1293 | rules is very easy. You may use this eBook for nearly any purpose 1294 | such as creation of derivative works, reports, performances and 1295 | research. They may be modified and printed and given away--you may do 1296 | practically ANYTHING with public domain eBooks. Redistribution is 1297 | subject to the trademark license, especially commercial 1298 | redistribution. 1299 | 1300 | 1301 | 1302 | *** START: FULL LICENSE *** 1303 | 1304 | THE FULL PROJECT GUTENBERG LICENSE 1305 | PLEASE READ THIS BEFORE YOU DISTRIBUTE OR USE THIS WORK 1306 | 1307 | To protect the Project Gutenberg-tm mission of promoting the free 1308 | distribution of electronic works, by using or distributing this work 1309 | (or any other work associated in any way with the phrase "Project 1310 | Gutenberg"), you agree to comply with all the terms of the Full Project 1311 | Gutenberg-tm License (available with this file or online at 1312 | http://gutenberg.org/license). 1313 | 1314 | 1315 | Section 1. General Terms of Use and Redistributing Project Gutenberg-tm 1316 | electronic works 1317 | 1318 | 1.A. By reading or using any part of this Project Gutenberg-tm 1319 | electronic work, you indicate that you have read, understand, agree to 1320 | and accept all the terms of this license and intellectual property 1321 | (trademark/copyright) agreement. If you do not agree to abide by all 1322 | the terms of this agreement, you must cease using and return or destroy 1323 | all copies of Project Gutenberg-tm electronic works in your possession. 1324 | If you paid a fee for obtaining a copy of or access to a Project 1325 | Gutenberg-tm electronic work and you do not agree to be bound by the 1326 | terms of this agreement, you may obtain a refund from the person or 1327 | entity to whom you paid the fee as set forth in paragraph 1.E.8. 1328 | 1329 | 1.B. "Project Gutenberg" is a registered trademark. It may only be 1330 | used on or associated in any way with an electronic work by people who 1331 | agree to be bound by the terms of this agreement. There are a few 1332 | things that you can do with most Project Gutenberg-tm electronic works 1333 | even without complying with the full terms of this agreement. See 1334 | paragraph 1.C below. There are a lot of things you can do with Project 1335 | Gutenberg-tm electronic works if you follow the terms of this agreement 1336 | and help preserve free future access to Project Gutenberg-tm electronic 1337 | works. See paragraph 1.E below. 1338 | 1339 | 1.C. The Project Gutenberg Literary Archive Foundation ("the Foundation" 1340 | or PGLAF), owns a compilation copyright in the collection of Project 1341 | Gutenberg-tm electronic works. Nearly all the individual works in the 1342 | collection are in the public domain in the United States. If an 1343 | individual work is in the public domain in the United States and you are 1344 | located in the United States, we do not claim a right to prevent you from 1345 | copying, distributing, performing, displaying or creating derivative 1346 | works based on the work as long as all references to Project Gutenberg 1347 | are removed. Of course, we hope that you will support the Project 1348 | Gutenberg-tm mission of promoting free access to electronic works by 1349 | freely sharing Project Gutenberg-tm works in compliance with the terms of 1350 | this agreement for keeping the Project Gutenberg-tm name associated with 1351 | the work. You can easily comply with the terms of this agreement by 1352 | keeping this work in the same format with its attached full Project 1353 | Gutenberg-tm License when you share it without charge with others. 1354 | 1355 | 1.D. The copyright laws of the place where you are located also govern 1356 | what you can do with this work. Copyright laws in most countries are in 1357 | a constant state of change. If you are outside the United States, check 1358 | the laws of your country in addition to the terms of this agreement 1359 | before downloading, copying, displaying, performing, distributing or 1360 | creating derivative works based on this work or any other Project 1361 | Gutenberg-tm work. The Foundation makes no representations concerning 1362 | the copyright status of any work in any country outside the United 1363 | States. 1364 | 1365 | 1.E. Unless you have removed all references to Project Gutenberg: 1366 | 1367 | 1.E.1. The following sentence, with active links to, or other immediate 1368 | access to, the full Project Gutenberg-tm License must appear prominently 1369 | whenever any copy of a Project Gutenberg-tm work (any work on which the 1370 | phrase "Project Gutenberg" appears, or with which the phrase "Project 1371 | Gutenberg" is associated) is accessed, displayed, performed, viewed, 1372 | copied or distributed: 1373 | 1374 | This eBook is for the use of anyone anywhere at no cost and with 1375 | almost no restrictions whatsoever. You may copy it, give it away or 1376 | re-use it under the terms of the Project Gutenberg License included 1377 | with this eBook or online at www.gutenberg.org 1378 | 1379 | 1.E.2. If an individual Project Gutenberg-tm electronic work is derived 1380 | from the public domain (does not contain a notice indicating that it is 1381 | posted with permission of the copyright holder), the work can be copied 1382 | and distributed to anyone in the United States without paying any fees 1383 | or charges. If you are redistributing or providing access to a work 1384 | with the phrase "Project Gutenberg" associated with or appearing on the 1385 | work, you must comply either with the requirements of paragraphs 1.E.1 1386 | through 1.E.7 or obtain permission for the use of the work and the 1387 | Project Gutenberg-tm trademark as set forth in paragraphs 1.E.8 or 1388 | 1.E.9. 1389 | 1390 | 1.E.3. If an individual Project Gutenberg-tm electronic work is posted 1391 | with the permission of the copyright holder, your use and distribution 1392 | must comply with both paragraphs 1.E.1 through 1.E.7 and any additional 1393 | terms imposed by the copyright holder. Additional terms will be linked 1394 | to the Project Gutenberg-tm License for all works posted with the 1395 | permission of the copyright holder found at the beginning of this work. 1396 | 1397 | 1.E.4. Do not unlink or detach or remove the full Project Gutenberg-tm 1398 | License terms from this work, or any files containing a part of this 1399 | work or any other work associated with Project Gutenberg-tm. 1400 | 1401 | 1.E.5. Do not copy, display, perform, distribute or redistribute this 1402 | electronic work, or any part of this electronic work, without 1403 | prominently displaying the sentence set forth in paragraph 1.E.1 with 1404 | active links or immediate access to the full terms of the Project 1405 | Gutenberg-tm License. 1406 | 1407 | 1.E.6. You may convert to and distribute this work in any binary, 1408 | compressed, marked up, nonproprietary or proprietary form, including any 1409 | word processing or hypertext form. However, if you provide access to or 1410 | distribute copies of a Project Gutenberg-tm work in a format other than 1411 | "Plain Vanilla ASCII" or other format used in the official version 1412 | posted on the official Project Gutenberg-tm web site (www.gutenberg.org), 1413 | you must, at no additional cost, fee or expense to the user, provide a 1414 | copy, a means of exporting a copy, or a means of obtaining a copy upon 1415 | request, of the work in its original "Plain Vanilla ASCII" or other 1416 | form. Any alternate format must include the full Project Gutenberg-tm 1417 | License as specified in paragraph 1.E.1. 1418 | 1419 | 1.E.7. Do not charge a fee for access to, viewing, displaying, 1420 | performing, copying or distributing any Project Gutenberg-tm works 1421 | unless you comply with paragraph 1.E.8 or 1.E.9. 1422 | 1423 | 1.E.8. You may charge a reasonable fee for copies of or providing 1424 | access to or distributing Project Gutenberg-tm electronic works provided 1425 | that 1426 | 1427 | - You pay a royalty fee of 20% of the gross profits you derive from 1428 | the use of Project Gutenberg-tm works calculated using the method 1429 | you already use to calculate your applicable taxes. The fee is 1430 | owed to the owner of the Project Gutenberg-tm trademark, but he 1431 | has agreed to donate royalties under this paragraph to the 1432 | Project Gutenberg Literary Archive Foundation. Royalty payments 1433 | must be paid within 60 days following each date on which you 1434 | prepare (or are legally required to prepare) your periodic tax 1435 | returns. Royalty payments should be clearly marked as such and 1436 | sent to the Project Gutenberg Literary Archive Foundation at the 1437 | address specified in Section 4, "Information about donations to 1438 | the Project Gutenberg Literary Archive Foundation." 1439 | 1440 | - You provide a full refund of any money paid by a user who notifies 1441 | you in writing (or by e-mail) within 30 days of receipt that s/he 1442 | does not agree to the terms of the full Project Gutenberg-tm 1443 | License. You must require such a user to return or 1444 | destroy all copies of the works possessed in a physical medium 1445 | and discontinue all use of and all access to other copies of 1446 | Project Gutenberg-tm works. 1447 | 1448 | - You provide, in accordance with paragraph 1.F.3, a full refund of any 1449 | money paid for a work or a replacement copy, if a defect in the 1450 | electronic work is discovered and reported to you within 90 days 1451 | of receipt of the work. 1452 | 1453 | - You comply with all other terms of this agreement for free 1454 | distribution of Project Gutenberg-tm works. 1455 | 1456 | 1.E.9. If you wish to charge a fee or distribute a Project Gutenberg-tm 1457 | electronic work or group of works on different terms than are set 1458 | forth in this agreement, you must obtain permission in writing from 1459 | both the Project Gutenberg Literary Archive Foundation and Michael 1460 | Hart, the owner of the Project Gutenberg-tm trademark. Contact the 1461 | Foundation as set forth in Section 3 below. 1462 | 1463 | 1.F. 1464 | 1465 | 1.F.1. Project Gutenberg volunteers and employees expend considerable 1466 | effort to identify, do copyright research on, transcribe and proofread 1467 | public domain works in creating the Project Gutenberg-tm 1468 | collection. Despite these efforts, Project Gutenberg-tm electronic 1469 | works, and the medium on which they may be stored, may contain 1470 | "Defects," such as, but not limited to, incomplete, inaccurate or 1471 | corrupt data, transcription errors, a copyright or other intellectual 1472 | property infringement, a defective or damaged disk or other medium, a 1473 | computer virus, or computer codes that damage or cannot be read by 1474 | your equipment. 1475 | 1476 | 1.F.2. LIMITED WARRANTY, DISCLAIMER OF DAMAGES - Except for the "Right 1477 | of Replacement or Refund" described in paragraph 1.F.3, the Project 1478 | Gutenberg Literary Archive Foundation, the owner of the Project 1479 | Gutenberg-tm trademark, and any other party distributing a Project 1480 | Gutenberg-tm electronic work under this agreement, disclaim all 1481 | liability to you for damages, costs and expenses, including legal 1482 | fees. YOU AGREE THAT YOU HAVE NO REMEDIES FOR NEGLIGENCE, STRICT 1483 | LIABILITY, BREACH OF WARRANTY OR BREACH OF CONTRACT EXCEPT THOSE 1484 | PROVIDED IN PARAGRAPH F3. YOU AGREE THAT THE FOUNDATION, THE 1485 | TRADEMARK OWNER, AND ANY DISTRIBUTOR UNDER THIS AGREEMENT WILL NOT BE 1486 | LIABLE TO YOU FOR ACTUAL, DIRECT, INDIRECT, CONSEQUENTIAL, PUNITIVE OR 1487 | INCIDENTAL DAMAGES EVEN IF YOU GIVE NOTICE OF THE POSSIBILITY OF SUCH 1488 | DAMAGE. 1489 | 1490 | 1.F.3. LIMITED RIGHT OF REPLACEMENT OR REFUND - If you discover a 1491 | defect in this electronic work within 90 days of receiving it, you can 1492 | receive a refund of the money (if any) you paid for it by sending a 1493 | written explanation to the person you received the work from. If you 1494 | received the work on a physical medium, you must return the medium with 1495 | your written explanation. The person or entity that provided you with 1496 | the defective work may elect to provide a replacement copy in lieu of a 1497 | refund. If you received the work electronically, the person or entity 1498 | providing it to you may choose to give you a second opportunity to 1499 | receive the work electronically in lieu of a refund. If the second copy 1500 | is also defective, you may demand a refund in writing without further 1501 | opportunities to fix the problem. 1502 | 1503 | 1.F.4. Except for the limited right of replacement or refund set forth 1504 | in paragraph 1.F.3, this work is provided to you 'AS-IS' WITH NO OTHER 1505 | WARRANTIES OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO 1506 | WARRANTIES OF MERCHANTIBILITY OR FITNESS FOR ANY PURPOSE. 1507 | 1508 | 1.F.5. Some states do not allow disclaimers of certain implied 1509 | warranties or the exclusion or limitation of certain types of damages. 1510 | If any disclaimer or limitation set forth in this agreement violates the 1511 | law of the state applicable to this agreement, the agreement shall be 1512 | interpreted to make the maximum disclaimer or limitation permitted by 1513 | the applicable state law. The invalidity or unenforceability of any 1514 | provision of this agreement shall not void the remaining provisions. 1515 | 1516 | 1.F.6. INDEMNITY - You agree to indemnify and hold the Foundation, the 1517 | trademark owner, any agent or employee of the Foundation, anyone 1518 | providing copies of Project Gutenberg-tm electronic works in accordance 1519 | with this agreement, and any volunteers associated with the production, 1520 | promotion and distribution of Project Gutenberg-tm electronic works, 1521 | harmless from all liability, costs and expenses, including legal fees, 1522 | that arise directly or indirectly from any of the following which you do 1523 | or cause to occur: (a) distribution of this or any Project Gutenberg-tm 1524 | work, (b) alteration, modification, or additions or deletions to any 1525 | Project Gutenberg-tm work, and (c) any Defect you cause. 1526 | 1527 | 1528 | Section 2. Information about the Mission of Project Gutenberg-tm 1529 | 1530 | Project Gutenberg-tm is synonymous with the free distribution of 1531 | electronic works in formats readable by the widest variety of computers 1532 | including obsolete, old, middle-aged and new computers. It exists 1533 | because of the efforts of hundreds of volunteers and donations from 1534 | people in all walks of life. 1535 | 1536 | Volunteers and financial support to provide volunteers with the 1537 | assistance they need, is critical to reaching Project Gutenberg-tm's 1538 | goals and ensuring that the Project Gutenberg-tm collection will 1539 | remain freely available for generations to come. In 2001, the Project 1540 | Gutenberg Literary Archive Foundation was created to provide a secure 1541 | and permanent future for Project Gutenberg-tm and future generations. 1542 | To learn more about the Project Gutenberg Literary Archive Foundation 1543 | and how your efforts and donations can help, see Sections 3 and 4 1544 | and the Foundation web page at http://www.pglaf.org. 1545 | 1546 | 1547 | Section 3. Information about the Project Gutenberg Literary Archive 1548 | Foundation 1549 | 1550 | The Project Gutenberg Literary Archive Foundation is a non profit 1551 | 501(c)(3) educational corporation organized under the laws of the 1552 | state of Mississippi and granted tax exempt status by the Internal 1553 | Revenue Service. The Foundation's EIN or federal tax identification 1554 | number is 64-6221541. Its 501(c)(3) letter is posted at 1555 | http://pglaf.org/fundraising. Contributions to the Project Gutenberg 1556 | Literary Archive Foundation are tax deductible to the full extent 1557 | permitted by U.S. federal laws and your state's laws. 1558 | 1559 | The Foundation's principal office is located at 4557 Melan Dr. S. 1560 | Fairbanks, AK, 99712., but its volunteers and employees are scattered 1561 | throughout numerous locations. Its business office is located at 1562 | 809 North 1500 West, Salt Lake City, UT 84116, (801) 596-1887, email 1563 | business@pglaf.org. Email contact links and up to date contact 1564 | information can be found at the Foundation's web site and official 1565 | page at http://pglaf.org 1566 | 1567 | For additional contact information: 1568 | Dr. Gregory B. Newby 1569 | Chief Executive and Director 1570 | gbnewby@pglaf.org 1571 | 1572 | 1573 | Section 4. Information about Donations to the Project Gutenberg 1574 | Literary Archive Foundation 1575 | 1576 | Project Gutenberg-tm depends upon and cannot survive without wide 1577 | spread public support and donations to carry out its mission of 1578 | increasing the number of public domain and licensed works that can be 1579 | freely distributed in machine readable form accessible by the widest 1580 | array of equipment including outdated equipment. Many small donations 1581 | ($1 to $5,000) are particularly important to maintaining tax exempt 1582 | status with the IRS. 1583 | 1584 | The Foundation is committed to complying with the laws regulating 1585 | charities and charitable donations in all 50 states of the United 1586 | States. Compliance requirements are not uniform and it takes a 1587 | considerable effort, much paperwork and many fees to meet and keep up 1588 | with these requirements. We do not solicit donations in locations 1589 | where we have not received written confirmation of compliance. To 1590 | SEND DONATIONS or determine the status of compliance for any 1591 | particular state visit http://pglaf.org 1592 | 1593 | While we cannot and do not solicit contributions from states where we 1594 | have not met the solicitation requirements, we know of no prohibition 1595 | against accepting unsolicited donations from donors in such states who 1596 | approach us with offers to donate. 1597 | 1598 | International donations are gratefully accepted, but we cannot make 1599 | any statements concerning tax treatment of donations received from 1600 | outside the United States. U.S. laws alone swamp our small staff. 1601 | 1602 | Please check the Project Gutenberg Web pages for current donation 1603 | methods and addresses. Donations are accepted in a number of other 1604 | ways including checks, online payments and credit card donations. 1605 | To donate, please visit: http://pglaf.org/donate 1606 | 1607 | 1608 | Section 5. General Information About Project Gutenberg-tm electronic 1609 | works. 1610 | 1611 | Professor Michael S. Hart is the originator of the Project Gutenberg-tm 1612 | concept of a library of electronic works that could be freely shared 1613 | with anyone. For thirty years, he produced and distributed Project 1614 | Gutenberg-tm eBooks with only a loose network of volunteer support. 1615 | 1616 | 1617 | Project Gutenberg-tm eBooks are often created from several printed 1618 | editions, all of which are confirmed as Public Domain in the U.S. 1619 | unless a copyright notice is included. Thus, we do not necessarily 1620 | keep eBooks in compliance with any particular paper edition. 1621 | 1622 | 1623 | Most people start at our Web site which has the main PG search facility: 1624 | 1625 | http://www.gutenberg.org 1626 | 1627 | This Web site includes information about Project Gutenberg-tm, 1628 | including how to make donations to the Project Gutenberg Literary 1629 | Archive Foundation, how to help produce our new eBooks, and how to 1630 | subscribe to our email newsletter to hear about new eBooks. 1631 | -------------------------------------------------------------------------------- /my_rnn_pytorched/charRNN.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import torch\n", 12 | "import torch.nn as nn\n", 13 | "import torch.optim as optim\n", 14 | "from torch.autograd import Variable\n", 15 | "# load functional definitions for activations like softmax\n", 16 | "import torch.nn.functional as F\n", 17 | "# print utilities with json\n", 18 | "import json\n", 19 | "import numpy as np\n", 20 | "import matplotlib.pyplot as plt\n", 21 | "from random import shuffle\n", 22 | "import timeit\n", 23 | "%matplotlib inline" 24 | ] 25 | }, 26 | { 27 | "cell_type": "markdown", 28 | "metadata": {}, 29 | "source": [ 30 | "# 1. Quick intro: The simplicity of running an RNN in PyTorch" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "# We instantiate an RNN layer with the RNN API from nn module\n", 40 | "rnn_layer = nn.RNN(input_size=3, hidden_size=3, num_layers=1, batch_first=True)\n", 41 | "\n", 42 | "# Make some fake data\n", 43 | "fake_data = [[1, 2, 3], [4, 5, 6], [7, 8, 9], [0, 1, 1]]\n", 44 | "fake_data = torch.FloatTensor(fake_data)\n", 45 | "print('fake data tensor size: ', fake_data.size())" 46 | ] 47 | }, 48 | { 49 | "cell_type": "markdown", 50 | "metadata": {}, 51 | "source": [ 52 | "### 1) Iterate with your own loop (yesss)" 53 | ] 54 | }, 55 | { 56 | "cell_type": "code", 57 | "execution_count": null, 58 | "metadata": {}, 59 | "outputs": [], 60 | "source": [ 61 | "H_t = []\n", 62 | "h_t_1 = None\n", 63 | "for x_t in fake_data:\n", 64 | " # turn into 3-D shape [batch_size, seq_len, feat_dim=3]\n", 65 | " x_t = Variable(x_t).view(1, -1, 3)\n", 66 | " h_t, h_t_1 = rnn_layer(x_t, h_t_1)\n", 67 | " H_t.append(h_t.data[0, 0, :].numpy())\n", 68 | "H_t = np.array(H_t)\n", 69 | "print(H_t)" 70 | ] 71 | }, 72 | { 73 | "cell_type": "markdown", 74 | "metadata": {}, 75 | "source": [ 76 | "### 2) Structure your data batching, and leave it to the framework (yesss too)" 77 | ] 78 | }, 79 | { 80 | "cell_type": "code", 81 | "execution_count": null, 82 | "metadata": {}, 83 | "outputs": [], 84 | "source": [ 85 | "h_t_1 = None\n", 86 | "x_t = Variable(fake_data).view(1, -1, 3)\n", 87 | "H_t, h_t_1 = rnn_layer(x_t, h_t_1)\n", 88 | "print(H_t.data.numpy())" 89 | ] 90 | }, 91 | { 92 | "cell_type": "markdown", 93 | "metadata": {}, 94 | "source": [ 95 | "# 2. Going Deeper: Character prediction RNN-LSTM\n", 96 | "\n", 97 | "#### We'll now proceed to read a text file from our local dir." 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": null, 103 | "metadata": {}, 104 | "outputs": [], 105 | "source": [ 106 | "MIN_CHARS=5\n", 107 | "with open('aliceinwonderland.txt') as txt_f:\n", 108 | " # read the text file\n", 109 | " text = txt_f.read().lower()\n", 110 | "# get sentences on line break (yes, a bit vague, but ok for this)\n", 111 | "text_sents = list(filter(lambda x: len(x) > 0, text.split('\\n')))\n", 112 | "print('text corpus length in chars:', len(text))\n", 113 | "print('total sentences: ', len(text_sents))\n", 114 | "\n", 115 | "max_len = max(len(s) for s in text_sents)\n", 116 | "min_len = min(len(s) for s in text_sents)\n", 117 | "print('max sentence len: ', max_len)\n", 118 | "print('min sentence len: ', min_len)\n", 119 | "\n", 120 | "print('Example 10 sentences: ')\n", 121 | "for i, sent in enumerate(text_sents[:10], start=1):\n", 122 | " print('{} >> {}'.format(i, sent))\n", 123 | "\n", 124 | "chars = sorted(list(set(text)))\n", 125 | "print('total chars:', len(chars))\n", 126 | "char2idx = dict((c, i) for i, c in enumerate(chars))\n", 127 | "idx2char = dict((i, c) for i, c in enumerate(chars))" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": null, 133 | "metadata": {}, 134 | "outputs": [], 135 | "source": [ 136 | "# We'll print the char2idx to grasp how it looks like\n", 137 | "print(json.dumps(char2idx, indent=2))" 138 | ] 139 | }, 140 | { 141 | "cell_type": "code", 142 | "execution_count": null, 143 | "metadata": {}, 144 | "outputs": [], 145 | "source": [ 146 | "# STATIC GRAPH PROCEDURE to inject data\n", 147 | "# taken from Keras example https://github.com/fchollet/keras/blob/master/examples/lstm_text_generation.py\n", 148 | "# ----------------------------------------------------------------\n", 149 | "# cut the text in semi-redundant sequences of maxlen characters\n", 150 | "maxlen = 40\n", 151 | "step = 3\n", 152 | "sentences = []\n", 153 | "next_chars = []\n", 154 | "for i in range(0, len(text) - maxlen, step):\n", 155 | " sentences.append(text[i: i + maxlen])\n", 156 | " next_chars.append(text[i + maxlen])\n", 157 | "print('nb sequences:', len(sentences))\n", 158 | "\n", 159 | "print('Vectorization...')\n", 160 | "X = np.zeros((len(sentences), maxlen, len(chars)), dtype=np.bool)\n", 161 | "y = np.zeros((len(sentences), len(chars)), dtype=np.bool)\n", 162 | "for i, sentence in enumerate(sentences):\n", 163 | " for t, char in enumerate(sentence):\n", 164 | " X[i, t, char2idx[char]] = 1\n", 165 | " y[i, char2idx[next_chars[i]]] = 1\n", 166 | "print('Done vectorizing...')\n", 167 | "print('X data tensor shape: ', X.shape)\n", 168 | "print('y data tensor shape: ', y.shape)" 169 | ] 170 | }, 171 | { 172 | "cell_type": "markdown", 173 | "metadata": {}, 174 | "source": [ 175 | "**Note how we had to apply a certain maximum sequence length (40), and then we've built a tensor of input chunks of chars X, and a tensor of next chars x[t] named y**\n", 176 | "\n", 177 | "### Gimme some PyTorch, please\n", 178 | "\n", 179 | "Now we can unleash PyTorch capabilities to define the char RNN very simplistically, and process sequences **with true variable length!**" 180 | ] 181 | }, 182 | { 183 | "cell_type": "code", 184 | "execution_count": null, 185 | "metadata": { 186 | "collapsed": true 187 | }, 188 | "outputs": [], 189 | "source": [ 190 | "# Make a sentence encoder helper function\n", 191 | "def encode_sent(sent):\n", 192 | " x_seq = []\n", 193 | " for ch in sent:\n", 194 | " x_seq.append(char2idx[ch])\n", 195 | " return x_seq" 196 | ] 197 | }, 198 | { 199 | "cell_type": "code", 200 | "execution_count": null, 201 | "metadata": {}, 202 | "outputs": [], 203 | "source": [ 204 | "X = []\n", 205 | "for i, sentence in enumerate(text_sents):\n", 206 | " # encode each sentence into its char integer code from char2idx dictionary\n", 207 | " X.append(encode_sent(sentence))\n", 208 | "print('Resulting encoded sequences: ', len(X))\n", 209 | "print('Example 3 encoded sequences: ')\n", 210 | "for n, sent in enumerate(X[:3], start=1):\n", 211 | " print(sent)" 212 | ] 213 | }, 214 | { 215 | "cell_type": "markdown", 216 | "metadata": {}, 217 | "source": [ 218 | "### Building our char-RNN" 219 | ] 220 | }, 221 | { 222 | "cell_type": "code", 223 | "execution_count": null, 224 | "metadata": {}, 225 | "outputs": [], 226 | "source": [ 227 | "class charRNN(nn.Module):\n", 228 | " def __init__(self, char_vocab_size, emb_size=15,\n", 229 | " rnn_size=128, rnn_layers=1, rnn_dropout=0.):\n", 230 | " super(charRNN, self).__init__()\n", 231 | " # build character Embedding layer\n", 232 | " self.emb = nn.Embedding(char_vocab_size, emb_size)\n", 233 | " self.rnn_layers = rnn_layers\n", 234 | " self.rnn_size = rnn_size\n", 235 | " # LSTM layers\n", 236 | " self.lstm = nn.LSTM(input_size=emb_size,\n", 237 | " hidden_size=rnn_size,\n", 238 | " num_layers=rnn_layers,\n", 239 | " dropout=rnn_dropout,\n", 240 | " batch_first=True,\n", 241 | " bidirectional=False)\n", 242 | " # FC output layer into num classes (vocab size)\n", 243 | " self.fc = nn.Linear(rnn_size, char_vocab_size)\n", 244 | " \n", 245 | " def forward(self, seq, states=None):\n", 246 | " # input tensor is of shape [batch_size, seq_len]\n", 247 | " # it is a LongTensor containing an integer idx per char per seq\n", 248 | " assert len(seq.size()) == 2, seq.size()\n", 249 | " # project seq through embedding layer\n", 250 | " emb_ch = self.emb(seq)\n", 251 | " # emb_ch ~ [batch_size, seq_len, emb_size]\n", 252 | " H_t, states = self.lstm(emb_ch, states)\n", 253 | " # H_t ~ [batch_size, seq_len, rnn_size]\n", 254 | " # unroll tensor to 2-D to adjust to FC nature\n", 255 | " H_t_u = H_t.contiguous().view(-1, H_t.size(-1))\n", 256 | " # H_t_u ~ [batch_size x seq_len, rnn_size]\n", 257 | " y_t = F.log_softmax(self.fc(H_t_u))\n", 258 | " # return output predicted probs and rnn states\n", 259 | " return y_t, states\n", 260 | "\n", 261 | " def init_hidden_zero(self, curr_bsz):\n", 262 | " return (Variable(torch.zeros(self.rnn_layers, curr_bsz, self.rnn_size)),\n", 263 | " Variable(torch.zeros(self.rnn_layers, curr_bsz, self.rnn_size)))\n", 264 | " \n", 265 | "# instance our lstm model\n", 266 | "char_lstm = charRNN(len(char2idx), rnn_size=100, rnn_dropout=0.)\n", 267 | "print(char_lstm)\n", 268 | "\n", 269 | "# HELPER FUNCTION WE'LL USE WE'LL SEE WHERE\n", 270 | "def repackage_hidden(h):\n", 271 | " # https://github.com/pytorch/examples/blob/master/word_language_model/main.py\n", 272 | " \"\"\"Wraps hidden states in new Variables, to detach them from their history.\"\"\"\n", 273 | " if type(h) == Variable:\n", 274 | " return Variable(h.data)\n", 275 | " else:\n", 276 | " return tuple(repackage_hidden(v) for v in h)" 277 | ] 278 | }, 279 | { 280 | "cell_type": "code", 281 | "execution_count": null, 282 | "metadata": { 283 | "collapsed": true 284 | }, 285 | "outputs": [], 286 | "source": [ 287 | "# build a sentence decoder function to simplify our future calls to check predictions\n", 288 | "def decode_pred_sent(pred_sent):\n", 289 | " dec_sent = ''\n", 290 | " for idx in pred_sent:\n", 291 | " dec_sent += idx2char[idx]\n", 292 | " return dec_sent" 293 | ] 294 | }, 295 | { 296 | "cell_type": "code", 297 | "execution_count": null, 298 | "metadata": {}, 299 | "outputs": [], 300 | "source": [ 301 | "# Make a text prediction function\n", 302 | "def predict_text_from_seed(model, text_seed='alice was beginning ', num_preds=20):\n", 303 | " model.eval()\n", 304 | " # begin heating the lstm with some seed text and predict next num_preds chars\n", 305 | " enc_seed = encode_sent(text_seed)\n", 306 | "\n", 307 | " # first make the warm up to save into the LSTM states the seed\n", 308 | " states = None\n", 309 | " for code in enc_seed:\n", 310 | " # please note the Volatile keyword: we just want inference, not backprop\n", 311 | " code = Variable(torch.LongTensor([code]).view(1, 1), volatile=True)\n", 312 | " pred_ch, states = model(code, states)\n", 313 | "\n", 314 | " pred_ch = torch.max(pred_ch, dim=-1)[1]\n", 315 | " # |> symbol serves as a simple prompt to check where does it start the prediction\n", 316 | " resulting_pred = text_seed + '|>' + idx2char[pred_ch.data[0]]\n", 317 | " # Now iterate char by char in a feedback fashion\n", 318 | " for n in range(num_preds):\n", 319 | " pred_ch, states = model(pred_ch.view(1, 1), states)\n", 320 | " pred_ch = torch.max(pred_ch, dim=-1)[1]\n", 321 | " pred_idx = pred_ch.data[0]\n", 322 | " resulting_pred += idx2char[pred_idx]\n", 323 | " return resulting_pred\n", 324 | "\n", 325 | "# predict some chars w/ RANDOM weights\n", 326 | "print('resulting text: ', predict_text_from_seed(char_lstm, 'i found a door ', num_preds=100))" 327 | ] 328 | }, 329 | { 330 | "cell_type": "markdown", 331 | "metadata": {}, 332 | "source": [ 333 | "**Observe how we can iterate with external for loops to the LSTM structure, feeding back the data from time-step to time-step, contrary to TensorFlow methodology.**" 334 | ] 335 | }, 336 | { 337 | "cell_type": "markdown", 338 | "metadata": {}, 339 | "source": [ 340 | "### Training the model!" 341 | ] 342 | }, 343 | { 344 | "cell_type": "code", 345 | "execution_count": null, 346 | "metadata": {}, 347 | "outputs": [], 348 | "source": [ 349 | "def train_seqbyseq_epoch(X, curr_epoch, model, opt, log_freq, stateful=False):\n", 350 | " # A simple training loop sequence by sequence will serve our purposes\n", 351 | " # YES! THAT SIMPLE!!\n", 352 | " # specify we are in train mode (this will set Dropout/BN behaviors to train mode)\n", 353 | " model.train()\n", 354 | " states = model.init_hidden_zero(1)\n", 355 | " tr_losses = []\n", 356 | " for bidx, x in enumerate(X, start=1):\n", 357 | " # clean previous gradients in graph\n", 358 | " opt.zero_grad()\n", 359 | " # build input x and output y\n", 360 | " # output is just a shifted by 1 timestep version of x\n", 361 | " y = x[1:]\n", 362 | " x = x[:-1]\n", 363 | " # format the PyTorch Variables\n", 364 | " y = Variable(torch.LongTensor(y)).view(-1) # y ~ [seq_len]\n", 365 | " x = Variable(torch.LongTensor(x)).view(1, -1) # x ~ [1, seq_len]\n", 366 | " # (1) forward sequence\n", 367 | " if not stateful:\n", 368 | " pred, _ = model(x, states)\n", 369 | " else:\n", 370 | " pred, states = model(x, states)\n", 371 | " states = repackage_hidden(states)\n", 372 | " #raise NotImplementedError('Missing stateful implementation')\n", 373 | " \n", 374 | " # (2) compute loss: Negative Log Likelihood of correct classes\n", 375 | " loss = F.nll_loss(pred, y)\n", 376 | " # (3) backprop gradients\n", 377 | " loss.backward()\n", 378 | " # (4) update parameters\n", 379 | " opt.step()\n", 380 | " if bidx % log_freq == 0 or bidx >= len(X):\n", 381 | " tr_losses.append(loss.data[0])\n", 382 | " print('Batch {:4d}/{:4d} (epoch {:3d}) loss {:.3f}'.format(bidx,\n", 383 | " len(X),\n", 384 | " curr_epoch,\n", 385 | " tr_losses[-1]))\n", 386 | " return tr_losses\n", 387 | " \n", 388 | "import timeit # time epochs\n", 389 | "\n", 390 | "char_lstm = charRNN(len(char2idx), rnn_size=100, rnn_dropout=0.)\n", 391 | "NUM_EPOCHS=20\n", 392 | "LOG_FREQ=400\n", 393 | "# we can limit the samples in the dataset to speed up training (this is a toy example, remember)\n", 394 | "max_samples = 100\n", 395 | "tr_X = X[:max_samples]\n", 396 | "opt = optim.Adam(char_lstm.parameters(), lr=0.001)\n", 397 | "beg_t = timeit.default_timer()\n", 398 | "tr_losses = []\n", 399 | "for epoch in range(NUM_EPOCHS):\n", 400 | " # shuffle sentences\n", 401 | " shuffle(tr_X)\n", 402 | " tr_losses += train_seqbyseq_epoch(tr_X, epoch, char_lstm, opt, 400, False)\n", 403 | " end_t = timeit.default_timer()\n", 404 | " print('Elapsed time for epoch {:3d}: {:.3f} s'.format(epoch, end_t - beg_t))\n", 405 | " beg_t = end_t\n", 406 | " # see predictions change\n", 407 | " print('Epoch {} result {}'.format(epoch, '-' * 30))\n", 408 | " print(predict_text_from_seed(char_lstm, text_sents[1], num_preds=30))" 409 | ] 410 | }, 411 | { 412 | "cell_type": "code", 413 | "execution_count": null, 414 | "metadata": {}, 415 | "outputs": [], 416 | "source": [ 417 | "print(predict_text_from_seed(char_lstm, 'she found a door ', num_preds=100))\n", 418 | "print(predict_text_from_seed(char_lstm, text_sents[1], num_preds=100))" 419 | ] 420 | }, 421 | { 422 | "cell_type": "code", 423 | "execution_count": null, 424 | "metadata": { 425 | "scrolled": true 426 | }, 427 | "outputs": [], 428 | "source": [ 429 | "plt.title('Seqbyseq training curve')\n", 430 | "plt.ylabel('NLLLoss')\n", 431 | "plt.xlabel('Log tick of freq {}'.format(LOG_FREQ))\n", 432 | "plt.plot(tr_losses)" 433 | ] 434 | }, 435 | { 436 | "cell_type": "markdown", 437 | "metadata": {}, 438 | "source": [ 439 | "### Well look at that learning noise... :( Let's smooth the learning with better updates" 440 | ] 441 | }, 442 | { 443 | "cell_type": "code", 444 | "execution_count": null, 445 | "metadata": {}, 446 | "outputs": [], 447 | "source": [ 448 | "def train_batches_epoch(X, batch_size, curr_epoch, model, opt, log_freq):\n", 449 | " # We still need batching with pytorch to smooth our training in the SGD training\n", 450 | " # BUT: Each batch can have a variable length in sequence dimension!!\n", 451 | " # specify we are in train mode (this will set Dropout/BN behaviors to train mode)\n", 452 | " model.train()\n", 453 | " \n", 454 | " n_batches = int(np.ceil(len(X) / batch_size))\n", 455 | " #print('Number of batches with batch_size {}:{}'.format(batch_size, n_batches))\n", 456 | " tr_losses = []\n", 457 | " for bidx, b_beg in enumerate(range(0, len(X), batch_size), start=1):\n", 458 | " left = min(len(X) - b_beg, batch_size)\n", 459 | " states = model.init_hidden_zero(left)\n", 460 | " # select batch of sequences of outputs and inputs\n", 461 | " x_batch = X[b_beg:b_beg + left]\n", 462 | " # get max_len and add PADDING to smaller seqs\n", 463 | " max_batch_len = max(len(s) for s in x_batch)\n", 464 | " min_batch_len = min(len(s) for s in x_batch)\n", 465 | " for s_idx in range(len(x_batch)):\n", 466 | " x_batch[s_idx] = [0] * (max_batch_len - len(x_batch[s_idx])) + x_batch[s_idx]\n", 467 | " x_batch = np.array(x_batch, dtype=np.int64)\n", 468 | " y_batch = x_batch[:, 1:]\n", 469 | " x_batch = x_batch[:, :-1]\n", 470 | " # clean previous gradients in graph\n", 471 | " opt.zero_grad()\n", 472 | " # format the PyTorch Variables\n", 473 | " y = Variable(torch.from_numpy(y_batch).contiguous()).view(-1) # y ~ [batch_size x seq_len]\n", 474 | " x = Variable(torch.from_numpy(x_batch).contiguous()).view(left, -1) # x ~ [batch_size, seq_len]\n", 475 | " # (1) forward sequence\n", 476 | " pred, _ = model(x, states)\n", 477 | " \n", 478 | " # (2) compute loss: Negative Log Likelihood of correct classes\n", 479 | " loss = F.nll_loss(pred, y)\n", 480 | " # (3) backprop gradients\n", 481 | " loss.backward()\n", 482 | " # (4) update parameters\n", 483 | " opt.step()\n", 484 | " if bidx % log_freq == 0 or bidx >= n_batches:\n", 485 | " tr_losses.append(loss.data[0])\n", 486 | " print('Batch {:4d}/{:4d} (epoch {:3d}) loss {:.3f}'.format(bidx,\n", 487 | " n_batches,\n", 488 | " curr_epoch,\n", 489 | " tr_losses[-1]))\n", 490 | " return tr_losses\n", 491 | "\n", 492 | "\n", 493 | "char_lstm = charRNN(len(char2idx), rnn_size=100, rnn_dropout=0.)\n", 494 | "NUM_EPOCHS=20\n", 495 | "LOG_FREQ=400\n", 496 | "# we can limit the samples in the dataset to speed up training (this is a toy example, remember)\n", 497 | "max_samples = 100\n", 498 | "tr_X = X[:max_samples]\n", 499 | "opt = optim.Adam(char_lstm.parameters(), lr=0.001)\n", 500 | "beg_t = timeit.default_timer()\n", 501 | "b_tr_losses = []\n", 502 | "for epoch in range(NUM_EPOCHS):\n", 503 | " # shuffle sentences\n", 504 | " shuffle(tr_X)\n", 505 | " b_tr_losses += train_batches_epoch(tr_X, 10, epoch, char_lstm, opt, 400)\n", 506 | " end_t = timeit.default_timer()\n", 507 | " print('Elapsed time for epoch {:3d}: {:.3f} s'.format(epoch, end_t - beg_t))\n", 508 | " beg_t = end_t\n", 509 | " # see predictions change\n", 510 | " print('Epoch {} result {}'.format(epoch, '-' * 30))\n", 511 | " print(predict_text_from_seed(char_lstm, text_sents[1], num_preds=30))" 512 | ] 513 | }, 514 | { 515 | "cell_type": "code", 516 | "execution_count": null, 517 | "metadata": {}, 518 | "outputs": [], 519 | "source": [ 520 | "plt.title('Batched training curve')\n", 521 | "plt.ylabel('NLLLoss')\n", 522 | "plt.xlabel('Log tick of freq {}'.format(LOG_FREQ))\n", 523 | "plt.plot(b_tr_losses, label='batched learning')\n", 524 | "plt.plot(tr_losses, label='sample-wise learning')\n", 525 | "plt.legend()" 526 | ] 527 | }, 528 | { 529 | "cell_type": "markdown", 530 | "metadata": {}, 531 | "source": [ 532 | "### Last stage: limiting the max sequence length with statefulness" 533 | ] 534 | }, 535 | { 536 | "cell_type": "code", 537 | "execution_count": null, 538 | "metadata": { 539 | "collapsed": true 540 | }, 541 | "outputs": [], 542 | "source": [ 543 | "def train_stateful_maxlen_batches_epoch(X, batch_size, max_len, curr_epoch, model, opt, log_freq, clip=0.):\n", 544 | " # specify we are in train mode (this will set Dropout/BN behaviors to train mode)\n", 545 | " model.train()\n", 546 | " # build a super long sequence out of all samples concatenated\n", 547 | " X_long = []\n", 548 | " for x in X:\n", 549 | " X_long += x\n", 550 | " # trim to multiple of seqlen x batch_size\n", 551 | " X_long = X_long[:batch_size * max_len * (len(X_long) // (batch_size * max_len))]\n", 552 | " X_arr = np.array(X_long, dtype=np.int64).reshape((-1, 1))\n", 553 | " X_arr = X_arr.reshape((batch_size, -1, 1))\n", 554 | " X_arr = np.split(X_arr, X_arr.shape[1] // max_len, axis=1)\n", 555 | " X_arr = np.concatenate(X_arr, axis=0)\n", 556 | " # print('X_arr shape: ', X_arr.shape)\n", 557 | " n_batches = int(np.ceil(X_arr.shape[0] / batch_size))\n", 558 | " # print('Number of batches with batch_size {}: {}'.format(batch_size, n_batches))\n", 559 | " tr_losses = []\n", 560 | " states = model.init_hidden_zero(batch_size)\n", 561 | " for bidx, b_beg in enumerate(range(0, X_arr.shape[0], batch_size), start=1):\n", 562 | " # select batch of sequences of outputs and inputs\n", 563 | " x_batch = X_arr[b_beg:b_beg + batch_size]\n", 564 | " x_batch = np.array(x_batch, dtype=np.int64)\n", 565 | " y_batch = x_batch[:, 1:]\n", 566 | " x_batch = x_batch[:, :-1]\n", 567 | " # clean previous gradients in graph\n", 568 | " opt.zero_grad()\n", 569 | " # format the PyTorch Variables\n", 570 | " y = Variable(torch.from_numpy(y_batch).contiguous()).view(-1) # y ~ [batch_size x seq_len]\n", 571 | " x = Variable(torch.from_numpy(x_batch).contiguous()).view(batch_size, -1) # x ~ [batch_size, seq_len]\n", 572 | " # (1) forward sequence\n", 573 | " pred, states = model(x, states)\n", 574 | " states = repackage_hidden(states)\n", 575 | " # (2) compute loss: Negative Log Likelihood of correct classes\n", 576 | " loss = F.nll_loss(pred, y)\n", 577 | " # (3) backprop gradients\n", 578 | " loss.backward()\n", 579 | " if clip > 0:\n", 580 | " # `clip_grad_norm` helps prevent the exploding gradient problem in RNNs / LSTMs.\n", 581 | " torch.nn.utils.clip_grad_norm(model.parameters(), clip)\n", 582 | "\n", 583 | " # (4) update parameters\n", 584 | " opt.step()\n", 585 | " if bidx % log_freq == 0 or bidx >= n_batches:\n", 586 | " tr_losses.append(loss.data[0])\n", 587 | " print('Batch {:4d}/{:4d} (epoch {:3d}) loss {:.3f}'.format(bidx,\n", 588 | " n_batches,\n", 589 | " curr_epoch,\n", 590 | " tr_losses[-1]))\n", 591 | " return tr_losses" 592 | ] 593 | }, 594 | { 595 | "cell_type": "code", 596 | "execution_count": null, 597 | "metadata": { 598 | "collapsed": true 599 | }, 600 | "outputs": [], 601 | "source": [ 602 | "char_lstm = charRNN(len(char2idx), rnn_size=100, rnn_dropout=0.)\n", 603 | "NUM_EPOCHS=20\n", 604 | "LOG_FREQ=400\n", 605 | "# we can limit the samples in the dataset to speed up training (this is a toy example, remember)\n", 606 | "max_samples = 100\n", 607 | "tr_X = X[:max_samples]\n", 608 | "opt = optim.Adam(char_lstm.parameters(), lr=0.001)\n", 609 | "beg_t = timeit.default_timer()\n", 610 | "st_tr_losses = []\n", 611 | "for epoch in range(NUM_EPOCHS):\n", 612 | " # shuffle sentences\n", 613 | " shuffle(tr_X)\n", 614 | " st_tr_losses += train_stateful_maxlen_batches_epoch(tr_X, 10, 35, epoch, char_lstm, opt, 400)\n", 615 | " end_t = timeit.default_timer()\n", 616 | " print('Elapsed time for epoch {:3d}: {:.3f} s'.format(epoch, end_t - beg_t))\n", 617 | " beg_t = end_t\n", 618 | " # see predictions change\n", 619 | " print('Epoch {} result {}'.format(epoch, '-' * 30))\n", 620 | " print(predict_text_from_seed(char_lstm, text_sents[1], num_preds=30))" 621 | ] 622 | }, 623 | { 624 | "cell_type": "code", 625 | "execution_count": null, 626 | "metadata": {}, 627 | "outputs": [], 628 | "source": [ 629 | "plt.title('Stateful Batched training curve')\n", 630 | "plt.ylabel('NLLLoss')\n", 631 | "plt.xlabel('Log tick of freq {}'.format(LOG_FREQ))\n", 632 | "plt.plot(b_tr_losses, label='batched learning')\n", 633 | "plt.plot(st_tr_losses, label='stateful batched learning')\n", 634 | "plt.plot(tr_losses, label='sample-wise learning')\n", 635 | "plt.legend()" 636 | ] 637 | }, 638 | { 639 | "cell_type": "code", 640 | "execution_count": null, 641 | "metadata": {}, 642 | "outputs": [], 643 | "source": [ 644 | "# Now max_len = 15, shorter to backprop less tsteps\n", 645 | "char_lstm = charRNN(len(char2idx), rnn_size=100, rnn_dropout=0.)\n", 646 | "NUM_EPOCHS=20\n", 647 | "LOG_FREQ=400\n", 648 | "# we can limit the samples in the dataset to speed up training (this is a toy example, remember)\n", 649 | "max_samples = 100\n", 650 | "tr_X = X[:max_samples]\n", 651 | "opt = optim.RMSprop(char_lstm.parameters(), lr=0.01)\n", 652 | "beg_t = timeit.default_timer()\n", 653 | "st_sh_tr_losses = []\n", 654 | "for epoch in range(NUM_EPOCHS):\n", 655 | " # shuffle sentences\n", 656 | " shuffle(tr_X)\n", 657 | " st_sh_tr_losses += train_stateful_maxlen_batches_epoch(tr_X, 10, 15, epoch, char_lstm, opt, 400)\n", 658 | " end_t = timeit.default_timer()\n", 659 | " print('Elapsed time for epoch {:3d}: {:.3f} s'.format(epoch, end_t - beg_t))\n", 660 | " beg_t = end_t\n", 661 | " # see predictions change\n", 662 | " print('Epoch {} result {}'.format(epoch, '-' * 30))\n", 663 | " print(predict_text_from_seed(char_lstm, text_sents[1], num_preds=30))" 664 | ] 665 | }, 666 | { 667 | "cell_type": "code", 668 | "execution_count": null, 669 | "metadata": {}, 670 | "outputs": [], 671 | "source": [ 672 | "plt.title('Stateful Batched training curve')\n", 673 | "plt.ylabel('NLLLoss')\n", 674 | "plt.xlabel('Log tick of freq {}'.format(LOG_FREQ))\n", 675 | "plt.plot(b_tr_losses, label='batched learning')\n", 676 | "plt.plot(st_tr_losses, label='stateful-35 batched learning')\n", 677 | "plt.plot(st_sh_tr_losses, label='stateful-15 batched learning')\n", 678 | "plt.plot(tr_losses, label='sample-wise learning')\n", 679 | "plt.legend()" 680 | ] 681 | }, 682 | { 683 | "cell_type": "code", 684 | "execution_count": null, 685 | "metadata": {}, 686 | "outputs": [], 687 | "source": [ 688 | "# Now max_len = 15, shorter to backprop less tsteps\n", 689 | "char_lstm = charRNN(len(char2idx), rnn_size=100, rnn_dropout=0.)\n", 690 | "NUM_EPOCHS=20\n", 691 | "LOG_FREQ=400\n", 692 | "# we can limit the samples in the dataset to speed up training (this is a toy example, remember)\n", 693 | "max_samples = 100\n", 694 | "tr_X = X[:max_samples]\n", 695 | "opt = optim.RMSprop(char_lstm.parameters(), lr=0.01)\n", 696 | "beg_t = timeit.default_timer()\n", 697 | "st_sh_sh_tr_losses = []\n", 698 | "for epoch in range(NUM_EPOCHS):\n", 699 | " # shuffle sentences\n", 700 | " shuffle(tr_X)\n", 701 | " st_sh_sh_tr_losses += train_stateful_maxlen_batches_epoch(tr_X, 10, 10, epoch, char_lstm, opt, 400)\n", 702 | " end_t = timeit.default_timer()\n", 703 | " print('Elapsed time for epoch {:3d}: {:.3f} s'.format(epoch, end_t - beg_t))\n", 704 | " beg_t = end_t\n", 705 | " # see predictions change\n", 706 | " print('Epoch {} result {}'.format(epoch, '-' * 30))\n", 707 | " print(predict_text_from_seed(char_lstm, text_sents[1], num_preds=30))" 708 | ] 709 | }, 710 | { 711 | "cell_type": "code", 712 | "execution_count": null, 713 | "metadata": {}, 714 | "outputs": [], 715 | "source": [ 716 | "plt.title('Stateful Batched training curve')\n", 717 | "plt.ylabel('NLLLoss')\n", 718 | "plt.xlabel('Log tick of freq {}'.format(LOG_FREQ))\n", 719 | "plt.plot(b_tr_losses, label='batched learning')\n", 720 | "plt.plot(st_tr_losses, label='stateful-35 batched learning')\n", 721 | "plt.plot(st_sh_tr_losses, label='stateful-15 batched learning')\n", 722 | "plt.plot(st_sh_sh_tr_losses, label='stateful-10 batched learning')\n", 723 | "plt.plot(tr_losses, label='sample-wise learning')\n", 724 | "plt.legend()" 725 | ] 726 | }, 727 | { 728 | "cell_type": "code", 729 | "execution_count": null, 730 | "metadata": {}, 731 | "outputs": [], 732 | "source": [ 733 | "# Now max_len = 15, shorter to backprop less tsteps\n", 734 | "char_lstm = charRNN(len(char2idx), rnn_size=100, rnn_dropout=0.)\n", 735 | "NUM_EPOCHS=20\n", 736 | "LOG_FREQ=400\n", 737 | "# we can limit the samples in the dataset to speed up training (this is a toy example, remember)\n", 738 | "max_samples = 100\n", 739 | "tr_X = X[:max_samples]\n", 740 | "opt = optim.RMSprop(char_lstm.parameters(), lr=0.01)\n", 741 | "beg_t = timeit.default_timer()\n", 742 | "st_cn_tr_losses = []\n", 743 | "for epoch in range(NUM_EPOCHS):\n", 744 | " # shuffle sentences\n", 745 | " shuffle(tr_X)\n", 746 | " st_cn_tr_losses += train_stateful_maxlen_batches_epoch(tr_X, 10, 35, epoch, char_lstm, opt, 400, 0.2)\n", 747 | " end_t = timeit.default_timer()\n", 748 | " print('Elapsed time for epoch {:3d}: {:.3f} s'.format(epoch, end_t - beg_t))\n", 749 | " beg_t = end_t\n", 750 | " # see predictions change\n", 751 | " print('Epoch {} result {}'.format(epoch, '-' * 30))\n", 752 | " print(predict_text_from_seed(char_lstm, text_sents[1], num_preds=30))" 753 | ] 754 | }, 755 | { 756 | "cell_type": "code", 757 | "execution_count": null, 758 | "metadata": {}, 759 | "outputs": [], 760 | "source": [ 761 | "print(predict_text_from_seed(char_lstm, 'she found a door ', num_preds=50))\n", 762 | "print(predict_text_from_seed(char_lstm, 'she went ', num_preds=50))\n", 763 | "print(predict_text_from_seed(char_lstm, 'when ', num_preds=50))\n", 764 | "print(predict_text_from_seed(char_lstm, 'a rabbit ', num_preds=50))\n", 765 | "print(predict_text_from_seed(char_lstm, 'a golden key ', num_preds=50))" 766 | ] 767 | }, 768 | { 769 | "cell_type": "code", 770 | "execution_count": null, 771 | "metadata": {}, 772 | "outputs": [], 773 | "source": [ 774 | "plt.title('Stateful Batched training curve')\n", 775 | "plt.ylabel('NLLLoss')\n", 776 | "plt.xlabel('Log tick of freq {}'.format(LOG_FREQ))\n", 777 | "plt.plot(b_tr_losses, label='batched learning')\n", 778 | "plt.plot(st_tr_losses, label='stateful-35 batched learning')\n", 779 | "plt.plot(st_cn_tr_losses, label='stateful-35 cnorm 0.2 batched learning')\n", 780 | "plt.plot(st_sh_tr_losses, label='stateful-15 batched learning')\n", 781 | "plt.plot(st_sh_sh_tr_losses, label='stateful-10 batched learning')\n", 782 | "plt.plot(tr_losses, label='sample-wise learning')\n", 783 | "plt.legend()" 784 | ] 785 | }, 786 | { 787 | "cell_type": "markdown", 788 | "metadata": {}, 789 | "source": [ 790 | "### Proposals to do \n", 791 | "\n", 792 | "* Change the dataset into another txt file\n", 793 | "* Introduce a sequence length filtering in model forward, to **exclude padding positions** in sequence when batching with padding\n", 794 | "* Implement a noisy initial hidden state with normal distribution ~ N(0, I)\n", 795 | "* Implement a **learnable initial hidden state** (not just zero vector)\n", 796 | "* Implement the option to use GRU layers instead of LSTM in the model (BEWARE with cell differences, like states)\n", 797 | "* Implement the option to use RNN layers instead of LSTM in the model (BEWARE with cell differences, like states)\n", 798 | "* Compare the **performance of RNN vs GRU vs LSTM**, training for 20 epochs with the desired amount of data\n", 799 | "* Implement **stateful batched trainer** method (making sample i-th from batch b-th continue in sample i-th from batch (b+1)-th" 800 | ] 801 | } 802 | ], 803 | "metadata": { 804 | "kernelspec": { 805 | "display_name": "Python 3", 806 | "language": "python", 807 | "name": "python3" 808 | }, 809 | "language_info": { 810 | "codemirror_mode": { 811 | "name": "ipython", 812 | "version": 3 813 | }, 814 | "file_extension": ".py", 815 | "mimetype": "text/x-python", 816 | "name": "python", 817 | "nbconvert_exporter": "python", 818 | "pygments_lexer": "ipython3", 819 | "version": "3.6.1" 820 | } 821 | }, 822 | "nbformat": 4, 823 | "nbformat_minor": 2 824 | } 825 | -------------------------------------------------------------------------------- /my_rnn_pytorched/dynamic_graph.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/santi-pdp/pytorch_tutorials/a38a8addc84159d89182f6f98dc6b387514cb523/my_rnn_pytorched/dynamic_graph.gif --------------------------------------------------------------------------------