├── Lectures ├── Lecture1.pdf ├── Lecture2.pdf ├── Lecture3.pdf ├── Lecture4.pdf └── Lecture5.pdf ├── README.md └── Tutorials ├── Tutorial1.ipynb ├── Tutorial2.ipynb ├── Tutorial3.ipynb ├── Tutorial4.ipynb └── Tutorial5.ipynb /Lectures/Lecture1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eladhoffer/DeepLearningCourse/e158bbadf8097fdadc05cd9572299a46f428377d/Lectures/Lecture1.pdf -------------------------------------------------------------------------------- /Lectures/Lecture2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eladhoffer/DeepLearningCourse/e158bbadf8097fdadc05cd9572299a46f428377d/Lectures/Lecture2.pdf -------------------------------------------------------------------------------- /Lectures/Lecture3.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eladhoffer/DeepLearningCourse/e158bbadf8097fdadc05cd9572299a46f428377d/Lectures/Lecture3.pdf -------------------------------------------------------------------------------- /Lectures/Lecture4.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eladhoffer/DeepLearningCourse/e158bbadf8097fdadc05cd9572299a46f428377d/Lectures/Lecture4.pdf -------------------------------------------------------------------------------- /Lectures/Lecture5.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eladhoffer/DeepLearningCourse/e158bbadf8097fdadc05cd9572299a46f428377d/Lectures/Lecture5.pdf -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DeepLearningCourse 2 | 3 | [![Join the chat at https://gitter.im/eladhoffer/DeepLearningCourse](https://badges.gitter.im/eladhoffer/DeepLearningCourse.svg)](https://gitter.im/eladhoffer/DeepLearningCourse?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) 4 | Deep learning mini-course given in Technion 5 | -------------------------------------------------------------------------------- /Tutorials/Tutorial5.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Deep Learning with Torch - Tutorial 5" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Learning language models with recurrent networks\n" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "First, we need to handle our data. We will use a text file and load it at character level.\n", 22 | "\n", 23 | "We also need to encode the strings into indexed symbols (our vocabulary)" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 1, 29 | "metadata": { 30 | "collapsed": true 31 | }, 32 | "outputs": [], 33 | "source": [ 34 | "\n", 35 | "function loadTextFileChars(filename, vocab)\n", 36 | " local file = torch.DiskFile(filename, 'r')\n", 37 | " file:seekEnd()\n", 38 | " local length = file:position() - 1\n", 39 | " file:seek(1)\n", 40 | " local byteVec = torch.ByteTensor(length)\n", 41 | " file:readByte(byteVec:storage())\n", 42 | "\n", 43 | " local vocab = vocab or {}\n", 44 | " local currentNum = 1\n", 45 | " local data = byteVec:data()\n", 46 | " for i=0, length-1 do\n", 47 | " local encodedNum = vocab[data[i]]\n", 48 | " if not encodedNum then\n", 49 | " vocab[data[i]] = currentNum\n", 50 | " encodedNum = currentNum\n", 51 | " currentNum = currentNum + 1\n", 52 | " end\n", 53 | " data[i] = encodedNum\n", 54 | " end\n", 55 | " local decoder = {}\n", 56 | " for val, num in pairs(vocab) do\n", 57 | " decoder[num] = string.char(val)\n", 58 | " end\n", 59 | " return byteVec, vocab, decoder\n", 60 | "end\n", 61 | "\n", 62 | "\n", 63 | "function reshapeData(wordVec, seqLength, batchSize)\n", 64 | " local offset = offset or 0\n", 65 | " local length = wordVec:nElement()\n", 66 | " local numBatches = torch.floor(length / (batchSize * seqLength))\n", 67 | "\n", 68 | " local batchWordVec = wordVec.new():resize(numBatches, batchSize, seqLength)\n", 69 | " \n", 70 | " local endIdxs = torch.LongTensor()\n", 71 | " for i=1, batchSize do\n", 72 | " local startPos = torch.round((i - 1) * length / batchSize ) + 1\n", 73 | " local sliceLength = seqLength * numBatches\n", 74 | " batchWordVec:select(2,i):copy(wordVec:narrow(1, startPos, sliceLength))\n", 75 | " end\n", 76 | "return batchWordVec\n", 77 | "end\n", 78 | "\n", 79 | "\n", 80 | "function decodeFunc(decoder)\n", 81 | " local space = ''\n", 82 | " local func = function(vec)\n", 83 | " local output = ''\n", 84 | " for i=1, vec:size(1) do\n", 85 | " output = output .. space .. decoder[vec[i]]\n", 86 | " end\n", 87 | " return output\n", 88 | " end\n", 89 | " return func\n", 90 | "end\n", 91 | "\n", 92 | "\n", 93 | " function encodeFunc(vocab)\n", 94 | " local func = function(str)\n", 95 | " local length = #str\n", 96 | " local encoded = torch.ByteTensor(length):zero()\n", 97 | "\n", 98 | " for i=1, length do\n", 99 | " local encodedNum = vocab[str[i]]\n", 100 | " if not encodedNum then\n", 101 | " encodedNum = -1\n", 102 | " end\n", 103 | " encoded[i] = encodedNum\n", 104 | " end\n", 105 | " return encoded\n", 106 | " end\n", 107 | " \n", 108 | " return func\n", 109 | "end" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": 28, 115 | "metadata": { 116 | "collapsed": false 117 | }, 118 | "outputs": [], 119 | "source": [ 120 | "byteVec, vocab, decoder = loadTextFileChars('hebrewBible.txt')\n", 121 | "--byteVec, vocab, decoder = loadTextFileChars('tinyshakespeare.txt')\n", 122 | "vocabSize = #decoder\n", 123 | "encodeTensor = encodeFunc(vocab)\n", 124 | "decodeString = decodeFunc(decoder)" 125 | ] 126 | }, 127 | { 128 | "cell_type": "code", 129 | "execution_count": 29, 130 | "metadata": { 131 | "collapsed": false 132 | }, 133 | "outputs": [ 134 | { 135 | "data": { 136 | "text/plain": [ 137 | "39\t\n", 138 | " 3215738\n", 139 | "[torch.LongStorage of size 1]\n", 140 | "\n", 141 | "א,א בראשית, ברא אלוהים, את השמיים, ואת הארץ. א,ב והארץ, הייתה תוהו ובוהו, וחושך, על-פני תהום; ורוח אלוהים, מרחפת על-פני המים. א,ג ויאמר אלוהים, יהי אור; ויהי-אור. א,ד וירא אלוהים את-האור, כי-טוב; ויבדל אלוהים, בין האור ובין החושך. א,ה ויקרא אלוהים לאור יום, ולחושך קרא לילה; ויהי-ערב ויהי-בוקר, יום אחד. {פ}\n", 142 | "\n", 143 | "א,ו ויאמר אלוהים, יהי רקיע בתוך המים, ויהי מבדיל, בין מים למים. א,ז ויעש אלוהים, את-הרקיע, ויבדל בין המים אשר מתחת לרקיע, ובין המים אשר מעל לרקיע; ויהי-כן. א,ח ויקרא אלוהים לרקיע, שמיים; ויהי-ערב ויהי-בוקר, יום שני. {פ}\n", 144 | "\n", 145 | "א,ט ויאמר אלוהים, ייקוו המים מתחת השמיים אל-מ\t\n" 146 | ] 147 | }, 148 | "execution_count": 29, 149 | "metadata": {}, 150 | "output_type": "execute_result" 151 | } 152 | ], 153 | "source": [ 154 | "print(vocabSize)\n", 155 | "print(byteVec:size())\n", 156 | "print(decodeString(byteVec:narrow(1,1,1000)))\n" 157 | ] 158 | }, 159 | { 160 | "cell_type": "code", 161 | "execution_count": 4, 162 | "metadata": { 163 | "collapsed": false 164 | }, 165 | "outputs": [ 166 | { 167 | "data": { 168 | "text/plain": [ 169 | " 3062\n", 170 | " 50\n", 171 | " 21\n", 172 | "[torch.LongStorage of size 3]\n", 173 | "\n" 174 | ] 175 | }, 176 | "execution_count": 4, 177 | "metadata": {}, 178 | "output_type": "execute_result" 179 | } 180 | ], 181 | "source": [ 182 | "batchSize = 50\n", 183 | "seqLength = 20\n", 184 | "trainData = reshapeData(byteVec, seqLength + 1, batchSize)\n", 185 | "print(trainData:size())" 186 | ] 187 | }, 188 | { 189 | "cell_type": "markdown", 190 | "metadata": {}, 191 | "source": [ 192 | "We will use the ```recurrent``` package for torch.\n", 193 | "https://github.com/eladhoffer/recurrent.torch\n", 194 | "\n", 195 | "\n", 196 | "A popular alternative is the ```rnn``` package\n", 197 | "https://github.com/Element-Research/rnn" 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": 26, 203 | "metadata": { 204 | "collapsed": false 205 | }, 206 | "outputs": [ 207 | { 208 | "data": { 209 | "text/plain": [ 210 | "----RNN - Single----\t\n", 211 | "Output size: \t\n", 212 | " 16\n", 213 | " 7\n", 214 | "[torch.LongStorage of size 2]\n", 215 | "\n", 216 | "State size: \t\n", 217 | " 16\n", 218 | " 7\n", 219 | "[torch.LongStorage of size 2]\n", 220 | "\n", 221 | "----LSTM - Sequence----\t\n" 222 | ] 223 | }, 224 | "execution_count": 26, 225 | "metadata": {}, 226 | "output_type": "execute_result" 227 | }, 228 | { 229 | "data": { 230 | "text/plain": [ 231 | "Output size: \t\n", 232 | " 16\n", 233 | " 20\n", 234 | " 7\n", 235 | "[torch.LongStorage of size 3]\n", 236 | "\n", 237 | "State size: \t\n", 238 | " 16\n", 239 | " 14\n", 240 | "[torch.LongStorage of size 2]\n", 241 | "\n" 242 | ] 243 | }, 244 | "execution_count": 26, 245 | "metadata": {}, 246 | "output_type": "execute_result" 247 | } 248 | ], 249 | "source": [ 250 | "require 'recurrent'\n", 251 | "local inputSize = 5\n", 252 | "local outputSize = 7\n", 253 | "\n", 254 | "local rnn = nn.RNN(inputSize, outputSize)\n", 255 | "local lstm = nn.LSTM(inputSize, outputSize)\n", 256 | "local gru = nn.GRU(inputSize, outputSize)\n", 257 | "\n", 258 | "\n", 259 | "print('----RNN - Single----')\n", 260 | "-- To feed single time step, we use 'single' mode\n", 261 | "local batch = 16\n", 262 | "local x = torch.rand(batch, inputSize)\n", 263 | "rnn:single()\n", 264 | "\n", 265 | "-- And now we can feed the input to get output. We can also peek at state\n", 266 | "local y = rnn:forward(x)\n", 267 | "print('Output size: '); print(y:size())\n", 268 | "print('State size: '); print(rnn:getState():size())\n", 269 | "\n", 270 | "\n", 271 | "print('----LSTM - Sequence----')\n", 272 | "--We mostly feed sequences through rnns\n", 273 | "local timeLength = 20\n", 274 | "x = torch.rand(batch, timeLength, inputSize)\n", 275 | "\n", 276 | "-- We will enable this by putting the recurrent model to 'sequence' mode\n", 277 | "lstm:sequence()\n", 278 | "\n", 279 | "-- And now we can feed the sequence to get output. We can also peek at state\n", 280 | "local y = lstm:forward(x)\n", 281 | "print('Output size: '); print(y:size())\n", 282 | "print('State size: '); print(lstm:getState():size())\n", 283 | "\n", 284 | "\n" 285 | ] 286 | }, 287 | { 288 | "cell_type": "markdown", 289 | "metadata": {}, 290 | "source": [ 291 | "Non recurrent layers and criterions are wrapped in ```TemporalModule``` and ```TemporalCriterion``` containers" 292 | ] 293 | }, 294 | { 295 | "cell_type": "code", 296 | "execution_count": 5, 297 | "metadata": { 298 | "collapsed": false 299 | }, 300 | "outputs": [ 301 | { 302 | "data": { 303 | "text/plain": [ 304 | "#Parameters = \t535335\t\n" 305 | ] 306 | }, 307 | "execution_count": 5, 308 | "metadata": {}, 309 | "output_type": "execute_result" 310 | } 311 | ], 312 | "source": [ 313 | "require 'recurrent'\n", 314 | "require 'cunn'\n", 315 | "\n", 316 | "rnnSize = 256\n", 317 | "embedder = nn.LookupTable(vocabSize, rnnSize)\n", 318 | "classifier = nn.Linear(rnnSize, vocabSize)\n", 319 | "rnn = nn.LSTM(rnnSize, rnnSize)\n", 320 | "\n", 321 | "model = nn.Sequential()\n", 322 | "model:add(embedder)\n", 323 | "model:add(rnn)\n", 324 | "model:add(nn.TemporalModule(classifier))\n", 325 | "model:add(nn.TemporalModule(nn.LogSoftMax()))\n", 326 | "\n", 327 | "\n", 328 | "model:cuda()\n", 329 | "model:sequence()\n", 330 | "embedder:share(classifier, 'weight', 'gradWeight')\n", 331 | "w, dE_dw = model:getParameters()\n", 332 | "print('#Parameters = ', w:nElement())\n", 333 | "\n", 334 | "criterion = nn.TemporalCriterion(nn.ClassNLLCriterion()):cuda()\n", 335 | "\n", 336 | "w:uniform(-0.08, 0.08)" 337 | ] 338 | }, 339 | { 340 | "cell_type": "markdown", 341 | "metadata": {}, 342 | "source": [ 343 | "### Training the network" 344 | ] 345 | }, 346 | { 347 | "cell_type": "code", 348 | "execution_count": 6, 349 | "metadata": { 350 | "collapsed": false 351 | }, 352 | "outputs": [], 353 | "source": [ 354 | "require 'optim'\n", 355 | "\n", 356 | "Vmax = 5\n", 357 | "optimState = {learningRate = 1e-3}\n", 358 | "\n", 359 | "\n", 360 | "function forwardNet(data, train)\n", 361 | " local lossAcc = 0\n", 362 | " local numBatches = data:size(1)\n", 363 | " model:sequence()\n", 364 | " model:forget()\n", 365 | " model:zeroState()\n", 366 | " if train then\n", 367 | " --set network into training mode\n", 368 | " model:training()\n", 369 | " else\n", 370 | " model:evaluate()\n", 371 | " end\n", 372 | " local x = torch.CudaTensor(batchSize, seqLength)\n", 373 | " local yt = torch.CudaTensor(batchSize, seqLength)\n", 374 | " for i = 1, numBatches do\n", 375 | " -- our input and target are the same sequence shifted\n", 376 | " x:copy(data[i]:narrow(2, 1, seqLength))\n", 377 | " yt:copy(data[i]:narrow(2, 2, seqLength))\n", 378 | " \n", 379 | " \n", 380 | " local y = model:forward(x)\n", 381 | " local err = criterion:forward(y, yt)\n", 382 | "\n", 383 | " lossAcc = lossAcc + err / seqLength\n", 384 | " \n", 385 | " \n", 386 | " if train then\n", 387 | " function feval()\n", 388 | " model:zeroGradParameters() --zero grads\n", 389 | " local dE_dy = criterion:backward(y,yt)\n", 390 | " model:backward(x, dE_dy) -- backpropagation\n", 391 | " local norm = dE_dw:norm()\n", 392 | " if norm > Vmax then -- gradient renormalization to avoid explosion\n", 393 | " local shrink = Vmax / norm\n", 394 | " dE_dw:mul(shrink)\n", 395 | " end\n", 396 | " return err, dE_dw\n", 397 | " end\n", 398 | " \n", 399 | " optim.adam(feval, w, optimState)\n", 400 | " end\n", 401 | " end\n", 402 | " \n", 403 | "\n", 404 | " local avgLoss = lossAcc / numBatches\n", 405 | " \n", 406 | " \n", 407 | " return avgLoss\n", 408 | "end\n" 409 | ] 410 | }, 411 | { 412 | "cell_type": "markdown", 413 | "metadata": {}, 414 | "source": [ 415 | "We'll also introduce a sampling function - to try and generate sequences from our network" 416 | ] 417 | }, 418 | { 419 | "cell_type": "code", 420 | "execution_count": 27, 421 | "metadata": { 422 | "collapsed": true 423 | }, 424 | "outputs": [], 425 | "source": [ 426 | "\n", 427 | "function sample(num, space, temperature)\n", 428 | " local num = num or 50\n", 429 | " local temperature = temperature or 1\n", 430 | " local function smp(preds)\n", 431 | " if temperature == 0 then\n", 432 | " local _, num = preds:max(2)\n", 433 | " return num\n", 434 | " else\n", 435 | " preds:div(temperature) -- scale by temperature\n", 436 | " local probs = preds:squeeze()\n", 437 | " probs:div(probs:sum()) -- renormalize so probs sum to one\n", 438 | " local num = torch.multinomial(probs:float(), 1):typeAs(preds)\n", 439 | " return num\n", 440 | " end\n", 441 | " end\n", 442 | "\n", 443 | " local sampleModel = nn.Sequential():add(embedder):add(rnn):add(classifier):add(nn.SoftMax():cuda())\n", 444 | "\n", 445 | " sampleModel:evaluate()\n", 446 | " sampleModel:single()\n", 447 | "\n", 448 | " \n", 449 | " local pred, predText, embedded\n", 450 | " \n", 451 | " wordNum = torch.Tensor(1):random(vocabSize):cuda()\n", 452 | "\n", 453 | " local predText = {}\n", 454 | " \n", 455 | " for i=1, num do\n", 456 | " pred = sampleModel:forward(wordNum)\n", 457 | " wordNum = smp(pred)\n", 458 | " predText[i] = wordNum:squeeze()\n", 459 | " end\n", 460 | " return torch.Tensor(predText)\n", 461 | "end" 462 | ] 463 | }, 464 | { 465 | "cell_type": "code", 466 | "execution_count": 7, 467 | "metadata": { 468 | "collapsed": false 469 | }, 470 | "outputs": [ 471 | { 472 | "data": { 473 | "text/plain": [ 474 | "Sampled text:\t\n" 475 | ] 476 | }, 477 | "execution_count": 7, 478 | "metadata": {}, 479 | "output_type": "execute_result" 480 | }, 481 | { 482 | "data": { 483 | "text/plain": [ 484 | "Training Loss: 1.2775420697977\t\n", 485 | "Sampled text:\t\n" 486 | ] 487 | }, 488 | "execution_count": 7, 489 | "metadata": {}, 490 | "output_type": "execute_result" 491 | }, 492 | { 493 | "data": { 494 | "text/plain": [ 495 | "ר-הארץ; גיקר; והשם בין-גוליים תצחבה-אל-תשלחנו בארץ-אשר: המזבקת חמר מבצר, התאקרה אשר באלמים ואיש, מיה; לשמח-גמתיהם, לך הסהב וכדור, ומלך-הזאקרוח.\n", 496 | "עב,א גשלו, מפני-יהוד תעללים כי-עוזה, מפני יהובני המלכישלגא, והיה רבחנה לירחנה: כי-ראשי, נתבורו ידון אל-רגבל יעקול, והעיר ליהוה. ב,יא חזק\t\n", 497 | "Epoch 2:\t\n" 498 | ] 499 | }, 500 | "execution_count": 7, 501 | "metadata": {}, 502 | "output_type": "execute_result" 503 | }, 504 | { 505 | "data": { 506 | "text/plain": [ 507 | "Training Loss: 1.1135815120382\t\n", 508 | "Sampled text:\t\n" 509 | ] 510 | }, 511 | "execution_count": 7, 512 | "metadata": {}, 513 | "output_type": "execute_result" 514 | }, 515 | { 516 | "data": { 517 | "text/plain": [ 518 | "Training Loss: 1.0722586075166\t\n", 519 | "Sampled text:\t\n" 520 | ] 521 | }, 522 | "execution_count": 7, 523 | "metadata": {}, 524 | "output_type": "execute_result" 525 | }, 526 | { 527 | "data": { 528 | "text/plain": [ 529 | " דיבר-יהוה--נדים, מקומו--טמא, על-בני שלכים, ישראל ישראל, מאהב בבל; ויעזבם, מי לאלה לאמור: את כל-הוא וחושם מצוהרי, סביבות את-כל-גיד המלך משילות, אשר שבע צסון--מלך-פדר; והנפש את-כל-דרכו, מזמור--בחוץ הנשמר, במלך: בתית לו שקר הנביא הראשון, להם בנו-החצר הדבר, אשר יקרא, עוונם הגונינים. ב ד,טו \t\n", 530 | "Epoch 4:\t\n" 531 | ] 532 | }, 533 | "execution_count": 7, 534 | "metadata": {}, 535 | "output_type": "execute_result" 536 | }, 537 | { 538 | "data": { 539 | "text/plain": [ 540 | "Training Loss: 1.0446834487124\t\n", 541 | "Sampled text:\t\n" 542 | ] 543 | }, 544 | "execution_count": 7, 545 | "metadata": {}, 546 | "output_type": "execute_result" 547 | }, 548 | { 549 | "data": { 550 | "text/plain": [ 551 | "Training Loss: 1.0277193483771\t\n", 552 | "Sampled text:\t\n" 553 | ] 554 | }, 555 | "execution_count": 7, 556 | "metadata": {}, 557 | "output_type": "execute_result" 558 | }, 559 | { 560 | "data": { 561 | "text/plain": [ 562 | "קני יהוה. נא,כב אז אל-הקשת כי-תוכיח על-הפקודים--את-שלושה נמוצדים הניחמם איש-אחאב, מלץ; וישאו בת-אמור, אבשמו ארץ גדול. מז,יז לנשא, עליו, וייפול ודויד. ס}\n", 563 | "\n", 564 | "כב,ח טופג על-חואני--ברכו שפרה כתף, חילו מלך אביו; ותהיו מכים זהב, {ס} בעימתי, וצבא שאול, מיניו לבבל, כחרב מלחם עלי כל-הכריס; ומבנות נ\t\n" 565 | ] 566 | }, 567 | "execution_count": 7, 568 | "metadata": {}, 569 | "output_type": "execute_result" 570 | } 571 | ], 572 | "source": [ 573 | "epochs = 5\n", 574 | "trainLoss = torch.Tensor(epochs)\n", 575 | "\n", 576 | "print('Sampled text:')\n", 577 | "print(decodeString(sample(500)))\n", 578 | "for e = 1, epochs do\n", 579 | " print('Epoch ' .. e .. ':')\n", 580 | " trainLoss[e] = forwardNet(trainData, true)\n", 581 | " \n", 582 | " print('Training Loss: ' .. trainLoss[e])\n", 583 | " \n", 584 | " print('Sampled text:')\n", 585 | " print(decodeString(sample(500)))\n", 586 | "end" 587 | ] 588 | }, 589 | { 590 | "cell_type": "code", 591 | "execution_count": 8, 592 | "metadata": { 593 | "collapsed": false 594 | }, 595 | "outputs": [ 596 | { 597 | "data": { 598 | "text/plain": [ 599 | "Sampled text:\t\n" 600 | ] 601 | }, 602 | "execution_count": 8, 603 | "metadata": {}, 604 | "output_type": "execute_result" 605 | }, 606 | { 607 | "data": { 608 | "text/plain": [ 609 | " נולוה. {ס}\n", 610 | "\n", 611 | "כג,א בשמן בדי-שדה, עליי אל-מימינה מותנו.\n", 612 | "קב,ג כי-עמד ראפו שמונה דמיקים תבוכו; ולבנך טובה, בושני יעשו נלשכם, תבקר היית; ולך מחטאו במושו; - אם-אמרים שם חמדו נשינו, חושש תרביבני; אנוכי יתאול, יחדיתי שתיה, פתח יהוה; וארון ישר, כי-הוא, הקהלתו.\n", 613 | "קמא,ב קין מצדקה תיקח יהוה-\t\n" 614 | ] 615 | }, 616 | "execution_count": 8, 617 | "metadata": {}, 618 | "output_type": "execute_result" 619 | } 620 | ], 621 | "source": [ 622 | " print('Sampled text:')\n", 623 | " print(decodeString(sample(500)))" 624 | ] 625 | }, 626 | { 627 | "cell_type": "code", 628 | "execution_count": null, 629 | "metadata": { 630 | "collapsed": true 631 | }, 632 | "outputs": [], 633 | "source": [] 634 | } 635 | ], 636 | "metadata": { 637 | "kernelspec": { 638 | "display_name": "iTorch", 639 | "language": "lua", 640 | "name": "itorch" 641 | }, 642 | "language_info": { 643 | "name": "lua", 644 | "version": "5.1" 645 | } 646 | }, 647 | "nbformat": 4, 648 | "nbformat_minor": 0 649 | } 650 | --------------------------------------------------------------------------------