├── README.md ├── bootstrap.vocab ├── compiler ├── android-compiler.py ├── assets │ ├── android-dsl-mapping.json │ ├── ios-dsl-mapping.json │ └── web-dsl-mapping.json ├── classes │ ├── Compiler.py │ ├── Node.py │ ├── Utils.py │ └── __init__.py ├── ios-compiler.py └── web-compiler.py ├── floyd.yml ├── models └── .keep └── pix2code.ipynb /README.md: -------------------------------------------------------------------------------- 1 | # Pix2Code 2 | 3 | Within three years deep learning will change front-end development. It will increase prototyping speed and lower the barrier for building software. The field took off last year when Tony Beltramelli introduced the [pix2code paper](https://arxiv.org/abs/1705.07962) and Airbnb launched [sketch2code](https://airbnb.design/sketching-interfaces/). Currently, the largest barrier to automating front-end development is computing power. However, we can use current deep learning algorithms, along with synthesized training data, to start exploring artificial front-end automation right now! 4 | 5 | ## Try it now 6 | 7 | [![Run on FloydHub](https://static.floydhub.com/button/button.svg)](https://floydhub.com/run?template=https://github.com/floydhub/pix2code-template) 8 | 9 | Click this button to open a Workspace on FloydHub that will train this model. 10 | 11 | ## Turning Design Mockups Into Code With Deep Learning 12 | 13 | In this notebook, we’ll build a neural network to code a basic a HTML and CSS website based on a picture of a design mockup. 14 | 15 | ![pix2code](https://blog.floydhub.com/content/images/2018/04/bootstrap_overview.gif) 16 | 17 | *Image from the [Blog](https://blog.floydhub.com/turning-design-mockups-into-code-with-deep-learning/)* 18 | 19 | 20 | We’ll use a dataset of generated bootstrap websites from the [pix2code paper](https://arxiv.org/abs/1705.07962). By using Twitter’s [bootstrap](https://getbootstrap.com/), we can combine HTML and CSS and decrease the size of the vocabulary. 21 | 22 | Instead of training it on the bootstrap markup, we’ll use 17 simplified tokens that we then translate into HTML and CSS. The [dataset](https://github.com/tonybeltramelli/pix2code/tree/master/datasets) includes 1500 test screenshots and 250 validation images. For each screenshot there are on average 65 tokens, resulting in 96925 training examples. 23 | 24 | By tweaking the model in the pix2code paper, the model can predict the web components with 97% accuracy ([BLEU](https://en.wikipedia.org/wiki/BLEU) 4-ngram greedy search). 25 | 26 | We will: 27 | 28 | - Preprocess webpage images and the code related HTML for this mixed NLP-CV task 29 | - Build and train the `pix2code` model using Keras and Tensorflow 30 | - Evaluate our model on the test set -------------------------------------------------------------------------------- /bootstrap.vocab: -------------------------------------------------------------------------------- 1 | , { } small-title text quadruple row btn-inactive btn-red btn-green btn-orange double header btn-active single -------------------------------------------------------------------------------- /compiler/android-compiler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | __author__ = 'Tony Beltramelli - www.tonybeltramelli.com' 4 | 5 | import sys 6 | 7 | from os.path import basename 8 | from classes.Utils import * 9 | from classes.Compiler import * 10 | 11 | if __name__ == "__main__": 12 | argv = sys.argv[1:] 13 | length = len(argv) 14 | if length != 0: 15 | input_file = argv[0] 16 | else: 17 | print("Error: not enough argument supplied:") 18 | print("android-compiler.py ") 19 | exit(0) 20 | 21 | TEXT_PLACE_HOLDER = "[TEXT]" 22 | ID_PLACE_HOLDER = "[ID]" 23 | 24 | dsl_path = "assets/android-dsl-mapping.json" 25 | compiler = Compiler(dsl_path) 26 | 27 | 28 | def render_content_with_text(key, value): 29 | value = value.replace(TEXT_PLACE_HOLDER, Utils.get_random_text(length_text=5, space_number=0)) 30 | while value.find(ID_PLACE_HOLDER) != -1: 31 | value = value.replace(ID_PLACE_HOLDER, Utils.get_android_id(), 1) 32 | return value 33 | 34 | file_uid = basename(input_file)[:basename(input_file).find(".")] 35 | path = input_file[:input_file.find(file_uid)] 36 | 37 | input_file_path = "{}{}.gui".format(path, file_uid) 38 | output_file_path = "{}{}.xml".format(path, file_uid) 39 | 40 | compiler.compile(input_file_path, output_file_path, rendering_function=render_content_with_text) 41 | -------------------------------------------------------------------------------- /compiler/assets/android-dsl-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "opening-tag": "{", 3 | "closing-tag": "}", 4 | "body": "\n\n {}\n\n", 5 | "stack": "\n \n {}\n \n", 6 | "row": "\n{}\n", 7 | "label": "\n", 8 | "btn": "", 12 | "footer": "\n \n \n \n \n {}\n \n", 13 | "btn-search": "", 14 | "btn-contact": "", 15 | "btn-download": "", 16 | "btn-more": "" 17 | } 18 | -------------------------------------------------------------------------------- /compiler/assets/web-dsl-mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "opening-tag": "{", 3 | "closing-tag": "}", 4 | "body": "\n
\n \n \n \n\n\n Scaffold\n
\n \n
\n {}\n
\n

© Tony Beltramelli 2017

\n
\n
\n \n \n \n\n", 5 | "header": "
\n \n
\n", 6 | "btn-active": "
  • []
  • \n", 7 | "btn-inactive": "
  • []
  • \n", 8 | "row": "
    {}
    \n", 9 | "single": "
    \n{}\n
    \n", 10 | "double": "
    \n{}\n
    \n", 11 | "quadruple": "
    \n{}\n
    \n", 12 | "btn-green": "[]\n", 13 | "btn-orange": "[]\n", 14 | "btn-red": "[]", 15 | "big-title": "

    []

    ", 16 | "small-title": "

    []

    ", 17 | "text": "

    []

    \n" 18 | } 19 | -------------------------------------------------------------------------------- /compiler/classes/Compiler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | __author__ = 'Tony Beltramelli - www.tonybeltramelli.com' 3 | 4 | import json 5 | from .Node import * 6 | from .Utils import * 7 | 8 | def render_content_with_text(key, value): 9 | if FILL_WITH_RANDOM_TEXT: 10 | if key.find("btn") != -1: 11 | value = value.replace(TEXT_PLACE_HOLDER, Utils.get_random_text()) 12 | elif key.find("title") != -1: 13 | value = value.replace(TEXT_PLACE_HOLDER, Utils.get_random_text(length_text=5, space_number=0)) 14 | elif key.find("text") != -1: 15 | value = value.replace(TEXT_PLACE_HOLDER, 16 | Utils.get_random_text(length_text=56, space_number=7, with_upper_case=False)) 17 | return value 18 | 19 | 20 | class Compiler: 21 | def __init__(self, dsl_mapping_file_path): 22 | with open(dsl_mapping_file_path) as data_file: 23 | self.dsl_mapping = json.load(data_file) 24 | 25 | self.opening_tag = self.dsl_mapping["opening-tag"] 26 | self.closing_tag = self.dsl_mapping["closing-tag"] 27 | self.content_holder = self.opening_tag + self.closing_tag 28 | 29 | self.root = Node("body", None, self.content_holder) 30 | 31 | def compile(self, tokens, output_file_path): 32 | dsl_file = tokens 33 | 34 | #Parse fix 35 | dsl_file = dsl_file[1:-1] 36 | dsl_file = ' '.join(dsl_file) 37 | dsl_file = dsl_file.replace('{', '{8').replace('}', '8}8') 38 | dsl_file = dsl_file.replace(' ', '') 39 | dsl_file = dsl_file.split('8') 40 | dsl_file = list(filter(None, dsl_file)) 41 | #End Parse fix 42 | 43 | current_parent = self.root 44 | 45 | for token in dsl_file: 46 | token = token.replace(" ", "").replace("\n", "") 47 | 48 | if token.find(self.opening_tag) != -1: 49 | token = token.replace(self.opening_tag, "") 50 | 51 | element = Node(token, current_parent, self.content_holder) 52 | current_parent.add_child(element) 53 | current_parent = element 54 | elif token.find(self.closing_tag) != -1: 55 | current_parent = current_parent.parent 56 | else: 57 | tokens = token.split(",") 58 | for t in tokens: 59 | element = Node(t, current_parent, self.content_holder) 60 | current_parent.add_child(element) 61 | 62 | output_html = self.root.render(self.dsl_mapping, rendering_function=render_content_with_text) 63 | if output_html is None: 64 | return "Parsing Error" 65 | 66 | with open(output_file_path, 'w') as output_file: 67 | output_file.write(output_html) 68 | return output_html 69 | 70 | 71 | FILL_WITH_RANDOM_TEXT = True 72 | TEXT_PLACE_HOLDER = "[]" -------------------------------------------------------------------------------- /compiler/classes/Node.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | __author__ = 'Tony Beltramelli - www.tonybeltramelli.com' 4 | 5 | 6 | class Node: 7 | def __init__(self, key, parent_node, content_holder): 8 | self.key = key 9 | self.parent = parent_node 10 | self.children = [] 11 | self.content_holder = content_holder 12 | 13 | def add_child(self, child): 14 | self.children.append(child) 15 | 16 | def show(self): 17 | for child in self.children: 18 | child.show() 19 | 20 | def render(self, mapping, rendering_function=None): 21 | content = "" 22 | for child in self.children: 23 | placeholder = child.render(mapping, rendering_function) 24 | if placeholder is None: 25 | self = None 26 | return 27 | else: 28 | content += placeholder 29 | 30 | value = mapping.get(self.key, None) 31 | if value is None: 32 | self = None 33 | return None 34 | if rendering_function is not None: 35 | value = rendering_function(self.key, value) 36 | 37 | if len(self.children) != 0: 38 | value = value.replace(self.content_holder, content) 39 | 40 | return value 41 | -------------------------------------------------------------------------------- /compiler/classes/Utils.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Tony Beltramelli - www.tonybeltramelli.com' 2 | 3 | import string 4 | import random 5 | 6 | 7 | class Utils: 8 | @staticmethod 9 | def get_random_text(length_text=10, space_number=1, with_upper_case=True): 10 | results = [] 11 | while len(results) < length_text: 12 | char = random.choice(string.ascii_letters[:26]) 13 | results.append(char) 14 | if with_upper_case: 15 | results[0] = results[0].upper() 16 | 17 | current_spaces = [] 18 | while len(current_spaces) < space_number: 19 | space_pos = random.randint(2, length_text - 3) 20 | if space_pos in current_spaces: 21 | break 22 | results[space_pos] = " " 23 | if with_upper_case: 24 | results[space_pos + 1] = results[space_pos - 1].upper() 25 | 26 | current_spaces.append(space_pos) 27 | 28 | return ''.join(results) 29 | 30 | @staticmethod 31 | def get_ios_id(length=10): 32 | results = [] 33 | 34 | while len(results) < length: 35 | char = random.choice(string.digits + string.ascii_letters) 36 | results.append(char) 37 | 38 | results[3] = "-" 39 | results[6] = "-" 40 | 41 | return ''.join(results) 42 | 43 | @staticmethod 44 | def get_android_id(length=10): 45 | results = [] 46 | 47 | while len(results) < length: 48 | char = random.choice(string.ascii_letters) 49 | results.append(char) 50 | 51 | return ''.join(results) 52 | -------------------------------------------------------------------------------- /compiler/classes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/floydhub/pix2code-template/35026f1b25f971a089866a3f8885a1d43ca47b64/compiler/classes/__init__.py -------------------------------------------------------------------------------- /compiler/ios-compiler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | __author__ = 'Tony Beltramelli - www.tonybeltramelli.com' 4 | 5 | import sys 6 | 7 | from os.path import basename 8 | from classes.Utils import * 9 | from classes.Compiler import * 10 | 11 | if __name__ == "__main__": 12 | argv = sys.argv[1:] 13 | length = len(argv) 14 | if length != 0: 15 | input_file = argv[0] 16 | else: 17 | print("Error: not enough argument supplied:") 18 | print("ios-compiler.py ") 19 | exit(0) 20 | 21 | TEXT_PLACE_HOLDER = "[TEXT]" 22 | ID_PLACE_HOLDER = "[ID]" 23 | 24 | dsl_path = "assets/ios-dsl-mapping.json" 25 | compiler = Compiler(dsl_path) 26 | 27 | 28 | def render_content_with_text(key, value): 29 | value = value.replace(TEXT_PLACE_HOLDER, Utils.get_random_text(length_text=6, space_number=0)) 30 | while value.find(ID_PLACE_HOLDER) != -1: 31 | value = value.replace(ID_PLACE_HOLDER, Utils.get_ios_id(), 1) 32 | return value 33 | 34 | file_uid = basename(input_file)[:basename(input_file).find(".")] 35 | path = input_file[:input_file.find(file_uid)] 36 | 37 | input_file_path = "{}{}.gui".format(path, file_uid) 38 | output_file_path = "{}{}.storyboard".format(path, file_uid) 39 | 40 | compiler.compile(input_file_path, output_file_path, rendering_function=render_content_with_text) 41 | -------------------------------------------------------------------------------- /compiler/web-compiler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | __author__ = 'Tony Beltramelli - www.tonybeltramelli.com' 4 | 5 | import sys 6 | 7 | from os.path import basename 8 | from classes.Utils import * 9 | from classes.Compiler import * 10 | 11 | if __name__ == "__main__": 12 | argv = sys.argv[1:] 13 | length = len(argv) 14 | if length != 0: 15 | input_file = argv[0] 16 | else: 17 | print("Error: not enough argument supplied:") 18 | print("web-compiler.py ") 19 | exit(0) 20 | 21 | FILL_WITH_RANDOM_TEXT = True 22 | TEXT_PLACE_HOLDER = "[]" 23 | 24 | dsl_path = "assets/web-dsl-mapping.json" 25 | compiler = Compiler(dsl_path) 26 | 27 | 28 | def render_content_with_text(key, value): 29 | if FILL_WITH_RANDOM_TEXT: 30 | if key.find("btn") != -1: 31 | value = value.replace(TEXT_PLACE_HOLDER, Utils.get_random_text()) 32 | elif key.find("title") != -1: 33 | value = value.replace(TEXT_PLACE_HOLDER, Utils.get_random_text(length_text=5, space_number=0)) 34 | elif key.find("text") != -1: 35 | value = value.replace(TEXT_PLACE_HOLDER, 36 | Utils.get_random_text(length_text=56, space_number=7, with_upper_case=False)) 37 | return value 38 | 39 | file_uid = basename(input_file)[:basename(input_file).find(".")] 40 | path = input_file[:input_file.find(file_uid)] 41 | 42 | input_file_path = "{}{}.gui".format(path, file_uid) 43 | output_file_path = "{}{}.html".format(path, file_uid) 44 | 45 | compiler.compile(input_file_path, output_file_path, rendering_function=render_content_with_text) 46 | -------------------------------------------------------------------------------- /floyd.yml: -------------------------------------------------------------------------------- 1 | env: tensorflow-1.7 2 | machine: cpu 3 | data: 4 | - source: floydhub/datasets/pix2code/1 5 | destination: pix2code -------------------------------------------------------------------------------- /models/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/floydhub/pix2code-template/35026f1b25f971a089866a3f8885a1d43ca47b64/models/.keep -------------------------------------------------------------------------------- /pix2code.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Pix2Code\n", 8 | "\n", 9 | "Within three years deep learning will change front-end development. It will increase prototyping speed and lower the barrier for building software. The field took off last year when Tony Beltramelli introduced the [pix2code paper](https://arxiv.org/abs/1705.07962) and Airbnb launched [sketch2code](https://airbnb.design/sketching-interfaces/). Currently, the largest barrier to automating front-end development is computing power. However, we can use current deep learning algorithms, along with synthesized training data, to start exploring artificial front-end automation right now!\n", 10 | "\n", 11 | "## Turning Design Mockups Into Code With Deep Learning\n", 12 | "\n", 13 | "In this notebook, we’ll build a neural network to code a basic a HTML and CSS website based on a picture of a design mockup. \n", 14 | "\n", 15 | "![pix2code](https://blog.floydhub.com/content/images/2018/04/bootstrap_overview.gif)\n", 16 | "\n", 17 | "*Image from the [Blog](https://blog.floydhub.com/turning-design-mockups-into-code-with-deep-learning/)*\n", 18 | "\n", 19 | "\n", 20 | "We’ll use a dataset of generated bootstrap websites from the [pix2code paper](https://arxiv.org/abs/1705.07962). By using Twitter’s [bootstrap](https://getbootstrap.com/), we can combine HTML and CSS and decrease the size of the vocabulary.\n", 21 | "\n", 22 | "Instead of training it on the bootstrap markup, we’ll use 17 simplified tokens that we then translate into HTML and CSS. The [dataset](https://github.com/tonybeltramelli/pix2code/tree/master/datasets) includes 1500 test screenshots and 250 validation images. For each screenshot there are on average 65 tokens, resulting in 96925 training examples.\n", 23 | "\n", 24 | "By tweaking the model in the pix2code paper, the model can predict the web components with 97% accuracy ([BLEU](https://en.wikipedia.org/wiki/BLEU) 4-ngram greedy search).\n", 25 | "\n", 26 | "We will:\n", 27 | "\n", 28 | "- Preprocess webpage images and the code related HTML for this mixed NLP-CV task\n", 29 | "- Build and train the `pix2code` model using Keras and Tensorflow\n", 30 | "- Evaluate our model on the test set\n", 31 | "\n", 32 | "### Instructions\n", 33 | "- To execute a code cell, click on the cell and press `Shift + Enter` (shortcut for Run).\n", 34 | "- To learn more about Workspaces, check out the [Getting Started Notebook]().\n", 35 | "- **Tip**: *Feel free to try this Notebook with your own data and on your own super awesome pix2code or skecth2code task.*\n", 36 | "\n", 37 | "Now, let's get started! 🚀" 38 | ] 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": [ 44 | "## Initial Setup\n", 45 | "Let's start by importing some packages" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 1, 51 | "metadata": {}, 52 | "outputs": [ 53 | { 54 | "name": "stderr", 55 | "output_type": "stream", 56 | "text": [ 57 | "Using TensorFlow backend.\n" 58 | ] 59 | } 60 | ], 61 | "source": [ 62 | "from os import listdir\n", 63 | "from numpy import array\n", 64 | "import numpy as np\n", 65 | "\n", 66 | "from keras.preprocessing.text import Tokenizer, one_hot\n", 67 | "from keras.preprocessing.sequence import pad_sequences\n", 68 | "from keras.models import Model, Sequential, model_from_json\n", 69 | "from keras.utils import to_categorical\n", 70 | "from keras.layers.core import Dense, Dropout, Flatten\n", 71 | "from keras.optimizers import RMSprop\n", 72 | "from keras.layers.convolutional import Conv2D\n", 73 | "from keras.callbacks import ModelCheckpoint\n", 74 | "from keras.layers import Embedding, TimeDistributed, RepeatVector, LSTM, concatenate , Input, Reshape, Dense\n", 75 | "from keras.preprocessing.image import array_to_img, img_to_array, load_img\n", 76 | "\n", 77 | "import tensorflow as tf" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 2, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "# Path to Dataset\n", 87 | "\n", 88 | "DS_PATH = '/floyd/input/pix2code/train/' # edit to your /path/to/train/ds\n", 89 | "DS_EVAL_PATH = '/floyd/input/pix2code/eval/' # edit to your /path/to/eval/ds" 90 | ] 91 | }, 92 | { 93 | "cell_type": "markdown", 94 | "metadata": {}, 95 | "source": [ 96 | "## Training Parameters\n", 97 | "We'll set the hyperparameters for training our model. If you understand what they mean, feel free to play around - otherwise, we recommend keeping the defaults for your first run 🙂\n", 98 | "\n", 99 | "**WARNING**\n", 100 | "\n", 101 | "The training is not feasible on a CPU machine." 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 3, 107 | "metadata": {}, 108 | "outputs": [], 109 | "source": [ 110 | "# Hyperparams if GPU is available\n", 111 | "if tf.test.is_gpu_available():\n", 112 | " # GPU\n", 113 | " EPOCHS = 50 # Number of passes through entire dataset\n", 114 | " MAX_SEQUENCE = 150 # The max sequence to predict\n", 115 | " MAX_LEN = 48 # Max number of token for the input in the context prediction\n", 116 | "# Hyperparams for CPU training\n", 117 | "else:\n", 118 | " # CPU\n", 119 | " EPOCHS = 50\n", 120 | " MAX_SEQUENCE = 100\n", 121 | " MAX_LEN = 48\n", 122 | " print(\"WARNING: Switch on GPU for training!!\")" 123 | ] 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "metadata": {}, 128 | "source": [ 129 | "## Data preparation\n", 130 | "\n", 131 | "In the step below we will load the imagse and code for each sample." 132 | ] 133 | }, 134 | { 135 | "cell_type": "code", 136 | "execution_count": 4, 137 | "metadata": {}, 138 | "outputs": [], 139 | "source": [ 140 | "import matplotlib.pyplot as plt\n", 141 | "\n", 142 | "def show_img(im, figsize=None, ax=None):\n", 143 | " if not ax: fig,ax = plt.subplots(figsize=figsize)\n", 144 | " ax.imshow(im)\n", 145 | " ax.get_xaxis().set_visible(False)\n", 146 | " ax.get_yaxis().set_visible(False)\n", 147 | " return ax\n", 148 | "\n", 149 | "# Read a file and return a string\n", 150 | "def load_doc(filename):\n", 151 | " file = open(filename, 'r')\n", 152 | " text = file.read()\n", 153 | " file.close()\n", 154 | " return text\n", 155 | "\n", 156 | "def load_data(data_dir):\n", 157 | " text = []\n", 158 | " images = []\n", 159 | " # Load all the files and order them\n", 160 | " all_filenames = listdir(data_dir)\n", 161 | " all_filenames.sort()\n", 162 | " for filename in (all_filenames):\n", 163 | " if filename[-3:] == \"npz\":\n", 164 | " # Load the images already prepared in arrays\n", 165 | " image = np.load(data_dir+filename)\n", 166 | " images.append(image['features'])\n", 167 | " else:\n", 168 | " # Load the boostrap tokens and rap them in a start and end tag\n", 169 | " syntax = ' ' + load_doc(data_dir+filename) + ' '\n", 170 | " # Seperate all the words with a single space\n", 171 | " syntax = ' '.join(syntax.split())\n", 172 | " # Add a space after each comma\n", 173 | " syntax = syntax.replace(',', ' ,')\n", 174 | " text.append(syntax)\n", 175 | " images = np.array(images, dtype=float)\n", 176 | " return images, text\n", 177 | "\n", 178 | "# Get images and text\n", 179 | "train_features, texts = load_data(DS_PATH)" 180 | ] 181 | }, 182 | { 183 | "cell_type": "code", 184 | "execution_count": 5, 185 | "metadata": {}, 186 | "outputs": [ 187 | { 188 | "name": "stdout", 189 | "output_type": "stream", 190 | "text": [ 191 | "Here's what an example looks like\n", 192 | "\n", 193 | "HTML bootstrap text: header { btn-inactive , btn-inactive , btn-inactive , btn-active } row { single { small-title , text , btn-red } } row { quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-red } quadruple { small-title , text , btn-orange } } row { double { small-title , text , btn-red } double { small-title , text , btn-orange } } \n" 194 | ] 195 | }, 196 | { 197 | "data": { 198 | "text/plain": [ 199 | "Text(0.5,1,'HTML bootstrap image')" 200 | ] 201 | }, 202 | "execution_count": 5, 203 | "metadata": {}, 204 | "output_type": "execute_result" 205 | }, 206 | { 207 | "data": { 208 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAO4AAAD7CAYAAABt9agKAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJztnXd8FcXeh59JL4TeEeRa7gUV9d4TwZcrm0INTUA6qBARuSiIevUiTUSxIqLYwAJKAEFFBFF6iRVFEWyIIhJ6C2mbctq8f+zJyUlIz4ZkwzyfzyZ7ZmdnfzOz352+K6SUKBQKa+FX1QYoFIqyo4SrUFgQJVyFwoIo4SoUFkQJV6GwIEq4CoUFUcK9AAghtgshxlS1HRcaIcSnQojbq9qOmkiNE64Q4i8hRJcCbqOEEJ979jN8NrcQIsvn9wghxEwhhBRC3FsgjHs97jM9v6OFEEcuWMQKQQixWAjxeCn9zhRCJFS2Tb5IKeOklG9fyGteLNQ44ZaElLJW7gYkAX183JZ6vO0Hbitw6u0e9xqJMLjo7gerojKqcL4FwoQQVwN4/od43MvL5UKIb4QQaUKIj4QQ9XMPCCH6CiF+FkKkeKrVbX2OtfW4pXj89PW4jwVGAA95agtrPe7/E0IcFUKkCyF+E0J0FkL0AKYAQzx+93j8bhdCzBZCfAFkApcJIUYLIX71nP+nEOIuH1uihRBHhBBThBBnPLWbEUVF2LeJ4Kn1fCGEeN4Tlz+FEB097oeFEKd8q9VCiF5CiN2e9DqcW9PxOX6bEOKQEOKsEGK6b01LCOEnhJgshDjgOb7SN71rAkq4RbOEvFL3ds/vinAbEA80A5zAiwBCiL8Dy4FJQCPgE2CtECJICBEIrAU2Ao2BCcBSIcQ/pJQLgaXAM57aQh8hxD+Ae4AbpJQRQHfgLynleuAJYIXH73U+dt0KjAUigEPAKaA3UBsYDTwvhPiXj/+mQEOghSddFnquWxo6AHuBBsAy4F3gBuAKYCTwkhCilsev7kmzukAv4D9CiH6eNLsKeAXjwdUMqOOxJ5cJQD8gCmgOnANeLqWNlqCmCne156meIoRIwcjkspIADPOIZ6jnd0VYIqX8SUqpA9OBwUIIf2AIsE5KuUlK6QDmAKFAR+BGoBbwlJTSLqXcCnwMDCviGi4gGLhKCBEopfxLSnmgBLsWSyl/llI6pZQOKeU6KeUBabAD46HRqcA506WUOZ7j64DBpUyDg1LKRVJKF7ACaAnM8oS1EbBjiBgp5XYp5Y9SSreUci/Gwy3KE85AYK2U8nMppR2YAfhOuh8HTJVSHpFS5gAzgYFCiIBS2lntqanC7SelrJu7AePLGoCUMgn4A6Ok+l1KebiCNvmefwgIxCi5mnt+517X7fHbwnPssMfN91zf0sXX5j8wSu6ZwCkhxLtCiOZlsAshRJwQ4mshRLLnodfTY2cu5zwPH197SrpGLid99rM8Nhd0q+Wxo4MQYpsQ4rQQIhVDjLl2NPe1W0qZCZz1CedS4EOfB/evGA+1JqW0s9pTU4VrFu8AD3j+V5SWPvutAAdwBjiGcaMBRieRx+9Rz7GWBTqNWnmOQf5SxnCQcpmU8iZPmBJ4uii/Bd2FEMHABxilfhPPQ+8TQPj4ryeECC9gz7Eiwq4Iy4A1QEspZR3gNR87jgOX+NgdilH9zuUwEOf78JZShkgpj1JDUMItnhVAN2BlUR6EECEFNlGE15FCiKuEEGHALOB9T5VxJdDL04kUiPGgyAG+BHZidBo9JIQIFEJEA30w2oZglGCX+djyDyFErEeA2RglmNvHb+sSeo6DMKrapwGnECLOE/+CPOppg3fCaA+/V0yY5SUCSJZSZgsh2gPDfY69D/TxdG4FYdQwfNP9NWC2EOJSACFEIyHEzZVgY5WhhFsMUsosKeVmKWVWEV5aYIjDd7u8CL9LgMXACYwe6omea/yG0TEzH6ME7oMxRGX3tN/6AHGeY68At0kp93nCfBOjPZsihFiNIbqnPH5PYHRoPezxmyuus0KI74uIb7rHrpUYHTrDMUo9X054jh3D6Bwb52OPmYwHZgkh0jHasN6Hp5TyZ4wOqHcxSt8MjE61HI+XFzx2b/Sc/zVGx1iNQaiF9IrS4inxE6SUl5Tk90Li6YlOAa6UUh6sansuBKrEVVgSIUQfIUSYp709B/gR+KtqrbpwKOEqrMrNGNX1Y8CVwFB5EVUfVVVZobAgqsRVKCyIEq5CYUHKNAWsYcOGsnXr1pVkikKh+Ouvvzhz5kxRcwG8lEm4rVu3ZteuXeW3SqFQFEtkZGSp/KmqskJhQUxZLaGfSTIjGMIbtvLuJyVVLMx69eoRERFhWni+tGplnp2VHZ6iZlJh4W5++S6cx74zwxYAeszeRe/evTlx4kSFw/Kt1g8YMKDC4RUMt7TVmrKEaYadqjlT86lwVdlM0eZihmgVipqMauMqFBZECVehsCCmC7d+m/wvR+z+6FbAaLvaxq2j7mVFvlusSB7vnv+/H3DLlXnH748pW1ix7c53H90m73judcrC7VcWf3xql+KPF0ZBOx7vDl3aFu5XcXFh+jt4ajdwccPjO9kwrQM9Zu9COlMB2Dgzim4zd9DojvuASXw1/QYier9C0yvasmtu8co79Hvevr8/PNoFdh+Ax7vBd/th7jafm1zCtI1FhzVtg8dO4MHukJ4FzyTC5ZcC++D5jXBWGuFJIM0OP5yErfvA5S463Ld/h8e651/NfTAJ3vzV2J+92Qjzs51gs8ETW4qNMgDPf2X8H/k3SDho2H5fG2hsg42/wj3XwsyvYVY32H0aPtxdcpiKmoHpJW6bnvexYVovoh/52uNi3MrS8/fbJQmsn3oDTUd+wDUd2pcoWoBdLngwEvacgkc7w0yP+KZvgk3H8/yt3Ax/pZfOzoe6g9MN59zQMBCcGfmPT9sA0zdAnQCIuqR40ebyx25Ye8A4d9qGPNF2uTovzHQXfHsIxsWWHN54T6d1iyvy3H48CCeT4EwmpIYZblkusDUuOTxFzaFS2ridJq9i56L32PfxHHIl227AVEDwt5s0OkzawJ8fv8Z38/qUGFYt4G9hsPEnQMATO/KOPdYNMnLg7ptg427Y64I3vio+vAFtoW0z+PCLPLdJhYjoHxEwrh08txU++6lEM+lYF94+BTv/gN6Xww3N8o5Fe17tdnUd6NQOoi6Dtz4vPrwQ4LGt8K/LIcKTS2NvhOhoaNsGggPhBU9auNUCr4uOMi3ri4yMlAXHCNdPLWwsszHGm0SKxxZzDd9ty6+KHrN3mTY+6mtrZYy5Vocw24TCvgIv1lHjuNYlMjKSXbt2lThXuZJ6lUsWLXCeaBVlp6BoFRcHajhIobAgFRZu6GWaGXbko0WLQt/3rVAoPFS4jatQKMyjitu4CoWiMlHCVSgsiBKuQmFBlHAVCguihKtQWBAlXIXCgpR7dVBmZqaZdigUFzVhYWFl8q9KXIXCgijhKhQWRAlXobAgSrgKhQVRwlUoLIgSrkJhQZRwFQoLooSrUFgQJVyFwoKYJtyRI0cycuRIs4JTKBTFYNoL0RMSEgBw5GQRGBxKZkYaYbVqk5njIDTQH+GnCneFwixME+5dd92Fy+7iUHZtWjZpxsxpY2lVC04f2Mmt46aQmJho1qUUiose04rBdu3acb3tenKy7NRrVp/PPEI9efQIQzs1NesyCoWCCrwsrqjVQU5HDpOffZU5UyYB4Eb1gCkUJZG7OqjKXhYXEBjMnCmTGDpkcOVcQKFQVJ6u3l2xsrKCViguelSBqFBYECVchcKCKOEqFBbkggk3pmuPYo9nHfui2OO5bD9uZ1BcyR/DLkhOyuEij+lJm8ocnkJRlZgm3KPJGTz+6lbW7PiDBx5dTp8HV6BpGrdpGss/30fuB653rnod/dB6XE4H/1tkfFo+M/VU7ofrmdynB9nZ2QBomoamaTiddk7mOHE67AAEhQYAmRzIgb9SMjz+bgFgxec/M2n2PDRNA9zsOKhz/5PzWfDgCMjczyE7rN35J5qmsXt+PzKz7d44pGfloGl3EH/f5HzXz0w9iR3Jzj9Oo2kaj00cQE5WBp/sOcToKc9y9ugeso5/4TknBk3TsGfrZiWtQnEepgj39yxoUb8Wk/8TS526Iez780vef3IQXa+vz1/AsJvakKvMDgPG8PwD8/APCKTt5S0BCKvTGCTccd+jfJkKISEhANw5sAPPv/QyAQFBNAkOYF/Scc8VBdOGGiV467q18APGTnkSgNp1wr12fTBnLNNnfcR/BvbgrmeXop85xLyPf6VPh8vYsWMHiz7J4vn5C3hxwQdkARGhwYAbh9N4yNxzYyOPfU34+OdMOlzRyBOym+DQWvS87lKus39BgxbX5T6XABcABw8dMSNpFYpCMUW4V4ZC984acZpG1HWXsO7t+Sybex8ICANenHk/MYPiAejeZzDT3l+PpmkEp/4JgKZFAf50b36IdZvXecMVl91Cu9YN0LRYtNg4xo++lSABCPBvf6/nXI0gIOO7NwGIa9eakMC6ACz/JYLERcN5esYDICAgKIznB7Tl1kG9eeCxV3lm6StsWPseE+8azIYFUxgUp7Fg2VzIMpIlrOdD3msMuDqcwb27ASHe2gHApDkfEh3TF3/8AbimQx/8wupz5513mpG0CkWhmD5zKheHWxLoZ9zh3bp2ZuOmLeW3shAeG9mD6QnrAYjr3p1PN2yocJixMdFs3bbd+3tk/H9469n7CWpwZaH+fz2cRtuWtencpQtbNm+u8PUVFy9lnTlVacJVKBSlp8qnPCoUispHCVehsCCmrcdtvSC53Oe6JSSNq2+WKQpFjccU4fb5II3IlhFmBKVQKEqBKcK1u9y4naXv5FIoFBXDtDauwyVxuCQDmp7z7vu6iXDOc/fdFApF6TFNuE63sS09XBenG/529GfsDkg9+CNLD9clM0Uypq0DpxtuqXfC6z93UygUpce0zil7AfElXXI13wyvR1rqPwmvHcGNS8/x4o+BzG/vJrlWa+xbs826tEJx0WFiiSu9m8Oeg9NtVH9/PppOuxd+JceejdMt+XfbBjy47Cfa/C0o3zkKhaL0mFbi+lZ3B10VwXv7crjqzWQgBCJCcEpAQqf304hteznbf88x69IKxUWHacLNdOSVmm//WHQ1OPOMg6JXxioUitJginCFQFV3FYoLiCnCXdsvnCveKv/CcSndGAsAFQpFaTBFuIGBgRy8s3a5zxeixMUQCoXCB9PauH7qo14KxQVDqU2hsCBKuAqFBVHCVSgsiBKuQmFByt05lfuOHIVCceFRJa5CYUGUcBUKC6KEq1BYECVchcKCKOEqFBZECVehsCDlHg6qSZ8g8R3aqinxKjhcVxPjVVPiBGUfXlUlrg9ZWVlVbYKiBFQeGSjhKhQWRAlXobAgSrgKhQVRwlUoLIgSrkJhQZRwFQoLooSrUFgQJVyFwoKYKtxJs+eZGVyVo2kamqZx2lnVlpjL8ys2omkaAB+89WQVW1MZGF/SGDRoUBXbUXmYXuLm3hB5/7t4BWA15j8ylPXbEvH3/M6LU3/6Dh1ZdYZVkA9ffpzExETGjhrCC4s/5eud6wAsmUeFsf3XYxz5cRMnT55E0zT+zHDXmLjlYrpwExMTST/8HYmJidzRQ2PT2gSvu9WY8Oi79IjRcAFxk973xEEyd0IXUo6frGrzyk3s9fUAWLh4BWOHduKn5KZoPUdaMo98yfF8ea5j/VSG3/0YADsSExnVM5pPt1k7bgUxTbiapvH9hlVomkZESxuapvHim3Pp2mew97iV+XTeQE8cBPfPX4FfgHW/ldQi+j5vfix89zO2LnmahUsXn1dbshpdY6PRNI2gJld73aI0jdtmJRAXY804FYWQsvQ3YGRkpNy1axdQtpUZmqZV66d57sqMrKwsCkuPVYk/MkBr5/09fGBPjpzKsESccikuv+KGj+XTZQsr2yRTEEIQGhoK1MzVQZGRkezatavEb/JcEOFWd3ITrSbGKZeaGLeaGKfSClcNB/mgPj5W/VF5ZKCE60NZah+KqkHlkYESrg/qaV79UXlkoITrg3qaV39UHhmYJtwczytFMrPt+dyly130SRJ8j3qHISRs/umwsZPr1e0GKcnKcZ7nPzMjjZGPLCR/aIC7mGsXQpFPc7cblyOTjAyjM6Sfz3BJZkZGoaek6zq4UpjzwZbcKJ1Hamo6AE57dj7bPzmcgz3bSE8pJXaHEedXpg72iVL5b2C703Xe+TmZmUbYErKycvIdS0tL9+5vf2Q0AFlZ+nnhup05SLcLtyOv02hSX61grlSI0pW4RkQyM40ZVBnpqd4jdmeeNfFxRj66HHakdOc7Vt0xRbipQEBwKJqmESZOAaBp9wEwOiYad+ZJojp3o6+mMf4RY1pkYsIMXv/sF5K+W8Phr17ibI6RaBkHVoMAf39/QICUbPouiXQnaFG3EuzvxHl2L119xBNWqzYAzuw0AGbN+4hpd9+KFh1dpngU9jTvpmlo0UMQCGrVCsOZcRQ/4M9dH4EzibBatejRczrPrdrqifcg7r//ISLCwwHwE8a8q5RDW/ly8Txiuvdn3NPLSPl9HXXqRDD51e9ZeyCDafeMRNM05q38FYBdhwyxCJlDUKA/qXaJf6DEzy93KmYUQ8Y8Vab45eLOPMLhX7biSXJ+Op6JX1gYb23eD+5sQkODALjXk8a1a0egaTczftbrAGQCoaHhnsAygBzmbj/OqLmb6X3XIyTtfI8p44cwZtIU/PzMrdYVV+LGRmmMGD0XTRuFpvUmLDQIcFAroo6RZtE98ben8OO6Jd5zVv56lgdfXka/YaPxy0nmwKblJlpbeZiSpmkO8PeDRgDBl4A7Hd8nenbyEeaNvYyVCTOZ++gk7G6JNnIWAK2uuYGm1w2jf9dxAPg7jae9W8L4KU+BEKSkp1A7yA9Iwi8gBHvqMVzBzQpYIdmzYwUAAujUrEmZ41Hwaf7y1gOeWJxE+sTHDVzaLiqvkAxw8ttf5ziw/iGWLp3Lru++9bHKOK9eq07885bbcGVl0r93FNKVV3O4rHE47a/4R75r165dy9jxCwEEGVkOMs4YpXt4m8E8vPgTVrwxucxxBAiJaAlA13sTvG6BwPXtmoIIwEhBeCHfOPU5rmvbFgDdd+62j5Bq1YsgLDyUFv/sTvKZk5w6vI+ULECaN9m7uBL34dc/yrUECAORd3vXBggNwz+sPmfOJec7LyI0mMBAPwLCG5hmZ2VjinBbBsKbr7zMsu2JvLbwdfCLID4+zns87BIb1w9+mZBWsYQAy955m8WLFwOw4bPDBIY14KUJVzLtiXks/SqL7z9azD9a1KX935vy+7bFDIq+lk/ee4v4iY+y4M23CbusB1s3rSA+Ph6Al154iVtiIjmYmSfmK8dMp04Z41HwaR6e9BnvrN1E/N1TWLJ0JfNffg2AkfHx+PmHQlBDEhYtYv2aJ7m73//x2YmraNisBWveXcj8+a+ACOE/vYxS66PtuwmNqE9i4gauEUep1+ZmXpo/n4Hd/kbTsECaNwknccd2/u+aRtiaBNM4IjCfLYEpvxI18A4WL17MpwvvIe6yWt40LCsrVq8nJCiCqZ1DvG4LX3uZpG1rWPxOAgveXMKGz77yhv/iiy+SmJhI/+vDad2lP40CYMErLwHw2sKlQDB9rqlHb1trhsRpHNVDeW3ldtZ8sIoh8fEsfjuhECvKR3Elbte/12PwqD7Ex/cmPn4Ib731DuDHK68uYHViIvXqhPH6omXEjLwXgH63xfN/LWsTE9mOIQP6s3zVp4TVCzfN1spETcCg5JlTVqSmTsBQM6cMVK+yDzVFtDUZlUcGlSbcHl1jjR2ZU6y/LctfqSwTykxJPZZH9bL1Oh7Y9G5FzKk8nKfL5P25IaWfoF/aNdkHVt/n3de08d79ya9+X+x5ZRnHzbdYwlm+1VyPTehXJv+LvzlVruuUFfNWB/UZQe9YjYlTH2Po2IcAN7OmPQDA7DG9GN9N46Ov/+C3lLyOin49unj3u464F62rxrC4GOKGjkXTNLYve4JnJk1k6qwnGBw/3SxTi6Tg03z2zId57uM9xHlugMcmGzfbE/8dztEMF91GTUbrrGE/+wcOCZrWjwX/uwMXoGm9QEhmPvEUmqbRNTaaMUMG4sC4ofYk3Mnjs6ZVepyKRvLr9gUA9NI0xgy/GYDHn3yK7gOGc+ug3saxwfF8dAru6NqZsUMGEhVlrK1+fMZkMp3QuXsf9iycyKmDX3P3A0Z8NC3KexVNG+Dd/2TRc+Sc20d0tz4AOHXPTe7pKX/q8ZkAxEbnnX+e1cWUuJqmsW/bYp5++jGPSyj9u2tMGDOcfuPe4NaHjR5jNxAdE+OzGqo7UVEaf6x/gRnPLmXiqP5kHP3BG+6x/V8x++ln0DSN++8cQnJOBk63cb3fv3wbgNvvncLkF9bx1n8HknFgNSvnPcb/ZjzO8xMHMrwyVltJKUu92Ww2mYuu697t4/0p0mazyc69h3ndOnW8wdjPSJa973hI6hlpUtd1aWvfRb7QM1r+sW2x1HVdrnnjWfnrOV1OGDNI2qKN82+8wSZ/Xf2wNyybzSbvnfxyvmuauRUWp9zt2NlU2btXnDxzcIf843R6PpsG9+shbV3HyfYdO8lH3/5Evjdak7quy34jHpW6rsu9q9/w+s3dko/8LLtEa/LLBcM9YaWbHh/fOBUWN5st0ojbuQyp67qMttmkruvy5N6lUtd1me7x98M5w/anb7NJm22s1HVdvjtpkNy7doHUdV2On/+uN35nPOfcOWW2vP+9fVLXdZl68ne56dVHZHryMa+/G202ue1girTFxPjYc7s8kmHsT5jzmWHLL5/Ig+lny5RfnTreINc+OyhfHj2xZK28SYuSGbou+9ls0naDTT705DtS13U5OMomdf20/HT3ETn9o5+lruuye25a7P9Cjl32rbR1/LfPNYz00tNPSttNY+XpfR9Jm80md+9aI3VdlzfYbHL+1oPy5N6lctHsyVLXddn3joelnp5a6vzyaKxELZpS4sa0CKRtVE+yj+/P577khZkArHjxEfCMZy5/9m7eOelkyH/n0zfOKHFHdtHYnZQF6fu5rUtnogePYeRjG9mw6hXenPIw9z30Xz7f9LYZppaZya/u4PiJU3QfPJkAn9RKTEzkeE4oJH/Llk9XM3FgFE/tzeGNJ8bx5sL/Frqmtc1ljcmu15pz7sYArEx4karoZkjc+j4uewYRQQKti1EVnDCqP7Uu78cHa9d6LboySNI+qj8rf4EGAbuZOS6ePrMXc3ms8faPGbf2IbZHPzZt3UHWod3MfnFRvnkdsT2H0vG2h4juapSwdz/zHoFNh4KARWOu5PcvVzDiv4+BgJuj8tJL0zR63jqdgdHdyhw3KWD1qtxx2gA+eWkmdw7rjQCWbttI4vZEpk3sh6ZFIyRoWg+0v9djcpfWAMybfQ/TRg4D4MgrE+nSNoxlc+5i7bq1zJr1GPcPM9aX39fyCD1GzOKFB3vw97ad6dw5niviJrL4wYH0iZ8LSJ56ZQnLX5wGfuX+tl6RqF5lVK+ylVC9ygaqV9mHmiLamozKIwMlXMVFR8a5I1VtQoVRwlXUGFwy/7TUP46lAQ7cziyS9v/Mj/v+JNvhJjOsEUfOZbHvT+sK2LRWc6vXkkv2VAqSxtU3JRwziLzxcypaM/tu503mGGMi9625j30n9pkS1hDbEEbZRpkSVkXp1bUz6zcbiz2iNY3AkDDa9x7KkxNH8fqWPTx6R29wnEQENaO+cz8NWzaqYovLjynC7f9hGh1aRZgRFB2WpLLz1rLOMjYHIUS+NtTll7eqEjsqm4w6GVxS5xJTwtqp72QUo0wJqzQUNwHD5XJilLVGRdKR4+TJiSOM8/wFLkcWMu1PaNSMHreMIXHLUqBe5RtdCZgi3EyHmwB/czoNdEfVrYks2PFht9ewTxh4cNgd5oWFeWGVhuI6pzb5vDs59x3YuSudZo01hngIb0IjrPmeb19Mqyo73UaCOtOTCYgoXXXXnqkTFFZ9V2O4fF4CYHdmExQQgsORTGBgfVxS4l/g6W/PSSYouL7v/VItcfvEKzstm5DaIef5sWc7CQryB7+8iDiz7AR41upag2qcCRXEtM4ph8vYYts0weGCnPQz6GdPYM/WcTjB4bKTceoIWannyDx7AocLdo5uQua5c2R5fjtcUJE3O5iNy+X2bhtXtiE7O40dW6LJ1E/w8vRGuJzZOBy618/WLdFk6sdZ8nwjr1t1xO1ye7erW7ZDP51Fximd7EwnjhwXbpebpT1n0K39izhSs8k4mUHGyQyiWvXE7XKjn9LzhaG48JgnXLexzegYyvpeIXxz1xV0/7/WjOralK9vq8fXtzYhrH4Ddtzaiu8mtCG3RvzALZfyxYS23vOr8ilZsP3kK1ykZMWajgDUqt0YpGTr5vZIGYzNbvjp2u0H6tRpgss/2DLC/e30r9Rr+G/qNqzLqj6TmPT3Th4xSuxOiQgNoH7jurRq0YzB7e8k0OmmXqO6VSZc9bI4A9OE63RLnG5J+4RU6tYPYdTSI6zbk4bbpwSdYDM6AnJSsjxVaydPrjuV7/yqLG8Ltp9yxff0nY0AN/GPHwBAz3AipRvhzMTlcvPs1utxudwE+LtIS3cS4Cv4aojb7c63ZeR8TaZdB+FPgEPidrvxl5JbmjfxHM8kNSsFCTQMiyAjR893/oVETcAwMK2Nm1uC/nJHfbomJPNc3xZcW9uP+Ycd6FkuwkP8GXp1KOdS7AQIf8O/fyh7RoXg8Dm/KinYq+z0vDxs6kvHWTesPe9PDvW6S/zoM+oYk568hmcS7TidbtIyYOcX7Xn9wb3ec6sjLqfLu7/utnX0eqcXAEPWPMMjnZ/AtX8ry5IOkVO3GU/2Xc5Dqwbj8tSE/kpNznfOhaa4Enfw8N/Pm+pZHmbNaECrVue3+6sTpsxV7r4yhSPZweYY5Mzmp/gL20Vf1Cct/h31Q2Hey8QXO66vcBjlobi5yv2WlW2NaUmsHr7a1PBKoqj8GjD4oCnhX9pS5/nnrjElrNJS1rnKJpa45lRhqrLPsmCJW12ruhXlQldvzaT4cVyT4mVww0nXAAAS5UlEQVSBZrQpbdy1/cKwO92mbN+PrLrhoYK1D6fTWeGtOuLIcuByuEzZ/B3+JV/QRIqrIeb2K3S4vSWbPr0Gl8vN1Hn/wOVyc2VMw3ydjcbmKsTNGg81U0rcoKAg/hhlTj9XQID5axdLS8ESd+cXthrZGbLujnWmlbrG+68vHKUpce0uB1Gdf2DHln+y7Tc79VxuXG43WzdeR9rBFEKbBNOlzz4+Xn89EYFGeHYAN3Tturs6jUgWiWkqqUrBmUVBkV7om/JC4efnh5+fNdeXlFTiAswa1ojOC/ZyU/T3zFpwDS8915IZn2RzU/R3ALhdAj9/SUSg8Lpt3W4DkRtG9a8rW19tJlKwxFVUP4orcXOnqGo37cR4KbqDyaN3A3BVXPP8U1hd0L7jTu/Pm3z2lXAthhJt9ac0JW5h/PhxWZbwVf+alhKuD/7+/rhcrpI9KqqM4povpo2dCyVcSxEcHFyj3mNUEwkOLnq+gNPpMmVK5G0jWlQ4jMqm3MI1Y4ZKdUTFy7p8tu0a7HZ7yR5LICLCnLXllYkqcRU1hpCQEEJCqvdURbOw5piAQnGRo4SrUFgQJVyFwoIo4SoUFkQJV2FZihsaqumoXmWFZfH3978ohrkKQ5W4CoUFUcJVKCyIEq5CYUGUcBUKC6KEq1BYkHL3KteUVTQBAQEEBeW9oq6mxKumfpHeN152u73avterrJS1d/yiL3F9Mz4rK6sKLVGUBt88qimiLQ8XvXAVCiuihOuDenVN9UflkYESrg/qg1LVH5VHBkq4CoUFUcJVKCyIEq4Pqv1U/VF5ZKCE64NqP1V/VB4ZKOH6oJ7m1R+VRwZKuAqFBTFFuN+czAFA0zQzgqsyClbDiorPj8vnFXu8ujN16lSmTp1aqP2zJpr70WuzKa6qrGkaSIePi/Q5NrISrbrwmCLc9k2COfHtQhITEwEjATUtlgcGxvD67RpPzN+EpmmsWrXKjMtdMNYmvMbBLS8D+UXqxsUTn/5ZVWZVmNmzZzN79mzvb9+4Wb4FKQIBSPl9i9fpuSEaULOms5pWVR78QAIAb04bA8CCjz7h2aVLwQlhTS4B/BgwYIBZl6sUCms/3f7oCg6luz0PJeO7Qu9t+4rvly66wNZVHhNmL/DuV/e+n/K3cU+bakdVY3ob9+3E/dwwYBpt64XgF9zcu/4oMXG75aqWfoGSdVt2cGmEn8d2f7Qe/Rg9fgiXh35f1eaZxqBObdm7dRnRmsbG3clVbU6F0TQNvwCf1Tb+IFp1rTqDKgFRlidYZGSk3LVrF1BzlolB3pKqrKysGtNrWVOX9QkhCA0NBWpOnCAvvyIjI9m1a1eJ9R7Vq+xDTRFtTUblkYESrg9qcL/6o/LIQAnXB/U0r/6oPDJQwlUoLEiVCzf35SNHP3+l0OM5F86UElm59Qd+2PpukccL6zV3V/CaXXvdUmq/Fem1//qko8hjx77fWu5wLzRa936MWbCjTOcMnTjPu5/wytQS/a98elh+B9e5wm0ZMKZMdpQF04SrRUWjaVH89dVbxMbEEBU9g7lLNpKTdgwtKopuPcaz7H8TAYiJjeXePhoxMTEAdO7WAyEEsbGxxMQvJTo6CoB9Z+x01TRenXwvz42o/KGkgu2n2NhYTqfb0bQ8W8GPAykuUg7vQdOi0DSN6Oho7zkxnn0tKppBmkZsdAzLfjjhmZSi8f0nC7xhGUKT/HY6Cy3+ca/womNi0HpOYcN7zxPgTGF5YhLRURoHtiw3zhvxHPH3TUbTNPZseRWAmfF56aNpGpGRkeSkHi80nlFRht/Z8SPQtCjIOo4QoGlRLHptGvdrGs+v/o244SuIjoryju1qWrzXxv2ZbjRtML9sWU5sbOx5D437X/3Qk27RfHPGTnKmi+ioKE84A7z+Y2NjS5M1XoqdOdV3Hn72VJx+hh9Xjm6kZa8xREUZaRUbG8NJu0TTNH5ePgXd7uLYD6vQNI0sVzo/pdXx5remRaNpGjOio7xxjNI0wI+YmGgjLaNjAEF0VBTR0TFkuCVdozXsZ36GM/uJiYlh4kcHvPHNvQ/y7qdyIqUs9Waz2WQuuq7n234/dk4u23lY2mw2+e9O3WR6hi77dLTJf3e5Weq6LhcNteXzP3LaXKnrukzVU+Rvm+fL3zfOOS9Mm62DPK3rcs6EMXJWb9t5x83aioqTzWaTNptx3X/fMl0uWrtTfrF2kdx7PM17fO9Hb8iU00lS11NkxxsM//O2HpS6rsu7p74kT6Yb/j5LOiOXPfGwtN3UU+q6Lm/S7pA2m00umhYnO97YXj720EjZo/8geTzduPacCXf42BEndV2Xe1e/Yfzu94QcMnaC1HVdfrlmjvwp6Yzs5LHVZrPJL5POSGk38qKwuNlsNrnv21VS13U5dlgfqeu63PpXijyTki7nzZggvz+uS5utizfuf3y+Vm7ef1IufPwBI4y0o/K/Npvc+uyE89Js78eLZHrqWXnnkGjZZchYqeu6TEs+Km22AdJmay8PpOkycc1r3rDNzK+xWiep67q8fd6nctadcXnxjR7mTStd16WtQ39vOmj/7iBttv4y+eg+r/+lM+6Suq7Ls6npctver2V6ykmp67pctS/ZuJdn9JO6rsttP/7oTY+U3Gu172XkQ+cJ0mazyaXrd3nDPfLTVtn/v8/LTnEDzot/Lp48K1GLF2Qc9+HX1vLkuD4l+jt15gyNGzYsdbhmkTuGVlKc/vP4y7w67e5yXWPP2Sx+WzKHwZOmF+nnlpGj+SCh6BlZBzYt5/KuedW0zYvn0KnfUPw4R2Dddvn8mj+O6wACS+1b0zQCgS2eabCFHU9MTOTM2bM0bNCgTJaUJr+6DBnP5hX5m197trzKdZ3/4/29bNNOhnftUKZrVxZlHcdVEzAovXCtRE2dgAE1O7/UBIxyoMYIqz8qjwxME+7RY0fz/T5y+HAZzs4r9Y8cPVqMv8qlvGOEjowzJltSyciie5Ch+J5wVznSyJGdXuZziqLkPDr/uNuRDcDx4yfyuZ84fuQ8v7//ef59a6b9ZmGacCfentf2OpaUxMgRIwA4fPgg4AZp3A5nTx0DwI0bPJkg3XlvpB89wgjn8PG8HlGHK/+tlJHl9N5dqelFV5dyzzp5JgUo/5vvZ859n6494vA93e1y4nQb9gfWymuXZ+sp3v2UdL1c16t0XIaNp5IzOXEmJd+haa8upcctRa9dzfBo/lRymjd982nJJ68OJSWRnZ7KL18kgNPJkUNleZiXj8JyOPvUPg4fOsToex7M56Nhw+YAHD18yOu2c92r6Lrd+1vTNMP+aoZpwj2bAwPHz8QNNG/Vyruus3nzS9D6TmL6si/56Xgm4Y2bs2BYX6K1aI6m2dF6jcOZfY7/3XsHADmefG/ZrBngIm5sAv5+yfyy/QO0wbMYMXoutUIDSHhtKpoWQ51ww/8ryz9j+3E7etImtiw3OiXiuxpd700a1sWedrTMQw+5+PsFAoI7e8cQFdOXZxZ9xB85/gSk7uGHj41x3QnDe5DtdBMSXpcufYYyZs6n1I0IL19iVjInfviciZpG4/phNG1Yl/uHdyP3Mefv74efMG7YAzlwPDNPiHHdOwOw96Ml2M/uA+DjxdM4u/s1ADStJ1qMMYTy0L2juLRVK5buSCLLDgQEcMmlLdGiypcHpaF7l2j8slP4/KnRecMuPe7hwP5fad6iGQhJ7nI1LTqamztH4wZatLwUTYuhz8w14C8IDw9CP7GXkf27GAGL6jdby7TOqYyMdMJCQ0Dm4BdQi/S0NCRw+lQyjZo2pnatMNxuF35+/tidbrrERnsX3uc4XAQH+rNyziji7nqRiIja/PlnEpdd1gqQpKRnUzciFAlkZjnIyM5k0/JnGDl+NkeOH6d5gxD8gurhcrs5k5JKg1phBAQFs/LpEQz+31KOHDnCJZdcQnpaGhG1a58Xr6JWB+lpaQQF+uMfEo6ekcnsaeN5ct4ihICM1BRcfkHU8YjTLSV+QnD6RBKNmrbCac/mWHIGrZpe+F5y3zjl4ptfDqeLwAB/AJKOnqBuRBhpKclc0qo1binJ1DOpVcuIl8tpR8/MJjw8FF3P8qbfieNnaNbMiJueZSc81PhwWlpaGgABAf6kpp6jSZNmSJeT7Bw74bUiKhyv4lYHSSAr205YSN5H3J5btZV7b9Y4eiSJ+g0aI91uImrXRgC9NI21OxL547d9/L1NG1xOO06Xi4xMJw3qGbZmZucQ7A/+gcEVtr04LNOrvHfvXq699tpyn3/y6J80aXFZsX4yko9Rq37zEsMqrpfyVGoOjesYmZZ87CD1m//tPD8//rSPy668gvDgcn/80HTK06ucdFanVYPqWUvwpaj8crslfn757/nDp9No2ej8hzUALjvSP6havPWjrMKtsjutIqIFShQtUCrR+iKEOK/zI1e0QKGiBWh3TZsyXae6YgXRFterXFC0QNGiBagmoi0PajjIB7XypPqj8shACVehsCBKuD6owf3qj8ojA1PauOPfH4+9ob1kj6Xgjeg3TAnHDG5ZV/E4fdArqGRPF5jBw5PKfa6f3xneTfiXidaYR0XilUvfXoKRI1qaYE3lYlrnlNtV0ZWnVU/B9pPT+lEqFFcF8qqQ/p8LSnFt3IrEKxe3tEYlVAm3GJRwzyew+ox4nYcZwrVK69E0K10uV6FbQu+EIo8VtlUlBdtPdrf0busH1c73e0OB3/m3vHOrIy6X27s57ee8+/i4L3+5JR+vbovL5ea2kSG8uayNScKoGMW1cXNt/3h1W95Z1TZfPAvb1qw+349VME24bpfbuz3yr+F8cMsHuF1u+icM4r1e8/hgwAocdhe10v14q+97zL2+Hy6H3esvd6tKCqsq527a8lQ+H1Kbuxpn8aAWRseEc6zoX4cvRtTD6ZZ8MaIejes5+GJEPZ67IQin27/alti+N+rWzZ3YsOZKXC43L79/DUN76GzZcC233HmQoCB/ln/YjpHDryAsxL9a3NglVZVdLjcz3j/HkF4/sGXDtSx/silbNlxLeCs/Nq29ivuvN/wNnnUFPXt+z/3z2vDUG1cp4bpdbqZ//Q4j1s/A7XKzbtRHHPVrwU85sGbYas7ZndQJ9GPinv0s6P8+Ny+/udoIt+DT3OGS3u2LEXWJXZ7KbV2b0S7bzZfD6+JO0XnnqyQcnntpeY9G/GtxMuO/zMHhcuJwVdcSN6+G0z3mO2J6/ILL5c/YvrvJuvRv3NBxIy6Xi06dv6dRmADPIpCqrhFBSSWuEacrW4ZSv2kInWK+AySdYr7jpeeuZmD3PcQ9fS0ul4uRl7vZuvFaJCCR1aLGVxZMa7E4HfnXZaSc+g6AXu/0yu8xJM8tfkX1/pZQlk+U2r11Dn+XpO0buS8Gy/2IVC2aODJpm68zvHoKNpecnLyI5b2Mz0kOkDDnEII6Xj8dOu4E4NOt7fOdVx3JtW/B/37yukm3m5wcJwN7fIMLuNETn5i+P1eFiaZhmnCt9LQqLY4CbVSHAApptx7xDy3UvbpSnipht6ivK8EScyksXj3v/iu/nwtkS2WjepV9KNh+clg/SoVipbZcQSp7OMgqmCLcsOAwXFnWf5YVXGRQXduoFaUiN3h4WNVOKCmpV7miBAX5VziMC4Epwn2659P0frN3hcMJCqpes4xc9mwqrt2wkr1cYEbf1oDX3zr/tS2lYeFr15tsjXmsXX0Fcb1/rFAYgwc2M8maykW95RH1mU0roT6zaWCNaSIKhSIfSrgKhQVRwlUoLIgSrg/VrXNMcT4qjwyUcH3w97fGUMDFjMojg3IPBxXstawpqHhZh5oYp9KiSlyFwoIo4SoUFkQJV6GwIEq4CoUFUcJVKCyIEq5CYUGUcBUKC6KEq1BYECVchcKCKOEqFBZECVehsCBKuAqFBVHCVSgsiBKuRZjw0qdkZRuf/dzwySds+mRdof7WrV8PwLSx3YsM65wLXhkYCcCgSc/Rv5sGQIfISO6dMIEbIiPNNF1RCZR5WV9NeZma1XiqtyAkOBD99Fd0i4sD3Pye6uSxB16mRfo2rou+jiHjZhJ53fX0uncu/5Tw/prVbFiSwJR5zzNy2K3kZGYw95HbuSbuHqQ08nLR43cRHn4/R7/9FIB5L74I0q7yuZqjSlyL8OWHm0jNcRMY1jqf+9uvT2TT2UYMGTcTgMZNm2CrkwLALX1uZuHK91i1JAE87w3c/O1pIO9TIseyjFvAL9DnVhDqLRPVnTK/nvWbb76pRHMU5aF9+/aofKkZtG/fXr2e9WJBifbiQ7VxFQoLUibhSimVcBWKSqS0+lIlrkJhQVQbV6GwIGUucWviB6wVCqtR5jau233xfDxYobjQVFobVwlXoah6yjQBQwhxGjhUeeYoFBc9l0opG5XkqUzCVSgU1QPVq6xQWBAlXIXCgijhKhQWRAlXobAgSrgKhQVRwlUoLIgSrkJhQZRwFQoLooSrUFiQ/wevyhyitT9yHAAAAABJRU5ErkJggg==\n", 209 | "text/plain": [ 210 | "
    " 211 | ] 212 | }, 213 | "metadata": {}, 214 | "output_type": "display_data" 215 | } 216 | ], 217 | "source": [ 218 | "# Visualization\n", 219 | "print(\"Here's what an example looks like\\n\")\n", 220 | "print(\"HTML bootstrap text:\", texts[0])\n", 221 | "ax = show_img(train_features[1], figsize=(4,4))\n", 222 | "ax.set_title('HTML bootstrap image')" 223 | ] 224 | }, 225 | { 226 | "cell_type": "markdown", 227 | "metadata": {}, 228 | "source": [ 229 | "*Note*: `btn-orange` represents the *light blue button* and `btn-red` the *blue button*." 230 | ] 231 | }, 232 | { 233 | "cell_type": "code", 234 | "execution_count": 6, 235 | "metadata": {}, 236 | "outputs": [ 237 | { 238 | "data": { 239 | "text/plain": [ 240 | "((1500, 256, 256, 3), 1500)" 241 | ] 242 | }, 243 | "execution_count": 6, 244 | "metadata": {}, 245 | "output_type": "execute_result" 246 | } 247 | ], 248 | "source": [ 249 | "# Dataset info as Sanity check\n", 250 | "train_features.shape, len(texts)" 251 | ] 252 | }, 253 | { 254 | "cell_type": "markdown", 255 | "metadata": {}, 256 | "source": [ 257 | "The Tokens per sentence plot (see below) is useful for setting the `MAX_LEN` and `MAX_SEQUENCE` training hyperparameters." 258 | ] 259 | }, 260 | { 261 | "cell_type": "code", 262 | "execution_count": 7, 263 | "metadata": {}, 264 | "outputs": [ 265 | { 266 | "data": { 267 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAHJNJREFUeJzt3XmYVdWZ7/HvTwFng0hJlEHQYPrafTtoUElrp41DNGZAE+PQRtE2F7Ud4tDGqTt6c9t+jHGIRq9ep4iJQ5yiGJwVo4migCKCSkTBCwQEE0XUxIi+/cdaFXbKXVWnsE7tU1W/z/Psp/Zee3rPrqrznrXW3usoIjAzM2tpjaoDMDOzxuQEYWZmpZwgzMyslBOEmZmVcoIwM7NSThBmZlbKCcIalqQ9Jc2tOg6z3soJwupK0tuF6UNJfywsH1R1fD2RpCmSvlV1HNb99ak6AOvZImL95nlJ84FvR8SD1UVUDUl9ImJl1XGYdYRrEFYpSetIulTSYkkLJf1QUt9Wtj1Z0kxJn8zL++TlNyU9JmnrwrZLJJ0gaZak5ZKul9Qvr/ukpHvzfr+X9HAr51tbUkg6RtJ8ScsknS1JhW2OkDRH0h8kTZI0uMW+R0l6GZhVcvz1JN2U931T0pOSNsrrBki6Lr+OBZLOlLRGXnekpIckXZz3e1nSbnnd+cB2wFW5lnZ+Lv87SQ9LekPSC5L2LsRxk6QfSbpP0gpJv5G0eWH9Zwr7LpF0Ui5fU9J/SHpF0uv5Gvev6Rdv3UNEePLUJRMwH9itRdm5wGPAQGAQMBU4I6/bE5ib5/8LeBIYkJfHAIuBzwJrAuOB3wJ98volwG/yMZuAucChed2FwEWkGnQ/4POtxLs2EMB9QH9gBPAK8K28fn/gBWAroC/wn8DkFvtOyvuuU3L87wC3AuvkWLYD1svr7gF+DKwLbAo8A4zL644E3gcOya/9BGB+4bhTmmPMyxvma3VQ3n474A/Ap/L6m4ClwLb5ddwKXJvXbQQsA44B1srH2i6vOyX/7jbLr/da4CdV/5156sT/2aoD8NR7plYSxCJgl8LyWODFPL8n8DJwKTAZ2KCw3U+aE0mh7FVghzy/BNi3sO5i4Ed5/lzgFmCLduJtfpPfuVB2IjApz08GDiqs65vfuAcV9v2HNo7/r8CvgL9rUb458A7Qt1B2GHBPnj8SmFVYNyCfq39ebpkgxgEPtDjHBOCUPH8TcElh3deBGYXzPtFK/POAHQvLI4B3AVX9t+apcyb3QVhlclPNJ0lv7M1eBQYXljchvUl9NSJWFMo3B/aTdHKhrF+LfZcU5t8l1VIAzga+D0yW9D7wfyPigjZCXdAivs0KMVwu6dLC+pXAEGB5yb4tXU16/bdKWh+4DviPfNy1gWWF1qw1SLWg1l4bwPrAmyXn2Rz4vKTiuj7AG20cr7nvaCgpSf+V/LsbCtwtqTji5xrAxsDrJXFYN+MEYZWJiJC0hPQG1vwmNIxUq2j2GumT9g2SvhIRU3P5AtIn+fNX47zLSc0735H0GVKieDIiftPKLsU3yWHA7woxnBwRt7XcQdLazadrI473gO8B35O0BakpazbwOPA2sFHkj+Yd1HKfBcD9EfHV1TjWAuBLHzlB+t0tAr4eEdNX47jWDbiT2qp2I3CmpI0lbQKcAfysuEFE3A/8C3CXpG1y8RXAsZJGK1lf0tckrdveCfN2W+RPwcuBD4AP29jlFEmfkDSc1Bb/81x+OfDvkj6dj7uRpG/U+LqRtJukrXPn81uk2seHETGP1Ex0rqQNJK0haaSknWo89GvAFoXlO4BtJO0vqa+kfpLGSNqqhmPdAXwqd7b3k7ShpO3yusuBcyQNza9nE0mrk4SsQTlBWNW+BzxP+uQ8g9SxfG7LjSJiEnAUcI+kv8+f9o8D/h+pWeW3wD/Txif2gv9B6j9YATwKnBcRT7Sx/STgWWAaqe/iZzmmG4FLgNslvZXj372G8zcbDNyZ45gF3M2q5HMgqXP7RVKH8s9JfRu1uBA4JN91dG5EvAHsQWqqW0yqAf0nqc+kTXnf3YEDSB3Zc4DmRHUu8CDwsKQVpJrPtjXGaN2AVq8Ga9bz5WaiPwJDI2Jh1fGYdTXXIMzMrJQThJmZlXITk5mZlXINwszMSnXr5yAGDhwYw4cPrzoMM7NuZfr06a9HRFN723XrBDF8+HCmTZtWdRhmZt2KpFfb38pNTGZm1gonCDMzK+UEYWZmpZwgzMyslBOEmZmVcoIwM7NSThBmZlbKCcLMzEo5QZiZWalu/SS1WXc0/NRJpeXzz/lyF0di1jYnCLMexgnIOoubmMzMrJQThJmZlXKCMDOzUk4QZmZWyp3UZt1Ua53RZp3FNQgzMyvlBGFmZqWcIMzMrJQThJmZlXKCMDOzUk4QZmZWygnCzMxKOUGYmVkpJwgzMyvlBGFmZqXqliAkDZU0WdLzkmZL+k4uP0vSIkkz8rRXYZ/TJM2VNEfSHvWKzczM2lfPsZhWAidFxNOSNgCmS3ogr7swIs4rbixpa+AA4G+BzYAHJW0VER/UMUYzM2tF3RJERCwGFuf5FZJeAAa3sctY4KaIeA+YJ2kusD3wRL1iNOsOPCifVaVL+iAkDQe2AZ7MRcdIminpGkkb5bLBwILCbgtpO6GYmVkd1T1BSFofuA04PiLeAi4DtgRGkWoY53fweOMlTZM0bdmyZZ0er5mZJXVNEJL6kpLD9RFxO0BEvBYRH0TEh8CVpGYkgEXA0MLuQ3LZX4mIKyJidESMbmpqqmf4Zma9Wj3vYhJwNfBCRFxQKN+0sNk+wKw8PxE4QNJakkYAI4Gn6hWfmZm1rZ53Me0IHAw8J2lGLjsdOFDSKCCA+cARABExW9LNwPOkO6CO9h1MZmbVqeddTL8GVLLq7jb2ORs4u14xmZlZ7fwktZmZlXKCMDOzUk4QZmZWygnCzMxKOUGYmVkpJwgzMyvlBGFmZqWcIMzMrJQThJmZlXKCMDOzUk4QZmZWygnCzMxKOUGYmVkpJwgzMyvlBGFmZqWcIMzMrJQThJmZlXKCMDOzUk4QZmZWygnCzMxKOUGYmVkpJwgzMyvlBGFmZqWcIMzMrJQThJmZlXKCMDOzUk4QZmZWygnCzMxKOUGYmVmpuiUISUMlTZb0vKTZkr6TywdIekDSS/nnRrlcki6WNFfSTEnb1is2MzNrXz1rECuBkyJia2AMcLSkrYFTgYciYiTwUF4G+BIwMk/jgcvqGJuZmbWjbgkiIhZHxNN5fgXwAjAYGAtMyJtNAPbO82OB6yKZAvSXtGm94jMzs7Z1SR+EpOHANsCTwKCIWJxXLQEG5fnBwILCbgtzWctjjZc0TdK0ZcuW1S1mM7Peru4JQtL6wG3A8RHxVnFdRAQQHTleRFwREaMjYnRTU1MnRmpmZkV1TRCS+pKSw/URcXsufq256Sj/XJrLFwFDC7sPyWVmZlaBet7FJOBq4IWIuKCwaiIwLs+PA+4slB+S72YaAywvNEWZmVkX61PHY+8IHAw8J2lGLjsdOAe4WdLhwKvAfnnd3cBewFzgXeCwOsZmZmbtqFuCiIhfA2pl9a4l2wdwdL3iMTOzjvGT1GZmVsoJwszMSjlBmJlZKScIMzMr5QRhZmalnCDMzKyUE4SZmZVygjAzs1JOEGZmVqrdBCFpS0lr5fmdJR0nqX/9QzMzsyrVUoO4DfhA0qeAK0gjrt5Q16jMzKxytSSIDyNiJbAP8OOIOBnwN72ZmfVwtSSI9yUdSBqa+5e5rG/9QjIzs0ZQS4I4DPgccHZEzJM0AvhpfcMyM7OqtTvcd0Q8L+kUYFhengf8oN6BmZlZtWq5i+mrwAzg3rw8StLEegdmZmbVqqWJ6Sxge+BNgIiYAWxRx5jMzKwB1NRJHRHLW5R9WI9gzMyscdTylaOzJf0zsKakkcBxwOP1DcvMzKpWSw3iWOBvgfeAG4G3gOPrGZSZmVWvlruY3gXOyJOZmfUSrSYISXcB0dr6iPhaXSIyM7OG0FYN4rwui8LMzBpOqwkiIn7VPC+pH/A3pBrFnIj4cxfEZmZmFWq3D0LSl4HLgZcBASMkHRER99Q7ODMzq04tt7meD3whIuZC+n4IYBLgBGFm1oPVcpvriubkkL0CrKhTPGZm1iBqqUFMk3Q3cDOpD+KbwFRJXweIiNvrGJ+ZmVWklhrE2sBrwD8BOwPLgHWArwJfaW0nSddIWippVqHsLEmLJM3I016FdadJmitpjqQ9VvP1mJlZJ6nlQbnDVvPY1wKXANe1KL8wIv7qFlpJWwMHkJ7Y3gx4UNJWEfHBap7bzMw+plruYhpBGm5jeHH79h6Ui4hHJQ2vMY6xwE0R8R4wT9Jc0giyT9S4v5mZdbJa+iDuAK4G7qJzRnE9RtIhwDTgpIh4AxgMTClsszCXfYSk8cB4gGHDhnVCOGZmVqaWPog/RcTFETE5In7VPK3m+S4DtgRGAYtJt9B2SERcERGjI2J0U1PTaoZhZmbtqaUGcZGkM4H7SSO6AhART3f0ZBHxWvO8pCuBX+bFRcDQwqZDcpmZmVWklgTxP4GDgV1Y1cQUeblDJG0aEYvz4j5A8x1OE4EbJF1A6qQeCTzV0eObmVnnqSVBfBPYoqPjL0m6kXRb7EBJC4EzgZ0ljSIlmPnAEQARMVvSzcDzwErgaN/BZGZWrVoSxCygP7C0IweOiANLiq9uY/uzgbM7cg4zM6ufWhJEf+BFSVP56z4Ifx+EmVkPVkuCOLPuUZiZWcOp5Unq1b2l1czMurF2n4OQNEbSVElvS/qzpA8kvdUVwZmZWXVqeVDuEuBA4CXSIH3fBi6tZ1BmZla9WhIE+fsg1oyIDyLiJ8Ce9Q3LzMyqVksn9bv5O6lnSDqXNERGTYnFzMy6r1re6A/O2x0DvEMaEuMb9QzKzMyqV8tdTK/m2T9JuhgY2uIrSM3MrAeq5S6mRyRtKGkA8DRwZR4zyczMerBampg+ERFvAV8HrouIHYDd6huWmZlVrZYE0UfSpsB+rBqe28zMerhaEsT3gfuAuRExVdIWpGcizMysB6ulk/oW4JbC8iv4LiYzsx7PzzOYmVkpJwgzMyvlBGFmZqVqeQ7i3wvza9U3HDMzaxStJghJp0j6HLBvofiJ+odkZmaNoK27mF4EvglsIemxvLyxpE9HxJwuic7MzCrTVhPTm8DpwFxgZ+CiXH6qpMfrHJeZmVWsrRrEHsD3gC2BC4CZwDsRcVhXBGZmZtVqtQYREadHxK7AfOCnwJpAk6RfS7qri+IzM7OK1PKFQfdFxDRgmqSjImInSQPrHZiZmVWr3dtcI+K7hcVDc9nr9QrIzMwaQ4celIuIZ+sViJmZNRY/SW1mZqWcIMzMrFTdEoSkayQtlTSrUDZA0gOSXso/N8rlknSxpLmSZkratl5xmZlZbepZg7gW2LNF2anAQxExEngoLwN8CRiZp/HAZXWMy8zMalC3BBERjwJ/aFE8FpiQ5ycAexfKr4tkCtA/f82pmZlVpKv7IAZFxOI8vwQYlOcHAwsK2y3MZR8habykaZKmLVu2rH6Rmpn1cpV1UkdEALEa+10REaMjYnRTU1MdIjMzM+j6BPFac9NR/rk0ly8Chha2G5LLzMysIl2dICYC4/L8OODOQvkh+W6mMcDyQlOUmZlVoJaxmFaLpBtJw4QPlLQQOBM4B7hZ0uHAq8B+efO7gb1IQ4u/C3jEWDOzitUtQUTEga2s2rVk2wCOrlcsZmbWcX6S2szMSjlBmJlZKScIMzMr5QRhZmalnCDMzKyUE4SZmZVygjAzs1JOEGZmVsoJwszMSjlBmJlZKScIMzMr5QRhZmalnCDMzKyUE4SZmZVygjAzs1JOEGZmVsoJwszMSjlBmJlZKScIMzMr5QRhZmalnCDMzKyUE4SZmZVygjAzs1JOEGZmVsoJwszMSjlBmJlZKScIMzMr5QRhZmal+lRxUknzgRXAB8DKiBgtaQDwc2A4MB/YLyLeqCI+MzOrtgbxhYgYFRGj8/KpwEMRMRJ4KC+bmVlFGqmJaSwwIc9PAPauMBYzs16vqgQRwP2Spksan8sGRcTiPL8EGFRNaGZmBhX1QQA7RcQiSZsAD0h6sbgyIkJSlO2YE8p4gGHDhtU/UjOzXqqSGkRELMo/lwK/ALYHXpO0KUD+ubSVfa+IiNERMbqpqamrQjYz63W6PEFIWk/SBs3zwBeBWcBEYFzebBxwZ1fHZmZmq1TRxDQI+IWk5vPfEBH3SpoK3CzpcOBVYL8KYjMzs6zLE0REvAJ8pqT898CuXR2PmZmVa6TbXM3MrIE4QZiZWSknCDMzK+UEYWZmpZwgzMysVFVPUls3N/zUSaXl88/5chdHYmb14hqEmZmVcoIwM7NSThBmZlbKCcLMzEo5QZiZWSknCDMzK+UEYWZmpfwchHUJPzdh1v24BmFmZqVcg7BeyTUas/Y5QVilWnujbk13egPv6GszazRuYjIzs1JOEGZmVspNTHXU0XZut4ubWSNxgijRXd6oe3L7vZlVr9cmCHcgtq83XqPu8uHArCv02gRRpd74xtuT+fdpPZU7qc3MrJRrEL1IT/ik6yag6vl30Hs4QViP1hOSYr11pzf87hRrT+AE0QH+42xcTgSdz9fUnCCsU/lNxazncCe1mZmVargahKQ9gYuANYGrIuKcikMy6xGqrN25ebZ7aqgEIWlN4FJgd2AhMFXSxIh4vtrIrLdz01lj66xRBboikXXW31JXJNeGShDA9sDciHgFQNJNwFigVycIvzmZWRUUEVXH8BeS9gX2jIhv5+WDgR0i4pjCNuOB8Xnx08CcLg/0owYCr1cdRA0cZ+fpDjGC4+xM3SFGqC3OzSOiqb0DNVoNol0RcQVwRdVxFEmaFhGjq46jPY6z83SHGMFxdqbuECN0bpyNdhfTImBoYXlILjMzsy7WaAliKjBS0ghJ/YADgIkVx2Rm1is1VBNTRKyUdAxwH+k212siYnbFYdWioZq82uA4O093iBEcZ2fqDjFCJ8bZUJ3UZmbWOBqticnMzBqEE4SZmZVyglgNktaU9IykX+blEZKelDRX0s9zB3vVMfaXdKukFyW9IOlzkgZIekDSS/nnRg0Q5wmSZkuaJelGSWs3wvWUdI2kpZJmFcpKr5+Si3O8MyVtW3GcP8y/95mSfiGpf2HdaTnOOZL2qCrGwrqTJIWkgXm5oa5lLj82X8/Zks4tlHf5tWwtTkmjJE2RNEPSNEnb5/KPdz0jwlMHJ+BE4Abgl3n5ZuCAPH85cFQDxDgB+Hae7wf0B84FTs1lpwI/qDjGwcA8YJ3CdTy0Ea4n8HlgW2BWoaz0+gF7AfcAAsYAT1Yc5xeBPnn+B4U4twaeBdYCRgAvA2tWEWMuH0q6IeVVYGCDXssvAA8Ca+XlTaq8lm3EeT/wpcI1fKQzrqdrEB0kaQjwZeCqvCxgF+DWvMkEYO9qokskfYL0R3Q1QET8OSLeJA1bMiFvVnmcWR9gHUl9gHWBxTTA9YyIR4E/tChu7fqNBa6LZArQX9KmVcUZEfdHxMq8OIX0PFFznDdFxHsRMQ+YSxrepstjzC4EvgsU75RpqGsJHAWcExHv5W2WFuLs8mvZRpwBbJjnPwH8rhDnal9PJ4iO+xHpj/rDvLwx8GbhH3Ih6ZNxlUYAy4Cf5KawqyStBwyKiMV5myXAoMoiBCJiEXAe8P9JiWE5MJ3Gu57NWrt+g4EFhe0aKeZ/IX2ChAaKU9JYYFFEPNtiVcPEmG0F/GNu8vyVpO1yeaPFeTzwQ0kLSP9Tp+XyjxWnE0QHSPoKsDQiplcdSzv6kKqgl0XENsA7pCaRv4hU/6z0Hufchj+WlNA2A9YD9qwyplo1wvVrj6QzgJXA9VXHUiRpXeB04HtVx1KDPsAAUvPMycDNudWg0RwFnBARQ4ETyK0HH5cTRMfsCHxN0nzgJlJTyEWkalvzQ4eNMDzIQmBhRDyZl28lJYzXmquX+efSVvbvKrsB8yJiWUS8D9xOusaNdj2btXb9Gm6IGEmHAl8BDsrJDBonzi1JHwqezf9LQ4CnJX2Sxomx2ULg9txE8xSp5WAgjRfnONL/D8AtrGru+lhxOkF0QEScFhFDImI4aRiQhyPiIGAysG/ebBxwZ0UhAhARS4AFkj6di3YlDZk+kRQfNECcpKalMZLWzZ/KmuNsqOtZ0Nr1mwgcku8YGQMsLzRFdTmlL936LvC1iHi3sGoicICktSSNAEYCT3V1fBHxXERsEhHD8//SQmDb/HfbUNcSuIPUUY2krUg3fLxOg1zLgt8B/5TndwFeyvMf73p2Ra97T5yAnVl1F9MWpD+OuaTsvVYDxDcKmAbMJP2Rb0TqL3ko//E8CAxogDj/N/AiMAv4KemukMqvJ3AjqV/kfdIb2OGtXT/SHSKXku5keQ4YXXGcc0ntzjPydHlh+zNynHPId71UEWOL9fNZdRdTo13LfsDP8t/n08AuVV7LNuLcidR/9yzwJPDZzrieHmrDzMxKuYnJzMxKOUGYmVkpJwgzMyvlBGFmZqWcIMzMrJQThFVK0tt1OKYkPSxpw/a3/ljneURS3b/EXtJxSiPyXt+ifJSkvWrY/yxJ/9YJcTRJuvfjHse6DycI64n2Ap6NiLeqDqQ1hSfFa/GvwO6RHsosGkV6rV0iIpYBiyXt2FXntGo5QVjDyZ9Ub5M0NU875vKz8lj4j0h6RdJxrRziIPJTzpKG50/fV+bx/O+XtE5e95cagKSBedgHJB0q6Q6l73yYL+kYSSfmgQ+nSBpQONfBeQz+WYUx+NfLcT6V9xlbOO5ESQ+THrhr+bpPzMeZJen4XHY56cHBeySdUNi2H/B9YP98/v2Vvq/iDqVx/6dI+vuSc/wvSfdIWkfSlpLulTRd0mOS/iZvc63Sdwg8nq/zvoVD3JGvr/UGXfX0nydPZRPwdknZDcBOeX4Y8EKePwt4nPS09UDg90Dfkv1fBTbI88NJA9aNyss3A9/K84+QnyzNx5uf5w8lPY28AdBEGmX2yLzuQuD4wv5X5vnPk8fnB/6rcI7+wG9JAxEeSnry9SNPsAOfJT3puh6wPjAb2Cavm09+0rjFPocClxSWfwycmed3AWYUrtu/AceQEmfzdxs8BIzM8zuQho4BuJb0BPsapO89mFs4x2Dguar/bjx1zdSRaq5ZV9kN2LowaOaGktbP85Mijc3/nqSlpCG3F7bYf0BErCgsz4uIGXl+OilptGdyPsYKScuBu3L5c0Dxk/mNkMbol7Sh0re3fZE0qGNzu//apEQH8EBElH03wk7ALyLiHQBJtwP/CDxTQ6zFY3wjx/OwpI0L/TCHkIbf2Dsi3s/X8x+AWwrXea3Cse6IiA+B5yUVh4VfShp513oBJwhrRGsAYyLiT8XC/Eb2XqHoA8r/hldKWiO/wZXts07zdqxqZl27xTGK+3xYWP6wxTlbjlUTpPFvvhERc1rEvwNp6PUqPEfqsxhC+ha/NUjfuzGqle2Lr784vPXawB/rEqE1HPdBWCO6Hzi2eUFSa29irZlDardvz3xS0w6sGj22o/YHkLQTaaTM5aSv0Tw2j1CLpG1qOM5jwN5KI9uuB+yTy9qygtQMVjzGQfmcOwOvx6qO+meAI4CJkjbL5fMkfTNvL0mfqSHOrUgD11kv4ARhVVtX0sLCdCJwHDA6d7Y+DxzZwWNOIo22257zgKMkPUPqg1gdf8r7X04aVRPg/wB9gZmSZuflNkXE06S2/6dIo3FeFRHtNS9NJjXFzZC0P6mv4bOSZgLnsGpo8uZz/JrUFzFJ0kBSMjlc0rOkPo+x7b9cvkC6vtYLeDRX63GUvsznuojYvepYehpJjwJjI+KNqmOx+nMNwnqcSF+IcmW9H5TrbSQ1ARc4OfQerkGYmVkp1yDMzKyUE4SZmZVygjAzs1JOEGZmVsoJwszMSv03vm81NMZs/QIAAAAASUVORK5CYII=\n", 268 | "text/plain": [ 269 | "
    " 270 | ] 271 | }, 272 | "metadata": {}, 273 | "output_type": "display_data" 274 | } 275 | ], 276 | "source": [ 277 | "import re\n", 278 | "import string\n", 279 | "\n", 280 | "# Custom Tokenizer\n", 281 | "re_tok = re.compile(f'([{string.punctuation}“”¨«»®´·º½¾¿¡§£₤‘’])')\n", 282 | "def tokenize(s): return re_tok.sub(r' \\1 ', s).split()\n", 283 | "\n", 284 | "# Plot sentence by lenght\n", 285 | "plt.hist([len(tokenize(s)) for s in texts], bins=50)\n", 286 | "plt.title('Tokens per sentence')\n", 287 | "plt.xlabel('Len (number of token)')\n", 288 | "plt.ylabel('# samples')\n", 289 | "plt.show()" 290 | ] 291 | }, 292 | { 293 | "cell_type": "markdown", 294 | "metadata": {}, 295 | "source": [ 296 | "## Data preprocessing\n", 297 | "\n", 298 | "For the input data, we will use sentences, starting with the first word and then adding each word one by one. The output data is always one word.\n", 299 | "Sentences follow the same logic as words. They also need the same input length. Instead of being capped by the vocabulary they are bound by maximum sentence length. If it’s shorter than the maximum length, you fill it up with empty words, a word with just zeros (a.k.a. Padding).\n", 300 | "\n", 301 | "![sentence](https://blog.floydhub.com/content/images/2018/04/one_hot_sentence.png)\n", 302 | "\n", 303 | "*The image above show one-hot encoding representation for each token, but we will use this representation only for the predictions. Image from the [Blog](https://blog.floydhub.com/turning-design-mockups-into-code-with-deep-learning/).*\n", 304 | "\n", 305 | "As you see, words are printed from right to left. This forces each word to change position for each training round. This allows the model to learn the sequence instead of memorizing the position of each word." 306 | ] 307 | }, 308 | { 309 | "cell_type": "code", 310 | "execution_count": 8, 311 | "metadata": {}, 312 | "outputs": [], 313 | "source": [ 314 | "# Initialize the function to create the vocabulary \n", 315 | "tokenizer = Tokenizer(filters='', split=\" \", lower=False)\n", 316 | "# Create the vocabulary \n", 317 | "tokenizer.fit_on_texts([load_doc('bootstrap.vocab')])\n", 318 | "\n", 319 | "# Add one spot for the empty word in the vocabulary \n", 320 | "VOCAB_SIZE = len(tokenizer.word_index) + 1\n", 321 | "\n", 322 | "def preprocess_data(texts, features, max_sequence):\n", 323 | " X, y, image_data = list(), list(), list()\n", 324 | " sequences = tokenizer.texts_to_sequences(texts)\n", 325 | " for img_no, seq in enumerate(sequences):\n", 326 | " for i in range(1, len(seq)):\n", 327 | " # Add the sentence until the current count(i) and add the current count to the output\n", 328 | " in_seq, out_seq = seq[:i], seq[i]\n", 329 | " # Pad all the input token sentences to max_sequence\n", 330 | " in_seq = pad_sequences([in_seq], maxlen=max_sequence)[0]\n", 331 | " # Turn the output into one-hot encoding\n", 332 | " out_seq = to_categorical([out_seq], num_classes=VOCAB_SIZE)[0]\n", 333 | " # Add the corresponding image to the boostrap token file\n", 334 | " image_data.append(features[img_no])\n", 335 | " # Cap the input sentence to MAX_LEN tokens and add it\n", 336 | " X.append(in_seq[-MAX_LEN:])\n", 337 | " y.append(out_seq)\n", 338 | " return np.array(image_data), np.array(X), np.array(y)" 339 | ] 340 | }, 341 | { 342 | "cell_type": "markdown", 343 | "metadata": {}, 344 | "source": [ 345 | "As mentioned above we are using only 17 tokens (+1 which is for the `PAD`, this ensure that the text will have the same lenght) for encoding the HTML text. We included the `` and `` tag. These tags are cues for when the network starts its predictions and when to stop." 346 | ] 347 | }, 348 | { 349 | "cell_type": "code", 350 | "execution_count": 9, 351 | "metadata": {}, 352 | "outputs": [ 353 | { 354 | "data": { 355 | "text/plain": [ 356 | "{',': 1,\n", 357 | " '{': 2,\n", 358 | " '}': 3,\n", 359 | " 'small-title': 4,\n", 360 | " 'text': 5,\n", 361 | " 'quadruple': 6,\n", 362 | " 'row': 7,\n", 363 | " 'btn-inactive': 8,\n", 364 | " 'btn-red': 9,\n", 365 | " 'btn-green': 10,\n", 366 | " 'btn-orange': 11,\n", 367 | " 'double': 12,\n", 368 | " '': 13,\n", 369 | " 'header': 14,\n", 370 | " 'btn-active': 15,\n", 371 | " '': 16,\n", 372 | " 'single': 17}" 373 | ] 374 | }, 375 | "execution_count": 9, 376 | "metadata": {}, 377 | "output_type": "execute_result" 378 | } 379 | ], 380 | "source": [ 381 | "# Show Vocabulary\n", 382 | "tokenizer.word_index " 383 | ] 384 | }, 385 | { 386 | "cell_type": "code", 387 | "execution_count": 10, 388 | "metadata": {}, 389 | "outputs": [], 390 | "source": [ 391 | "# Data generator, intended to be used in a call to model.fit_generator()\n", 392 | "def data_generator(descriptions, features, n_step, max_sequence):\n", 393 | " # loop until we finish training\n", 394 | " while 1:\n", 395 | " # loop over photo identifiers in the dataset\n", 396 | " for i in range(0, len(descriptions), n_step):\n", 397 | " Ximages, XSeq, y = list(), list(),list()\n", 398 | " for j in range(i, min(len(descriptions), i+n_step)):\n", 399 | " image = features[j]\n", 400 | " # retrieve text input\n", 401 | " desc = descriptions[j]\n", 402 | " # Generate input-output pairs\n", 403 | " in_img, in_seq, out_word = preprocess_data([desc], [image], max_sequence)\n", 404 | " for k in range(len(in_img)):\n", 405 | " Ximages.append(in_img[k])\n", 406 | " XSeq.append(in_seq[k])\n", 407 | " y.append(out_word[k])\n", 408 | " # yield this batch of samples to the model\n", 409 | " yield [[array(Ximages), array(XSeq)], array(y)]" 410 | ] 411 | }, 412 | { 413 | "cell_type": "markdown", 414 | "metadata": {}, 415 | "source": [ 416 | "## Model\n", 417 | "\n", 418 | "The model is based on Beltramelli‘s [pix2code paper](https://arxiv.org/abs/1705.07962) and Jason Brownlee’s [image caption tutorials](https://machinelearningmastery.com/blog/page/2/).\n", 419 | "\n", 420 | "![full](https://blog.floydhub.com/content/images/2018/04/model_more_detail_alone.png)\n", 421 | "\n", 422 | "*Image from the [Blog](https://blog.floydhub.com/turning-design-mockups-into-code-with-deep-learning/)*\n", 423 | "\n", 424 | "We are learning a function which given an image, predicts one token, then uses the prediction[s] and the image as context for the next precitions until reaching the `` token. \n", 425 | "\n", 426 | "In the image below you can see an example where each row is one prediction. To the left are the images represented in their three color channels: red, green and blue and the previous words. Outside of the brackets, are the predictions one by one, ending with a red square to mark the end.\n", 427 | "\n", 428 | "![formal](https://blog.floydhub.com/content/images/2018/04/model_function.png)\n", 429 | "\n", 430 | "*Image from the [Blog](https://blog.floydhub.com/turning-design-mockups-into-code-with-deep-learning/)*" 431 | ] 432 | }, 433 | { 434 | "cell_type": "code", 435 | "execution_count": 11, 436 | "metadata": {}, 437 | "outputs": [ 438 | { 439 | "name": "stdout", 440 | "output_type": "stream", 441 | "text": [ 442 | "_________________________________________________________________\n", 443 | "Layer (type) Output Shape Param # \n", 444 | "=================================================================\n", 445 | "conv2d_1 (Conv2D) (None, 254, 254, 16) 448 \n", 446 | "_________________________________________________________________\n", 447 | "conv2d_2 (Conv2D) (None, 127, 127, 16) 2320 \n", 448 | "_________________________________________________________________\n", 449 | "conv2d_3 (Conv2D) (None, 127, 127, 32) 4640 \n", 450 | "_________________________________________________________________\n", 451 | "conv2d_4 (Conv2D) (None, 64, 64, 32) 9248 \n", 452 | "_________________________________________________________________\n", 453 | "conv2d_5 (Conv2D) (None, 64, 64, 64) 18496 \n", 454 | "_________________________________________________________________\n", 455 | "conv2d_6 (Conv2D) (None, 32, 32, 64) 36928 \n", 456 | "_________________________________________________________________\n", 457 | "conv2d_7 (Conv2D) (None, 32, 32, 128) 73856 \n", 458 | "_________________________________________________________________\n", 459 | "flatten_1 (Flatten) (None, 131072) 0 \n", 460 | "_________________________________________________________________\n", 461 | "dense_1 (Dense) (None, 1024) 134218752 \n", 462 | "_________________________________________________________________\n", 463 | "dropout_1 (Dropout) (None, 1024) 0 \n", 464 | "_________________________________________________________________\n", 465 | "dense_2 (Dense) (None, 1024) 1049600 \n", 466 | "_________________________________________________________________\n", 467 | "dropout_2 (Dropout) (None, 1024) 0 \n", 468 | "_________________________________________________________________\n", 469 | "repeat_vector_1 (RepeatVecto (None, 48, 1024) 0 \n", 470 | "=================================================================\n", 471 | "Total params: 135,414,288\n", 472 | "Trainable params: 135,414,288\n", 473 | "Non-trainable params: 0\n", 474 | "_________________________________________________________________\n", 475 | "__________________________________________________________________________________________________\n", 476 | "Layer (type) Output Shape Param # Connected to \n", 477 | "==================================================================================================\n", 478 | "input_2 (InputLayer) (None, 48) 0 \n", 479 | "__________________________________________________________________________________________________\n", 480 | "embedding_1 (Embedding) (None, 48, 50) 900 input_2[0][0] \n", 481 | "__________________________________________________________________________________________________\n", 482 | "input_1 (InputLayer) (None, 256, 256, 3) 0 \n", 483 | "__________________________________________________________________________________________________\n", 484 | "lstm_1 (LSTM) (None, 48, 128) 91648 embedding_1[0][0] \n", 485 | "__________________________________________________________________________________________________\n", 486 | "sequential_1 (Sequential) (None, 48, 1024) 135414288 input_1[0][0] \n", 487 | "__________________________________________________________________________________________________\n", 488 | "lstm_2 (LSTM) (None, 48, 128) 131584 lstm_1[0][0] \n", 489 | "__________________________________________________________________________________________________\n", 490 | "concatenate_1 (Concatenate) (None, 48, 1152) 0 sequential_1[1][0] \n", 491 | " lstm_2[0][0] \n", 492 | "__________________________________________________________________________________________________\n", 493 | "lstm_3 (LSTM) (None, 48, 512) 3409920 concatenate_1[0][0] \n", 494 | "__________________________________________________________________________________________________\n", 495 | "lstm_4 (LSTM) (None, 512) 2099200 lstm_3[0][0] \n", 496 | "__________________________________________________________________________________________________\n", 497 | "dense_3 (Dense) (None, 18) 9234 lstm_4[0][0] \n", 498 | "==================================================================================================\n", 499 | "Total params: 141,156,774\n", 500 | "Trainable params: 141,156,774\n", 501 | "Non-trainable params: 0\n", 502 | "__________________________________________________________________________________________________\n" 503 | ] 504 | } 505 | ], 506 | "source": [ 507 | "#Create the Image-encoder\n", 508 | "image_model = Sequential()\n", 509 | "image_model.add(Conv2D(16, (3, 3), padding='valid', activation='relu', input_shape=(256, 256, 3,)))\n", 510 | "image_model.add(Conv2D(16, (3,3), activation='relu', padding='same', strides=2))\n", 511 | "image_model.add(Conv2D(32, (3,3), activation='relu', padding='same'))\n", 512 | "image_model.add(Conv2D(32, (3,3), activation='relu', padding='same', strides=2))\n", 513 | "image_model.add(Conv2D(64, (3,3), activation='relu', padding='same'))\n", 514 | "image_model.add(Conv2D(64, (3,3), activation='relu', padding='same', strides=2))\n", 515 | "image_model.add(Conv2D(128, (3,3), activation='relu', padding='same'))\n", 516 | "\n", 517 | "image_model.add(Flatten())\n", 518 | "image_model.add(Dense(1024, activation='relu'))\n", 519 | "image_model.add(Dropout(0.3))\n", 520 | "image_model.add(Dense(1024, activation='relu'))\n", 521 | "image_model.add(Dropout(0.3))\n", 522 | "\n", 523 | "image_model.add(RepeatVector(MAX_LEN))\n", 524 | "\n", 525 | "visual_input = Input(shape=(256, 256, 3,))\n", 526 | "encoded_image = image_model(visual_input)\n", 527 | "\n", 528 | "#Create the Text-encoder\n", 529 | "language_input = Input(shape=(MAX_LEN,))\n", 530 | "language_model = Embedding(VOCAB_SIZE, 50, input_length=MAX_LEN, mask_zero=True)(language_input)\n", 531 | "language_model = LSTM(128, return_sequences=True)(language_model)\n", 532 | "language_model = LSTM(128, return_sequences=True)(language_model)\n", 533 | "\n", 534 | "#Create the decoder\n", 535 | "decoder = concatenate([encoded_image, language_model])\n", 536 | "decoder = LSTM(512, return_sequences=True)(decoder)\n", 537 | "decoder = LSTM(512, return_sequences=False)(decoder)\n", 538 | "decoder = Dense(VOCAB_SIZE, activation='softmax')(decoder)\n", 539 | "\n", 540 | "# Compile the model\n", 541 | "model = Model(inputs=[visual_input, language_input], outputs=decoder)\n", 542 | "image_model.summary()\n", 543 | "model.summary()" 544 | ] 545 | }, 546 | { 547 | "cell_type": "code", 548 | "execution_count": 12, 549 | "metadata": {}, 550 | "outputs": [], 551 | "source": [ 552 | "#Save the model for every 2nd epoch\n", 553 | "filepath=\"models/org-weights-epoch-{epoch:04d}--loss-{loss:.4f}.hdf5\"\n", 554 | "checkpoint = ModelCheckpoint(filepath, verbose=1, save_weights_only=True, period=2)\n", 555 | "callbacks_list = [checkpoint]\n", 556 | "\n", 557 | "# Optimizer\n", 558 | "optimizer = RMSprop(lr=0.0001, clipvalue=1.0)\n", 559 | "model.compile(loss='categorical_crossentropy', optimizer=optimizer)" 560 | ] 561 | }, 562 | { 563 | "cell_type": "markdown", 564 | "metadata": {}, 565 | "source": [ 566 | "## Train\n", 567 | "If you left the default hyperpameters in the Notebook untouched, your training should take approximately:\n", 568 | "\n", 569 | "- On GPU machine: 17-18 hours for 50 epochs.\n", 570 | "- On CPU: **not working because the model doesn't fit in memory**\n", 571 | "\n", 572 | "You can use CPU machine for the model evaluation." 573 | ] 574 | }, 575 | { 576 | "cell_type": "code", 577 | "execution_count": null, 578 | "metadata": {}, 579 | "outputs": [], 580 | "source": [ 581 | "model.fit_generator(data_generator(texts, train_features, 1, MAX_SEQUENCE), \n", 582 | " steps_per_epoch=1500, \n", 583 | " epochs=EPOCHS, \n", 584 | " callbacks=callbacks_list, \n", 585 | " verbose=2)" 586 | ] 587 | }, 588 | { 589 | "cell_type": "markdown", 590 | "metadata": {}, 591 | "source": [ 592 | "## Evaluate\n", 593 | "\n", 594 | "It's time to test the trained model. Otherwise you can load the Emil's pretrained model from the dataset as show below." 595 | ] 596 | }, 597 | { 598 | "cell_type": "markdown", 599 | "metadata": {}, 600 | "source": [ 601 | "```python\n", 602 | "if model: del model\n", 603 | "\n", 604 | "# Load model and weights \n", 605 | "json_file = open('/floyd/input/pix2code/model.json', 'r')\n", 606 | "loaded_model_json = json_file.read()\n", 607 | "json_file.close()\n", 608 | "model = model_from_json(loaded_model_json)\n", 609 | "\n", 610 | "# Load weights into new model\n", 611 | "model.load_weights(\"/floyd/input/pix2code/weights.h5\")\n", 612 | "print(\"Loaded model from disk\")\n", 613 | "```" 614 | ] 615 | }, 616 | { 617 | "cell_type": "markdown", 618 | "metadata": {}, 619 | "source": [ 620 | "### Load the data for Evaluation" 621 | ] 622 | }, 623 | { 624 | "cell_type": "code", 625 | "execution_count": 15, 626 | "metadata": {}, 627 | "outputs": [], 628 | "source": [ 629 | "# map an integer to a word\n", 630 | "def word_for_id(integer, tokenizer):\n", 631 | " for word, index in tokenizer.word_index.items():\n", 632 | " if index == integer:\n", 633 | " return word\n", 634 | " return None\n", 635 | "\n", 636 | "# generate a description for an image\n", 637 | "def generate_desc(model, tokenizer, photo, max_length):\n", 638 | " photo = np.array([photo])\n", 639 | " # seed the generation process\n", 640 | " in_text = ' '\n", 641 | " # iterate over the whole length of the sequence\n", 642 | " print('\\nPrediction---->\\n\\n ', end='')\n", 643 | " for i in range(150):\n", 644 | " # integer encode input sequence\n", 645 | " sequence = tokenizer.texts_to_sequences([in_text])[0]\n", 646 | " # pad input\n", 647 | " sequence = pad_sequences([sequence], maxlen=max_length)\n", 648 | " # predict next word\n", 649 | " yhat = model.predict([photo, sequence], verbose=0)\n", 650 | " # convert probability to integer\n", 651 | " yhat = np.argmax(yhat)\n", 652 | " # map integer to word\n", 653 | " word = word_for_id(yhat, tokenizer)\n", 654 | " # stop if we cannot map the word\n", 655 | " if word is None:\n", 656 | " break\n", 657 | " # append as input for generating the next word\n", 658 | " in_text += word + ' '\n", 659 | " # stop if we predict the end of the sequence\n", 660 | " print(word + ' ', end='')\n", 661 | " if word == '':\n", 662 | " break\n", 663 | " return in_text" 664 | ] 665 | }, 666 | { 667 | "cell_type": "code", 668 | "execution_count": 16, 669 | "metadata": {}, 670 | "outputs": [], 671 | "source": [ 672 | "train_features, texts = load_data(DS_EVAL_PATH)" 673 | ] 674 | }, 675 | { 676 | "cell_type": "markdown", 677 | "metadata": {}, 678 | "source": [ 679 | "### Test accuracy\n", 680 | "\n", 681 | "It’s tricky to find a fair way to measure the accuracy. Say you compare word by word. If your prediction is one word out of sync, you might have 0% accuracy. If you remove one word which syncs the prediction, you might end up with 99/100.\n", 682 | "\n", 683 | "Emil used tje BLEU score, best practice in machine translating and image captioning models. It breaks the sentence into four n-grams, from 1-4 word sequences. In the below prediction “cat” is supposed to be “code”.\n", 684 | "\n", 685 | "![blue](https://blog.floydhub.com/content/images/2018/04/bleu_score.png)\n", 686 | "\n", 687 | "To get the final score you multiply each score with 25%, (4/5) * 0.25 + (2/4) * 0.25 + (1/3) * 0.25 + (0/2) * 0.25 = 0.2 + 0.125 + 0.083 + 0 = 0.408 . The sum is then multiplied with a sentence length penalty. Since the length is correct in our example, it becomes our final score.\n", 688 | "\n", 689 | "You could increase the number of n-grams to make it harder. A four n-gram model is the model that best corresponds to human translations. I’d recommend running a few examples with the below code and reading the [wiki page](https://en.wikipedia.org/wiki/BLEU)." 690 | ] 691 | }, 692 | { 693 | "cell_type": "code", 694 | "execution_count": 17, 695 | "metadata": {}, 696 | "outputs": [ 697 | { 698 | "name": "stdout", 699 | "output_type": "stream", 700 | "text": [ 701 | "\n", 702 | "Prediction---->\n", 703 | "\n", 704 | " header { btn-active , btn-inactive , btn-inactive , btn-inactive } row { single { small-title , text , btn-orange } } row { double { small-title , text , btn-orange } double { small-title , text , btn-orange } } \n", 705 | "\n", 706 | "Real---->\n", 707 | "\n", 708 | " header { btn-active , btn-inactive , btn-inactive , btn-inactive } row { single { small-title , text , btn-orange } } row { double { small-title , text , btn-orange } double { small-title , text , btn-orange } } row { double { small-title , text , btn-red } double { small-title , text , btn-orange } } \n", 709 | "\n", 710 | "Prediction---->\n", 711 | "\n", 712 | " header { btn-active , btn-inactive , btn-inactive } row { quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-red } quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-red } } row { double { small-title , text , btn-red } double { small-title , text , btn-orange } } row { single { small-title , text , btn-green } } \n", 713 | "\n", 714 | "Real---->\n", 715 | "\n", 716 | " header { btn-active , btn-inactive , btn-inactive } row { quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-red } quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-red } } row { double { small-title , text , btn-red } double { small-title , text , btn-orange } } row { single { small-title , text , btn-green } } \n", 717 | "\n", 718 | "Prediction---->\n", 719 | "\n", 720 | " header { btn-inactive , btn-active , btn-inactive , btn-inactive , btn-inactive } row { quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-red } quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-orange } } row { single { small-title , text , btn-green } } \n", 721 | "\n", 722 | "Real---->\n", 723 | "\n", 724 | " header { btn-inactive , btn-active , btn-inactive , btn-inactive , btn-inactive } row { quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-red } quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-orange } } row { single { small-title , text , btn-green } } \n", 725 | "\n", 726 | "Prediction---->\n", 727 | "\n", 728 | " header { btn-inactive , btn-inactive , btn-active } row { double { small-title , text , btn-green } double { small-title , text , btn-orange } } \n", 729 | "\n", 730 | "Real---->\n", 731 | "\n", 732 | " header { btn-inactive , btn-inactive , btn-active } row { double { small-title , text , btn-green } double { small-title , text , btn-orange } } \n", 733 | "\n", 734 | "Prediction---->\n", 735 | "\n", 736 | " header { btn-active , btn-inactive } row { single { small-title , text , btn-orange } } row { quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-green } } row { double { small-title , text , btn-red } double { small-title , text , btn-orange } } \n", 737 | "\n", 738 | "Real---->\n", 739 | "\n", 740 | " header { btn-active , btn-inactive } row { single { small-title , text , btn-orange } } row { quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-green } } row { double { small-title , text , btn-red } double { small-title , text , btn-orange } } \n", 741 | "\n", 742 | "Prediction---->\n", 743 | "\n", 744 | " header { btn-active , btn-inactive } row { single { small-title , text , btn-red } } row { quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-orange } } row { double { small-title , text , btn-green } double { small-title , text , btn-red } } \n", 745 | "\n", 746 | "Real---->\n", 747 | "\n", 748 | " header { btn-active , btn-inactive } row { single { small-title , text , btn-red } } row { quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-orange } } row { double { small-title , text , btn-green } double { small-title , text , btn-red } } \n", 749 | "\n", 750 | "Prediction---->\n", 751 | "\n", 752 | " header { btn-inactive , btn-inactive , btn-active } row { quadruple { small-title , text , btn-red } quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-red } } row { quadruple { small-title , text , btn-red } quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-green } } \n", 753 | "\n", 754 | "Real---->\n", 755 | "\n", 756 | " header { btn-inactive , btn-inactive , btn-active } row { quadruple { small-title , text , btn-red } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-red } } row { quadruple { small-title , text , btn-red } quadruple { small-title , text , btn-red } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-red } } row { double { small-title , text , btn-red } double { small-title , text , btn-orange } } \n", 757 | "\n", 758 | "Prediction---->\n", 759 | "\n", 760 | " header { btn-inactive , btn-inactive , btn-active } row { single { small-title , text , btn-orange } } row { quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-red } } row { double { small-title , text , btn-red } double { small-title , text , btn-green } } \n", 761 | "\n", 762 | "Real---->\n", 763 | "\n", 764 | " header { btn-inactive , btn-inactive , btn-active } row { single { small-title , text , btn-orange } } row { quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-orange } quadruple { small-title , text , btn-green } quadruple { small-title , text , btn-red } } row { double { small-title , text , btn-red } double { small-title , text , btn-green } } \n", 765 | "\n", 766 | "Prediction---->\n", 767 | "\n", 768 | " header { btn-inactive , btn-inactive , btn-active } row { double { small-title , text , btn-red } double { small-title , text , btn-red } } row { single { small-title , text , btn-orange } } row { double { small-title , text , btn-green } double { small-title , text , btn-green } } \n", 769 | "\n", 770 | "Real---->\n", 771 | "\n", 772 | " header { btn-inactive , btn-inactive , btn-active } row { double { small-title , text , btn-red } double { small-title , text , btn-red } } row { single { small-title , text , btn-orange } } row { double { small-title , text , btn-green } double { small-title , text , btn-green } } \n", 773 | "\n", 774 | "Prediction---->\n", 775 | "\n", 776 | " header { btn-inactive , btn-active } row { double { small-title , text , btn-orange } double { small-title , text , btn-red } } row { double { small-title , text , btn-orange } double { small-title , text , btn-red } } \n", 777 | "\n", 778 | "Real---->\n", 779 | "\n", 780 | " header { btn-inactive , btn-active } row { double { small-title , text , btn-orange } double { small-title , text , btn-red } } row { double { small-title , text , btn-orange } double { small-title , text , btn-green } } \n", 781 | "BLUE score: 0.9238882925948598\n" 782 | ] 783 | } 784 | ], 785 | "source": [ 786 | "from nltk.translate.bleu_score import corpus_bleu\n", 787 | "\n", 788 | "# Evaluate the skill of the model\n", 789 | "def evaluate_model(model, descriptions, photos, tokenizer, max_length):\n", 790 | " actual, predicted = list(), list()\n", 791 | " # step over the whole set\n", 792 | " for i in range(len(descriptions)):\n", 793 | " yhat = generate_desc(model, tokenizer, photos[i], max_length)\n", 794 | " # store actual and predicted\n", 795 | " print('\\n\\nReal---->\\n\\n' + texts[i])\n", 796 | " actual.append([texts[i].split()])\n", 797 | " predicted.append(yhat.split())\n", 798 | " # calculate BLEU score\n", 799 | " bleu = corpus_bleu(actual, predicted)\n", 800 | " return bleu, actual, predicted\n", 801 | "\n", 802 | "# Eval on the first 10 samples\n", 803 | "bleu, actual, predicted = evaluate_model(model, texts[:10], train_features[:10], tokenizer, MAX_LEN)\n", 804 | "print(\"BLUE score: \", bleu)" 805 | ] 806 | }, 807 | { 808 | "cell_type": "markdown", 809 | "metadata": {}, 810 | "source": [ 811 | "### Show results" 812 | ] 813 | }, 814 | { 815 | "cell_type": "code", 816 | "execution_count": 18, 817 | "metadata": {}, 818 | "outputs": [], 819 | "source": [ 820 | "from compiler.classes.Compiler import *\n", 821 | "\n", 822 | "#Compile the tokens into HTML and css\n", 823 | "dsl_path = \"compiler/assets/web-dsl-mapping.json\"\n", 824 | "compiler = Compiler(dsl_path)\n", 825 | "compiled_website = compiler.compile(actual[0][0], 'index.html')" 826 | ] 827 | }, 828 | { 829 | "cell_type": "code", 830 | "execution_count": 19, 831 | "metadata": {}, 832 | "outputs": [ 833 | { 834 | "name": "stdout", 835 | "output_type": "stream", 836 | "text": [ 837 | "HERE'S THE GENERATED HTML\n", 838 | " ---------------------------------------------------------------------------------------------------- \n", 839 | " \n", 840 | "
    \n", 841 | " \n", 842 | " \n", 843 | " \n", 844 | "\n", 845 | "\n", 848 | " Scaffold\n", 849 | "
    \n", 850 | " \n", 851 | "
    \n", 852 | "
    \n", 853 | " \n", 862 | "
    \n", 863 | "
    \n", 864 | "

    Zzprt

    rmn hgtvoxahrplodpu pvi omaw eviaeall hlbzfcrta mghmjibg

    \n", 865 | "Vxec Ctwlr\n", 866 | "\n", 867 | "
    \n", 868 | "
    \n", 869 | "
    \n", 870 | "

    Nmfag

    ivservvb jq cuibrnamkdeubnudxzw tvxxsgzsn jvf ltnsqa rd

    \n", 871 | "Jibpp Plta\n", 872 | "\n", 873 | "
    \n", 874 | "
    \n", 875 | "

    Ttppf

    tqh uyrqgx wxa vvnhufunpdrrttcndat iqknuzkdgfo k pdmkof

    \n", 876 | "Bx Xqsplse\n", 877 | "\n", 878 | "
    \n", 879 | "
    \n", 880 | "
    \n", 881 | "

    Skcym

    mnxt wogebkqik lchcjc vcfznqtwnvt qzywdvny bzhgep zlogue

    \n", 882 | "Aadp Pqktb\n", 883 | "
    \n", 884 | "
    \n", 885 | "

    Cqaic

    vekszebrbibic qshpdykwcsz ogdejc bmyceqizboimtiyo i ee

    \n", 886 | "Oz Zxhkdfg\n", 887 | "\n", 888 | "
    \n", 889 | "
    \n", 890 | "\n", 891 | "
    \n", 892 | "

    © Tony Beltramelli 2017

    \n", 893 | "
    \n", 894 | "
    \n", 895 | " \n", 896 | " \n", 897 | " \n", 898 | "\n", 899 | "\n" 900 | ] 901 | } 902 | ], 903 | "source": [ 904 | "print(\"HERE'S THE GENERATED HTML\\n\", \"-\"*100, \"\\n\", compiled_website)" 905 | ] 906 | }, 907 | { 908 | "cell_type": "code", 909 | "execution_count": 20, 910 | "metadata": {}, 911 | "outputs": [ 912 | { 913 | "data": { 914 | "text/html": [ 915 | "\n", 916 | "
    \n", 917 | " \n", 918 | " \n", 919 | " \n", 920 | "\n", 921 | "\n", 924 | " Scaffold\n", 925 | "
    \n", 926 | " \n", 927 | "
    \n", 928 | "
    \n", 929 | " \n", 938 | "
    \n", 939 | "
    \n", 940 | "

    Zzprt

    rmn hgtvoxahrplodpu pvi omaw eviaeall hlbzfcrta mghmjibg

    \n", 941 | "Vxec Ctwlr\n", 942 | "\n", 943 | "
    \n", 944 | "
    \n", 945 | "
    \n", 946 | "

    Nmfag

    ivservvb jq cuibrnamkdeubnudxzw tvxxsgzsn jvf ltnsqa rd

    \n", 947 | "Jibpp Plta\n", 948 | "\n", 949 | "
    \n", 950 | "
    \n", 951 | "

    Ttppf

    tqh uyrqgx wxa vvnhufunpdrrttcndat iqknuzkdgfo k pdmkof

    \n", 952 | "Bx Xqsplse\n", 953 | "\n", 954 | "
    \n", 955 | "
    \n", 956 | "
    \n", 957 | "

    Skcym

    mnxt wogebkqik lchcjc vcfznqtwnvt qzywdvny bzhgep zlogue

    \n", 958 | "Aadp Pqktb\n", 959 | "
    \n", 960 | "
    \n", 961 | "

    Cqaic

    vekszebrbibic qshpdykwcsz ogdejc bmyceqizboimtiyo i ee

    \n", 962 | "Oz Zxhkdfg\n", 963 | "\n", 964 | "
    \n", 965 | "
    \n", 966 | "\n", 967 | "
    \n", 968 | "

    © Tony Beltramelli 2017

    \n", 969 | "
    \n", 970 | "
    \n", 971 | " \n", 972 | " \n", 973 | " \n", 974 | "\n" 975 | ], 976 | "text/plain": [ 977 | "" 978 | ] 979 | }, 980 | "metadata": {}, 981 | "output_type": "display_data" 982 | } 983 | ], 984 | "source": [ 985 | "# SHOWTIME\n", 986 | "from IPython.core.display import display, HTML\n", 987 | "\n", 988 | "display(HTML(compiled_website))" 989 | ] 990 | }, 991 | { 992 | "cell_type": "markdown", 993 | "metadata": {}, 994 | "source": [ 995 | "##### That's all folks - don't forget to shutdown your workspace once you're done 🙂" 996 | ] 997 | } 998 | ], 999 | "metadata": { 1000 | "kernelspec": { 1001 | "display_name": "Python 2", 1002 | "language": "python", 1003 | "name": "python2" 1004 | }, 1005 | "language_info": { 1006 | "codemirror_mode": { 1007 | "name": "ipython", 1008 | "version": 2 1009 | }, 1010 | "file_extension": ".py", 1011 | "mimetype": "text/x-python", 1012 | "name": "python", 1013 | "nbconvert_exporter": "python", 1014 | "pygments_lexer": "ipython2", 1015 | "version": "2.7.10" 1016 | } 1017 | }, 1018 | "nbformat": 4, 1019 | "nbformat_minor": 2 1020 | } 1021 | --------------------------------------------------------------------------------