├── app.yaml ├── digit.mp4 ├── digit_demo.gif ├── checkpoint └── mnist.pt ├── static ├── screenshot.PNG ├── style.css └── jquery.min.js ├── requirements.txt ├── torch_serve ├── mnist_jit.pt ├── model_store │ └── mnistmodel.mar ├── model.py ├── server.py └── index_to_name.json ├── config.py ├── test_app.py ├── utils.py ├── README.md ├── app.py ├── templates └── default.html └── train.py /app.yaml: -------------------------------------------------------------------------------- 1 | entrypoint: "gunicorn app:app --config=config.py" 2 | env: flex 3 | runtime: python37 -------------------------------------------------------------------------------- /digit.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Prajwal10031999/Handwritten-Digit-Recognition-CNN-Flask-App-/HEAD/digit.mp4 -------------------------------------------------------------------------------- /digit_demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Prajwal10031999/Handwritten-Digit-Recognition-CNN-Flask-App-/HEAD/digit_demo.gif -------------------------------------------------------------------------------- /checkpoint/mnist.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Prajwal10031999/Handwritten-Digit-Recognition-CNN-Flask-App-/HEAD/checkpoint/mnist.pt -------------------------------------------------------------------------------- /static/screenshot.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Prajwal10031999/Handwritten-Digit-Recognition-CNN-Flask-App-/HEAD/static/screenshot.PNG -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | torch 2 | numpy==1.16.5 3 | flask==1.1.1 4 | gunicorn 5 | matplotlib==3.3.1 6 | pillow==6.2.0 7 | flake8 8 | pip 9 | pylint -------------------------------------------------------------------------------- /torch_serve/mnist_jit.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Prajwal10031999/Handwritten-Digit-Recognition-CNN-Flask-App-/HEAD/torch_serve/mnist_jit.pt -------------------------------------------------------------------------------- /torch_serve/model_store/mnistmodel.mar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Prajwal10031999/Handwritten-Digit-Recognition-CNN-Flask-App-/HEAD/torch_serve/model_store/mnistmodel.mar -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | from os import environ as env 2 | import multiprocessing 3 | 4 | HOST = "127.0.0.1" 5 | PORT = int(env.get("PORT", 5000)) 6 | DEBUG_MODE = int(env.get("DEBUG_MODE", 0)) 7 | 8 | # Gunicorn config 9 | bind = ":" + str(PORT) 10 | workers = multiprocessing.cpu_count() * 2 + 1 11 | threads = 2 * multiprocessing.cpu_count() -------------------------------------------------------------------------------- /test_app.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from app import app 3 | 4 | 5 | class Tests(unittest.TestCase): 6 | '''Basic tests for the application''' 7 | 8 | def setUp(self): 9 | '''Create a test client for the app''' 10 | self.app = app.test_client() 11 | 12 | def test_200(self): 13 | '''test_200: a request for / shall return 200 OK''' 14 | res = self.app.get('/') 15 | assert res.status == '200 OK' 16 | 17 | def test_404(self): 18 | '''test_404: a request for null shall return 404 NOT FOUND''' 19 | res = self.app.get('/null') 20 | assert res.status == '404 NOT FOUND' 21 | 22 | 23 | if __name__ == "__main__": 24 | unittest.main() 25 | -------------------------------------------------------------------------------- /static/style.css: -------------------------------------------------------------------------------- 1 | /* Ripple effect */ 2 | .ripple { 3 | background-position: center; 4 | transition: background 0.8s; 5 | } 6 | /* .ripple:hover { 7 | 8 | } */ 9 | .ripple:active { 10 | background: #25282b radial-gradient(circle, transparent 1%, #47a7f5 1%) center/15000%; 11 | /* background-color: #6eb9f7; */ 12 | background-size: 100%; 13 | transition: background 0s; 14 | } 15 | 16 | /* Button style */ 17 | button { 18 | border: none; 19 | border-radius: 2px; 20 | padding: 12px 18px; 21 | font-size: 16px; 22 | text-transform: uppercase; 23 | cursor: pointer; 24 | color: white; 25 | background-color: #2196f3; 26 | box-shadow: 0 0 4px #999; 27 | outline: none; 28 | } 29 | 30 | /* Span style */ 31 | #text { 32 | color: #4DAF74; 33 | } 34 | 35 | /* Canvas style */ 36 | #can { 37 | margin-top: 10; 38 | border:5px solid; 39 | } 40 | 41 | /* canvasimage style */ 42 | #canvasimg{ 43 | position:absolute; 44 | display:none; 45 | } 46 | 47 | body{ 48 | font-family: sans-serif; 49 | } 50 | 51 | #side{ 52 | float: left; 53 | padding-left:15%; 54 | } -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import time 2 | import torch 3 | import random 4 | import functools 5 | import numpy as np 6 | from typing import Any, Callable, TypeVar, cast 7 | 8 | 9 | def random_seed(seed_value: int) -> None: 10 | """ 11 | Random Seeds Numpy, Random and Torch libraries 12 | 13 | Args: 14 | seed_value (int): Number for seeding 15 | """ 16 | np.random.seed(seed_value) # cpu vars 17 | torch.manual_seed(seed_value) # cpu vars 18 | random.seed(seed_value) # Python 19 | if torch.cuda.is_available(): 20 | torch.cuda.manual_seed(seed_value) 21 | torch.cuda.manual_seed_all(seed_value) # gpu vars 22 | torch.backends.cudnn.deterministic = True # needed 23 | torch.backends.cudnn.benchmark = False 24 | 25 | 26 | F = TypeVar('F', bound=Callable[..., Any]) 27 | 28 | 29 | def timer(func: F) -> F: 30 | """ Print the runtime of the decorated function """ 31 | @functools.wraps(func) 32 | def wrapper_timer(*args, **kwargs): 33 | start_time = time.perf_counter() 34 | value = func(*args, **kwargs) 35 | _ = time.perf_counter() - start_time 36 | hours, _ = divmod(_, 3600) 37 | minutes, seconds = divmod(_, 60) 38 | 39 | print(f'Execution time of function {func.__name__!r}: {hours:.0f} hrs {minutes:.0f} mins {seconds:.3f} secs') 40 | return value 41 | return cast(F, wrapper_timer) 42 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Handwritten-Digit-Recognition-CNN-Flask-App- 2 | A Convolutional Neural Network model created using PyTorch library over the MNIST dataset to recognize handwritten digits .
3 | 4 | The dataset can be downloaded from here :
5 | https://drive.google.com/drive/folders/1z4iFh1gJiRS3BpdzhYwf9tZGbh__CDNg?usp=sharing
6 | Download the dataset and store it in a folder(create new) named "data" in the main directory. 7 |
8 | 9 | The MNIST dataset is an acronym that stands for the Modified National Institute of Standards and Technology dataset. 10 | 11 | It is a dataset of 60,000 small square 28×28 pixel grayscale images of handwritten single digits between 0 and 9. 12 | 13 | The task is to classify a given image of a handwritten digit into one of 10 classes representing integer values from 0 to 9, inclusively. 14 | 15 | It is a widely used and deeply understood dataset and, for the most part, is “solved.” Top-performing models are deep learning convolutional neural networks that achieve a classification accuracy of above 99%, with an error rate between 0.4 %and 0.2% on the hold out test dataset.
16 |
17 | 18 | The handwritten digit recognition is the capability of computer 19 | applications to recognize the human handwritten digits. It is a 20 | hard task for the machine because handwritten digits are not 21 | perfect and can be made with many different shapes and sizes. 22 | The handwritten digit recognition system is a way to tackle this 23 | problem which uses the image of a digit and recognizes the 24 | digit present in the image. 25 | 26 | ![](digit_demo.gif) 27 | -------------------------------------------------------------------------------- /torch_serve/model.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | 4 | 5 | class MnistModel(nn.Module): 6 | """ 7 | Custom CNN Model for Mnist 8 | """ 9 | 10 | def __init__(self, classes: int) -> None: 11 | super(MnistModel, self).__init__() 12 | 13 | self.classes = classes 14 | 15 | # initialize the layers in the first (CONV => RELU) * 2 => POOL + DROP 16 | # (N,1,28,28) -> (N,16,24,24) 17 | self.conv1A = nn.Conv2d( 18 | in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=0) 19 | # (N,16,24,24) -> (N,32,20,20) 20 | self.conv1B = nn.Conv2d( 21 | in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=0) 22 | # (N,32,20,20) -> (N,32,10,10) 23 | self.pool1 = nn.MaxPool2d(kernel_size=2) 24 | self.act = nn.ReLU() 25 | self.do = nn.Dropout(0.25) 26 | 27 | # initialize the layers in the second (CONV => RELU) * 2 => POOL + DROP 28 | # (N,32,10,10) -> (N,64,8,8) 29 | self.conv2A = nn.Conv2d( 30 | in_channels=32, out_channels=64, kernel_size=3, stride=1, padding=0) 31 | # (N,64,8,8) -> (N,128,6,6) 32 | self.conv2B = nn.Conv2d( 33 | in_channels=64, out_channels=128, kernel_size=3, stride=1, padding=0) 34 | # (N,128,6,6) -> (N,128,3,3) 35 | self.pool2 = nn.MaxPool2d(kernel_size=2) 36 | 37 | # initialize the layers in our fully-connected layer set 38 | # (N,128,3,3) -> (N,32) 39 | self.dense3 = nn.Linear(128*3*3, 32) 40 | 41 | # initialize the layers in the softmax classifier layer set 42 | # (N, classes) 43 | self.dense4 = nn.Linear(32, self.classes) 44 | 45 | def forward(self, x: torch.Tensor) -> torch.Tensor: 46 | 47 | # build the first (CONV => RELU) * 2 => POOL layer set 48 | x = self.conv1A(x) 49 | x = self.act(x) 50 | x = self.conv1B(x) 51 | x = self.act(x) 52 | x = self.pool1(x) 53 | x = self.do(x) 54 | 55 | # build the second (CONV => RELU) * 2 => POOL layer set 56 | x = self.conv2A(x) 57 | x = self.act(x) 58 | x = self.conv2B(x) 59 | x = self.act(x) 60 | x = self.pool2(x) 61 | x = self.do(x) 62 | 63 | # build our FC layer set 64 | x = x.view(x.size(0), -1) 65 | x = self.dense3(x) 66 | x = self.act(x) 67 | x = self.do(x) 68 | 69 | # build the softmax classifier 70 | x = nn.functional.log_softmax(self.dense4(x), dim=1) 71 | 72 | return x 73 | -------------------------------------------------------------------------------- /torch_serve/server.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | 4 | 5 | class MnistModel(nn.Module): 6 | """ 7 | Custom CNN Model for Mnist 8 | """ 9 | 10 | def __init__(self, classes: int) -> None: 11 | super(MnistModel, self).__init__() 12 | 13 | self.classes = classes 14 | 15 | # initialize the layers in the first (CONV => RELU) * 2 => POOL + DROP 16 | # (N,1,28,28) -> (N,16,24,24) 17 | self.conv1A = nn.Conv2d( 18 | in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=0) 19 | # (N,16,24,24) -> (N,32,20,20) 20 | self.conv1B = nn.Conv2d( 21 | in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=0) 22 | # (N,32,20,20) -> (N,32,10,10) 23 | self.pool1 = nn.MaxPool2d(kernel_size=2) 24 | self.act = nn.ReLU() 25 | self.do = nn.Dropout(0.25) 26 | 27 | # initialize the layers in the second (CONV => RELU) * 2 => POOL + DROP 28 | # (N,32,10,10) -> (N,64,8,8) 29 | self.conv2A = nn.Conv2d( 30 | in_channels=32, out_channels=64, kernel_size=3, stride=1, padding=0) 31 | # (N,64,8,8) -> (N,128,6,6) 32 | self.conv2B = nn.Conv2d( 33 | in_channels=64, out_channels=128, kernel_size=3, stride=1, padding=0) 34 | # (N,128,6,6) -> (N,128,3,3) 35 | self.pool2 = nn.MaxPool2d(kernel_size=2) 36 | 37 | # initialize the layers in our fully-connected layer set 38 | # (N,128,3,3) -> (N,32) 39 | self.dense3 = nn.Linear(128*3*3, 32) 40 | 41 | # initialize the layers in the softmax classifier layer set 42 | # (N, classes) 43 | self.dense4 = nn.Linear(32, self.classes) 44 | 45 | def forward(self, x: torch.Tensor) -> torch.Tensor: 46 | 47 | # build the first (CONV => RELU) * 2 => POOL layer set 48 | x = self.conv1A(x) 49 | x = self.act(x) 50 | x = self.conv1B(x) 51 | x = self.act(x) 52 | x = self.pool1(x) 53 | x = self.do(x) 54 | 55 | # build the second (CONV => RELU) * 2 => POOL layer set 56 | x = self.conv2A(x) 57 | x = self.act(x) 58 | x = self.conv2B(x) 59 | x = self.act(x) 60 | x = self.pool2(x) 61 | x = self.do(x) 62 | 63 | # build our FC layer set 64 | x = x.view(x.size(0), -1) 65 | x = self.dense3(x) 66 | x = self.act(x) 67 | x = self.do(x) 68 | 69 | # build the softmax classifier 70 | x = nn.functional.log_softmax(self.dense4(x), dim=1) 71 | 72 | return x 73 | 74 | 75 | model = MnistModel(classes=10) 76 | model.load_state_dict(torch.load("checkpoint/mnist.pt", 77 | map_location=torch.device('cpu'))) 78 | model.eval() 79 | 80 | 81 | s = torch.jit.script(model) 82 | torch.jit.save(s, "checkpoint/mnist_jit.pt") 83 | 84 | """ 85 | torch-model-archiver --model-name mnistmodel 86 | --version 1.0 87 | --serialized-file mnist_jit.pt 88 | --model-file model.py 89 | --handler image_classifier 90 | --extra-files index_to_name.json 91 | """ 92 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import base64 3 | import config 4 | import matplotlib 5 | import numpy as np 6 | from PIL import Image 7 | from io import BytesIO 8 | from train import MnistModel 9 | import matplotlib.pyplot as plt 10 | from flask import Flask, request, render_template, jsonify 11 | matplotlib.use('Agg') 12 | 13 | 14 | MODEL = None 15 | DEVICE = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') 16 | 17 | app = Flask(__name__) 18 | 19 | 20 | class SaveOutput: 21 | def __init__(self): 22 | self.outputs = [] 23 | 24 | def __call__(self, module, module_in, module_out): 25 | self.outputs.append(module_out) 26 | 27 | def clear(self): 28 | self.outputs = [] 29 | 30 | 31 | def register_hook(): 32 | save_output = SaveOutput() 33 | hook_handles = [] 34 | 35 | for layer in MODEL.modules(): 36 | if isinstance(layer, torch.nn.modules.conv.Conv2d): 37 | handle = layer.register_forward_hook(save_output) 38 | hook_handles.append(handle) 39 | return save_output 40 | 41 | 42 | def module_output_to_numpy(tensor): 43 | return tensor.detach().to('cpu').numpy() 44 | 45 | 46 | def autolabel(rects, ax): 47 | """Attach a text label above each bar in *rects*, displaying its height.""" 48 | for rect in rects: 49 | height = rect.get_height() 50 | ax.annotate('{0:.2f}'.format(height), 51 | xy=(rect.get_x() + rect.get_width() / 2, height), 52 | xytext=(0, 3), # 3 points vertical offset 53 | textcoords="offset points", 54 | ha='center', va='bottom') 55 | 56 | 57 | def prob_img(probs): 58 | fig, ax = plt.subplots() 59 | rects = ax.bar(range(len(probs)), probs) 60 | ax.set_xticks(range(len(probs)), (0, 1, 2, 3, 4, 5, 6, 7, 8, 9)) 61 | ax.set_ylim(0, 110) 62 | ax.set_title('Probability % of Digit by Model') 63 | autolabel(rects, ax) 64 | probimg = BytesIO() 65 | fig.savefig(probimg, format='png') 66 | probencoded = base64.b64encode(probimg.getvalue()).decode('utf-8') 67 | return probencoded 68 | 69 | 70 | def interpretability_img(save_output): 71 | images = module_output_to_numpy(save_output.outputs[0]) 72 | with plt.style.context("seaborn-white"): 73 | fig, _ = plt.subplots(figsize=(20, 20)) 74 | plt.suptitle("Interpretability by Model", fontsize=50) 75 | for idx in range(16): 76 | plt.subplot(4, 4, idx+1) 77 | plt.imshow(images[0, idx]) 78 | plt.setp(plt.gcf().get_axes(), xticks=[], yticks=[]) 79 | interpretimg = BytesIO() 80 | fig.savefig(interpretimg, format='png') 81 | interpretencoded = base64.b64encode( 82 | interpretimg.getvalue()).decode('utf-8') 83 | return interpretencoded 84 | 85 | 86 | def mnist_prediction(img): 87 | save_output = register_hook() 88 | img = img.to(DEVICE, dtype=torch.float) 89 | outputs = MODEL(x=img) 90 | 91 | probs = torch.exp(outputs.data)[0] * 100 92 | probencoded = prob_img(probs) 93 | interpretencoded = interpretability_img(save_output) 94 | 95 | _, output = torch.max(outputs.data, 1) 96 | pred = module_output_to_numpy(output) 97 | return pred[0], probencoded, interpretencoded 98 | 99 | 100 | @app.route("/process", methods=["GET", "POST"]) 101 | def process(): 102 | data_url = str(request.get_data()) 103 | offset = data_url.index(',')+1 104 | img_bytes = base64.b64decode(data_url[offset:]) 105 | img = Image.open(BytesIO(img_bytes)) 106 | img = img.convert('L') 107 | img = img.resize((28, 28)) 108 | # img.save(r'templates\image.png') 109 | img = np.array(img) 110 | img = img.reshape((1, 28, 28)) 111 | img = torch.tensor(img, dtype=torch.float).unsqueeze(0) 112 | 113 | data, probencoded, interpretencoded = mnist_prediction(img) 114 | 115 | response = { 116 | 'data': str(data), 117 | 'probencoded': str(probencoded), 118 | 'interpretencoded': str(interpretencoded), 119 | } 120 | return jsonify(response) 121 | 122 | 123 | @app.route("/", methods=["GET", "POST"]) 124 | def start(): 125 | return render_template("default.html") 126 | 127 | 128 | if __name__ == "__main__": 129 | MODEL = MnistModel(classes=10) 130 | MODEL.load_state_dict(torch.load( 131 | 'checkpoint/mnist.pt', map_location=DEVICE)) 132 | MODEL.to(DEVICE) 133 | MODEL.eval() 134 | app.run(host=config.HOST, port=config.PORT, debug=config.DEBUG_MODE) 135 | -------------------------------------------------------------------------------- /templates/default.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 113 | 114 | 115 |
116 |

Handwritten Digit Recognition using PyTorch CNN

117 |
118 |
119 |

Draw a Digit in the center of the Box..

120 | 121 | 122 |
123 | 124 |   125 | 126 |

127 |
128 |
129 |
130 | 131 | 132 |
133 | 134 | 135 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn, optim 3 | from torch.utils import data 4 | 5 | from utils import * 6 | import pandas as pd 7 | import numpy as np 8 | from os import makedirs 9 | from typing import Union 10 | import matplotlib.pyplot as plt 11 | from dataclasses import dataclass 12 | 13 | import warnings 14 | warnings.filterwarnings('ignore') 15 | 16 | 17 | class MnistModel(nn.Module): 18 | """ 19 | Custom CNN Model for Mnist 20 | """ 21 | 22 | def __init__(self, classes: int) -> None: 23 | super(MnistModel, self).__init__() 24 | 25 | self.classes = classes 26 | 27 | # initialize the layers in the first (CONV => RELU) * 2 => POOL + DROP 28 | # (N,1,28,28) -> (N,16,24,24) 29 | self.conv1A = nn.Conv2d( 30 | in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=0) 31 | # (N,16,24,24) -> (N,32,20,20) 32 | self.conv1B = nn.Conv2d( 33 | in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=0) 34 | # (N,32,20,20) -> (N,32,10,10) 35 | self.pool1 = nn.MaxPool2d(kernel_size=2) 36 | self.act = nn.ReLU() 37 | self.do = nn.Dropout(0.25) 38 | 39 | # initialize the layers in the second (CONV => RELU) * 2 => POOL + DROP 40 | # (N,32,10,10) -> (N,64,8,8) 41 | self.conv2A = nn.Conv2d( 42 | in_channels=32, out_channels=64, kernel_size=3, stride=1, padding=0) 43 | # (N,64,8,8) -> (N,128,6,6) 44 | self.conv2B = nn.Conv2d( 45 | in_channels=64, out_channels=128, kernel_size=3, stride=1, padding=0) 46 | # (N,128,6,6) -> (N,128,3,3) 47 | self.pool2 = nn.MaxPool2d(kernel_size=2) 48 | 49 | # initialize the layers in our fully-connected layer set 50 | # (N,128,3,3) -> (N,32) 51 | self.dense3 = nn.Linear(128*3*3, 32) 52 | 53 | # initialize the layers in the softmax classifier layer set 54 | # (N, classes) 55 | self.dense4 = nn.Linear(32, self.classes) 56 | 57 | def forward(self, x: torch.Tensor) -> torch.Tensor: 58 | 59 | # build the first (CONV => RELU) * 2 => POOL layer set 60 | x = self.conv1A(x) 61 | x = self.act(x) 62 | x = self.conv1B(x) 63 | x = self.act(x) 64 | x = self.pool1(x) 65 | x = self.do(x) 66 | 67 | # build the second (CONV => RELU) * 2 => POOL layer set 68 | x = self.conv2A(x) 69 | x = self.act(x) 70 | x = self.conv2B(x) 71 | x = self.act(x) 72 | x = self.pool2(x) 73 | x = self.do(x) 74 | 75 | # build our FC layer set 76 | x = x.view(x.size(0), -1) 77 | x = self.dense3(x) 78 | x = self.act(x) 79 | x = self.do(x) 80 | 81 | # build the softmax classifier 82 | x = nn.functional.log_softmax(self.dense4(x), dim=1) 83 | 84 | return x 85 | 86 | 87 | class MnistDataset(data.Dataset): 88 | """ 89 | Custom Dataset for Mnist 90 | """ 91 | 92 | def __init__(self, df: pd.DataFrame, target: np.array, test: bool = False) -> None: 93 | self.df = df 94 | self.test = test 95 | 96 | # if test=True skip this step 97 | if not self.test: 98 | self.df_targets = target 99 | 100 | def __len__(self) -> int: 101 | # return length of the dataset 102 | return len(self.df) 103 | 104 | def __getitem__(self, idx: int) -> Union[tuple, torch.Tensor]: 105 | # if indexes are in tensor, convert to list 106 | if torch.is_tensor(idx): 107 | idx = idx.tolist() 108 | 109 | # if test=False return bunch of images, targets 110 | if not self.test: 111 | return torch.as_tensor(self.df[idx].astype(float)), self.df_targets[idx] 112 | # if test=True return only images 113 | else: 114 | return torch.as_tensor(self.df[idx].astype(float)) 115 | 116 | 117 | def loss_fn(outputs: torch.Tensor, targets: torch.Tensor) -> torch.Tensor: 118 | """ 119 | Loss Function 120 | 121 | Args: 122 | outputs (torch.Tensor): Predicted Labels 123 | targets (torch.Tensor): True Labels 124 | 125 | Returns: 126 | torch.Tensor: NLLLoss value 127 | """ 128 | return nn.NLLLoss()(outputs, targets) 129 | 130 | 131 | def train_loop_fn(data_loader, model, optimizer, device, scheduler=None): 132 | """ 133 | Training Loop 134 | 135 | Args: 136 | data_loader: Train Data Loader 137 | model: NN Model 138 | optimizer: Optimizer 139 | device: Device (CPU/CUDA) 140 | scheduler: Scheduler. Defaults to None. 141 | """ 142 | # set model to train 143 | model.train() 144 | # iterate over data loader 145 | train_loss = [] 146 | for ids, targets in data_loader: 147 | # sending to device (cpu/gpu) 148 | ids = ids.to(device, dtype=torch.float) 149 | targets = targets.to(device, dtype=torch.long) 150 | 151 | # Clear gradients w.r.t. parameters 152 | optimizer.zero_grad() 153 | # Forward pass to get output/logits 154 | outputs = model(x=ids) 155 | # Calculate Loss: softmax --> negative log likelihood loss 156 | loss = loss_fn(outputs, targets) 157 | train_loss.append(loss) 158 | # Getting gradients w.r.t. parameters 159 | loss.backward() 160 | optimizer.step() 161 | 162 | if scheduler is not None: 163 | # Updating scheduler 164 | if type(scheduler).__name__ == 'ReduceLROnPlateau': 165 | scheduler.step(loss) 166 | else: 167 | scheduler.step() 168 | print(f"Loss on Train Data : {sum(train_loss)/len(train_loss)}") 169 | 170 | 171 | def eval_loop_fn(data_loader, model, device): 172 | """ 173 | Evaluation Loop 174 | 175 | Args: 176 | data_loader: Evaluation Data Loader 177 | model: NN Model 178 | device: Device (CPU/CUDA) 179 | 180 | Returns: 181 | List of Target Labels and True Labels 182 | """ 183 | # full list of targets, outputs 184 | fin_targets = [] 185 | fin_outputs = [] 186 | # set model to eveluate 187 | model.eval() # as model is set to eval, there will be no optimizer and scheduler update 188 | 189 | # iterate over data loader 190 | for _, (ids, targets) in enumerate(data_loader): 191 | ids = ids.to(device, dtype=torch.float) 192 | targets = targets.to(device, dtype=torch.long) 193 | 194 | outputs = model(x=ids) 195 | loss = loss_fn(outputs, targets) 196 | loss.backward() 197 | 198 | # Get predictions from the maximum value 199 | _, outputs = torch.max(outputs.data, 1) 200 | 201 | # appending the values to final lists 202 | fin_targets.append(targets.cpu().detach().numpy()) 203 | fin_outputs.append(outputs.cpu().detach().numpy()) 204 | return np.vstack(fin_outputs), np.vstack(fin_targets) 205 | 206 | 207 | def test_loop_fn(test, model, device): 208 | """ 209 | Testing Loop 210 | 211 | Args: 212 | test: Test DataFrame 213 | model: NN Model 214 | device: Device (CPU/CUDA) 215 | 216 | Returns: 217 | List of Predicted Labels 218 | """ 219 | model.eval() 220 | # convert test data to FloatTensor 221 | test = torch.as_tensor(test) 222 | test = test.to(device, dtype=torch.float) 223 | 224 | # Get predictions 225 | pred = model(test) 226 | # Get predictions from the maximum value 227 | _, predlabel = torch.max(pred.data, 1) 228 | # converting to list 229 | predlabel = predlabel.tolist() 230 | 231 | # Plotting the predicted results 232 | L = 5 233 | W = 5 234 | _, axes = plt.subplots(L, W, figsize=(12, 12)) 235 | axes = axes.ravel() 236 | 237 | for i in np.arange(0, L * W): 238 | axes[i].imshow(test[i].cpu().detach().numpy().reshape(28, 28)) 239 | axes[i].set_title("Prediction Class = {:0.1f}".format(predlabel[i])) 240 | axes[i].axis('off') 241 | 242 | plt.suptitle('Predictions on Test Data') 243 | plt.subplots_adjust(wspace=0.5) 244 | plt.show() 245 | 246 | return predlabel 247 | 248 | 249 | @timer 250 | def run(args): 251 | """ 252 | Function where all the magic happens 253 | 254 | Args: 255 | args: Arguments for Training 256 | 257 | Returns: 258 | List of Predicted Labels 259 | """ 260 | # reading train and test data 261 | print('Reading Data..') 262 | dfx = pd.read_csv(args.data_path+'train.csv') 263 | df_test = pd.read_csv(args.data_path+'test.csv') 264 | 265 | classes = dfx[args.target].nunique() 266 | 267 | print('Data Wrangling..') 268 | # spliting train data to train, validate 269 | split_idx = int(len(dfx) * 0.8) 270 | df_train = dfx[:split_idx].reset_index(drop=True) 271 | df_valid = dfx[split_idx:].reset_index(drop=True) 272 | 273 | # target labels 274 | train_targets = df_train[args.target].values 275 | valid_targets = df_valid[args.target].values 276 | 277 | # reshaping data to 28 x 28 images and normalize 278 | df_train = df_train.drop(args.target, axis=1).values.reshape( 279 | len(df_train), 1, 28, 28)/255 280 | df_valid = df_valid.drop(args.target, axis=1).values.reshape( 281 | len(df_valid), 1, 28, 28)/255 282 | df_test = df_test.values.reshape(len(df_test), 1, 28, 28)/255 283 | 284 | print('DataSet and DataLoader..') 285 | # Creating PyTorch Custom Datasets 286 | train_dataset = MnistDataset(df=df_train, target=train_targets) 287 | valid_dataset = MnistDataset(df=df_valid, target=valid_targets) 288 | 289 | # Creating PyTorch DataLoaders 290 | train_data_loader = data.DataLoader( 291 | dataset=train_dataset, batch_size=args.BATCH_SIZE, shuffle=True) 292 | valid_data_loader = data.DataLoader( 293 | dataset=valid_dataset, batch_size=args.BATCH_SIZE, shuffle=False) 294 | 295 | # device (cpu/gpu) 296 | device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') 297 | 298 | # instatiate model and sending it to device 299 | model = MnistModel(classes=classes).to(device) 300 | # instantiate optimizer 301 | optimizer = optim.SGD(model.parameters(), lr=args.lr) 302 | # instantiate scheduler 303 | scheduler = optim.lr_scheduler.CyclicLR( 304 | optimizer, base_lr=args.lr, max_lr=0.1) 305 | 306 | print('Training..') 307 | best_accuracy = 0 308 | # loop through epochs 309 | for epoch in range(args.NUM_EPOCHS): 310 | print(f'Epoch [{epoch+1}/{args.NUM_EPOCHS}]') 311 | # train on train data 312 | train_loop_fn(train_data_loader, model, optimizer, device, scheduler) 313 | # evaluate on validation data 314 | o, t = eval_loop_fn(valid_data_loader, model, device) 315 | accuracy = (o == t).mean() * 100 316 | print(f'Accuracy on Valid Data : {accuracy} %') 317 | if accuracy > best_accuracy: 318 | torch.save(model.state_dict(), args.model_path) 319 | best_accuracy = accuracy 320 | 321 | # Predict on test data 322 | return test_loop_fn(df_test, model, device) 323 | 324 | 325 | if __name__ == "__main__": 326 | # variables for training model 327 | @dataclass 328 | class Args: 329 | lr: float = 3e-5 330 | RANDOM_STATE: int = 42 331 | NUM_EPOCHS: int = 5 332 | BATCH_SIZE: int = 100 333 | target: str = 'label' 334 | data_path: str = 'data/' 335 | model_path: str = 'checkpoint/mnist.pt' 336 | 337 | def __post_init__(self): 338 | makedirs('checkpoint', exist_ok=True) 339 | 340 | arg = Args() 341 | random_seed(arg.RANDOM_STATE) 342 | test_preds = run(args=arg) 343 | -------------------------------------------------------------------------------- /torch_serve/index_to_name.json: -------------------------------------------------------------------------------- 1 | {"0": ["n01440764", "tench"], "1": ["n01443537", "goldfish"], "2": ["n01484850", "great_white_shark"], "3": ["n01491361", "tiger_shark"], "4": ["n01494475", "hammerhead"], "5": ["n01496331", "electric_ray"], "6": ["n01498041", "stingray"], "7": ["n01514668", "cock"], "8": ["n01514859", "hen"], "9": ["n01518878", "ostrich"], "10": ["n01530575", "brambling"], "11": ["n01531178", "goldfinch"], "12": ["n01532829", "house_finch"], "13": ["n01534433", "junco"], "14": ["n01537544", "indigo_bunting"], "15": ["n01558993", "robin"], "16": ["n01560419", "bulbul"], "17": ["n01580077", "jay"], "18": ["n01582220", "magpie"], "19": ["n01592084", "chickadee"], "20": ["n01601694", "water_ouzel"], "21": ["n01608432", "kite"], "22": ["n01614925", "bald_eagle"], "23": ["n01616318", "vulture"], "24": ["n01622779", "great_grey_owl"], "25": ["n01629819", "European_fire_salamander"], "26": ["n01630670", "common_newt"], "27": ["n01631663", "eft"], "28": ["n01632458", "spotted_salamander"], "29": ["n01632777", "axolotl"], "30": ["n01641577", "bullfrog"], "31": ["n01644373", "tree_frog"], "32": ["n01644900", "tailed_frog"], "33": ["n01664065", "loggerhead"], "34": ["n01665541", "leatherback_turtle"], "35": ["n01667114", "mud_turtle"], "36": ["n01667778", "terrapin"], "37": ["n01669191", "box_turtle"], "38": ["n01675722", "banded_gecko"], "39": ["n01677366", "common_iguana"], "40": ["n01682714", "American_chameleon"], "41": ["n01685808", "whiptail"], "42": ["n01687978", "agama"], "43": ["n01688243", "frilled_lizard"], "44": ["n01689811", "alligator_lizard"], "45": ["n01692333", "Gila_monster"], "46": ["n01693334", "green_lizard"], "47": ["n01694178", "African_chameleon"], "48": ["n01695060", "Komodo_dragon"], "49": ["n01697457", "African_crocodile"], "50": ["n01698640", "American_alligator"], "51": ["n01704323", "triceratops"], "52": ["n01728572", "thunder_snake"], "53": ["n01728920", "ringneck_snake"], "54": ["n01729322", "hognose_snake"], "55": ["n01729977", "green_snake"], "56": ["n01734418", "king_snake"], "57": ["n01735189", "garter_snake"], "58": ["n01737021", "water_snake"], "59": ["n01739381", "vine_snake"], "60": ["n01740131", "night_snake"], "61": ["n01742172", "boa_constrictor"], "62": ["n01744401", "rock_python"], "63": ["n01748264", "Indian_cobra"], "64": ["n01749939", "green_mamba"], "65": ["n01751748", "sea_snake"], "66": ["n01753488", "horned_viper"], "67": ["n01755581", "diamondback"], "68": ["n01756291", "sidewinder"], "69": ["n01768244", "trilobite"], "70": ["n01770081", "harvestman"], "71": ["n01770393", "scorpion"], "72": ["n01773157", "black_and_gold_garden_spider"], "73": ["n01773549", "barn_spider"], "74": ["n01773797", "garden_spider"], "75": ["n01774384", "black_widow"], "76": ["n01774750", "tarantula"], "77": ["n01775062", "wolf_spider"], "78": ["n01776313", "tick"], "79": ["n01784675", "centipede"], "80": ["n01795545", "black_grouse"], "81": ["n01796340", "ptarmigan"], "82": ["n01797886", "ruffed_grouse"], "83": ["n01798484", "prairie_chicken"], "84": ["n01806143", "peacock"], "85": ["n01806567", "quail"], "86": ["n01807496", "partridge"], "87": ["n01817953", "African_grey"], "88": ["n01818515", "macaw"], "89": ["n01819313", "sulphur-crested_cockatoo"], "90": ["n01820546", "lorikeet"], "91": ["n01824575", "coucal"], "92": ["n01828970", "bee_eater"], "93": ["n01829413", "hornbill"], "94": ["n01833805", "hummingbird"], "95": ["n01843065", "jacamar"], "96": ["n01843383", "toucan"], "97": ["n01847000", "drake"], "98": ["n01855032", "red-breasted_merganser"], "99": ["n01855672", "goose"], "100": ["n01860187", "black_swan"], "101": ["n01871265", "tusker"], "102": ["n01872401", "echidna"], "103": ["n01873310", "platypus"], "104": ["n01877812", "wallaby"], "105": ["n01882714", "koala"], "106": ["n01883070", "wombat"], "107": ["n01910747", "jellyfish"], "108": ["n01914609", "sea_anemone"], "109": ["n01917289", "brain_coral"], "110": ["n01924916", "flatworm"], "111": ["n01930112", "nematode"], "112": ["n01943899", "conch"], "113": ["n01944390", "snail"], "114": ["n01945685", "slug"], "115": ["n01950731", "sea_slug"], "116": ["n01955084", "chiton"], "117": ["n01968897", "chambered_nautilus"], "118": ["n01978287", "Dungeness_crab"], "119": ["n01978455", "rock_crab"], "120": ["n01980166", "fiddler_crab"], "121": ["n01981276", "king_crab"], "122": ["n01983481", "American_lobster"], "123": ["n01984695", "spiny_lobster"], "124": ["n01985128", "crayfish"], "125": ["n01986214", "hermit_crab"], "126": ["n01990800", "isopod"], "127": ["n02002556", "white_stork"], "128": ["n02002724", "black_stork"], "129": ["n02006656", "spoonbill"], "130": ["n02007558", "flamingo"], "131": ["n02009229", "little_blue_heron"], "132": ["n02009912", "American_egret"], "133": ["n02011460", "bittern"], "134": ["n02012849", "crane"], "135": ["n02013706", "limpkin"], "136": ["n02017213", "European_gallinule"], "137": ["n02018207", "American_coot"], "138": ["n02018795", "bustard"], "139": ["n02025239", "ruddy_turnstone"], "140": ["n02027492", "red-backed_sandpiper"], "141": ["n02028035", "redshank"], "142": ["n02033041", "dowitcher"], "143": ["n02037110", "oystercatcher"], "144": ["n02051845", "pelican"], "145": ["n02056570", "king_penguin"], "146": ["n02058221", "albatross"], "147": ["n02066245", "grey_whale"], "148": ["n02071294", "killer_whale"], "149": ["n02074367", "dugong"], "150": ["n02077923", "sea_lion"], "151": ["n02085620", "Chihuahua"], "152": ["n02085782", "Japanese_spaniel"], "153": ["n02085936", "Maltese_dog"], "154": ["n02086079", "Pekinese"], "155": ["n02086240", "Shih-Tzu"], "156": ["n02086646", "Blenheim_spaniel"], "157": ["n02086910", "papillon"], "158": ["n02087046", "toy_terrier"], "159": ["n02087394", "Rhodesian_ridgeback"], "160": ["n02088094", "Afghan_hound"], "161": ["n02088238", "basset"], "162": ["n02088364", "beagle"], "163": ["n02088466", "bloodhound"], "164": ["n02088632", "bluetick"], "165": ["n02089078", "black-and-tan_coonhound"], "166": ["n02089867", "Walker_hound"], "167": ["n02089973", "English_foxhound"], "168": ["n02090379", "redbone"], "169": ["n02090622", "borzoi"], "170": ["n02090721", "Irish_wolfhound"], "171": ["n02091032", "Italian_greyhound"], "172": ["n02091134", "whippet"], "173": ["n02091244", "Ibizan_hound"], "174": ["n02091467", "Norwegian_elkhound"], "175": ["n02091635", "otterhound"], "176": ["n02091831", "Saluki"], "177": ["n02092002", "Scottish_deerhound"], "178": ["n02092339", "Weimaraner"], "179": ["n02093256", "Staffordshire_bullterrier"], "180": ["n02093428", "American_Staffordshire_terrier"], "181": ["n02093647", "Bedlington_terrier"], "182": ["n02093754", "Border_terrier"], "183": ["n02093859", "Kerry_blue_terrier"], "184": ["n02093991", "Irish_terrier"], "185": ["n02094114", "Norfolk_terrier"], "186": ["n02094258", "Norwich_terrier"], "187": ["n02094433", "Yorkshire_terrier"], "188": ["n02095314", "wire-haired_fox_terrier"], "189": ["n02095570", "Lakeland_terrier"], "190": ["n02095889", "Sealyham_terrier"], "191": ["n02096051", "Airedale"], "192": ["n02096177", "cairn"], "193": ["n02096294", "Australian_terrier"], "194": ["n02096437", "Dandie_Dinmont"], "195": ["n02096585", "Boston_bull"], "196": ["n02097047", "miniature_schnauzer"], "197": ["n02097130", "giant_schnauzer"], "198": ["n02097209", "standard_schnauzer"], "199": ["n02097298", "Scotch_terrier"], "200": ["n02097474", "Tibetan_terrier"], "201": ["n02097658", "silky_terrier"], "202": ["n02098105", "soft-coated_wheaten_terrier"], "203": ["n02098286", "West_Highland_white_terrier"], "204": ["n02098413", "Lhasa"], "205": ["n02099267", "flat-coated_retriever"], "206": ["n02099429", "curly-coated_retriever"], "207": ["n02099601", "golden_retriever"], "208": ["n02099712", "Labrador_retriever"], "209": ["n02099849", "Chesapeake_Bay_retriever"], "210": ["n02100236", "German_short-haired_pointer"], "211": ["n02100583", "vizsla"], "212": ["n02100735", "English_setter"], "213": ["n02100877", "Irish_setter"], "214": ["n02101006", "Gordon_setter"], "215": ["n02101388", "Brittany_spaniel"], "216": ["n02101556", "clumber"], "217": ["n02102040", "English_springer"], "218": ["n02102177", "Welsh_springer_spaniel"], "219": ["n02102318", "cocker_spaniel"], "220": ["n02102480", "Sussex_spaniel"], "221": ["n02102973", "Irish_water_spaniel"], "222": ["n02104029", "kuvasz"], "223": ["n02104365", "schipperke"], "224": ["n02105056", "groenendael"], "225": ["n02105162", "malinois"], "226": ["n02105251", "briard"], "227": ["n02105412", "kelpie"], "228": ["n02105505", "komondor"], "229": ["n02105641", "Old_English_sheepdog"], "230": ["n02105855", "Shetland_sheepdog"], "231": ["n02106030", "collie"], "232": ["n02106166", "Border_collie"], "233": ["n02106382", "Bouvier_des_Flandres"], "234": ["n02106550", "Rottweiler"], "235": ["n02106662", "German_shepherd"], "236": ["n02107142", "Doberman"], "237": ["n02107312", "miniature_pinscher"], "238": ["n02107574", "Greater_Swiss_Mountain_dog"], "239": ["n02107683", "Bernese_mountain_dog"], "240": ["n02107908", "Appenzeller"], "241": ["n02108000", "EntleBucher"], "242": ["n02108089", "boxer"], "243": ["n02108422", "bull_mastiff"], "244": ["n02108551", "Tibetan_mastiff"], "245": ["n02108915", "French_bulldog"], "246": ["n02109047", "Great_Dane"], "247": ["n02109525", "Saint_Bernard"], "248": ["n02109961", "Eskimo_dog"], "249": ["n02110063", "malamute"], "250": ["n02110185", "Siberian_husky"], "251": ["n02110341", "dalmatian"], "252": ["n02110627", "affenpinscher"], "253": ["n02110806", "basenji"], "254": ["n02110958", "pug"], "255": ["n02111129", "Leonberg"], "256": ["n02111277", "Newfoundland"], "257": ["n02111500", "Great_Pyrenees"], "258": ["n02111889", "Samoyed"], "259": ["n02112018", "Pomeranian"], "260": ["n02112137", "chow"], "261": ["n02112350", "keeshond"], "262": ["n02112706", "Brabancon_griffon"], "263": ["n02113023", "Pembroke"], "264": ["n02113186", "Cardigan"], "265": ["n02113624", "toy_poodle"], "266": ["n02113712", "miniature_poodle"], "267": ["n02113799", "standard_poodle"], "268": ["n02113978", "Mexican_hairless"], "269": ["n02114367", "timber_wolf"], "270": ["n02114548", "white_wolf"], "271": ["n02114712", "red_wolf"], "272": ["n02114855", "coyote"], "273": ["n02115641", "dingo"], "274": ["n02115913", "dhole"], "275": ["n02116738", "African_hunting_dog"], "276": ["n02117135", "hyena"], "277": ["n02119022", "red_fox"], "278": ["n02119789", "kit_fox"], "279": ["n02120079", "Arctic_fox"], "280": ["n02120505", "grey_fox"], "281": ["n02123045", "tabby"], "282": ["n02123159", "tiger_cat"], "283": ["n02123394", "Persian_cat"], "284": ["n02123597", "Siamese_cat"], "285": ["n02124075", "Egyptian_cat"], "286": ["n02125311", "cougar"], "287": ["n02127052", "lynx"], "288": ["n02128385", "leopard"], "289": ["n02128757", "snow_leopard"], "290": ["n02128925", "jaguar"], "291": ["n02129165", "lion"], "292": ["n02129604", "tiger"], "293": ["n02130308", "cheetah"], "294": ["n02132136", "brown_bear"], "295": ["n02133161", "American_black_bear"], "296": ["n02134084", "ice_bear"], "297": ["n02134418", "sloth_bear"], "298": ["n02137549", "mongoose"], "299": ["n02138441", "meerkat"], "300": ["n02165105", "tiger_beetle"], "301": ["n02165456", "ladybug"], "302": ["n02167151", "ground_beetle"], "303": ["n02168699", "long-horned_beetle"], "304": ["n02169497", "leaf_beetle"], "305": ["n02172182", "dung_beetle"], "306": ["n02174001", "rhinoceros_beetle"], "307": ["n02177972", "weevil"], "308": ["n02190166", "fly"], "309": ["n02206856", "bee"], "310": ["n02219486", "ant"], "311": ["n02226429", "grasshopper"], "312": ["n02229544", "cricket"], "313": ["n02231487", "walking_stick"], "314": ["n02233338", "cockroach"], "315": ["n02236044", "mantis"], "316": ["n02256656", "cicada"], "317": ["n02259212", "leafhopper"], "318": ["n02264363", "lacewing"], "319": ["n02268443", "dragonfly"], "320": ["n02268853", "damselfly"], "321": ["n02276258", "admiral"], "322": ["n02277742", "ringlet"], "323": ["n02279972", "monarch"], "324": ["n02280649", "cabbage_butterfly"], "325": ["n02281406", "sulphur_butterfly"], "326": ["n02281787", "lycaenid"], "327": ["n02317335", "starfish"], "328": ["n02319095", "sea_urchin"], "329": ["n02321529", "sea_cucumber"], "330": ["n02325366", "wood_rabbit"], "331": ["n02326432", "hare"], "332": ["n02328150", "Angora"], "333": ["n02342885", "hamster"], "334": ["n02346627", "porcupine"], "335": ["n02356798", "fox_squirrel"], "336": ["n02361337", "marmot"], "337": ["n02363005", "beaver"], "338": ["n02364673", "guinea_pig"], "339": ["n02389026", "sorrel"], "340": ["n02391049", "zebra"], "341": ["n02395406", "hog"], "342": ["n02396427", "wild_boar"], "343": ["n02397096", "warthog"], "344": ["n02398521", "hippopotamus"], "345": ["n02403003", "ox"], "346": ["n02408429", "water_buffalo"], "347": ["n02410509", "bison"], "348": ["n02412080", "ram"], "349": ["n02415577", "bighorn"], "350": ["n02417914", "ibex"], "351": ["n02422106", "hartebeest"], "352": ["n02422699", "impala"], "353": ["n02423022", "gazelle"], "354": ["n02437312", "Arabian_camel"], "355": ["n02437616", "llama"], "356": ["n02441942", "weasel"], "357": ["n02442845", "mink"], "358": ["n02443114", "polecat"], "359": ["n02443484", "black-footed_ferret"], "360": ["n02444819", "otter"], "361": ["n02445715", "skunk"], "362": ["n02447366", "badger"], "363": ["n02454379", "armadillo"], "364": ["n02457408", "three-toed_sloth"], "365": ["n02480495", "orangutan"], "366": ["n02480855", "gorilla"], "367": ["n02481823", "chimpanzee"], "368": ["n02483362", "gibbon"], "369": ["n02483708", "siamang"], "370": ["n02484975", "guenon"], "371": ["n02486261", "patas"], "372": ["n02486410", "baboon"], "373": ["n02487347", "macaque"], "374": ["n02488291", "langur"], "375": ["n02488702", "colobus"], "376": ["n02489166", "proboscis_monkey"], "377": ["n02490219", "marmoset"], "378": ["n02492035", "capuchin"], "379": ["n02492660", "howler_monkey"], "380": ["n02493509", "titi"], "381": ["n02493793", "spider_monkey"], "382": ["n02494079", "squirrel_monkey"], "383": ["n02497673", "Madagascar_cat"], "384": ["n02500267", "indri"], "385": ["n02504013", "Indian_elephant"], "386": ["n02504458", "African_elephant"], "387": ["n02509815", "lesser_panda"], "388": ["n02510455", "giant_panda"], "389": ["n02514041", "barracouta"], "390": ["n02526121", "eel"], "391": ["n02536864", "coho"], "392": ["n02606052", "rock_beauty"], "393": ["n02607072", "anemone_fish"], "394": ["n02640242", "sturgeon"], "395": ["n02641379", "gar"], "396": ["n02643566", "lionfish"], "397": ["n02655020", "puffer"], "398": ["n02666196", "abacus"], "399": ["n02667093", "abaya"], "400": ["n02669723", "academic_gown"], "401": ["n02672831", "accordion"], "402": ["n02676566", "acoustic_guitar"], "403": ["n02687172", "aircraft_carrier"], "404": ["n02690373", "airliner"], "405": ["n02692877", "airship"], "406": ["n02699494", "altar"], "407": ["n02701002", "ambulance"], "408": ["n02704792", "amphibian"], "409": ["n02708093", "analog_clock"], "410": ["n02727426", "apiary"], "411": ["n02730930", "apron"], "412": ["n02747177", "ashcan"], "413": ["n02749479", "assault_rifle"], "414": ["n02769748", "backpack"], "415": ["n02776631", "bakery"], "416": ["n02777292", "balance_beam"], "417": ["n02782093", "balloon"], "418": ["n02783161", "ballpoint"], "419": ["n02786058", "Band_Aid"], "420": ["n02787622", "banjo"], "421": ["n02788148", "bannister"], "422": ["n02790996", "barbell"], "423": ["n02791124", "barber_chair"], "424": ["n02791270", "barbershop"], "425": ["n02793495", "barn"], "426": ["n02794156", "barometer"], "427": ["n02795169", "barrel"], "428": ["n02797295", "barrow"], "429": ["n02799071", "baseball"], "430": ["n02802426", "basketball"], "431": ["n02804414", "bassinet"], "432": ["n02804610", "bassoon"], "433": ["n02807133", "bathing_cap"], "434": ["n02808304", "bath_towel"], "435": ["n02808440", "bathtub"], "436": ["n02814533", "beach_wagon"], "437": ["n02814860", "beacon"], "438": ["n02815834", "beaker"], "439": ["n02817516", "bearskin"], "440": ["n02823428", "beer_bottle"], "441": ["n02823750", "beer_glass"], "442": ["n02825657", "bell_cote"], "443": ["n02834397", "bib"], "444": ["n02835271", "bicycle-built-for-two"], "445": ["n02837789", "bikini"], "446": ["n02840245", "binder"], "447": ["n02841315", "binoculars"], "448": ["n02843684", "birdhouse"], "449": ["n02859443", "boathouse"], "450": ["n02860847", "bobsled"], "451": ["n02865351", "bolo_tie"], "452": ["n02869837", "bonnet"], "453": ["n02870880", "bookcase"], "454": ["n02871525", "bookshop"], "455": ["n02877765", "bottlecap"], "456": ["n02879718", "bow"], "457": ["n02883205", "bow_tie"], "458": ["n02892201", "brass"], "459": ["n02892767", "brassiere"], "460": ["n02894605", "breakwater"], "461": ["n02895154", "breastplate"], "462": ["n02906734", "broom"], "463": ["n02909870", "bucket"], "464": ["n02910353", "buckle"], "465": ["n02916936", "bulletproof_vest"], "466": ["n02917067", "bullet_train"], "467": ["n02927161", "butcher_shop"], "468": ["n02930766", "cab"], "469": ["n02939185", "caldron"], "470": ["n02948072", "candle"], "471": ["n02950826", "cannon"], "472": ["n02951358", "canoe"], "473": ["n02951585", "can_opener"], "474": ["n02963159", "cardigan"], "475": ["n02965783", "car_mirror"], "476": ["n02966193", "carousel"], "477": ["n02966687", "carpenter's_kit"], "478": ["n02971356", "carton"], "479": ["n02974003", "car_wheel"], "480": ["n02977058", "cash_machine"], "481": ["n02978881", "cassette"], "482": ["n02979186", "cassette_player"], "483": ["n02980441", "castle"], "484": ["n02981792", "catamaran"], "485": ["n02988304", "CD_player"], "486": ["n02992211", "cello"], "487": ["n02992529", "cellular_telephone"], "488": ["n02999410", "chain"], "489": ["n03000134", "chainlink_fence"], "490": ["n03000247", "chain_mail"], "491": ["n03000684", "chain_saw"], "492": ["n03014705", "chest"], "493": ["n03016953", "chiffonier"], "494": ["n03017168", "chime"], "495": ["n03018349", "china_cabinet"], "496": ["n03026506", "Christmas_stocking"], "497": ["n03028079", "church"], "498": ["n03032252", "cinema"], "499": ["n03041632", "cleaver"], "500": ["n03042490", "cliff_dwelling"], "501": ["n03045698", "cloak"], "502": ["n03047690", "clog"], "503": ["n03062245", "cocktail_shaker"], "504": ["n03063599", "coffee_mug"], "505": ["n03063689", "coffeepot"], "506": ["n03065424", "coil"], "507": ["n03075370", "combination_lock"], "508": ["n03085013", "computer_keyboard"], "509": ["n03089624", "confectionery"], "510": ["n03095699", "container_ship"], "511": ["n03100240", "convertible"], "512": ["n03109150", "corkscrew"], "513": ["n03110669", "cornet"], "514": ["n03124043", "cowboy_boot"], "515": ["n03124170", "cowboy_hat"], "516": ["n03125729", "cradle"], "517": ["n03126707", "crane"], "518": ["n03127747", "crash_helmet"], "519": ["n03127925", "crate"], "520": ["n03131574", "crib"], "521": ["n03133878", "Crock_Pot"], "522": ["n03134739", "croquet_ball"], "523": ["n03141823", "crutch"], "524": ["n03146219", "cuirass"], "525": ["n03160309", "dam"], "526": ["n03179701", "desk"], "527": ["n03180011", "desktop_computer"], "528": ["n03187595", "dial_telephone"], "529": ["n03188531", "diaper"], "530": ["n03196217", "digital_clock"], "531": ["n03197337", "digital_watch"], "532": ["n03201208", "dining_table"], "533": ["n03207743", "dishrag"], "534": ["n03207941", "dishwasher"], "535": ["n03208938", "disk_brake"], "536": ["n03216828", "dock"], "537": ["n03218198", "dogsled"], "538": ["n03220513", "dome"], "539": ["n03223299", "doormat"], "540": ["n03240683", "drilling_platform"], "541": ["n03249569", "drum"], "542": ["n03250847", "drumstick"], "543": ["n03255030", "dumbbell"], "544": ["n03259280", "Dutch_oven"], "545": ["n03271574", "electric_fan"], "546": ["n03272010", "electric_guitar"], "547": ["n03272562", "electric_locomotive"], "548": ["n03290653", "entertainment_center"], "549": ["n03291819", "envelope"], "550": ["n03297495", "espresso_maker"], "551": ["n03314780", "face_powder"], "552": ["n03325584", "feather_boa"], "553": ["n03337140", "file"], "554": ["n03344393", "fireboat"], "555": ["n03345487", "fire_engine"], "556": ["n03347037", "fire_screen"], "557": ["n03355925", "flagpole"], "558": ["n03372029", "flute"], "559": ["n03376595", "folding_chair"], "560": ["n03379051", "football_helmet"], "561": ["n03384352", "forklift"], "562": ["n03388043", "fountain"], "563": ["n03388183", "fountain_pen"], "564": ["n03388549", "four-poster"], "565": ["n03393912", "freight_car"], "566": ["n03394916", "French_horn"], "567": ["n03400231", "frying_pan"], "568": ["n03404251", "fur_coat"], "569": ["n03417042", "garbage_truck"], "570": ["n03424325", "gasmask"], "571": ["n03425413", "gas_pump"], "572": ["n03443371", "goblet"], "573": ["n03444034", "go-kart"], "574": ["n03445777", "golf_ball"], "575": ["n03445924", "golfcart"], "576": ["n03447447", "gondola"], "577": ["n03447721", "gong"], "578": ["n03450230", "gown"], "579": ["n03452741", "grand_piano"], "580": ["n03457902", "greenhouse"], "581": ["n03459775", "grille"], "582": ["n03461385", "grocery_store"], "583": ["n03467068", "guillotine"], "584": ["n03476684", "hair_slide"], "585": ["n03476991", "hair_spray"], "586": ["n03478589", "half_track"], "587": ["n03481172", "hammer"], "588": ["n03482405", "hamper"], "589": ["n03483316", "hand_blower"], "590": ["n03485407", "hand-held_computer"], "591": ["n03485794", "handkerchief"], "592": ["n03492542", "hard_disc"], "593": ["n03494278", "harmonica"], "594": ["n03495258", "harp"], "595": ["n03496892", "harvester"], "596": ["n03498962", "hatchet"], "597": ["n03527444", "holster"], "598": ["n03529860", "home_theater"], "599": ["n03530642", "honeycomb"], "600": ["n03532672", "hook"], "601": ["n03534580", "hoopskirt"], "602": ["n03535780", "horizontal_bar"], "603": ["n03538406", "horse_cart"], "604": ["n03544143", "hourglass"], "605": ["n03584254", "iPod"], "606": ["n03584829", "iron"], "607": ["n03590841", "jack-o'-lantern"], "608": ["n03594734", "jean"], "609": ["n03594945", "jeep"], "610": ["n03595614", "jersey"], "611": ["n03598930", "jigsaw_puzzle"], "612": ["n03599486", "jinrikisha"], "613": ["n03602883", "joystick"], "614": ["n03617480", "kimono"], "615": ["n03623198", "knee_pad"], "616": ["n03627232", "knot"], "617": ["n03630383", "lab_coat"], "618": ["n03633091", "ladle"], "619": ["n03637318", "lampshade"], "620": ["n03642806", "laptop"], "621": ["n03649909", "lawn_mower"], "622": ["n03657121", "lens_cap"], "623": ["n03658185", "letter_opener"], "624": ["n03661043", "library"], "625": ["n03662601", "lifeboat"], "626": ["n03666591", "lighter"], "627": ["n03670208", "limousine"], "628": ["n03673027", "liner"], "629": ["n03676483", "lipstick"], "630": ["n03680355", "Loafer"], "631": ["n03690938", "lotion"], "632": ["n03691459", "loudspeaker"], "633": ["n03692522", "loupe"], "634": ["n03697007", "lumbermill"], "635": ["n03706229", "magnetic_compass"], "636": ["n03709823", "mailbag"], "637": ["n03710193", "mailbox"], "638": ["n03710637", "maillot"], "639": ["n03710721", "maillot"], "640": ["n03717622", "manhole_cover"], "641": ["n03720891", "maraca"], "642": ["n03721384", "marimba"], "643": ["n03724870", "mask"], "644": ["n03729826", "matchstick"], "645": ["n03733131", "maypole"], "646": ["n03733281", "maze"], "647": ["n03733805", "measuring_cup"], "648": ["n03742115", "medicine_chest"], "649": ["n03743016", "megalith"], "650": ["n03759954", "microphone"], "651": ["n03761084", "microwave"], "652": ["n03763968", "military_uniform"], "653": ["n03764736", "milk_can"], "654": ["n03769881", "minibus"], "655": ["n03770439", "miniskirt"], "656": ["n03770679", "minivan"], "657": ["n03773504", "missile"], "658": ["n03775071", "mitten"], "659": ["n03775546", "mixing_bowl"], "660": ["n03776460", "mobile_home"], "661": ["n03777568", "Model_T"], "662": ["n03777754", "modem"], "663": ["n03781244", "monastery"], "664": ["n03782006", "monitor"], "665": ["n03785016", "moped"], "666": ["n03786901", "mortar"], "667": ["n03787032", "mortarboard"], "668": ["n03788195", "mosque"], "669": ["n03788365", "mosquito_net"], "670": ["n03791053", "motor_scooter"], "671": ["n03792782", "mountain_bike"], "672": ["n03792972", "mountain_tent"], "673": ["n03793489", "mouse"], "674": ["n03794056", "mousetrap"], "675": ["n03796401", "moving_van"], "676": ["n03803284", "muzzle"], "677": ["n03804744", "nail"], "678": ["n03814639", "neck_brace"], "679": ["n03814906", "necklace"], "680": ["n03825788", "nipple"], "681": ["n03832673", "notebook"], "682": ["n03837869", "obelisk"], "683": ["n03838899", "oboe"], "684": ["n03840681", "ocarina"], "685": ["n03841143", "odometer"], "686": ["n03843555", "oil_filter"], "687": ["n03854065", "organ"], "688": ["n03857828", "oscilloscope"], "689": ["n03866082", "overskirt"], "690": ["n03868242", "oxcart"], "691": ["n03868863", "oxygen_mask"], "692": ["n03871628", "packet"], "693": ["n03873416", "paddle"], "694": ["n03874293", "paddlewheel"], "695": ["n03874599", "padlock"], "696": ["n03876231", "paintbrush"], "697": ["n03877472", "pajama"], "698": ["n03877845", "palace"], "699": ["n03884397", "panpipe"], "700": ["n03887697", "paper_towel"], "701": ["n03888257", "parachute"], "702": ["n03888605", "parallel_bars"], "703": ["n03891251", "park_bench"], "704": ["n03891332", "parking_meter"], "705": ["n03895866", "passenger_car"], "706": ["n03899768", "patio"], "707": ["n03902125", "pay-phone"], "708": ["n03903868", "pedestal"], "709": ["n03908618", "pencil_box"], "710": ["n03908714", "pencil_sharpener"], "711": ["n03916031", "perfume"], "712": ["n03920288", "Petri_dish"], "713": ["n03924679", "photocopier"], "714": ["n03929660", "pick"], "715": ["n03929855", "pickelhaube"], "716": ["n03930313", "picket_fence"], "717": ["n03930630", "pickup"], "718": ["n03933933", "pier"], "719": ["n03935335", "piggy_bank"], "720": ["n03937543", "pill_bottle"], "721": ["n03938244", "pillow"], "722": ["n03942813", "ping-pong_ball"], "723": ["n03944341", "pinwheel"], "724": ["n03947888", "pirate"], "725": ["n03950228", "pitcher"], "726": ["n03954731", "plane"], "727": ["n03956157", "planetarium"], "728": ["n03958227", "plastic_bag"], "729": ["n03961711", "plate_rack"], "730": ["n03967562", "plow"], "731": ["n03970156", "plunger"], "732": ["n03976467", "Polaroid_camera"], "733": ["n03976657", "pole"], "734": ["n03977966", "police_van"], "735": ["n03980874", "poncho"], "736": ["n03982430", "pool_table"], "737": ["n03983396", "pop_bottle"], "738": ["n03991062", "pot"], "739": ["n03992509", "potter's_wheel"], "740": ["n03995372", "power_drill"], "741": ["n03998194", "prayer_rug"], "742": ["n04004767", "printer"], "743": ["n04005630", "prison"], "744": ["n04008634", "projectile"], "745": ["n04009552", "projector"], "746": ["n04019541", "puck"], "747": ["n04023962", "punching_bag"], "748": ["n04026417", "purse"], "749": ["n04033901", "quill"], "750": ["n04033995", "quilt"], "751": ["n04037443", "racer"], "752": ["n04039381", "racket"], "753": ["n04040759", "radiator"], "754": ["n04041544", "radio"], "755": ["n04044716", "radio_telescope"], "756": ["n04049303", "rain_barrel"], "757": ["n04065272", "recreational_vehicle"], "758": ["n04067472", "reel"], "759": ["n04069434", "reflex_camera"], "760": ["n04070727", "refrigerator"], "761": ["n04074963", "remote_control"], "762": ["n04081281", "restaurant"], "763": ["n04086273", "revolver"], "764": ["n04090263", "rifle"], "765": ["n04099969", "rocking_chair"], "766": ["n04111531", "rotisserie"], "767": ["n04116512", "rubber_eraser"], "768": ["n04118538", "rugby_ball"], "769": ["n04118776", "rule"], "770": ["n04120489", "running_shoe"], "771": ["n04125021", "safe"], "772": ["n04127249", "safety_pin"], "773": ["n04131690", "saltshaker"], "774": ["n04133789", "sandal"], "775": ["n04136333", "sarong"], "776": ["n04141076", "sax"], "777": ["n04141327", "scabbard"], "778": ["n04141975", "scale"], "779": ["n04146614", "school_bus"], "780": ["n04147183", "schooner"], "781": ["n04149813", "scoreboard"], "782": ["n04152593", "screen"], "783": ["n04153751", "screw"], "784": ["n04154565", "screwdriver"], "785": ["n04162706", "seat_belt"], "786": ["n04179913", "sewing_machine"], "787": ["n04192698", "shield"], "788": ["n04200800", "shoe_shop"], "789": ["n04201297", "shoji"], "790": ["n04204238", "shopping_basket"], "791": ["n04204347", "shopping_cart"], "792": ["n04208210", "shovel"], "793": ["n04209133", "shower_cap"], "794": ["n04209239", "shower_curtain"], "795": ["n04228054", "ski"], "796": ["n04229816", "ski_mask"], "797": ["n04235860", "sleeping_bag"], "798": ["n04238763", "slide_rule"], "799": ["n04239074", "sliding_door"], "800": ["n04243546", "slot"], "801": ["n04251144", "snorkel"], "802": ["n04252077", "snowmobile"], "803": ["n04252225", "snowplow"], "804": ["n04254120", "soap_dispenser"], "805": ["n04254680", "soccer_ball"], "806": ["n04254777", "sock"], "807": ["n04258138", "solar_dish"], "808": ["n04259630", "sombrero"], "809": ["n04263257", "soup_bowl"], "810": ["n04264628", "space_bar"], "811": ["n04265275", "space_heater"], "812": ["n04266014", "space_shuttle"], "813": ["n04270147", "spatula"], "814": ["n04273569", "speedboat"], "815": ["n04275548", "spider_web"], "816": ["n04277352", "spindle"], "817": ["n04285008", "sports_car"], "818": ["n04286575", "spotlight"], "819": ["n04296562", "stage"], "820": ["n04310018", "steam_locomotive"], "821": ["n04311004", "steel_arch_bridge"], "822": ["n04311174", "steel_drum"], "823": ["n04317175", "stethoscope"], "824": ["n04325704", "stole"], "825": ["n04326547", "stone_wall"], "826": ["n04328186", "stopwatch"], "827": ["n04330267", "stove"], "828": ["n04332243", "strainer"], "829": ["n04335435", "streetcar"], "830": ["n04336792", "stretcher"], "831": ["n04344873", "studio_couch"], "832": ["n04346328", "stupa"], "833": ["n04347754", "submarine"], "834": ["n04350905", "suit"], "835": ["n04355338", "sundial"], "836": ["n04355933", "sunglass"], "837": ["n04356056", "sunglasses"], "838": ["n04357314", "sunscreen"], "839": ["n04366367", "suspension_bridge"], "840": ["n04367480", "swab"], "841": ["n04370456", "sweatshirt"], "842": ["n04371430", "swimming_trunks"], "843": ["n04371774", "swing"], "844": ["n04372370", "switch"], "845": ["n04376876", "syringe"], "846": ["n04380533", "table_lamp"], "847": ["n04389033", "tank"], "848": ["n04392985", "tape_player"], "849": ["n04398044", "teapot"], "850": ["n04399382", "teddy"], "851": ["n04404412", "television"], "852": ["n04409515", "tennis_ball"], "853": ["n04417672", "thatch"], "854": ["n04418357", "theater_curtain"], "855": ["n04423845", "thimble"], "856": ["n04428191", "thresher"], "857": ["n04429376", "throne"], "858": ["n04435653", "tile_roof"], "859": ["n04442312", "toaster"], "860": ["n04443257", "tobacco_shop"], "861": ["n04447861", "toilet_seat"], "862": ["n04456115", "torch"], "863": ["n04458633", "totem_pole"], "864": ["n04461696", "tow_truck"], "865": ["n04462240", "toyshop"], "866": ["n04465501", "tractor"], "867": ["n04467665", "trailer_truck"], "868": ["n04476259", "tray"], "869": ["n04479046", "trench_coat"], "870": ["n04482393", "tricycle"], "871": ["n04483307", "trimaran"], "872": ["n04485082", "tripod"], "873": ["n04486054", "triumphal_arch"], "874": ["n04487081", "trolleybus"], "875": ["n04487394", "trombone"], "876": ["n04493381", "tub"], "877": ["n04501370", "turnstile"], "878": ["n04505470", "typewriter_keyboard"], "879": ["n04507155", "umbrella"], "880": ["n04509417", "unicycle"], "881": ["n04515003", "upright"], "882": ["n04517823", "vacuum"], "883": ["n04522168", "vase"], "884": ["n04523525", "vault"], "885": ["n04525038", "velvet"], "886": ["n04525305", "vending_machine"], "887": ["n04532106", "vestment"], "888": ["n04532670", "viaduct"], "889": ["n04536866", "violin"], "890": ["n04540053", "volleyball"], "891": ["n04542943", "waffle_iron"], "892": ["n04548280", "wall_clock"], "893": ["n04548362", "wallet"], "894": ["n04550184", "wardrobe"], "895": ["n04552348", "warplane"], "896": ["n04553703", "washbasin"], "897": ["n04554684", "washer"], "898": ["n04557648", "water_bottle"], "899": ["n04560804", "water_jug"], "900": ["n04562935", "water_tower"], "901": ["n04579145", "whiskey_jug"], "902": ["n04579432", "whistle"], "903": ["n04584207", "wig"], "904": ["n04589890", "window_screen"], "905": ["n04590129", "window_shade"], "906": ["n04591157", "Windsor_tie"], "907": ["n04591713", "wine_bottle"], "908": ["n04592741", "wing"], "909": ["n04596742", "wok"], "910": ["n04597913", "wooden_spoon"], "911": ["n04599235", "wool"], "912": ["n04604644", "worm_fence"], "913": ["n04606251", "wreck"], "914": ["n04612504", "yawl"], "915": ["n04613696", "yurt"], "916": ["n06359193", "web_site"], "917": ["n06596364", "comic_book"], "918": ["n06785654", "crossword_puzzle"], "919": ["n06794110", "street_sign"], "920": ["n06874185", "traffic_light"], "921": ["n07248320", "book_jacket"], "922": ["n07565083", "menu"], "923": ["n07579787", "plate"], "924": ["n07583066", "guacamole"], "925": ["n07584110", "consomme"], "926": ["n07590611", "hot_pot"], "927": ["n07613480", "trifle"], "928": ["n07614500", "ice_cream"], "929": ["n07615774", "ice_lolly"], "930": ["n07684084", "French_loaf"], "931": ["n07693725", "bagel"], "932": ["n07695742", "pretzel"], "933": ["n07697313", "cheeseburger"], "934": ["n07697537", "hotdog"], "935": ["n07711569", "mashed_potato"], "936": ["n07714571", "head_cabbage"], "937": ["n07714990", "broccoli"], "938": ["n07715103", "cauliflower"], "939": ["n07716358", "zucchini"], "940": ["n07716906", "spaghetti_squash"], "941": ["n07717410", "acorn_squash"], "942": ["n07717556", "butternut_squash"], "943": ["n07718472", "cucumber"], "944": ["n07718747", "artichoke"], "945": ["n07720875", "bell_pepper"], "946": ["n07730033", "cardoon"], "947": ["n07734744", "mushroom"], "948": ["n07742313", "Granny_Smith"], "949": ["n07745940", "strawberry"], "950": ["n07747607", "orange"], "951": ["n07749582", "lemon"], "952": ["n07753113", "fig"], "953": ["n07753275", "pineapple"], "954": ["n07753592", "banana"], "955": ["n07754684", "jackfruit"], "956": ["n07760859", "custard_apple"], "957": ["n07768694", "pomegranate"], "958": ["n07802026", "hay"], "959": ["n07831146", "carbonara"], "960": ["n07836838", "chocolate_sauce"], "961": ["n07860988", "dough"], "962": ["n07871810", "meat_loaf"], "963": ["n07873807", "pizza"], "964": ["n07875152", "potpie"], "965": ["n07880968", "burrito"], "966": ["n07892512", "red_wine"], "967": ["n07920052", "espresso"], "968": ["n07930864", "cup"], "969": ["n07932039", "eggnog"], "970": ["n09193705", "alp"], "971": ["n09229709", "bubble"], "972": ["n09246464", "cliff"], "973": ["n09256479", "coral_reef"], "974": ["n09288635", "geyser"], "975": ["n09332890", "lakeside"], "976": ["n09399592", "promontory"], "977": ["n09421951", "sandbar"], "978": ["n09428293", "seashore"], "979": ["n09468604", "valley"], "980": ["n09472597", "volcano"], "981": ["n09835506", "ballplayer"], "982": ["n10148035", "groom"], "983": ["n10565667", "scuba_diver"], "984": ["n11879895", "rapeseed"], "985": ["n11939491", "daisy"], "986": ["n12057211", "yellow_lady's_slipper"], "987": ["n12144580", "corn"], "988": ["n12267677", "acorn"], "989": ["n12620546", "hip"], "990": ["n12768682", "buckeye"], "991": ["n12985857", "coral_fungus"], "992": ["n12998815", "agaric"], "993": ["n13037406", "gyromitra"], "994": ["n13040303", "stinkhorn"], "995": ["n13044778", "earthstar"], "996": ["n13052670", "hen-of-the-woods"], "997": ["n13054560", "bolete"], "998": ["n13133613", "ear"], "999": ["n15075141", "toilet_tissue"]} -------------------------------------------------------------------------------- /static/jquery.min.js: -------------------------------------------------------------------------------- 1 | /*! jQuery v3.4.1 | (c) JS Foundation and other contributors | jquery.org/license */ 2 | !function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],E=C.document,r=Object.getPrototypeOf,s=t.slice,g=t.concat,u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType},x=function(e){return null!=e&&e===e.window},c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.4.1",k=function(e,t){return new k.fn.init(e,t)},p=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;function d(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp($),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+$),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ne=function(e,t,n){var r="0x"+t-65536;return r!=r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023&r|56320)},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(m.childNodes),m.childNodes),t[m.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&((e?e.ownerDocument||e:m)!==C&&T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!A[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&U.test(t)){(s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=k),o=(l=h(t)).length;while(o--)l[o]="#"+s+" "+xe(l[o]);c=l.join(","),f=ee.test(t)&&ye(e.parentNode)||e}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){A(t,!0)}finally{s===k&&e.removeAttribute("id")}}}return g(t.replace(B,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[k]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e.namespaceURI,n=(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:m;return r!==C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),m!==C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=k,!C.getElementsByName||!C.getElementsByName(k).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+k+"-]").length||v.push("~="),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+k+"+*").length||v.push(".#.+[+~]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",$)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)===(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e===C||e.ownerDocument===m&&y(m,e)?-1:t===C||t.ownerDocument===m&&y(m,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e===C?-1:t===C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]===m?-1:s[r]===m?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if((e.ownerDocument||e)!==C&&T(e),d.matchesSelector&&E&&!A[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){A(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=p[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&p(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?k.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?k.grep(e,function(e){return e===n!==r}):"string"!=typeof n?k.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(k.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||q,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:L.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof k?t[0]:t,k.merge(this,k.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),D.test(r[1])&&k.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(k):k.makeArray(e,this)}).prototype=k.fn,q=k(E);var H=/^(?:parents|prev(?:Until|All))/,O={children:!0,contents:!0,next:!0,prev:!0};function P(e,t){while((e=e[t])&&1!==e.nodeType);return e}k.fn.extend({has:function(e){var t=k(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i,ge={option:[1,""],thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?k.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;nx",y.noCloneChecked=!!me.cloneNode(!0).lastChild.defaultValue;var Te=/^key/,Ce=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,Ee=/^([^.]*)(?:\.(.+)|)/;function ke(){return!0}function Se(){return!1}function Ne(e,t){return e===function(){try{return E.activeElement}catch(e){}}()==("focus"===t)}function Ae(e,t,n,r,i,o){var a,s;if("object"==typeof t){for(s in"string"!=typeof n&&(r=r||n,n=void 0),t)Ae(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=Se;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return k().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=k.guid++)),e.each(function(){k.event.add(this,t,i,r,n)})}function De(e,i,o){o?(Q.set(e,i,!1),k.event.add(e,i,{namespace:!1,handler:function(e){var t,n,r=Q.get(this,i);if(1&e.isTrigger&&this[i]){if(r.length)(k.event.special[i]||{}).delegateType&&e.stopPropagation();else if(r=s.call(arguments),Q.set(this,i,r),t=o(this,i),this[i](),r!==(n=Q.get(this,i))||t?Q.set(this,i,!1):n={},r!==n)return e.stopImmediatePropagation(),e.preventDefault(),n.value}else r.length&&(Q.set(this,i,{value:k.event.trigger(k.extend(r[0],k.Event.prototype),r.slice(1),this)}),e.stopImmediatePropagation())}})):void 0===Q.get(e,i)&&k.event.add(e,i,ke)}k.event={global:{},add:function(t,e,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.get(t);if(v){n.handler&&(n=(o=n).handler,i=o.selector),i&&k.find.matchesSelector(ie,i),n.guid||(n.guid=k.guid++),(u=v.events)||(u=v.events={}),(a=v.handle)||(a=v.handle=function(e){return"undefined"!=typeof k&&k.event.triggered!==e.type?k.event.dispatch.apply(t,arguments):void 0}),l=(e=(e||"").match(R)||[""]).length;while(l--)d=g=(s=Ee.exec(e[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=k.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=k.event.special[d]||{},c=k.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&k.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(t,r,h,a)||t.addEventListener&&t.addEventListener(d,a)),f.add&&(f.add.call(t,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),k.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.hasData(e)&&Q.get(e);if(v&&(u=v.events)){l=(t=(t||"").match(R)||[""]).length;while(l--)if(d=g=(s=Ee.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d){f=k.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,v.handle)||k.removeEvent(e,d,v.handle),delete u[d])}else for(d in u)k.event.remove(e,d+t[l],n,r,!0);k.isEmptyObject(u)&&Q.remove(e,"handle events")}},dispatch:function(e){var t,n,r,i,o,a,s=k.event.fix(e),u=new Array(arguments.length),l=(Q.get(this,"events")||{})[s.type]||[],c=k.event.special[s.type]||{};for(u[0]=s,t=1;t\x20\t\r\n\f]*)[^>]*)\/>/gi,qe=/\s*$/g;function Oe(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&k(e).children("tbody")[0]||e}function Pe(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Re(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Me(e,t){var n,r,i,o,a,s,u,l;if(1===t.nodeType){if(Q.hasData(e)&&(o=Q.access(e),a=Q.set(t,o),l=o.events))for(i in delete a.handle,a.events={},l)for(n=0,r=l[i].length;n")},clone:function(e,t,n){var r,i,o,a,s,u,l,c=e.cloneNode(!0),f=oe(e);if(!(y.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||k.isXMLDoc(e)))for(a=ve(c),r=0,i=(o=ve(e)).length;r").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var Vt,Gt=[],Yt=/(=)\?(?=&|$)|\?\?/;k.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Gt.pop()||k.expando+"_"+kt++;return this[e]=!0,e}}),k.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Yt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Yt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Yt,"$1"+r):!1!==e.jsonp&&(e.url+=(St.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||k.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?k(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Gt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((Vt=E.implementation.createHTMLDocument("").body).innerHTML="
",2===Vt.childNodes.length),k.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=D.exec(e))?[t.createElement(i[1])]:(i=we([e],t,o),o&&o.length&&k(o).remove(),k.merge([],i.childNodes)));var r,i,o},k.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(k.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},k.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){k.fn[t]=function(e){return this.on(t,e)}}),k.expr.pseudos.animated=function(t){return k.grep(k.timers,function(e){return t===e.elem}).length},k.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=k.css(e,"position"),c=k(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=k.css(e,"top"),u=k.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,k.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},k.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){k.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===k.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===k.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=k(e).offset()).top+=k.css(e,"borderTopWidth",!0),i.left+=k.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-k.css(r,"marginTop",!0),left:t.left-i.left-k.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===k.css(e,"position"))e=e.offsetParent;return e||ie})}}),k.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;k.fn[t]=function(e){return _(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),k.each(["top","left"],function(e,n){k.cssHooks[n]=ze(y.pixelPosition,function(e,t){if(t)return t=_e(e,n),$e.test(t)?k(e).position()[n]+"px":t})}),k.each({Height:"height",Width:"width"},function(a,s){k.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){k.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return _(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?k.css(e,t,i):k.style(e,t,n,i)},s,n?e:void 0,n)}})}),k.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){k.fn[n]=function(e,t){return 0