├── .gitignore ├── .idea ├── .gitignore ├── inspectionProfiles │ └── profiles_settings.xml ├── misc.xml ├── modules.xml ├── text_reco.iml └── vcs.xml ├── .travis.yml ├── Aptfile ├── Dockerfile ├── Procfile ├── README.md ├── __init__.py ├── app.py ├── base ├── __init__.py └── watchdog.py ├── build └── lib │ ├── base │ ├── __init__.py │ └── watchdog.py │ └── text_reco │ ├── __init__.py │ ├── box_detection.py │ ├── boxdetect │ ├── __init__.py │ └── box_detection.py │ └── models │ ├── __init__.py │ ├── craft │ ├── __init__.py │ ├── basenet │ │ ├── __init__.py │ │ └── vgg16_bn.py │ ├── craft.py │ ├── craft_reader.py │ ├── craft_utils.py │ ├── file_utils.py │ └── imgproc.py │ └── crnn │ ├── __init__.py │ ├── crnn.py │ ├── crnn_run.py │ ├── preprocess.py │ └── utils.py ├── dist ├── index.html ├── script.js └── style.css ├── img └── front_.PNG ├── requirements.txt ├── src ├── index.html ├── script.js └── style.css ├── static ├── css │ └── scrolling-nav.css ├── dist │ ├── index.html │ ├── script.js │ └── style.css ├── gulpfile.js ├── js │ └── scrolling-nav.js ├── src │ ├── index.html │ ├── script.js │ └── style.css └── vendor │ ├── bootstrap │ ├── css │ │ ├── bootstrap-grid.css │ │ ├── bootstrap-grid.css.map │ │ ├── bootstrap-grid.min.css │ │ ├── bootstrap-grid.min.css.map │ │ ├── bootstrap-reboot.css │ │ ├── bootstrap-reboot.css.map │ │ ├── bootstrap-reboot.min.css │ │ ├── bootstrap-reboot.min.css.map │ │ ├── bootstrap.css │ │ ├── bootstrap.css.map │ │ ├── bootstrap.min.css │ │ └── bootstrap.min.css.map │ └── js │ │ ├── bootstrap.bundle.js │ │ ├── bootstrap.bundle.js.map │ │ ├── bootstrap.bundle.min.js │ │ ├── bootstrap.bundle.min.js.map │ │ ├── bootstrap.js │ │ ├── bootstrap.js.map │ │ ├── bootstrap.min.js │ │ └── bootstrap.min.js.map │ ├── jquery-easing │ ├── jquery.easing.compatibility.js │ ├── jquery.easing.js │ └── jquery.easing.min.js │ └── jquery │ ├── jquery.js │ ├── jquery.min.js │ ├── jquery.min.map │ ├── jquery.slim.js │ ├── jquery.slim.min.js │ └── jquery.slim.min.map ├── templates ├── index.html └── index_old.html ├── text_reco.egg-info ├── PKG-INFO ├── SOURCES.txt ├── dependency_links.txt └── top_level.txt └── text_reco ├── __init__.py ├── __pycache__ └── __init__.cpython-37.pyc ├── boxdetect ├── .box_detection.py.swp ├── __init__.py ├── __pycache__ │ ├── __init__.cpython-37.pyc │ └── box_detection.cpython-37.pyc └── box_detection.py ├── cleaning └── .image_saver.py.swp ├── dist └── text-reco-0.1.tar.gz └── models ├── __init__.py ├── __pycache__ └── __init__.cpython-37.pyc ├── craft ├── .craft_reader.py.swi ├── .craft_reader.py.swj ├── .craft_reader.py.swk ├── .craft_reader.py.swl ├── .craft_reader.py.swm ├── .craft_reader.py.swn ├── .craft_reader.py.swo ├── .craft_reader.py.swp ├── __init__.py ├── __pycache__ │ ├── __init__.cpython-37.pyc │ ├── craft.cpython-37.pyc │ ├── craft_reader.cpython-37.pyc │ ├── craft_utils.cpython-37.pyc │ ├── file_utils.cpython-37.pyc │ └── imgproc.cpython-37.pyc ├── basenet │ ├── __init__.py │ ├── __pycache__ │ │ ├── __init__.cpython-37.pyc │ │ └── vgg16_bn.cpython-37.pyc │ └── vgg16_bn.py ├── craft.py ├── craft_reader.py ├── craft_utils.py ├── imgproc.py └── pretrain │ └── craft_mlt_25k.pth └── crnn ├── __init__.py ├── __pycache__ ├── __init__.cpython-37.pyc ├── crnn.cpython-37.pyc ├── crnn_run.cpython-37.pyc ├── preprocess.cpython-37.pyc └── utils.cpython-37.pyc ├── crnn.py ├── crnn_run.py ├── preprocess.py ├── pretrain └── crnn.pth └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | /venv/ 2 | data/ 3 | *.swp 4 | *.swo 5 | *.jpeg 6 | -------------------------------------------------------------------------------- /.idea/.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Default ignored files 3 | /workspace.xml -------------------------------------------------------------------------------- /.idea/inspectionProfiles/profiles_settings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 6 | -------------------------------------------------------------------------------- /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /.idea/text_reco.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 11 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | os: windows 3 | -------------------------------------------------------------------------------- /Aptfile: -------------------------------------------------------------------------------- 1 | libsm6 2 | libxrender1 3 | libfontconfig1 4 | libice6 5 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:16.04 2 | MAINTAINER S3nh "steam.panek@gmail.com" 3 | RUN apt-get update -y && \ 4 | apt-get install -y python-pip python-dev 5 | 6 | COPY requirements.txt /app/requirements.txt 7 | WORKDIR /app 8 | COPY . /app 9 | ENTRYPOINT ["python"] 10 | CMD [ "app.py" ] 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | web: gunicorn app:app 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![travis build](https://api.travis-ci.com/s3nh/pytorch-text-recognition.svg?branch=master) 2 | 3 | ### Text detection and recognition 4 | This repository contains tool which allow to detect region with text and translate it one by one. 5 | 6 | 7 | ### Description 8 | Two pretrained neural networks are used. One of them is responsible for detecting places in which 9 | text appear and return its coordinates. 10 | Structure use for this operation is based on CRAFT architecture. 11 | - [Craft Paper](https://arxiv.org/pdf/1904.01941.pdf) 12 | 13 | Second network take detected words and recognize words included inside it. 14 | Convolutional Recurrential neural networks (CRNN) are used for this operation. 15 | 16 | - [CRNN Paper](https://arxiv.org/abs/1507.05717) 17 | 18 | 19 | #### Example 20 | 21 | Under construction 22 | 23 | 24 | #### Deployment 25 | I decided to deploy it on heroku (temporarily solution), but the amount of memory available on this platform 26 | is not enough. 27 | You can check it on [heroku app](https://glacial-ravine-89423.herokuapp.com/). 28 | I decided to add bootstrap template because whole solution become more intuitive. 29 | 30 | ### Windows Installation 31 | To install it locally, you can run from your virtual env 32 | 33 | ```python 34 | python -m pip install requirements.txt 35 | ``` 36 | 37 | #### Linux installation 38 | 39 | to install it properly on Linux OS you have to install additionaly 40 | 41 | 42 | ```buildoutcfg 43 | 44 | apt-get update 45 | apt-get install -y libsm6 libxext6 libxrender-dev 46 | pip install opencv-python 47 | 48 | ``` 49 | 50 | If problems with cv2 imports are still appearing then you should install 51 | 52 | 53 | ```buildoutcfg 54 | pip install opencv-contrib-python 55 | ``` 56 | 57 | Then you can run 58 | 59 | ```buildoutcfg 60 | ```python 61 | python -m pip install requirements.txt 62 | ``` 63 | 64 | ### Run 65 | To run it locally, please activate your environment 66 | 67 | ```buildoutcfg 68 | > win 69 | venv\Scripts\activate.bat 70 | 71 | >linux 72 | source venv\Scripts\activate 73 | 74 | ``` 75 | and run straight from project origin 76 | 77 | 78 | ```buildoutcfg 79 | python app.py 80 | 81 | ``` 82 | If everything goes properly, you'll see on localhost:8000, 83 | screen just like one below. 84 | 85 | ![screen](img/front_.PNG?raw=True) 86 | 87 | 88 | 89 | 90 | #### Updates 91 | 92 | I decided to remove argparse, because as I mention earlier, it was less intuitive. 93 | Solution is not fast, is more like an toy example which shows how to use Pytorch model 94 | on deployment environment. 95 | 96 | Version which I use here contain torch-cpu which make preprocessing and detecting slightly slower. 97 | I test it on cuda and it was much faster. 98 | 99 | If you have more information, drop me a line 100 | If you like it, give a star 101 | 102 | Draft: Show how does it work on complex .tif example document. 103 | 104 | [Contact Info](https://s3nh.github.io) 105 | 106 | 107 | 108 | 109 | 110 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/__init__.py -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.backends.cudnn as cudnn 4 | from torch.autograd import Variable 5 | from PIL import Image 6 | import skimage 7 | import argparse 8 | 9 | 10 | import flask 11 | from flask import Flask, request, render_template 12 | 13 | from skimage import io 14 | import numpy as np 15 | import json 16 | import zipfile 17 | from collections import OrderedDict 18 | 19 | import text_reco.models.craft.craft_utils as craft_utils 20 | import text_reco.models.craft.imgproc as img_proc 21 | 22 | from text_reco.models.craft.craft import CRAFT 23 | from text_reco.models.craft.craft_reader import CraftReader 24 | from text_reco.boxdetect.box_detection import BoxDetect 25 | from text_reco.models.crnn.crnn_run import CRNNReader 26 | 27 | #def build_args(): 28 | # parser = argparse.ArgumentParser() 29 | # parser.add_argument('--infile', type = str, help = 'dataset to preprocess') 30 | # args = parser.parse_args() 31 | # return args 32 | app = Flask(__name__) 33 | 34 | @app.route("/") 35 | @app.route("/index") 36 | def index(): 37 | return flask.render_template('index.html') 38 | 39 | @app.route('/predict', methods=['POST']) 40 | def make_prediction(): 41 | if request.method=='POST': 42 | file_=request.files['image'] 43 | if not file_: 44 | return render_template('index.html', label = "No file") 45 | crr = CraftReader(file_) 46 | boxes, img_res = crr.boxes_detect() 47 | results = {} 48 | for _, tmp_box in enumerate(boxes): 49 | x = int(tmp_box[0][0]) 50 | y = int(tmp_box[0][1]) 51 | w = int(np.abs(tmp_box[0][0] - tmp_box[1][0])) 52 | h = int(np.abs(tmp_box[0][1] - tmp_box[2][1])) 53 | tmp_img = img_res[y:y+h, x:x+w] 54 | tmp_img = Image.fromarray(tmp_img.astype('uint8')).convert('L') 55 | tmp_img = crnn.transformer(tmp_img) 56 | tmp_img = tmp_img.view(1, *tmp_img.size()) 57 | tmp_img = Variable(tmp_img) 58 | results['{}'.format(_)] = crnn.get_predictions(tmp_img) 59 | return render_template('index.html', label = results) 60 | 61 | if __name__ == "__main__": 62 | crnn=CRNNReader() 63 | 64 | app.run(host='0.0.0.0', port=8000, debug = True) 65 | -------------------------------------------------------------------------------- /base/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/base/__init__.py -------------------------------------------------------------------------------- /base/watchdog.py: -------------------------------------------------------------------------------- 1 | import redis 2 | 3 | class WatchDogBase(): 4 | def __init__(self, host = 'localhost', port = 6379, db = 13): 5 | self.host = host 6 | self.port = port 7 | self.db = db 8 | 9 | def create_base(self): 10 | r = redis.StrictRedis(db = self.db) 11 | return r 12 | 13 | def update_base(self): 14 | pass 15 | 16 | def update_table(self, records): 17 | pass 18 | 19 | 20 | def flush_all(self): 21 | r.flushall() 22 | 23 | def flush_db(self): 24 | r.flushdb() 25 | 26 | 27 | def main(): 28 | 29 | wb = WatchDogBase() 30 | base = wb.create_base() 31 | 32 | 33 | 34 | if __name__ == "__main__": 35 | main() 36 | -------------------------------------------------------------------------------- /build/lib/base/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/build/lib/base/__init__.py -------------------------------------------------------------------------------- /build/lib/base/watchdog.py: -------------------------------------------------------------------------------- 1 | import redis 2 | 3 | class WatchDogBase(): 4 | def __init__(self, host = 'localhost', port = 6379, db = 13): 5 | self.host = host 6 | self.port = port 7 | self.db = db 8 | 9 | def create_base(self): 10 | r = redis.StrictRedis(db = self.db) 11 | return r 12 | 13 | def update_base(self): 14 | pass 15 | 16 | def update_table(self, records): 17 | pass 18 | 19 | 20 | def flush_all(self): 21 | r.flushall() 22 | 23 | def flush_db(self): 24 | r.flushdb() 25 | 26 | 27 | def main(): 28 | 29 | wb = WatchDogBase() 30 | base = wb.create_base() 31 | 32 | 33 | 34 | if __name__ == "__main__": 35 | main() 36 | -------------------------------------------------------------------------------- /build/lib/text_reco/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/build/lib/text_reco/__init__.py -------------------------------------------------------------------------------- /build/lib/text_reco/box_detection.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import 3 | -------------------------------------------------------------------------------- /build/lib/text_reco/boxdetect/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/build/lib/text_reco/boxdetect/__init__.py -------------------------------------------------------------------------------- /build/lib/text_reco/boxdetect/box_detection.py: -------------------------------------------------------------------------------- 1 | # Simple logic 2 | # box on input - preprocess - sliced image on output 3 | import cv2 4 | import numpy as np 5 | class BoxDetect(): 6 | 7 | def __init__(self, boxes): 8 | self.boxes = boxes 9 | self.n_boxes = len(self.boxes) 10 | 11 | def preprocess(self, image): 12 | img_storage = dict() 13 | print("To sa boxy") 14 | print(self.boxes) 15 | for el in self.boxes: 16 | tmp_img = cv2.rectangle(image, (el[0], el[1]), (el[2], el[3])) 17 | 18 | @staticmethod 19 | def load_box(path): 20 | with open(path, 'r') as outfile: 21 | file_ = json.load(outfile) 22 | return file_ 23 | 24 | @staticmethod 25 | def preprocess_box(file_, img): 26 | for el in file_.keys(): 27 | x,y,w,h = cv2.boundingRect(np.array(file_[el])) 28 | roi = img[x:x+w, y:y+h] 29 | cv2.imshow('image', roi) 30 | cv2.waitKey(0) 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /build/lib/text_reco/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/build/lib/text_reco/models/__init__.py -------------------------------------------------------------------------------- /build/lib/text_reco/models/craft/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/build/lib/text_reco/models/craft/__init__.py -------------------------------------------------------------------------------- /build/lib/text_reco/models/craft/basenet/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/build/lib/text_reco/models/craft/basenet/__init__.py -------------------------------------------------------------------------------- /build/lib/text_reco/models/craft/basenet/vgg16_bn.py: -------------------------------------------------------------------------------- 1 | # https://github.com/clovaai/CRAFT-pytorch/blob/master/basenet/vgg16_bn.py 2 | 3 | 4 | # Imports 5 | 6 | from collections import namedtuple 7 | 8 | import torch 9 | import torch.nn as nn 10 | import torch.nn.init as init 11 | from torchvision import models 12 | from torchvision.models.vgg import model_urls 13 | 14 | def init_weights(modules): 15 | # https://prateekvjoshi.com/2016/03/29/understanding-xavier-initialization-in-deep-neural-networks/ 16 | for m in modules: 17 | if isinstance(m, nn.Conv2d): 18 | init.xavier_uniform_(m.weight.data) 19 | if m.bias is not None: 20 | m.bias.data.zero_() 21 | 22 | elif isinstance(m, nn.BatchNorm2d): 23 | m.weight.data.fill_(1) 24 | m.bias.data.zero_() 25 | 26 | elif isinstance(m, nn.Linear): 27 | m.weight.data.normal_(0, 0.01) 28 | m.bias.data.zero_() 29 | 30 | 31 | class vgg16_bn(torch.nn.Module): 32 | 33 | def __init__(self, pretrained = True, freeze = True): 34 | super(vgg16_bn, self).__init__() 35 | model_urls['vgg16_bn'] = model_urls['vgg16_bn'].replace('https://', 'http://') 36 | vgg_pretrained_features = models.vgg16_bn(pretrained=pretrained).features 37 | self.slice1 = torch.nn.Sequential() 38 | self.slice2 = torch.nn.Sequential() 39 | self.slice3 = torch.nn.Sequential() 40 | self.slice4 = torch.nn.Sequential() 41 | self.slice5 = torch.nn.Sequential() 42 | 43 | for x in range(12): 44 | self.slice1.add_module(str(x), vgg_pretrained_features[x]) 45 | for x in range(12, 19): 46 | self.slice2.add_module(str(x), vgg_pretrained_features[x]) 47 | for x in range(19, 29): 48 | self.slice3.add_module(str(x), vgg_pretrained_features[x]) 49 | for x in range(29, 39): 50 | self.slice4.add_module(str(x), vgg_pretrained_features[x]) 51 | 52 | self.slice5 = torch.nn.Sequential( 53 | nn.MaxPool2d(3, 1, 1), 54 | nn.Conv2d(512, 1024, kernel_size = 3, padding = 6, dilation = 6), 55 | nn.Conv2d(1024, 1024, 1)) 56 | 57 | if not pretrained: 58 | init_weights(self.slice1.modules()) 59 | init_weights(self.slice2.modules()) 60 | init_weights(self.slice3.modules()) 61 | init_weights(self.slice4.modules()) 62 | 63 | init_weights(self.slice5.modules()) 64 | 65 | 66 | if freeze: 67 | for param in self.slice1.parameters(): 68 | param.requires_grad = False 69 | 70 | 71 | # Define forward pass 72 | 73 | 74 | def forward(self, x): 75 | h = self.slice1(x) 76 | h_relu2_2 = h 77 | h = self.slice2(h) 78 | h_relu3_2 = h 79 | h = self.slice3(h) 80 | h_relu4_3 = h 81 | h = self.slice4(h) 82 | h_relu5_3 = h 83 | h = self.slice5(h) 84 | h_fc7 = h 85 | vgg_outputs = namedtuple("VggOutputs", ['fc7', 'relu5_3', 'relu4_3', 'relu3_2', 'relu2_2']) 86 | out = vgg_outputs(h_fc7 ,h_relu5_3, h_relu4_3, h_relu3_2, h_relu2_2) 87 | return out 88 | 89 | -------------------------------------------------------------------------------- /build/lib/text_reco/models/craft/craft.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from text_reco.models.craft.basenet.vgg16_bn import vgg16_bn, init_weights 5 | 6 | class DoubleConv(nn.Module): 7 | def __init__(self, in_channel, mid_channel, out_channel): 8 | super(DoubleConv, self).__init__() 9 | 10 | self.conv = nn.Sequential( 11 | nn.Conv2d(in_channel + mid_channel, mid_channel, kernel_size = 1), 12 | nn.BatchNorm2d(mid_channel), 13 | nn.ReLU(inplace=True), 14 | nn.Conv2d(mid_channel, out_channel, kernel_size = 3, padding = 1), 15 | nn.BatchNorm2d(out_channel), 16 | nn.ReLU(inplace=True)) 17 | 18 | def forward(self, x): 19 | x = self.conv(x) 20 | return x 21 | 22 | class CRAFT(nn.Module): 23 | def __init__(self, pretrained=False, freeze=False): 24 | super(CRAFT, self).__init__() 25 | self.basenet = vgg16_bn(pretrained, freeze) 26 | self.upconv1 = DoubleConv(1024, 512, 256) 27 | self.upconv2 = DoubleConv(512, 256, 128) 28 | self.upconv3 = DoubleConv(256, 128, 64) 29 | self.upconv4 = DoubleConv(128, 64, 32) 30 | 31 | n_classes = 2 32 | self.conv_cls = nn.Sequential( 33 | nn.Conv2d(32, 32, 3, 1), 34 | nn.ReLU(inplace=True), 35 | nn.Conv2d(32, 32, 3, 1), 36 | nn.ReLU(inplace=True), 37 | nn.Conv2d(32, 16, 3, 1), 38 | nn.ReLU(inplace=True), 39 | nn.Conv2d(16, 16, 1), 40 | nn.ReLU(inplace=True), 41 | nn.Conv2d(16, n_classes, kernel_size=1),) 42 | 43 | init_weights(self.upconv1.modules()) 44 | init_weights(self.upconv2.modules()) 45 | init_weights(self.upconv3.modules()) 46 | init_weights(self.upconv4.modules()) 47 | init_weights(self.conv_cls.modules()) 48 | 49 | def forward(self, x): 50 | sources = self.basenet(x) 51 | y = torch.cat([sources[0], sources[1]], dim=1) 52 | y = self.upconv1(y) 53 | 54 | y = F.interpolate(y, size = sources[2].size()[2:], mode = 'bilinear', align_corners=False) 55 | y = torch.cat([y, sources[2]], dim=1) 56 | y = self.upconv2(y) 57 | 58 | y = F.interpolate(y, size = sources[3].size()[2:], mode = 'bilinear', align_corners=False) 59 | y = torch.cat([y, sources[3]], dim=1) 60 | y = self.upconv3(y) 61 | 62 | y = F.interpolate(y, size = sources[4].size()[2:], mode = 'bilinear', align_corners=False) 63 | y = torch.cat([y, sources[4]], dim =1) 64 | feature = self.upconv4(y) 65 | y = self.conv_cls(feature) 66 | 67 | return y.permute(0, 2, 3, 1), feature 68 | 69 | -------------------------------------------------------------------------------- /build/lib/text_reco/models/craft/craft_reader.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.backends.cudnn as cudnn 4 | from torch.autograd import Variable 5 | import text_reco.models.craft.craft_utils as craft_utils 6 | import text_reco.models.craft.imgproc as img_proc 7 | from text_reco.models.craft.craft import CRAFT 8 | from text_reco.boxdetect.box_detection import BoxDetect 9 | from text_reco.models.crnn.crnn_run import CRNNReader 10 | 11 | from PIL import Image 12 | 13 | import cv2 14 | from skimage import io 15 | import numpy as np 16 | from text_reco.models.craft.imgproc import ImageConvert 17 | import json 18 | import zipfile 19 | from collections import OrderedDict 20 | from skimage import io 21 | from skimage.transform import rescale, resize, downscale_local_mean 22 | 23 | class CraftReader(ImageConvert): 24 | def __init__(self, image): 25 | super(CraftReader, self).__init__(image) 26 | self.model_path = 'text_reco/models/craft/pretrain/craft_mlt_25k.pth' 27 | self.net = CRAFT() 28 | self.net.load_state_dict(self.copyStateDict(torch.load(self.model_path))) 29 | self.net.eval() 30 | self.mag_ratio = 1 31 | self.square_size = 1280 32 | 33 | @staticmethod 34 | def copyStateDict(state_dict): 35 | if list(state_dict.keys())[0].startswith("module"): 36 | start_idx = 1 37 | else: 38 | start_idx = 0 39 | new_state_dict = OrderedDict() 40 | for k, v in state_dict.items(): 41 | name = ".".join(k.split(".")[start_idx:]) 42 | new_state_dict[name] = v 43 | return new_state_dict 44 | 45 | @staticmethod 46 | def str2bool(v): 47 | return v.lower() in ("yes", "y", "t", "1") 48 | 49 | def image_preprocess(self, image): 50 | image = self.normalizeMeanVariance(image) 51 | image = torch.from_numpy(image).permute(2, 0, 1) 52 | image = Variable(image.unsqueeze(0)) 53 | return image 54 | 55 | def boxes_detect(self): 56 | img_resized, target_ratio, size_heatmap = self.resize_aspect_ratio(self.image) 57 | ratio_h = ratio_w = 1/ target_ratio 58 | x = self.image_preprocess(img_resized) 59 | y, _ = self.net(x) 60 | score_text = y[0, :, :, 0].cpu().data.numpy() 61 | score_link = y[0, :, :, 1].cpu().data.numpy() 62 | boxes = craft_utils.getDetBoxes(textmap =score_text, linkmap = score_link, text_threshold =0.7, link_threshold=0.4, low_text=0.4) 63 | print("Ilosc boxow {}".format(len(boxes))) 64 | boxes = craft_utils.adjustResultCoordinates(boxes, ratio_w, ratio_h) 65 | return boxes, img_resized 66 | 67 | -------------------------------------------------------------------------------- /build/lib/text_reco/models/craft/craft_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | https://raw.githubusercontent.com/clovaai/CRAFT-pytorch/master/craft_utils.py 3 | """ 4 | import numpy as np 5 | import cv2 6 | import math 7 | 8 | def getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text): 9 | linkmap = linkmap.copy() 10 | textmap = textmap.copy() 11 | img_h, img_w = textmap.shape 12 | 13 | ret, text_score = cv2.threshold(textmap, low_text, 1, 0) 14 | ret, link_score = cv2.threshold(linkmap, link_threshold, 1, 0) 15 | text_score_comb = np.clip(text_score + link_score, 0, 1) 16 | nLabels, labels, stats, centroids = cv2.connectedComponentsWithStats(text_score_comb.astype(np.uint8), connectivity=4) 17 | det = [] 18 | mapper = [] 19 | for k in range(1,nLabels): 20 | size = stats[k, cv2.CC_STAT_AREA] 21 | if size < 10: continue 22 | if np.max(textmap[labels==k]) < text_threshold: continue 23 | segmap = np.zeros(textmap.shape, dtype=np.uint8) 24 | segmap[labels==k] = 255 25 | segmap[np.logical_and(link_score==1, text_score==0)] = 0 # remove link area 26 | x, y = stats[k, cv2.CC_STAT_LEFT], stats[k, cv2.CC_STAT_TOP] 27 | w, h = stats[k, cv2.CC_STAT_WIDTH], stats[k, cv2.CC_STAT_HEIGHT] 28 | niter = int(math.sqrt(size * min(w, h) / (w * h)) * 2) 29 | sx, ex, sy, ey = x - niter, x + w + niter + 1, y - niter, y + h + niter + 1 30 | if sx < 0 : sx = 0 31 | if sy < 0 : sy = 0 32 | if ex >= img_w: ex = img_w 33 | if ey >= img_h: ey = img_h 34 | kernel = cv2.getStructuringElement(cv2.MORPH_RECT,(1 + niter, 1 + niter)) 35 | segmap[sy:ey, sx:ex] = cv2.dilate(segmap[sy:ey, sx:ex], kernel) 36 | np_contours = np.roll(np.array(np.where(segmap!=0)),1,axis=0).transpose().reshape(-1,2) 37 | rectangle = cv2.minAreaRect(np_contours) 38 | box = cv2.boxPoints(rectangle) 39 | w, h = np.linalg.norm(box[0] - box[1]), np.linalg.norm(box[1] - box[2]) 40 | box_ratio = max(w, h) / (min(w, h) + 1e-5) 41 | if abs(1 - box_ratio) <= 0.1: 42 | l, r = min(np_contours[:,0]), max(np_contours[:,0]) 43 | t, b = min(np_contours[:,1]), max(np_contours[:,1]) 44 | box = np.array([[l, t], [r, t], [r, b], [l, b]], dtype=np.float32) 45 | startidx = box.sum(axis=1).argmin() 46 | box = np.roll(box, 4-startidx, 0) 47 | box = np.array(box) 48 | det.append(box) 49 | mapper.append(k) 50 | return det, labels, mapper 51 | 52 | def getDetBoxes(textmap, linkmap, text_threshold, link_threshold, low_text): 53 | boxes, labels, mapper = getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text) 54 | return boxes 55 | 56 | def adjustResultCoordinates(polys, ratio_w, ratio_h, ratio_net = 2): 57 | if len(polys) > 0: 58 | polys = np.array(polys) 59 | for k in range(len(polys)): 60 | if polys[k] is not None: 61 | polys[k] *= (ratio_w * ratio_net, ratio_h * ratio_net) 62 | return polys 63 | -------------------------------------------------------------------------------- /build/lib/text_reco/models/craft/file_utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import numpy as np 3 | import cv2 4 | from PIL import Image 5 | 6 | 7 | 8 | class DataLoader(): 9 | 10 | 11 | def __init__(self, _file): 12 | 13 | self._file = _file 14 | self.extensions = ['.pdf', '.tif', '.png'] 15 | 16 | 17 | 18 | def load_image(self): 19 | 20 | try: 21 | _img = Image.open(self._file) 22 | return _img 23 | except: 24 | ValueError("File does not exist!") 25 | 26 | 27 | def main(): 28 | pass 29 | 30 | 31 | if __name__ == "__main__": 32 | main() 33 | -------------------------------------------------------------------------------- /build/lib/text_reco/models/craft/imgproc.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from skimage import io 3 | import cv2 4 | 5 | class ImageConvert(): 6 | def __init__(self, img_array, interpolation =cv2.INTER_LINEAR , square_size = 1280, mag_ratio=1): 7 | self.image = io.imread(img_array) 8 | self.image = self.image[:, :, :3] 9 | self.image = np.array(self.image) 10 | print("Shape of processed file {}".format(len(self.image))) 11 | self.mean = (0.485, 0.456, 0.406) 12 | self.variance = (0.229, 0.224, 0.225) 13 | self.square_size = square_size 14 | self.interpolation = interpolation 15 | self.mag_ratio = mag_ratio 16 | 17 | def normalizeMeanVariance(self,image): 18 | image = image.copy().astype(np.float32) 19 | image -= np.array([self.mean[0] * 255.0, self.mean[1] * 255.0, self.mean[2] * 255.0], dtype = np.float32) 20 | image /= np.array([self.variance[0] * 255.0, self.variance[1] * 255.0, self.variance[2] * 255.0], dtype = np.float32) 21 | return image 22 | 23 | def resize_aspect_ratio(self, image): 24 | height, width, channel = image.shape 25 | target_size = self.mag_ratio * max(height, width) 26 | if target_size > self.square_size: 27 | target_size = self.square_size 28 | 29 | ratio = target_size / max(height, width) 30 | target_h, target_w = int(height * ratio), int(width * ratio) 31 | proc = cv2.resize(self.image, (target_w, target_h), interpolation = cv2.INTER_LINEAR) 32 | 33 | target_h32, target_w32 = target_h, target_w 34 | if target_h % 32 != 0: 35 | target_h32 = target_h + (32 - target_h % 32) 36 | if target_w % 32 != 0: 37 | target_w32 = target_w + (32 - target_w % 32) 38 | 39 | resized = np.zeros((target_h32, target_w32, channel), dtype = np.float32) 40 | resized[0:target_h, 0:target_w, :] = proc 41 | target_h, target_w = target_h32, target_w32 42 | size_heatmap = (int(target_w/2), int(target_h/2)) 43 | return resized, ratio, size_heatmap 44 | 45 | def cvt2HeatmapImg(img): 46 | img = (np.clip(img, 0, 1) * 255).astype(np.uint8) 47 | img = cv2.applyColorMap(img, cv2.COLORMAP_JET) 48 | return img 49 | -------------------------------------------------------------------------------- /build/lib/text_reco/models/crnn/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/build/lib/text_reco/models/crnn/__init__.py -------------------------------------------------------------------------------- /build/lib/text_reco/models/crnn/crnn.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | 3 | class BidirectionalLSTM(nn.Module): 4 | def __init__(self, _in, hidden, out): 5 | super(BidirectionalLSTM, self).__init__() 6 | self.rnn = nn.LSTM(_in, hidden, bidirectional=True) 7 | self.embedding = nn.Linear(hidden * 2, out) 8 | 9 | def forward(self, x): 10 | recurrent, _ = self.rnn(x) 11 | T, b, h = recurrent.size() 12 | t_rec = recurrent.view(T * b, h) 13 | output = self.embedding(t_rec) 14 | output = output.view(T, b, -1) 15 | return output 16 | 17 | class CRNN(nn.Module): 18 | def __init__(self, imgh, nc, nclass, nh, n_rnn=2, leakyReLU = False): 19 | super(CRNN, self).__init__() 20 | ks = [3, 3, 3, 3, 3, 3, 2] 21 | ps = [1, 1, 1, 1, 1, 1, 0] 22 | ss = [1, 1, 1, 1, 1, 1, 1] 23 | nm = [64, 128, 256, 256, 512, 512, 512] 24 | cnn = nn.Sequential() 25 | def convRelu(i, batchNormalization=False): 26 | nIn = nc if i == 0 else nm[i - 1] 27 | nOut = nm[i] 28 | cnn.add_module('conv{0}'.format(i), 29 | nn.Conv2d(nIn, nOut, ks[i], ss[i], ps[i])) 30 | if batchNormalization: 31 | cnn.add_module('batchnorm{0}'.format(i), nn.BatchNorm2d(nOut)) 32 | if leakyReLU: 33 | cnn.add_module('relu{0}'.format(i), 34 | nn.LeakyReLU(0.2, inplace=True)) 35 | else: 36 | cnn.add_module('relu{0}'.format(i), nn.ReLU(True)) 37 | convRelu(0) 38 | cnn.add_module('pooling{0}'.format(0), nn.MaxPool2d(2, 2)) # 64x16x64 39 | convRelu(1) 40 | cnn.add_module('pooling{0}'.format(1), nn.MaxPool2d(2, 2)) # 128x8x32 41 | convRelu(2, True) 42 | convRelu(3) 43 | cnn.add_module('pooling{0}'.format(2), 44 | nn.MaxPool2d((2, 2), (2, 1), (0, 1))) # 256x4x16 45 | convRelu(4, True) 46 | convRelu(5) 47 | cnn.add_module('pooling{0}'.format(3), 48 | nn.MaxPool2d((2, 2), (2, 1), (0, 1))) # 512x2x16 49 | convRelu(6, True) # 512x1x16 50 | self.cnn = cnn 51 | self.rnn = nn.Sequential( 52 | BidirectionalLSTM(512, nh, nh), 53 | BidirectionalLSTM(nh, nh, nclass)) 54 | 55 | def forward(self, input): 56 | conv = self.cnn(input) 57 | b, c, h, w = conv.size() 58 | assert h == 1, "the height of conv must be 1" 59 | conv = conv.squeeze(2) 60 | conv = conv.permute(2, 0, 1) # [w, b, c] 61 | output = self.rnn(conv) 62 | return output 63 | -------------------------------------------------------------------------------- /build/lib/text_reco/models/crnn/crnn_run.py: -------------------------------------------------------------------------------- 1 | from PIL import Image 2 | from skimage import io 3 | import text_reco.models.crnn.crnn as crnn 4 | 5 | import torch 6 | from torch.autograd import Variable 7 | import text_reco.models.crnn.utils as utils 8 | import text_reco.models.crnn.preprocess as preprocess 9 | 10 | class CRNNReader(): 11 | def __init__(self, model_path= 'text_reco/models/crnn/pretrain/crnn.pth'): 12 | self.model_path = model_path 13 | self.model = crnn.CRNN(32, 1,37, 256) 14 | self.model = self.model.float() 15 | self.model.load_state_dict(torch.load(self.model_path)) 16 | self.model.eval() 17 | self.alphabet = '0123456789abcdefghijklmnopqrstuv2xyz' 18 | self.transformer = preprocess.resizeNormalize((100, 32)) 19 | self.converter = utils.strLabelConverter(self.alphabet) 20 | 21 | def get_predictions(self, img): 22 | self.model = self.model.float() 23 | img = img.float() 24 | predictions = self.model(img) 25 | _, predictions = predictions.max(2) 26 | predictions = predictions.transpose(1, 0).contiguous().view(-1) 27 | pred_size = Variable(torch.IntTensor([predictions.size(0)])) 28 | results = self.converter.decode(predictions.data, pred_size.data, raw=False) 29 | return results 30 | -------------------------------------------------------------------------------- /build/lib/text_reco/models/crnn/preprocess.py: -------------------------------------------------------------------------------- 1 | import skimage 2 | 3 | import random 4 | import torch 5 | from torch.utils.data import Dataset 6 | from torch.utils.data import sampler 7 | import torchvision.transforms as transforms 8 | import lmdb 9 | import six 10 | import sys 11 | from PIL import Image 12 | import numpy as np 13 | 14 | class lmdbDataset(Dataset): 15 | def __init__(self, root=None, transform=None, target_transform=None): 16 | self.env = lmdb.open( 17 | root, 18 | max_readers=1, 19 | readonly=True, 20 | lock=False, 21 | readahead=False, 22 | meminit=False) 23 | 24 | if not self.env: 25 | print('cannot creat lmdb from %s' % (root)) 26 | sys.exit(0) 27 | 28 | with self.env.begin(write=False) as txn: 29 | nSamples = int(txn.get('num-samples')) 30 | self.nSamples = nSamples 31 | self.transform = transform 32 | self.target_transform = target_transform 33 | 34 | def __len__(self): 35 | return self.nSamples 36 | 37 | def __getitem__(self, index): 38 | assert index <= len(self), 'index range error' 39 | index += 1 40 | with self.env.begin(write=False) as txn: 41 | img_key = 'image-%09d' % index 42 | imgbuf = txn.get(img_key) 43 | buf = six.BytesIO() 44 | buf.write(imgbuf) 45 | buf.seek(0) 46 | try: 47 | img = Image.open(buf).convert('L') 48 | except IOError: 49 | print('Corrupted image for %d' % index) 50 | return self[index + 1] 51 | 52 | if self.transform is not None: 53 | img = self.transform(img) 54 | label_key = 'label-%09d' % index 55 | label = str(txn.get(label_key)) 56 | if self.target_transform is not None: 57 | label = self.target_transform(label) 58 | return (img, label) 59 | 60 | class resizeNormalize(object): 61 | def __init__(self, size, interpolation=Image.BILINEAR): 62 | self.size = size 63 | self.interpolation = interpolation 64 | self.toTensor = transforms.ToTensor() 65 | 66 | def __call__(self, img): 67 | img = img.resize(self.size , self.interpolation) 68 | img = self.toTensor(img) 69 | img.sub_(0.5).div_(0.5) 70 | return img 71 | 72 | class randomSequentialSampler(sampler.Sampler): 73 | def __init__(self, data_source, batch_size): 74 | self.num_samples = len(data_source) 75 | self.batch_size = batch_size 76 | 77 | def __iter__(self): 78 | n_batch = len(self) // self.batch_size 79 | tail = len(self) % self.batch_size 80 | index = torch.LongTensor(len(self)).fill_(0) 81 | for i in range(n_batch): 82 | random_start = random.randint(0, len(self) - self.batch_size) 83 | batch_index = random_start + torch.range(0, self.batch_size - 1) 84 | index[i * self.batch_size:(i + 1) * self.batch_size] = batch_index 85 | if tail: 86 | random_start = random.randint(0, len(self) - self.batch_size) 87 | tail_index = random_start + torch.range(0, tail - 1) 88 | index[(i + 1) * self.batch_size:] = tail_index 89 | 90 | return iter(index) 91 | 92 | def __len__(self): 93 | return self.num_samples 94 | 95 | 96 | class alignCollate(object): 97 | 98 | def __init__(self, imgH=32, imgW=100, keep_ratio=False, min_ratio=1): 99 | self.imgH = imgH 100 | self.imgW = imgW 101 | self.keep_ratio = keep_ratio 102 | self.min_ratio = min_ratio 103 | 104 | def __call__(self, batch): 105 | images, labels = zip(*batch) 106 | 107 | imgH = self.imgH 108 | imgW = self.imgW 109 | if self.keep_ratio: 110 | ratios = [] 111 | for image in images: 112 | w, h = image.size 113 | ratios.append(w / float(h)) 114 | ratios.sort() 115 | max_ratio = ratios[-1] 116 | imgW = int(np.floor(max_ratio * imgH)) 117 | imgW = max(imgH * self.min_ratio, imgW) # assure imgH >= imgW 118 | 119 | transform = resizeNormalize((imgW, imgH)) 120 | images = [transform(image) for image in images] 121 | images = torch.cat([t.unsqueeze(0) for t in images], 0) 122 | 123 | return images, labels 124 | -------------------------------------------------------------------------------- /build/lib/text_reco/models/crnn/utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from torch.autograd import Variable 4 | import collections 5 | 6 | 7 | class strLabelConverter(object): 8 | """Convert between str and label. 9 | 10 | NOTE: 11 | Insert `blank` to the alphabet for CTC. 12 | 13 | Args: 14 | alphabet (str): set of the possible characters. 15 | ignore_case (bool, default=True): whether or not to ignore all of the case. 16 | """ 17 | 18 | def __init__(self, alphabet, ignore_case=True): 19 | self._ignore_case = ignore_case 20 | if self._ignore_case: 21 | alphabet = alphabet.lower() 22 | self.alphabet = alphabet + '-' # for `-1` index 23 | 24 | self.dict = {} 25 | for i, char in enumerate(alphabet): 26 | # NOTE: 0 is reserved for 'blank' required by wrap_ctc 27 | self.dict[char] = i + 1 28 | 29 | def encode(self, text): 30 | """Support batch or single str. 31 | 32 | Args: 33 | text (str or list of str): texts to convert. 34 | 35 | Returns: 36 | torch.IntTensor [length_0 + length_1 + ... length_{n - 1}]: encoded texts. 37 | torch.IntTensor [n]: length of each text. 38 | """ 39 | if isinstance(text, str): 40 | text = [ 41 | self.dict[char.lower() if self._ignore_case else char] 42 | for char in text 43 | ] 44 | length = [len(text)] 45 | elif isinstance(text, collections.Iterable): 46 | length = [len(s) for s in text] 47 | text = ''.join(text) 48 | text, _ = self.encode(text) 49 | return (torch.IntTensor(text), torch.IntTensor(length)) 50 | 51 | def decode(self, t, length, raw=False): 52 | """Decode encoded texts back into strs. 53 | 54 | Args: 55 | torch.IntTensor [length_0 + length_1 + ... length_{n - 1}]: encoded texts. 56 | torch.IntTensor [n]: length of each text. 57 | 58 | Raises: 59 | AssertionError: when the texts and its length does not match. 60 | 61 | Returns: 62 | text (str or list of str): texts to convert. 63 | """ 64 | if length.numel() == 1: 65 | length = length[0] 66 | assert t.numel() == length, "text with length: {} does not match declared length: {}".format(t.numel(), length) 67 | if raw: 68 | return ''.join([self.alphabet[i - 1] for i in t]) 69 | else: 70 | char_list = [] 71 | for i in range(length): 72 | if t[i] != 0 and (not (i > 0 and t[i - 1] == t[i])): 73 | char_list.append(self.alphabet[t[i] - 1]) 74 | return ''.join(char_list) 75 | else: 76 | # batch mode 77 | assert t.numel() == length.sum(), "texts with length: {} does not match declared length: {}".format(t.numel(), length.sum()) 78 | texts = [] 79 | index = 0 80 | for i in range(length.numel()): 81 | l = length[i] 82 | texts.append( 83 | self.decode( 84 | t[index:index + l], torch.IntTensor([l]), raw=raw)) 85 | index += l 86 | return texts 87 | 88 | 89 | class averager(object): 90 | """Compute average for `torch.Variable` and `torch.Tensor`. """ 91 | 92 | def __init__(self): 93 | self.reset() 94 | 95 | def add(self, v): 96 | if isinstance(v, Variable): 97 | count = v.data.numel() 98 | v = v.data.sum() 99 | elif isinstance(v, torch.Tensor): 100 | count = v.numel() 101 | v = v.sum() 102 | 103 | self.n_count += count 104 | self.sum += v 105 | 106 | def reset(self): 107 | self.n_count = 0 108 | self.sum = 0 109 | 110 | def val(self): 111 | res = 0 112 | if self.n_count != 0: 113 | res = self.sum / float(self.n_count) 114 | return res 115 | 116 | def oneHot(v, v_length, nc): 117 | batchSize = v_length.size(0) 118 | maxLength = v_length.max() 119 | v_onehot = torch.FloatTensor(batchSize, maxLength, nc).fill_(0) 120 | acc = 0 121 | for i in range(batchSize): 122 | length = v_length[i] 123 | label = v[acc:acc + length].view(-1, 1).long() 124 | v_onehot[i, :length].scatter_(1, label, 1.0) 125 | acc += length 126 | return v_onehot 127 | 128 | 129 | def loadData(v, data): 130 | v.data.resize_(data.size()).copy_(data) 131 | 132 | 133 | def prettyPrint(v): 134 | print('Size {0}, Type: {1}'.format(str(v.size()), v.data.type())) 135 | print('| Max: %f | Min: %f | Mean: %f' % (v.max().data[0], v.min().data[0], 136 | v.mean().data[0])) 137 | 138 | 139 | def assureRatio(img): 140 | """Ensure imgH <= imgW.""" 141 | b, c, h, w = img.size() 142 | if h > w: 143 | main = nn.UpsamplingBilinear2d(size=(h, h), scale_factor=None) 144 | img = main(img) 145 | return img 146 | -------------------------------------------------------------------------------- /dist/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | File upload input 6 | 7 | 8 | 9 | 10 | 11 |
12 | 13 | 14 |
15 | 16 |
17 |

Drag and drop a file or select add Image

18 |
19 |
20 |
21 | your image 22 |
23 | 24 |
25 |
26 |
27 | 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /dist/script.js: -------------------------------------------------------------------------------- 1 | function readURL(input) { 2 | if (input.files && input.files[0]) { 3 | 4 | var reader = new FileReader(); 5 | 6 | reader.onload = function(e) { 7 | $('.image-upload-wrap').hide(); 8 | 9 | $('.file-upload-image').attr('src', e.target.result); 10 | $('.file-upload-content').show(); 11 | 12 | $('.image-title').html(input.files[0].name); 13 | }; 14 | 15 | reader.readAsDataURL(input.files[0]); 16 | 17 | } else { 18 | removeUpload(); 19 | } 20 | } 21 | 22 | function removeUpload() { 23 | $('.file-upload-input').replaceWith($('.file-upload-input').clone()); 24 | $('.file-upload-content').hide(); 25 | $('.image-upload-wrap').show(); 26 | } 27 | $('.image-upload-wrap').bind('dragover', function () { 28 | $('.image-upload-wrap').addClass('image-dropping'); 29 | }); 30 | $('.image-upload-wrap').bind('dragleave', function () { 31 | $('.image-upload-wrap').removeClass('image-dropping'); 32 | }); -------------------------------------------------------------------------------- /dist/style.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: sans-serif; 3 | background-color: #eeeeee; 4 | } 5 | 6 | .file-upload { 7 | background-color: #ffffff; 8 | width: 600px; 9 | margin: 0 auto; 10 | padding: 20px; 11 | } 12 | 13 | .file-upload-btn { 14 | width: 100%; 15 | margin: 0; 16 | color: #fff; 17 | background: #1FB264; 18 | border: none; 19 | padding: 10px; 20 | border-radius: 4px; 21 | border-bottom: 4px solid #15824B; 22 | transition: all .2s ease; 23 | outline: none; 24 | text-transform: uppercase; 25 | font-weight: 700; 26 | } 27 | 28 | .file-upload-btn:hover { 29 | background: #1AA059; 30 | color: #ffffff; 31 | transition: all .2s ease; 32 | cursor: pointer; 33 | } 34 | 35 | .file-upload-btn:active { 36 | border: 0; 37 | transition: all .2s ease; 38 | } 39 | 40 | .file-upload-content { 41 | display: none; 42 | text-align: center; 43 | } 44 | 45 | .file-upload-input { 46 | position: absolute; 47 | margin: 0; 48 | padding: 0; 49 | width: 100%; 50 | height: 100%; 51 | outline: none; 52 | opacity: 0; 53 | cursor: pointer; 54 | } 55 | 56 | .image-upload-wrap { 57 | margin-top: 20px; 58 | border: 4px dashed #1FB264; 59 | position: relative; 60 | } 61 | 62 | .image-dropping, 63 | .image-upload-wrap:hover { 64 | background-color: #1FB264; 65 | border: 4px dashed #ffffff; 66 | } 67 | 68 | .image-title-wrap { 69 | padding: 0 15px 15px 15px; 70 | color: #222; 71 | } 72 | 73 | .drag-text { 74 | text-align: center; 75 | } 76 | 77 | .drag-text h3 { 78 | font-weight: 100; 79 | text-transform: uppercase; 80 | color: #15824B; 81 | padding: 60px 0; 82 | } 83 | 84 | .file-upload-image { 85 | max-height: 200px; 86 | max-width: 200px; 87 | margin: auto; 88 | padding: 20px; 89 | } 90 | 91 | .remove-image { 92 | width: 200px; 93 | margin: 0; 94 | color: #fff; 95 | background: #cd4535; 96 | border: none; 97 | padding: 10px; 98 | border-radius: 4px; 99 | border-bottom: 4px solid #b02818; 100 | transition: all .2s ease; 101 | outline: none; 102 | text-transform: uppercase; 103 | font-weight: 700; 104 | } 105 | 106 | .remove-image:hover { 107 | background: #c13b2a; 108 | color: #ffffff; 109 | transition: all .2s ease; 110 | cursor: pointer; 111 | } 112 | 113 | .remove-image:active { 114 | border: 0; 115 | transition: all .2s ease; 116 | } -------------------------------------------------------------------------------- /img/front_.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/img/front_.PNG -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | Click==7.0 2 | decorator==4.4.0 3 | Flask==1.1.1 4 | gunicorn==19.9.0 5 | imageio==2.5.0 6 | itsdangerous==1.1.0 7 | Jinja2==2.10.1 8 | lmdb==0.97 9 | MarkupSafe==1.1.1 10 | networkx==2.3 11 | numpy==1.17.1 12 | opencv-python 13 | Pillow==6.1.0 14 | PyWavelets==1.0.3 15 | scikit-image==0.15.0 16 | scipy==1.3.1 17 | six==1.12.0 18 | <<<<<<< HEAD 19 | https://download.pytorch.org/whl/cpu/torch-1.0.1.post2-cp36-cp36m-linux_x86_64.whl 20 | torchvision 21 | ======= 22 | https://download.pytorch.org/whl/cpu/torch-1.0.1.post2-cp37-cp37m-linux_x86_64.whl 23 | >>>>>>> f186427a36cf818daadc971971eb6c2784592308 24 | Werkzeug==0.15.5 25 | -------------------------------------------------------------------------------- /src/index.html: -------------------------------------------------------------------------------- 1 | 2 |
3 | 4 | 5 |
6 | 7 |
8 |

Drag and drop a file or select add Image

9 |
10 |
11 |
12 | your image 13 |
14 | 15 |
16 |
17 |
-------------------------------------------------------------------------------- /src/script.js: -------------------------------------------------------------------------------- 1 | function readURL(input) { 2 | if (input.files && input.files[0]) { 3 | 4 | var reader = new FileReader(); 5 | 6 | reader.onload = function(e) { 7 | $('.image-upload-wrap').hide(); 8 | 9 | $('.file-upload-image').attr('src', e.target.result); 10 | $('.file-upload-content').show(); 11 | 12 | $('.image-title').html(input.files[0].name); 13 | }; 14 | 15 | reader.readAsDataURL(input.files[0]); 16 | 17 | } else { 18 | removeUpload(); 19 | } 20 | } 21 | 22 | function removeUpload() { 23 | $('.file-upload-input').replaceWith($('.file-upload-input').clone()); 24 | $('.file-upload-content').hide(); 25 | $('.image-upload-wrap').show(); 26 | } 27 | $('.image-upload-wrap').bind('dragover', function () { 28 | $('.image-upload-wrap').addClass('image-dropping'); 29 | }); 30 | $('.image-upload-wrap').bind('dragleave', function () { 31 | $('.image-upload-wrap').removeClass('image-dropping'); 32 | }); 33 | -------------------------------------------------------------------------------- /src/style.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: sans-serif; 3 | background-color: #eeeeee; 4 | } 5 | 6 | .file-upload { 7 | background-color: #ffffff; 8 | width: 600px; 9 | margin: 0 auto; 10 | padding: 20px; 11 | } 12 | 13 | .file-upload-btn { 14 | width: 100%; 15 | margin: 0; 16 | color: #fff; 17 | background: #1FB264; 18 | border: none; 19 | padding: 10px; 20 | border-radius: 4px; 21 | border-bottom: 4px solid #15824B; 22 | transition: all .2s ease; 23 | outline: none; 24 | text-transform: uppercase; 25 | font-weight: 700; 26 | } 27 | 28 | .file-upload-btn:hover { 29 | background: #1AA059; 30 | color: #ffffff; 31 | transition: all .2s ease; 32 | cursor: pointer; 33 | } 34 | 35 | .file-upload-btn:active { 36 | border: 0; 37 | transition: all .2s ease; 38 | } 39 | 40 | .file-upload-content { 41 | display: none; 42 | text-align: center; 43 | } 44 | 45 | .file-upload-input { 46 | position: absolute; 47 | margin: 0; 48 | padding: 0; 49 | width: 100%; 50 | height: 100%; 51 | outline: none; 52 | opacity: 0; 53 | cursor: pointer; 54 | } 55 | 56 | .image-upload-wrap { 57 | margin-top: 20px; 58 | border: 4px dashed #1FB264; 59 | position: relative; 60 | } 61 | 62 | .image-dropping, 63 | .image-upload-wrap:hover { 64 | background-color: #1FB264; 65 | border: 4px dashed #ffffff; 66 | } 67 | 68 | .image-title-wrap { 69 | padding: 0 15px 15px 15px; 70 | color: #222; 71 | } 72 | 73 | .drag-text { 74 | text-align: center; 75 | } 76 | 77 | .drag-text h3 { 78 | font-weight: 100; 79 | text-transform: uppercase; 80 | color: #15824B; 81 | padding: 60px 0; 82 | } 83 | 84 | .file-upload-image { 85 | max-height: 200px; 86 | max-width: 200px; 87 | margin: auto; 88 | padding: 20px; 89 | } 90 | 91 | .remove-image { 92 | width: 200px; 93 | margin: 0; 94 | color: #fff; 95 | background: #cd4535; 96 | border: none; 97 | padding: 10px; 98 | border-radius: 4px; 99 | border-bottom: 4px solid #b02818; 100 | transition: all .2s ease; 101 | outline: none; 102 | text-transform: uppercase; 103 | font-weight: 700; 104 | } 105 | 106 | .remove-image:hover { 107 | background: #c13b2a; 108 | color: #ffffff; 109 | transition: all .2s ease; 110 | cursor: pointer; 111 | } 112 | 113 | .remove-image:active { 114 | border: 0; 115 | transition: all .2s ease; 116 | } -------------------------------------------------------------------------------- /static/css/scrolling-nav.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Start Bootstrap - Scrolling Nav (https://startbootstrap.com/template-overviews/scrolling-nav) 3 | * Copyright 2013-2019 Start Bootstrap 4 | * Licensed under MIT (https://github.com/BlackrockDigital/startbootstrap-scrolling-nav/blob/master/LICENSE) 5 | */ 6 | header { 7 | padding: 156px 0 100px; 8 | } 9 | 10 | section { 11 | padding: 150px 0; 12 | } 13 | -------------------------------------------------------------------------------- /static/dist/index.html: -------------------------------------------------------------------------------- 1 | 2 |
3 | 4 | 5 |
6 | 7 |
8 |

Drag and drop a file or select add Image

9 |
10 |
11 |
12 | your image 13 |
14 | 15 |
16 |
17 |
-------------------------------------------------------------------------------- /static/dist/script.js: -------------------------------------------------------------------------------- 1 | function readURL(input) { 2 | if (input.files && input.files[0]) { 3 | 4 | var reader = new FileReader(); 5 | 6 | reader.onload = function(e) { 7 | $('.image-upload-wrap').hide(); 8 | 9 | $('.file-upload-image').attr('src', e.target.result); 10 | $('.file-upload-content').show(); 11 | 12 | $('.image-title').html(input.files[0].name); 13 | }; 14 | 15 | reader.readAsDataURL(input.files[0]); 16 | 17 | } else { 18 | removeUpload(); 19 | } 20 | } 21 | 22 | function removeUpload() { 23 | $('.file-upload-input').replaceWith($('.file-upload-input').clone()); 24 | $('.file-upload-content').hide(); 25 | $('.image-upload-wrap').show(); 26 | } 27 | $('.image-upload-wrap').bind('dragover', function () { 28 | $('.image-upload-wrap').addClass('image-dropping'); 29 | }); 30 | $('.image-upload-wrap').bind('dragleave', function () { 31 | $('.image-upload-wrap').removeClass('image-dropping'); 32 | }); 33 | 34 | 35 | class TypeWriter { 36 | constructor(element) { 37 | this.element = element; 38 | this.text = element.textContent; 39 | this.height = element.offsetHeight; 40 | this.index = 0; 41 | this.addSpeed = 60; 42 | this.clearSpeed = 20; 43 | this.waitTime = 1000; 44 | 45 | this.removeText = this.removeText.bind(this); 46 | this.addText = this.addText.bind(this); 47 | 48 | this.init(); 49 | } 50 | 51 | removeText() { 52 | this.element.textContent = this.element.textContent.slice(0, -1); 53 | 54 | if (this.element.textContent.length == 0) { 55 | clearInterval(this.interval); 56 | 57 | var that = this; 58 | 59 | setTimeout(function(){ 60 | that.index = 0; 61 | that.interval = setInterval(that.addText, that.addSpeed); 62 | }, this.waitTime); 63 | } 64 | } 65 | 66 | addText() { 67 | this.element.textContent += this.text[this.index]; 68 | 69 | this.index = this.index + 1; 70 | 71 | if (this.index > this.text.length -1) { 72 | clearInterval(this.interval); 73 | 74 | var that = this; 75 | 76 | setTimeout(function(){ 77 | that.interval = setInterval(that.removeText, that.clearSpeed); 78 | }, this.waitTime); 79 | } 80 | 81 | } 82 | 83 | init() { 84 | this.element.textContent = ''; 85 | this.element.style.height = '' + this.height + 'px'; 86 | 87 | this.interval = setInterval(this.addText, this.addSpeed); 88 | } 89 | } 90 | 91 | Array.from(document.getElementsByClassName("typewriter")).forEach( 92 | function(element) { 93 | new TypeWriter(element); 94 | } 95 | ); -------------------------------------------------------------------------------- /static/dist/style.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: sans-serif; 3 | background-color: #eeeeee; 4 | } 5 | 6 | .file-upload { 7 | background-color: #ffffff; 8 | width: 600px; 9 | margin: 0 auto; 10 | padding: 20px; 11 | } 12 | 13 | .file-upload-btn { 14 | width: 100%; 15 | margin: 0; 16 | color: #fff; 17 | background: #1FB264; 18 | border: none; 19 | padding: 10px; 20 | border-radius: 4px; 21 | border-bottom: 4px solid #15824B; 22 | transition: all .2s ease; 23 | outline: none; 24 | text-transform: uppercase; 25 | font-weight: 700; 26 | } 27 | 28 | .file-upload-btn:hover { 29 | background: #AB8B6A 30 | color: #ffffff; 31 | transition: all .2s ease; 32 | cursor: pointer; 33 | } 34 | 35 | .file-upload-btn:active { 36 | border: 0; 37 | transition: all .2s ease; 38 | } 39 | 40 | .file-upload-content { 41 | display: none; 42 | text-align: center; 43 | } 44 | 45 | .file-upload-input { 46 | position: absolute; 47 | margin: 0; 48 | padding: 0; 49 | width: 100%; 50 | height: 100%; 51 | outline: none; 52 | opacity: 0; 53 | cursor: pointer; 54 | } 55 | 56 | .image-upload-wrap { 57 | margin-top: 20px; 58 | border: 4px dashed #1FB264; 59 | position: relative; 60 | } 61 | 62 | .image-dropping, 63 | .image-upload-wrap:hover { 64 | background-color: #1FB264; 65 | border: 4px dashed #ffffff; 66 | } 67 | 68 | .image-title-wrap { 69 | padding: 0 15px 15px 15px; 70 | color: #222; 71 | } 72 | 73 | .drag-text { 74 | text-align: center; 75 | } 76 | 77 | .drag-text h3 { 78 | font-weight: 100; 79 | text-transform: uppercase; 80 | color: #15824B; 81 | padding: 60px 0; 82 | } 83 | 84 | .file-upload-image { 85 | max-height: 200px; 86 | max-width: 200px; 87 | margin: auto; 88 | padding: 20px; 89 | } 90 | 91 | .remove-image { 92 | width: 200px; 93 | margin: 0; 94 | color: #fff; 95 | background: #cd4535; 96 | border: none; 97 | padding: 10px; 98 | border-radius: 4px; 99 | border-bottom: 4px solid #b02818; 100 | transition: all .2s ease; 101 | outline: none; 102 | text-transform: uppercase; 103 | font-weight: 700; 104 | } 105 | 106 | .remove-image:hover { 107 | background: #c13b2a; 108 | color: #ffffff; 109 | transition: all .2s ease; 110 | cursor: pointer; 111 | } 112 | 113 | .remove-image:active { 114 | border: 0; 115 | transition: all .2s ease; 116 | } 117 | 118 | .typewriter { 119 | font-size: 24px; 120 | font-family: "Courier"; 121 | } 122 | 123 | .container { 124 | width: 400px; 125 | border: 1px solid; 126 | padding: 20px; 127 | margin: auto; 128 | } 129 | 130 | .icon-button { 131 | background-color: white; 132 | border-radius: 2.6rem; 133 | cursor: pointer; 134 | display: inline-block; 135 | font-size: 1.3rem; 136 | height: 2.6rem; 137 | line-height: 2.6rem; 138 | margin: 0 5px; 139 | position: relative; 140 | text-align: center; 141 | -webkit-user-select: none; 142 | -moz-user-select: none; 143 | -ms-user-select: none; 144 | user-select: none; 145 | width: 2.6rem; 146 | } 147 | 148 | /* Circle */ 149 | .icon-button span { 150 | border-radius: 0; 151 | display: block; 152 | height: 0; 153 | left: 50%; 154 | margin: 0; 155 | position: absolute; 156 | top: 50%; 157 | -webkit-transition: all 0.3s; 158 | -moz-transition: all 0.3s; 159 | -o-transition: all 0.3s; 160 | transition: all 0.3s; 161 | width: 0; 162 | } 163 | .icon-button:hover span { 164 | width: 2.6rem; 165 | height: 2.6rem; 166 | border-radius: 2.6rem; 167 | margin: -1.3rem; 168 | } 169 | 170 | /* Icons */ 171 | .icon-button i { 172 | background: none; 173 | color: white; 174 | height: 2.6rem; 175 | left: 0; 176 | line-height: 2.6rem; 177 | position: absolute; 178 | top: 0; 179 | -webkit-transition: all 0.3s; 180 | -moz-transition: all 0.3s; 181 | -o-transition: all 0.3s; 182 | transition: all 0.3s; 183 | width: 2.6rem; 184 | z-index: 10; 185 | } 186 | 187 | 188 | 189 | 190 | @import url('//netdna.bootstrapcdn.com/font-awesome/3.2.1/css/font-awesome.min.css'); 191 | 192 | .button-wrap { 193 | width: 150px; 194 | height: 150px; 195 | position: relative; 196 | display: inline-block; 197 | margin: 25px 50px 0; 198 | cursor: pointer; 199 | border-radius: 100%; 200 | box-shadow: inset 0 0 15px rgba(0, 0, 0, 0.3); 201 | 202 | &.facebook { 203 | background-color: #3B5998; 204 | i.active { color: #3B5998; } 205 | } 206 | 207 | &.twitter { 208 | background-color: #0AC; 209 | i.active { color: #0AC; } 210 | } 211 | 212 | &.pinterest { 213 | background-color: #CD2129; 214 | i.active { color: #CD2129; } 215 | } 216 | 217 | &.dribbble { 218 | background-color: #F26798; 219 | i.active { color: #F26798; } 220 | } 221 | 222 | &:hover { 223 | .button-inner-wrap { 224 | width: 115px; 225 | height: 115px; 226 | 227 | i.inactive { transform: translate(100px, -50%); } 228 | i.active { transform: translate(-50%, -50%); } 229 | } 230 | } 231 | } 232 | 233 | .button-inner-wrap { 234 | width: 150px; 235 | height: 150px; 236 | border: 1px solid #DDD; 237 | position: absolute; 238 | left: 50%; 239 | top: 50%; 240 | overflow: hidden; 241 | background-color: #FFF; 242 | border-radius: 100%; 243 | transform: translate(-50%, -50%); 244 | transition: all 0.3s ease; 245 | 246 | i { 247 | position: absolute; 248 | left: 50%; 249 | top: 50%; 250 | font-size: 50px; 251 | transition: all 0.3s ease; 252 | 253 | &.inactive { 254 | color: #CCC; 255 | transform: translate(-50%, -50%); 256 | } 257 | 258 | &.active { transform: translate(-150px, -50%); } 259 | } 260 | } 261 | 262 | 263 | /* Wrapper */ 264 | .icon-button { 265 | background-color: white; 266 | border-radius: 2.6rem; 267 | cursor: pointer; 268 | display: inline-block; 269 | font-size: 1.3rem; 270 | height: 2.6rem; 271 | line-height: 2.6rem; 272 | margin: 0 5px; 273 | position: relative; 274 | text-align: center; 275 | -webkit-user-select: none; 276 | -moz-user-select: none; 277 | -ms-user-select: none; 278 | user-select: none; 279 | width: 2.6rem; 280 | } 281 | 282 | /* Circle */ 283 | .icon-button span { 284 | border-radius: 0; 285 | display: block; 286 | height: 0; 287 | left: 50%; 288 | margin: 0; 289 | position: absolute; 290 | top: 50%; 291 | -webkit-transition: all 0.3s; 292 | -moz-transition: all 0.3s; 293 | -o-transition: all 0.3s; 294 | transition: all 0.3s; 295 | width: 0; 296 | } 297 | .icon-button:hover span { 298 | width: 2.6rem; 299 | height: 2.6rem; 300 | border-radius: 2.6rem; 301 | margin: -1.3rem; 302 | } 303 | 304 | /* Icons */ 305 | .icon-button i { 306 | background: none; 307 | color: white; 308 | height: 2.6rem; 309 | left: 0; 310 | line-height: 2.6rem; 311 | position: absolute; 312 | top: 0; 313 | -webkit-transition: all 0.3s; 314 | -moz-transition: all 0.3s; 315 | -o-transition: all 0.3s; 316 | transition: all 0.3s; 317 | width: 2.6rem; 318 | z-index: 10; 319 | } 320 | 321 | 322 | 323 | $colors: ( 324 | background: #1d1f20, 325 | text: #eee 326 | ); 327 | 328 | $font: ( 329 | family: sans-serif, 330 | size: 24px, 331 | letter-spacing: 0 332 | ); 333 | 334 | $config: ( 335 | animated: false, 336 | nb-letters: 16, 337 | animation-length: 4s, 338 | mask-letters: ("0", "1", "2", "3", "4", "5", "6", "7", "8", "9") 339 | ); 340 | 341 | // ---------------------------------- 342 | 343 | @function color($value) { 344 | @return map-get($colors, $value); 345 | } 346 | 347 | @function font($value) { 348 | @return map-get($font, $value); 349 | } 350 | 351 | @function config($value) { 352 | @return map-get($config, $value); 353 | } 354 | 355 | @function random-string($list) { 356 | $result: null; 357 | @for $i from 1 through length($list) { 358 | $random: random(length($list)); 359 | $current: nth($list, $random); 360 | $list: remove-nth($list, $random); 361 | $result: $result#{$current}; 362 | } 363 | @return $result; 364 | } 365 | 366 | @function remove-nth($list, $index) { 367 | $result: (); 368 | @for $i from 1 through length($list) { 369 | @if $i != $index { 370 | $result: append($result, nth($list, $i)); 371 | } 372 | } 373 | @return $result; 374 | } 375 | 376 | @mixin respond-to($breakpoint) { 377 | @if type-of($breakpoint) == list { 378 | @media (min-width: nth($breakpoint, 1)) and (max-width: nth($breakpoint, 2)) { 379 | @content; 380 | } 381 | } 382 | @else { 383 | @media (max-width: $breakpoint) { 384 | @content; 385 | } 386 | } 387 | } 388 | 389 | // ---------------------------------- 390 | 391 | body { 392 | text-align: center; 393 | background-color: color(background); 394 | } 395 | 396 | .wrapper { 397 | position: absolute; 398 | top: 50%; 399 | left: 50%; 400 | width: 90%; 401 | font-size: 0; 402 | transform: translate(-50%); 403 | } 404 | 405 | p { 406 | font-family: font(family); 407 | font-size: 14px; 408 | font-weight: 500; 409 | color: color(text); 410 | opacity: 0.3; 411 | } 412 | 413 | .letter { 414 | width: font(size); 415 | display: inline-block; 416 | vertical-align: middle; 417 | position: relative; 418 | overflow: hidden; 419 | margin: 0 ceil(font(letter-spacing) / 2); 420 | font-family: font(family); 421 | font-size: font(size); 422 | font-weight: 600; 423 | line-height: font(size); 424 | text-transform: uppercase; 425 | color: color(text); 426 | @include respond-to(625px) { 427 | //font-size: 16px; 428 | //width: 18px; 429 | } 430 | &:before { 431 | position: absolute; 432 | top: 0; 433 | left: 0; 434 | width: 100%; 435 | word-break: break-all; 436 | background-color: color(background); 437 | } 438 | } 439 | 440 | // ---------------------------------- 441 | 442 | @for $i from 1 through config(nb-letters) { 443 | $length: length(config(mask-letters)); 444 | $random: random($length); 445 | $steps: $random - 1; 446 | $offset: font(size) * $steps * -1; 447 | $delay: random(100) / 100; 448 | $duration: $steps * (config(animation-length) / $random); 449 | .letter:nth-child(#{$i}):before { 450 | content: quote(random-string(config(mask-letters))); 451 | margin-top: $offset; 452 | animation-name: letter#{$i}; 453 | animation-duration: $duration - ($duration * $delay); 454 | animation-delay: $delay * 1s; 455 | animation-fill-mode: forwards; 456 | } 457 | @keyframes letter#{$i} { 458 | from { 459 | margin-top: $offset; 460 | } 461 | to { 462 | margin-top: font(size); 463 | } 464 | } 465 | } 466 | } -------------------------------------------------------------------------------- /static/gulpfile.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | // Load plugins 4 | const browsersync = require("browser-sync").create(); 5 | const del = require("del"); 6 | const gulp = require("gulp"); 7 | const merge = require("merge-stream"); 8 | 9 | // BrowserSync 10 | function browserSync(done) { 11 | browsersync.init({ 12 | server: { 13 | baseDir: "./" 14 | }, 15 | port: 3000 16 | }); 17 | done(); 18 | } 19 | 20 | // BrowserSync reload 21 | function browserSyncReload(done) { 22 | browsersync.reload(); 23 | done(); 24 | } 25 | 26 | // Clean vendor 27 | function clean() { 28 | return del(["./vendor/"]); 29 | } 30 | 31 | // Bring third party dependencies from node_modules into vendor directory 32 | function modules() { 33 | // Bootstrap 34 | var bootstrap = gulp.src('./node_modules/bootstrap/dist/**/*') 35 | .pipe(gulp.dest('./vendor/bootstrap')); 36 | // jQuery 37 | var jquery = gulp.src([ 38 | './node_modules/jquery/dist/*', 39 | '!./node_modules/jquery/dist/core.js' 40 | ]) 41 | .pipe(gulp.dest('./vendor/jquery')); 42 | // jQuery Easing 43 | var jqueryEasing = gulp.src('./node_modules/jquery.easing/*.js') 44 | .pipe(gulp.dest('./vendor/jquery-easing')); 45 | return merge(bootstrap, jquery, jqueryEasing); 46 | } 47 | 48 | // Watch files 49 | function watchFiles() { 50 | gulp.watch("./**/*.css", browserSyncReload); 51 | gulp.watch("./**/*.html", browserSyncReload); 52 | } 53 | 54 | // Define complex tasks 55 | const vendor = gulp.series(clean, modules); 56 | const build = gulp.series(vendor); 57 | const watch = gulp.series(build, gulp.parallel(watchFiles, browserSync)); 58 | 59 | // Export tasks 60 | exports.clean = clean; 61 | exports.vendor = vendor; 62 | exports.build = build; 63 | exports.watch = watch; 64 | exports.default = build; 65 | -------------------------------------------------------------------------------- /static/js/scrolling-nav.js: -------------------------------------------------------------------------------- 1 | (function($) { 2 | "use strict"; // Start of use strict 3 | 4 | // Smooth scrolling using jQuery easing 5 | $('a.js-scroll-trigger[href*="#"]:not([href="#"])').click(function() { 6 | if (location.pathname.replace(/^\//, '') == this.pathname.replace(/^\//, '') && location.hostname == this.hostname) { 7 | var target = $(this.hash); 8 | target = target.length ? target : $('[name=' + this.hash.slice(1) + ']'); 9 | if (target.length) { 10 | $('html, body').animate({ 11 | scrollTop: (target.offset().top - 56) 12 | }, 1000, "easeInOutExpo"); 13 | return false; 14 | } 15 | } 16 | }); 17 | 18 | // Closes responsive menu when a scroll trigger link is clicked 19 | $('.js-scroll-trigger').click(function() { 20 | $('.navbar-collapse').collapse('hide'); 21 | }); 22 | 23 | // Activate scrollspy to add active class to navbar items on scroll 24 | $('body').scrollspy({ 25 | target: '#mainNav', 26 | offset: 56 27 | }); 28 | 29 | })(jQuery); // End of use strict 30 | -------------------------------------------------------------------------------- /static/src/index.html: -------------------------------------------------------------------------------- 1 | 2 |
3 | 4 | 5 |
6 | 7 |
8 |

Drag and drop a file or select add Image

9 |
10 |
11 |
12 | your image 13 |
14 | 15 |
16 |
17 |
-------------------------------------------------------------------------------- /static/src/script.js: -------------------------------------------------------------------------------- 1 | function readURL(input) { 2 | if (input.files && input.files[0]) { 3 | 4 | var reader = new FileReader(); 5 | 6 | reader.onload = function(e) { 7 | $('.image-upload-wrap').hide(); 8 | 9 | $('.file-upload-image').attr('src', e.target.result); 10 | $('.file-upload-content').show(); 11 | 12 | $('.image-title').html(input.files[0].name); 13 | }; 14 | 15 | reader.readAsDataURL(input.files[0]); 16 | 17 | } else { 18 | removeUpload(); 19 | } 20 | } 21 | 22 | function removeUpload() { 23 | $('.file-upload-input').replaceWith($('.file-upload-input').clone()); 24 | $('.file-upload-content').hide(); 25 | $('.image-upload-wrap').show(); 26 | } 27 | $('.image-upload-wrap').bind('dragover', function () { 28 | $('.image-upload-wrap').addClass('image-dropping'); 29 | }); 30 | $('.image-upload-wrap').bind('dragleave', function () { 31 | $('.image-upload-wrap').removeClass('image-dropping'); 32 | }); 33 | 34 | 35 | class TypeWriter { 36 | constructor(element) { 37 | this.element = element; 38 | this.text = element.textContent; 39 | this.height = element.offsetHeight; 40 | this.index = 0; 41 | this.addSpeed = 60; 42 | this.clearSpeed = 20; 43 | this.waitTime = 1000; 44 | 45 | this.removeText = this.removeText.bind(this); 46 | this.addText = this.addText.bind(this); 47 | 48 | this.init(); 49 | } 50 | 51 | removeText() { 52 | this.element.textContent = this.element.textContent.slice(0, -1); 53 | 54 | if (this.element.textContent.length == 0) { 55 | clearInterval(this.interval); 56 | 57 | var that = this; 58 | 59 | setTimeout(function(){ 60 | that.index = 0; 61 | that.interval = setInterval(that.addText, that.addSpeed); 62 | }, this.waitTime); 63 | } 64 | } 65 | 66 | addText() { 67 | this.element.textContent += this.text[this.index]; 68 | 69 | this.index = this.index + 1; 70 | 71 | if (this.index > this.text.length -1) { 72 | clearInterval(this.interval); 73 | 74 | var that = this; 75 | 76 | setTimeout(function(){ 77 | that.interval = setInterval(that.removeText, that.clearSpeed); 78 | }, this.waitTime); 79 | } 80 | 81 | } 82 | 83 | init() { 84 | this.element.textContent = ''; 85 | this.element.style.height = '' + this.height + 'px'; 86 | 87 | this.interval = setInterval(this.addText, this.addSpeed); 88 | } 89 | } 90 | 91 | Array.from(document.getElementsByClassName("typewriter")).forEach( 92 | function(element) { 93 | new TypeWriter(element); 94 | } 95 | ); -------------------------------------------------------------------------------- /static/src/style.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: 'Ubuntu', sans-serif; 3 | background-color: #d6d6d6; 4 | } 5 | 6 | .file-upload { 7 | background-color: #d6d6d6; 8 | width: 600px; 9 | margin: 0 auto; 10 | padding: 20px; 11 | } 12 | 13 | .file-upload-btn { 14 | width: 60%; 15 | margin: 0; 16 | color: #fff; 17 | background: #1FB264; 18 | border: none; 19 | padding: 10px; 20 | border-radius: 4px; 21 | border-bottom: 4px solid #15824B; 22 | transition: all .2s ease; 23 | outline: none; 24 | text-transform: uppercase; 25 | font-weight: 700; 26 | } 27 | 28 | .file-upload-btn:hover { 29 | background: #d6d6d6; 30 | color: #ffffff; 31 | transition: all .2s ease; 32 | cursor: pointer; 33 | } 34 | 35 | .file-upload-btn:active { 36 | border: 0; 37 | transition: all .2s ease; 38 | } 39 | 40 | .file-upload-content { 41 | display: none; 42 | text-align: center; 43 | } 44 | 45 | .file-upload-input { 46 | position: absolute; 47 | margin: 0; 48 | padding: 0; 49 | width: 100%; 50 | height: 100%; 51 | outline: none; 52 | opacity: 0; 53 | cursor: pointer; 54 | } 55 | 56 | .image-upload-wrap { 57 | margin-top: 20px; 58 | border: 4px dashed #1FB264; 59 | position: relative; 60 | } 61 | 62 | .image-dropping, 63 | .image-upload-wrap:hover { 64 | background-color: #1FB264; 65 | border: 4px dashed #ffffff; 66 | } 67 | 68 | .image-title-wrap { 69 | padding: 0 15px 15px 15px; 70 | color: #222; 71 | } 72 | 73 | .drag-text { 74 | text-align: center; 75 | } 76 | 77 | .drag-text h3 { 78 | font-weight: 100; 79 | text-transform: uppercase; 80 | color: #15824B; 81 | padding: 60px 0; 82 | } 83 | 84 | .file-upload-image { 85 | max-height: 200px; 86 | max-width: 200px; 87 | margin: auto; 88 | padding: 20px; 89 | } 90 | 91 | .remove-image { 92 | width: 200px; 93 | margin: 0; 94 | color: #fff; 95 | background: #cd4535; 96 | border: none; 97 | padding: 10px; 98 | border-radius: 4px; 99 | border-bottom: 4px solid #b02818; 100 | transition: all .2s ease; 101 | outline: none; 102 | text-transform: uppercase; 103 | font-weight: 700; 104 | } 105 | 106 | .remove-image:hover { 107 | background: #c13b2a; 108 | color: #ffffff; 109 | transition: all .2s ease; 110 | cursor: pointer; 111 | } 112 | 113 | .remove-image:active { 114 | border: 0; 115 | transition: all .2s ease; 116 | } 117 | 118 | 119 | 120 | .container { 121 | width: 400px; 122 | border: 1px solid; 123 | padding: 10px; 124 | margin: auto; 125 | } 126 | 127 | .icon-button { 128 | background-color: white; 129 | border-radius: 2.6rem; 130 | cursor: pointer; 131 | display: inline-block; 132 | font-size: 1.3rem; 133 | height: 2.6rem; 134 | line-height: 2.6rem; 135 | margin: 0 5px; 136 | position: relative; 137 | text-align: center; 138 | -webkit-user-select: none; 139 | -moz-user-select: none; 140 | -ms-user-select: none; 141 | user-select: none; 142 | width: 2.6rem; 143 | } 144 | 145 | /* Circle */ 146 | .icon-button span { 147 | border-radius: 0; 148 | display: block; 149 | height: 0; 150 | left: 50%; 151 | margin: 0; 152 | position: absolute; 153 | top: 50%; 154 | -webkit-transition: all 0.3s; 155 | -moz-transition: all 0.3s; 156 | -o-transition: all 0.3s; 157 | transition: all 0.3s; 158 | width: 0; 159 | } 160 | .icon-button:hover span { 161 | width: 2.6rem; 162 | height: 2.6rem; 163 | border-radius: 2.6rem; 164 | margin: -1.3rem; 165 | } 166 | 167 | /* Icons */ 168 | .icon-button i { 169 | background: none; 170 | color: white; 171 | height: 2.6rem; 172 | left: 0; 173 | line-height: 2.6rem; 174 | position: absolute; 175 | top: 0; 176 | -webkit-transition: all 0.3s; 177 | -moz-transition: all 0.3s; 178 | -o-transition: all 0.3s; 179 | transition: all 0.3s; 180 | width: 2.6rem; 181 | z-index: 10; 182 | } 183 | 184 | 185 | 186 | 187 | @import url('//netdna.bootstrapcdn.com/font-awesome/3.2.1/css/font-awesome.min.css'); 188 | 189 | overflow: hidden; 190 | background-color: #FFF; 191 | border-radius: 100%; 192 | transform: translate(-50%, -50%); 193 | transition: all 0.3s ease; 194 | 195 | i { 196 | position: absolute; 197 | left: 50%; 198 | top: 50%; 199 | font-size: 50px; 200 | transition: all 0.3s ease; 201 | 202 | &.inactive { 203 | color: #CCC; 204 | transform: translate(-50%, -50%); 205 | } 206 | 207 | &.active { transform: translate(-150px, -50%); } 208 | } 209 | } 210 | -------------------------------------------------------------------------------- /static/vendor/bootstrap/css/bootstrap-grid.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Bootstrap Grid v4.3.1 (https://getbootstrap.com/) 3 | * Copyright 2011-2019 The Bootstrap Authors 4 | * Copyright 2011-2019 Twitter, Inc. 5 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 6 | */html{box-sizing:border-box;-ms-overflow-style:scrollbar}*,::after,::before{box-sizing:inherit}.container{width:100%;padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}@media (min-width:576px){.container{max-width:540px}}@media (min-width:768px){.container{max-width:720px}}@media (min-width:992px){.container{max-width:960px}}@media (min-width:1200px){.container{max-width:1140px}}.container-fluid{width:100%;padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}.row{display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;margin-right:-15px;margin-left:-15px}.no-gutters{margin-right:0;margin-left:0}.no-gutters>.col,.no-gutters>[class*=col-]{padding-right:0;padding-left:0}.col,.col-1,.col-10,.col-11,.col-12,.col-2,.col-3,.col-4,.col-5,.col-6,.col-7,.col-8,.col-9,.col-auto,.col-lg,.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-lg-auto,.col-md,.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-md-auto,.col-sm,.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-auto,.col-xl,.col-xl-1,.col-xl-10,.col-xl-11,.col-xl-12,.col-xl-2,.col-xl-3,.col-xl-4,.col-xl-5,.col-xl-6,.col-xl-7,.col-xl-8,.col-xl-9,.col-xl-auto{position:relative;width:100%;padding-right:15px;padding-left:15px}.col{-ms-flex-preferred-size:0;flex-basis:0;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto;max-width:100%}.col-1{-ms-flex:0 0 8.333333%;flex:0 0 8.333333%;max-width:8.333333%}.col-2{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-3{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-4{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.col-5{-ms-flex:0 0 41.666667%;flex:0 0 41.666667%;max-width:41.666667%}.col-6{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-7{-ms-flex:0 0 58.333333%;flex:0 0 58.333333%;max-width:58.333333%}.col-8{-ms-flex:0 0 66.666667%;flex:0 0 66.666667%;max-width:66.666667%}.col-9{-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-10{-ms-flex:0 0 83.333333%;flex:0 0 83.333333%;max-width:83.333333%}.col-11{-ms-flex:0 0 91.666667%;flex:0 0 91.666667%;max-width:91.666667%}.col-12{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-first{-ms-flex-order:-1;order:-1}.order-last{-ms-flex-order:13;order:13}.order-0{-ms-flex-order:0;order:0}.order-1{-ms-flex-order:1;order:1}.order-2{-ms-flex-order:2;order:2}.order-3{-ms-flex-order:3;order:3}.order-4{-ms-flex-order:4;order:4}.order-5{-ms-flex-order:5;order:5}.order-6{-ms-flex-order:6;order:6}.order-7{-ms-flex-order:7;order:7}.order-8{-ms-flex-order:8;order:8}.order-9{-ms-flex-order:9;order:9}.order-10{-ms-flex-order:10;order:10}.order-11{-ms-flex-order:11;order:11}.order-12{-ms-flex-order:12;order:12}.offset-1{margin-left:8.333333%}.offset-2{margin-left:16.666667%}.offset-3{margin-left:25%}.offset-4{margin-left:33.333333%}.offset-5{margin-left:41.666667%}.offset-6{margin-left:50%}.offset-7{margin-left:58.333333%}.offset-8{margin-left:66.666667%}.offset-9{margin-left:75%}.offset-10{margin-left:83.333333%}.offset-11{margin-left:91.666667%}@media (min-width:576px){.col-sm{-ms-flex-preferred-size:0;flex-basis:0;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-sm-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto;max-width:100%}.col-sm-1{-ms-flex:0 0 8.333333%;flex:0 0 8.333333%;max-width:8.333333%}.col-sm-2{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-sm-3{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-sm-4{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.col-sm-5{-ms-flex:0 0 41.666667%;flex:0 0 41.666667%;max-width:41.666667%}.col-sm-6{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-sm-7{-ms-flex:0 0 58.333333%;flex:0 0 58.333333%;max-width:58.333333%}.col-sm-8{-ms-flex:0 0 66.666667%;flex:0 0 66.666667%;max-width:66.666667%}.col-sm-9{-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-sm-10{-ms-flex:0 0 83.333333%;flex:0 0 83.333333%;max-width:83.333333%}.col-sm-11{-ms-flex:0 0 91.666667%;flex:0 0 91.666667%;max-width:91.666667%}.col-sm-12{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-sm-first{-ms-flex-order:-1;order:-1}.order-sm-last{-ms-flex-order:13;order:13}.order-sm-0{-ms-flex-order:0;order:0}.order-sm-1{-ms-flex-order:1;order:1}.order-sm-2{-ms-flex-order:2;order:2}.order-sm-3{-ms-flex-order:3;order:3}.order-sm-4{-ms-flex-order:4;order:4}.order-sm-5{-ms-flex-order:5;order:5}.order-sm-6{-ms-flex-order:6;order:6}.order-sm-7{-ms-flex-order:7;order:7}.order-sm-8{-ms-flex-order:8;order:8}.order-sm-9{-ms-flex-order:9;order:9}.order-sm-10{-ms-flex-order:10;order:10}.order-sm-11{-ms-flex-order:11;order:11}.order-sm-12{-ms-flex-order:12;order:12}.offset-sm-0{margin-left:0}.offset-sm-1{margin-left:8.333333%}.offset-sm-2{margin-left:16.666667%}.offset-sm-3{margin-left:25%}.offset-sm-4{margin-left:33.333333%}.offset-sm-5{margin-left:41.666667%}.offset-sm-6{margin-left:50%}.offset-sm-7{margin-left:58.333333%}.offset-sm-8{margin-left:66.666667%}.offset-sm-9{margin-left:75%}.offset-sm-10{margin-left:83.333333%}.offset-sm-11{margin-left:91.666667%}}@media (min-width:768px){.col-md{-ms-flex-preferred-size:0;flex-basis:0;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-md-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto;max-width:100%}.col-md-1{-ms-flex:0 0 8.333333%;flex:0 0 8.333333%;max-width:8.333333%}.col-md-2{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-md-3{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-md-4{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.col-md-5{-ms-flex:0 0 41.666667%;flex:0 0 41.666667%;max-width:41.666667%}.col-md-6{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-md-7{-ms-flex:0 0 58.333333%;flex:0 0 58.333333%;max-width:58.333333%}.col-md-8{-ms-flex:0 0 66.666667%;flex:0 0 66.666667%;max-width:66.666667%}.col-md-9{-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-md-10{-ms-flex:0 0 83.333333%;flex:0 0 83.333333%;max-width:83.333333%}.col-md-11{-ms-flex:0 0 91.666667%;flex:0 0 91.666667%;max-width:91.666667%}.col-md-12{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-md-first{-ms-flex-order:-1;order:-1}.order-md-last{-ms-flex-order:13;order:13}.order-md-0{-ms-flex-order:0;order:0}.order-md-1{-ms-flex-order:1;order:1}.order-md-2{-ms-flex-order:2;order:2}.order-md-3{-ms-flex-order:3;order:3}.order-md-4{-ms-flex-order:4;order:4}.order-md-5{-ms-flex-order:5;order:5}.order-md-6{-ms-flex-order:6;order:6}.order-md-7{-ms-flex-order:7;order:7}.order-md-8{-ms-flex-order:8;order:8}.order-md-9{-ms-flex-order:9;order:9}.order-md-10{-ms-flex-order:10;order:10}.order-md-11{-ms-flex-order:11;order:11}.order-md-12{-ms-flex-order:12;order:12}.offset-md-0{margin-left:0}.offset-md-1{margin-left:8.333333%}.offset-md-2{margin-left:16.666667%}.offset-md-3{margin-left:25%}.offset-md-4{margin-left:33.333333%}.offset-md-5{margin-left:41.666667%}.offset-md-6{margin-left:50%}.offset-md-7{margin-left:58.333333%}.offset-md-8{margin-left:66.666667%}.offset-md-9{margin-left:75%}.offset-md-10{margin-left:83.333333%}.offset-md-11{margin-left:91.666667%}}@media (min-width:992px){.col-lg{-ms-flex-preferred-size:0;flex-basis:0;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-lg-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto;max-width:100%}.col-lg-1{-ms-flex:0 0 8.333333%;flex:0 0 8.333333%;max-width:8.333333%}.col-lg-2{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-lg-3{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-lg-4{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.col-lg-5{-ms-flex:0 0 41.666667%;flex:0 0 41.666667%;max-width:41.666667%}.col-lg-6{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-lg-7{-ms-flex:0 0 58.333333%;flex:0 0 58.333333%;max-width:58.333333%}.col-lg-8{-ms-flex:0 0 66.666667%;flex:0 0 66.666667%;max-width:66.666667%}.col-lg-9{-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-lg-10{-ms-flex:0 0 83.333333%;flex:0 0 83.333333%;max-width:83.333333%}.col-lg-11{-ms-flex:0 0 91.666667%;flex:0 0 91.666667%;max-width:91.666667%}.col-lg-12{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-lg-first{-ms-flex-order:-1;order:-1}.order-lg-last{-ms-flex-order:13;order:13}.order-lg-0{-ms-flex-order:0;order:0}.order-lg-1{-ms-flex-order:1;order:1}.order-lg-2{-ms-flex-order:2;order:2}.order-lg-3{-ms-flex-order:3;order:3}.order-lg-4{-ms-flex-order:4;order:4}.order-lg-5{-ms-flex-order:5;order:5}.order-lg-6{-ms-flex-order:6;order:6}.order-lg-7{-ms-flex-order:7;order:7}.order-lg-8{-ms-flex-order:8;order:8}.order-lg-9{-ms-flex-order:9;order:9}.order-lg-10{-ms-flex-order:10;order:10}.order-lg-11{-ms-flex-order:11;order:11}.order-lg-12{-ms-flex-order:12;order:12}.offset-lg-0{margin-left:0}.offset-lg-1{margin-left:8.333333%}.offset-lg-2{margin-left:16.666667%}.offset-lg-3{margin-left:25%}.offset-lg-4{margin-left:33.333333%}.offset-lg-5{margin-left:41.666667%}.offset-lg-6{margin-left:50%}.offset-lg-7{margin-left:58.333333%}.offset-lg-8{margin-left:66.666667%}.offset-lg-9{margin-left:75%}.offset-lg-10{margin-left:83.333333%}.offset-lg-11{margin-left:91.666667%}}@media (min-width:1200px){.col-xl{-ms-flex-preferred-size:0;flex-basis:0;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-xl-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto;max-width:100%}.col-xl-1{-ms-flex:0 0 8.333333%;flex:0 0 8.333333%;max-width:8.333333%}.col-xl-2{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-xl-3{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-xl-4{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.col-xl-5{-ms-flex:0 0 41.666667%;flex:0 0 41.666667%;max-width:41.666667%}.col-xl-6{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-xl-7{-ms-flex:0 0 58.333333%;flex:0 0 58.333333%;max-width:58.333333%}.col-xl-8{-ms-flex:0 0 66.666667%;flex:0 0 66.666667%;max-width:66.666667%}.col-xl-9{-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-xl-10{-ms-flex:0 0 83.333333%;flex:0 0 83.333333%;max-width:83.333333%}.col-xl-11{-ms-flex:0 0 91.666667%;flex:0 0 91.666667%;max-width:91.666667%}.col-xl-12{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-xl-first{-ms-flex-order:-1;order:-1}.order-xl-last{-ms-flex-order:13;order:13}.order-xl-0{-ms-flex-order:0;order:0}.order-xl-1{-ms-flex-order:1;order:1}.order-xl-2{-ms-flex-order:2;order:2}.order-xl-3{-ms-flex-order:3;order:3}.order-xl-4{-ms-flex-order:4;order:4}.order-xl-5{-ms-flex-order:5;order:5}.order-xl-6{-ms-flex-order:6;order:6}.order-xl-7{-ms-flex-order:7;order:7}.order-xl-8{-ms-flex-order:8;order:8}.order-xl-9{-ms-flex-order:9;order:9}.order-xl-10{-ms-flex-order:10;order:10}.order-xl-11{-ms-flex-order:11;order:11}.order-xl-12{-ms-flex-order:12;order:12}.offset-xl-0{margin-left:0}.offset-xl-1{margin-left:8.333333%}.offset-xl-2{margin-left:16.666667%}.offset-xl-3{margin-left:25%}.offset-xl-4{margin-left:33.333333%}.offset-xl-5{margin-left:41.666667%}.offset-xl-6{margin-left:50%}.offset-xl-7{margin-left:58.333333%}.offset-xl-8{margin-left:66.666667%}.offset-xl-9{margin-left:75%}.offset-xl-10{margin-left:83.333333%}.offset-xl-11{margin-left:91.666667%}}.d-none{display:none!important}.d-inline{display:inline!important}.d-inline-block{display:inline-block!important}.d-block{display:block!important}.d-table{display:table!important}.d-table-row{display:table-row!important}.d-table-cell{display:table-cell!important}.d-flex{display:-ms-flexbox!important;display:flex!important}.d-inline-flex{display:-ms-inline-flexbox!important;display:inline-flex!important}@media (min-width:576px){.d-sm-none{display:none!important}.d-sm-inline{display:inline!important}.d-sm-inline-block{display:inline-block!important}.d-sm-block{display:block!important}.d-sm-table{display:table!important}.d-sm-table-row{display:table-row!important}.d-sm-table-cell{display:table-cell!important}.d-sm-flex{display:-ms-flexbox!important;display:flex!important}.d-sm-inline-flex{display:-ms-inline-flexbox!important;display:inline-flex!important}}@media (min-width:768px){.d-md-none{display:none!important}.d-md-inline{display:inline!important}.d-md-inline-block{display:inline-block!important}.d-md-block{display:block!important}.d-md-table{display:table!important}.d-md-table-row{display:table-row!important}.d-md-table-cell{display:table-cell!important}.d-md-flex{display:-ms-flexbox!important;display:flex!important}.d-md-inline-flex{display:-ms-inline-flexbox!important;display:inline-flex!important}}@media (min-width:992px){.d-lg-none{display:none!important}.d-lg-inline{display:inline!important}.d-lg-inline-block{display:inline-block!important}.d-lg-block{display:block!important}.d-lg-table{display:table!important}.d-lg-table-row{display:table-row!important}.d-lg-table-cell{display:table-cell!important}.d-lg-flex{display:-ms-flexbox!important;display:flex!important}.d-lg-inline-flex{display:-ms-inline-flexbox!important;display:inline-flex!important}}@media (min-width:1200px){.d-xl-none{display:none!important}.d-xl-inline{display:inline!important}.d-xl-inline-block{display:inline-block!important}.d-xl-block{display:block!important}.d-xl-table{display:table!important}.d-xl-table-row{display:table-row!important}.d-xl-table-cell{display:table-cell!important}.d-xl-flex{display:-ms-flexbox!important;display:flex!important}.d-xl-inline-flex{display:-ms-inline-flexbox!important;display:inline-flex!important}}@media print{.d-print-none{display:none!important}.d-print-inline{display:inline!important}.d-print-inline-block{display:inline-block!important}.d-print-block{display:block!important}.d-print-table{display:table!important}.d-print-table-row{display:table-row!important}.d-print-table-cell{display:table-cell!important}.d-print-flex{display:-ms-flexbox!important;display:flex!important}.d-print-inline-flex{display:-ms-inline-flexbox!important;display:inline-flex!important}}.flex-row{-ms-flex-direction:row!important;flex-direction:row!important}.flex-column{-ms-flex-direction:column!important;flex-direction:column!important}.flex-row-reverse{-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-column-reverse{-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-wrap{-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-nowrap{-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-wrap-reverse{-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-fill{-ms-flex:1 1 auto!important;flex:1 1 auto!important}.flex-grow-0{-ms-flex-positive:0!important;flex-grow:0!important}.flex-grow-1{-ms-flex-positive:1!important;flex-grow:1!important}.flex-shrink-0{-ms-flex-negative:0!important;flex-shrink:0!important}.flex-shrink-1{-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-start{-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-end{-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-center{-ms-flex-pack:center!important;justify-content:center!important}.justify-content-between{-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-around{-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-start{-ms-flex-align:start!important;align-items:flex-start!important}.align-items-end{-ms-flex-align:end!important;align-items:flex-end!important}.align-items-center{-ms-flex-align:center!important;align-items:center!important}.align-items-baseline{-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-stretch{-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-start{-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-end{-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-center{-ms-flex-line-pack:center!important;align-content:center!important}.align-content-between{-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-around{-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-stretch{-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-auto{-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-start{-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-end{-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-center{-ms-flex-item-align:center!important;align-self:center!important}.align-self-baseline{-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-stretch{-ms-flex-item-align:stretch!important;align-self:stretch!important}@media (min-width:576px){.flex-sm-row{-ms-flex-direction:row!important;flex-direction:row!important}.flex-sm-column{-ms-flex-direction:column!important;flex-direction:column!important}.flex-sm-row-reverse{-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-sm-column-reverse{-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-sm-wrap{-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-sm-nowrap{-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-sm-wrap-reverse{-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-sm-fill{-ms-flex:1 1 auto!important;flex:1 1 auto!important}.flex-sm-grow-0{-ms-flex-positive:0!important;flex-grow:0!important}.flex-sm-grow-1{-ms-flex-positive:1!important;flex-grow:1!important}.flex-sm-shrink-0{-ms-flex-negative:0!important;flex-shrink:0!important}.flex-sm-shrink-1{-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-sm-start{-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-sm-end{-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-sm-center{-ms-flex-pack:center!important;justify-content:center!important}.justify-content-sm-between{-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-sm-around{-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-sm-start{-ms-flex-align:start!important;align-items:flex-start!important}.align-items-sm-end{-ms-flex-align:end!important;align-items:flex-end!important}.align-items-sm-center{-ms-flex-align:center!important;align-items:center!important}.align-items-sm-baseline{-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-sm-stretch{-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-sm-start{-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-sm-end{-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-sm-center{-ms-flex-line-pack:center!important;align-content:center!important}.align-content-sm-between{-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-sm-around{-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-sm-stretch{-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-sm-auto{-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-sm-start{-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-sm-end{-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-sm-center{-ms-flex-item-align:center!important;align-self:center!important}.align-self-sm-baseline{-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-sm-stretch{-ms-flex-item-align:stretch!important;align-self:stretch!important}}@media (min-width:768px){.flex-md-row{-ms-flex-direction:row!important;flex-direction:row!important}.flex-md-column{-ms-flex-direction:column!important;flex-direction:column!important}.flex-md-row-reverse{-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-md-column-reverse{-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-md-wrap{-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-md-nowrap{-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-md-wrap-reverse{-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-md-fill{-ms-flex:1 1 auto!important;flex:1 1 auto!important}.flex-md-grow-0{-ms-flex-positive:0!important;flex-grow:0!important}.flex-md-grow-1{-ms-flex-positive:1!important;flex-grow:1!important}.flex-md-shrink-0{-ms-flex-negative:0!important;flex-shrink:0!important}.flex-md-shrink-1{-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-md-start{-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-md-end{-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-md-center{-ms-flex-pack:center!important;justify-content:center!important}.justify-content-md-between{-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-md-around{-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-md-start{-ms-flex-align:start!important;align-items:flex-start!important}.align-items-md-end{-ms-flex-align:end!important;align-items:flex-end!important}.align-items-md-center{-ms-flex-align:center!important;align-items:center!important}.align-items-md-baseline{-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-md-stretch{-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-md-start{-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-md-end{-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-md-center{-ms-flex-line-pack:center!important;align-content:center!important}.align-content-md-between{-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-md-around{-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-md-stretch{-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-md-auto{-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-md-start{-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-md-end{-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-md-center{-ms-flex-item-align:center!important;align-self:center!important}.align-self-md-baseline{-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-md-stretch{-ms-flex-item-align:stretch!important;align-self:stretch!important}}@media (min-width:992px){.flex-lg-row{-ms-flex-direction:row!important;flex-direction:row!important}.flex-lg-column{-ms-flex-direction:column!important;flex-direction:column!important}.flex-lg-row-reverse{-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-lg-column-reverse{-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-lg-wrap{-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-lg-nowrap{-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-lg-wrap-reverse{-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-lg-fill{-ms-flex:1 1 auto!important;flex:1 1 auto!important}.flex-lg-grow-0{-ms-flex-positive:0!important;flex-grow:0!important}.flex-lg-grow-1{-ms-flex-positive:1!important;flex-grow:1!important}.flex-lg-shrink-0{-ms-flex-negative:0!important;flex-shrink:0!important}.flex-lg-shrink-1{-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-lg-start{-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-lg-end{-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-lg-center{-ms-flex-pack:center!important;justify-content:center!important}.justify-content-lg-between{-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-lg-around{-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-lg-start{-ms-flex-align:start!important;align-items:flex-start!important}.align-items-lg-end{-ms-flex-align:end!important;align-items:flex-end!important}.align-items-lg-center{-ms-flex-align:center!important;align-items:center!important}.align-items-lg-baseline{-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-lg-stretch{-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-lg-start{-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-lg-end{-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-lg-center{-ms-flex-line-pack:center!important;align-content:center!important}.align-content-lg-between{-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-lg-around{-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-lg-stretch{-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-lg-auto{-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-lg-start{-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-lg-end{-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-lg-center{-ms-flex-item-align:center!important;align-self:center!important}.align-self-lg-baseline{-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-lg-stretch{-ms-flex-item-align:stretch!important;align-self:stretch!important}}@media (min-width:1200px){.flex-xl-row{-ms-flex-direction:row!important;flex-direction:row!important}.flex-xl-column{-ms-flex-direction:column!important;flex-direction:column!important}.flex-xl-row-reverse{-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-xl-column-reverse{-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-xl-wrap{-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-xl-nowrap{-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-xl-wrap-reverse{-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-xl-fill{-ms-flex:1 1 auto!important;flex:1 1 auto!important}.flex-xl-grow-0{-ms-flex-positive:0!important;flex-grow:0!important}.flex-xl-grow-1{-ms-flex-positive:1!important;flex-grow:1!important}.flex-xl-shrink-0{-ms-flex-negative:0!important;flex-shrink:0!important}.flex-xl-shrink-1{-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-xl-start{-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-xl-end{-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-xl-center{-ms-flex-pack:center!important;justify-content:center!important}.justify-content-xl-between{-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-xl-around{-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-xl-start{-ms-flex-align:start!important;align-items:flex-start!important}.align-items-xl-end{-ms-flex-align:end!important;align-items:flex-end!important}.align-items-xl-center{-ms-flex-align:center!important;align-items:center!important}.align-items-xl-baseline{-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-xl-stretch{-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-xl-start{-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-xl-end{-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-xl-center{-ms-flex-line-pack:center!important;align-content:center!important}.align-content-xl-between{-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-xl-around{-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-xl-stretch{-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-xl-auto{-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-xl-start{-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-xl-end{-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-xl-center{-ms-flex-item-align:center!important;align-self:center!important}.align-self-xl-baseline{-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-xl-stretch{-ms-flex-item-align:stretch!important;align-self:stretch!important}}.m-0{margin:0!important}.mt-0,.my-0{margin-top:0!important}.mr-0,.mx-0{margin-right:0!important}.mb-0,.my-0{margin-bottom:0!important}.ml-0,.mx-0{margin-left:0!important}.m-1{margin:.25rem!important}.mt-1,.my-1{margin-top:.25rem!important}.mr-1,.mx-1{margin-right:.25rem!important}.mb-1,.my-1{margin-bottom:.25rem!important}.ml-1,.mx-1{margin-left:.25rem!important}.m-2{margin:.5rem!important}.mt-2,.my-2{margin-top:.5rem!important}.mr-2,.mx-2{margin-right:.5rem!important}.mb-2,.my-2{margin-bottom:.5rem!important}.ml-2,.mx-2{margin-left:.5rem!important}.m-3{margin:1rem!important}.mt-3,.my-3{margin-top:1rem!important}.mr-3,.mx-3{margin-right:1rem!important}.mb-3,.my-3{margin-bottom:1rem!important}.ml-3,.mx-3{margin-left:1rem!important}.m-4{margin:1.5rem!important}.mt-4,.my-4{margin-top:1.5rem!important}.mr-4,.mx-4{margin-right:1.5rem!important}.mb-4,.my-4{margin-bottom:1.5rem!important}.ml-4,.mx-4{margin-left:1.5rem!important}.m-5{margin:3rem!important}.mt-5,.my-5{margin-top:3rem!important}.mr-5,.mx-5{margin-right:3rem!important}.mb-5,.my-5{margin-bottom:3rem!important}.ml-5,.mx-5{margin-left:3rem!important}.p-0{padding:0!important}.pt-0,.py-0{padding-top:0!important}.pr-0,.px-0{padding-right:0!important}.pb-0,.py-0{padding-bottom:0!important}.pl-0,.px-0{padding-left:0!important}.p-1{padding:.25rem!important}.pt-1,.py-1{padding-top:.25rem!important}.pr-1,.px-1{padding-right:.25rem!important}.pb-1,.py-1{padding-bottom:.25rem!important}.pl-1,.px-1{padding-left:.25rem!important}.p-2{padding:.5rem!important}.pt-2,.py-2{padding-top:.5rem!important}.pr-2,.px-2{padding-right:.5rem!important}.pb-2,.py-2{padding-bottom:.5rem!important}.pl-2,.px-2{padding-left:.5rem!important}.p-3{padding:1rem!important}.pt-3,.py-3{padding-top:1rem!important}.pr-3,.px-3{padding-right:1rem!important}.pb-3,.py-3{padding-bottom:1rem!important}.pl-3,.px-3{padding-left:1rem!important}.p-4{padding:1.5rem!important}.pt-4,.py-4{padding-top:1.5rem!important}.pr-4,.px-4{padding-right:1.5rem!important}.pb-4,.py-4{padding-bottom:1.5rem!important}.pl-4,.px-4{padding-left:1.5rem!important}.p-5{padding:3rem!important}.pt-5,.py-5{padding-top:3rem!important}.pr-5,.px-5{padding-right:3rem!important}.pb-5,.py-5{padding-bottom:3rem!important}.pl-5,.px-5{padding-left:3rem!important}.m-n1{margin:-.25rem!important}.mt-n1,.my-n1{margin-top:-.25rem!important}.mr-n1,.mx-n1{margin-right:-.25rem!important}.mb-n1,.my-n1{margin-bottom:-.25rem!important}.ml-n1,.mx-n1{margin-left:-.25rem!important}.m-n2{margin:-.5rem!important}.mt-n2,.my-n2{margin-top:-.5rem!important}.mr-n2,.mx-n2{margin-right:-.5rem!important}.mb-n2,.my-n2{margin-bottom:-.5rem!important}.ml-n2,.mx-n2{margin-left:-.5rem!important}.m-n3{margin:-1rem!important}.mt-n3,.my-n3{margin-top:-1rem!important}.mr-n3,.mx-n3{margin-right:-1rem!important}.mb-n3,.my-n3{margin-bottom:-1rem!important}.ml-n3,.mx-n3{margin-left:-1rem!important}.m-n4{margin:-1.5rem!important}.mt-n4,.my-n4{margin-top:-1.5rem!important}.mr-n4,.mx-n4{margin-right:-1.5rem!important}.mb-n4,.my-n4{margin-bottom:-1.5rem!important}.ml-n4,.mx-n4{margin-left:-1.5rem!important}.m-n5{margin:-3rem!important}.mt-n5,.my-n5{margin-top:-3rem!important}.mr-n5,.mx-n5{margin-right:-3rem!important}.mb-n5,.my-n5{margin-bottom:-3rem!important}.ml-n5,.mx-n5{margin-left:-3rem!important}.m-auto{margin:auto!important}.mt-auto,.my-auto{margin-top:auto!important}.mr-auto,.mx-auto{margin-right:auto!important}.mb-auto,.my-auto{margin-bottom:auto!important}.ml-auto,.mx-auto{margin-left:auto!important}@media (min-width:576px){.m-sm-0{margin:0!important}.mt-sm-0,.my-sm-0{margin-top:0!important}.mr-sm-0,.mx-sm-0{margin-right:0!important}.mb-sm-0,.my-sm-0{margin-bottom:0!important}.ml-sm-0,.mx-sm-0{margin-left:0!important}.m-sm-1{margin:.25rem!important}.mt-sm-1,.my-sm-1{margin-top:.25rem!important}.mr-sm-1,.mx-sm-1{margin-right:.25rem!important}.mb-sm-1,.my-sm-1{margin-bottom:.25rem!important}.ml-sm-1,.mx-sm-1{margin-left:.25rem!important}.m-sm-2{margin:.5rem!important}.mt-sm-2,.my-sm-2{margin-top:.5rem!important}.mr-sm-2,.mx-sm-2{margin-right:.5rem!important}.mb-sm-2,.my-sm-2{margin-bottom:.5rem!important}.ml-sm-2,.mx-sm-2{margin-left:.5rem!important}.m-sm-3{margin:1rem!important}.mt-sm-3,.my-sm-3{margin-top:1rem!important}.mr-sm-3,.mx-sm-3{margin-right:1rem!important}.mb-sm-3,.my-sm-3{margin-bottom:1rem!important}.ml-sm-3,.mx-sm-3{margin-left:1rem!important}.m-sm-4{margin:1.5rem!important}.mt-sm-4,.my-sm-4{margin-top:1.5rem!important}.mr-sm-4,.mx-sm-4{margin-right:1.5rem!important}.mb-sm-4,.my-sm-4{margin-bottom:1.5rem!important}.ml-sm-4,.mx-sm-4{margin-left:1.5rem!important}.m-sm-5{margin:3rem!important}.mt-sm-5,.my-sm-5{margin-top:3rem!important}.mr-sm-5,.mx-sm-5{margin-right:3rem!important}.mb-sm-5,.my-sm-5{margin-bottom:3rem!important}.ml-sm-5,.mx-sm-5{margin-left:3rem!important}.p-sm-0{padding:0!important}.pt-sm-0,.py-sm-0{padding-top:0!important}.pr-sm-0,.px-sm-0{padding-right:0!important}.pb-sm-0,.py-sm-0{padding-bottom:0!important}.pl-sm-0,.px-sm-0{padding-left:0!important}.p-sm-1{padding:.25rem!important}.pt-sm-1,.py-sm-1{padding-top:.25rem!important}.pr-sm-1,.px-sm-1{padding-right:.25rem!important}.pb-sm-1,.py-sm-1{padding-bottom:.25rem!important}.pl-sm-1,.px-sm-1{padding-left:.25rem!important}.p-sm-2{padding:.5rem!important}.pt-sm-2,.py-sm-2{padding-top:.5rem!important}.pr-sm-2,.px-sm-2{padding-right:.5rem!important}.pb-sm-2,.py-sm-2{padding-bottom:.5rem!important}.pl-sm-2,.px-sm-2{padding-left:.5rem!important}.p-sm-3{padding:1rem!important}.pt-sm-3,.py-sm-3{padding-top:1rem!important}.pr-sm-3,.px-sm-3{padding-right:1rem!important}.pb-sm-3,.py-sm-3{padding-bottom:1rem!important}.pl-sm-3,.px-sm-3{padding-left:1rem!important}.p-sm-4{padding:1.5rem!important}.pt-sm-4,.py-sm-4{padding-top:1.5rem!important}.pr-sm-4,.px-sm-4{padding-right:1.5rem!important}.pb-sm-4,.py-sm-4{padding-bottom:1.5rem!important}.pl-sm-4,.px-sm-4{padding-left:1.5rem!important}.p-sm-5{padding:3rem!important}.pt-sm-5,.py-sm-5{padding-top:3rem!important}.pr-sm-5,.px-sm-5{padding-right:3rem!important}.pb-sm-5,.py-sm-5{padding-bottom:3rem!important}.pl-sm-5,.px-sm-5{padding-left:3rem!important}.m-sm-n1{margin:-.25rem!important}.mt-sm-n1,.my-sm-n1{margin-top:-.25rem!important}.mr-sm-n1,.mx-sm-n1{margin-right:-.25rem!important}.mb-sm-n1,.my-sm-n1{margin-bottom:-.25rem!important}.ml-sm-n1,.mx-sm-n1{margin-left:-.25rem!important}.m-sm-n2{margin:-.5rem!important}.mt-sm-n2,.my-sm-n2{margin-top:-.5rem!important}.mr-sm-n2,.mx-sm-n2{margin-right:-.5rem!important}.mb-sm-n2,.my-sm-n2{margin-bottom:-.5rem!important}.ml-sm-n2,.mx-sm-n2{margin-left:-.5rem!important}.m-sm-n3{margin:-1rem!important}.mt-sm-n3,.my-sm-n3{margin-top:-1rem!important}.mr-sm-n3,.mx-sm-n3{margin-right:-1rem!important}.mb-sm-n3,.my-sm-n3{margin-bottom:-1rem!important}.ml-sm-n3,.mx-sm-n3{margin-left:-1rem!important}.m-sm-n4{margin:-1.5rem!important}.mt-sm-n4,.my-sm-n4{margin-top:-1.5rem!important}.mr-sm-n4,.mx-sm-n4{margin-right:-1.5rem!important}.mb-sm-n4,.my-sm-n4{margin-bottom:-1.5rem!important}.ml-sm-n4,.mx-sm-n4{margin-left:-1.5rem!important}.m-sm-n5{margin:-3rem!important}.mt-sm-n5,.my-sm-n5{margin-top:-3rem!important}.mr-sm-n5,.mx-sm-n5{margin-right:-3rem!important}.mb-sm-n5,.my-sm-n5{margin-bottom:-3rem!important}.ml-sm-n5,.mx-sm-n5{margin-left:-3rem!important}.m-sm-auto{margin:auto!important}.mt-sm-auto,.my-sm-auto{margin-top:auto!important}.mr-sm-auto,.mx-sm-auto{margin-right:auto!important}.mb-sm-auto,.my-sm-auto{margin-bottom:auto!important}.ml-sm-auto,.mx-sm-auto{margin-left:auto!important}}@media (min-width:768px){.m-md-0{margin:0!important}.mt-md-0,.my-md-0{margin-top:0!important}.mr-md-0,.mx-md-0{margin-right:0!important}.mb-md-0,.my-md-0{margin-bottom:0!important}.ml-md-0,.mx-md-0{margin-left:0!important}.m-md-1{margin:.25rem!important}.mt-md-1,.my-md-1{margin-top:.25rem!important}.mr-md-1,.mx-md-1{margin-right:.25rem!important}.mb-md-1,.my-md-1{margin-bottom:.25rem!important}.ml-md-1,.mx-md-1{margin-left:.25rem!important}.m-md-2{margin:.5rem!important}.mt-md-2,.my-md-2{margin-top:.5rem!important}.mr-md-2,.mx-md-2{margin-right:.5rem!important}.mb-md-2,.my-md-2{margin-bottom:.5rem!important}.ml-md-2,.mx-md-2{margin-left:.5rem!important}.m-md-3{margin:1rem!important}.mt-md-3,.my-md-3{margin-top:1rem!important}.mr-md-3,.mx-md-3{margin-right:1rem!important}.mb-md-3,.my-md-3{margin-bottom:1rem!important}.ml-md-3,.mx-md-3{margin-left:1rem!important}.m-md-4{margin:1.5rem!important}.mt-md-4,.my-md-4{margin-top:1.5rem!important}.mr-md-4,.mx-md-4{margin-right:1.5rem!important}.mb-md-4,.my-md-4{margin-bottom:1.5rem!important}.ml-md-4,.mx-md-4{margin-left:1.5rem!important}.m-md-5{margin:3rem!important}.mt-md-5,.my-md-5{margin-top:3rem!important}.mr-md-5,.mx-md-5{margin-right:3rem!important}.mb-md-5,.my-md-5{margin-bottom:3rem!important}.ml-md-5,.mx-md-5{margin-left:3rem!important}.p-md-0{padding:0!important}.pt-md-0,.py-md-0{padding-top:0!important}.pr-md-0,.px-md-0{padding-right:0!important}.pb-md-0,.py-md-0{padding-bottom:0!important}.pl-md-0,.px-md-0{padding-left:0!important}.p-md-1{padding:.25rem!important}.pt-md-1,.py-md-1{padding-top:.25rem!important}.pr-md-1,.px-md-1{padding-right:.25rem!important}.pb-md-1,.py-md-1{padding-bottom:.25rem!important}.pl-md-1,.px-md-1{padding-left:.25rem!important}.p-md-2{padding:.5rem!important}.pt-md-2,.py-md-2{padding-top:.5rem!important}.pr-md-2,.px-md-2{padding-right:.5rem!important}.pb-md-2,.py-md-2{padding-bottom:.5rem!important}.pl-md-2,.px-md-2{padding-left:.5rem!important}.p-md-3{padding:1rem!important}.pt-md-3,.py-md-3{padding-top:1rem!important}.pr-md-3,.px-md-3{padding-right:1rem!important}.pb-md-3,.py-md-3{padding-bottom:1rem!important}.pl-md-3,.px-md-3{padding-left:1rem!important}.p-md-4{padding:1.5rem!important}.pt-md-4,.py-md-4{padding-top:1.5rem!important}.pr-md-4,.px-md-4{padding-right:1.5rem!important}.pb-md-4,.py-md-4{padding-bottom:1.5rem!important}.pl-md-4,.px-md-4{padding-left:1.5rem!important}.p-md-5{padding:3rem!important}.pt-md-5,.py-md-5{padding-top:3rem!important}.pr-md-5,.px-md-5{padding-right:3rem!important}.pb-md-5,.py-md-5{padding-bottom:3rem!important}.pl-md-5,.px-md-5{padding-left:3rem!important}.m-md-n1{margin:-.25rem!important}.mt-md-n1,.my-md-n1{margin-top:-.25rem!important}.mr-md-n1,.mx-md-n1{margin-right:-.25rem!important}.mb-md-n1,.my-md-n1{margin-bottom:-.25rem!important}.ml-md-n1,.mx-md-n1{margin-left:-.25rem!important}.m-md-n2{margin:-.5rem!important}.mt-md-n2,.my-md-n2{margin-top:-.5rem!important}.mr-md-n2,.mx-md-n2{margin-right:-.5rem!important}.mb-md-n2,.my-md-n2{margin-bottom:-.5rem!important}.ml-md-n2,.mx-md-n2{margin-left:-.5rem!important}.m-md-n3{margin:-1rem!important}.mt-md-n3,.my-md-n3{margin-top:-1rem!important}.mr-md-n3,.mx-md-n3{margin-right:-1rem!important}.mb-md-n3,.my-md-n3{margin-bottom:-1rem!important}.ml-md-n3,.mx-md-n3{margin-left:-1rem!important}.m-md-n4{margin:-1.5rem!important}.mt-md-n4,.my-md-n4{margin-top:-1.5rem!important}.mr-md-n4,.mx-md-n4{margin-right:-1.5rem!important}.mb-md-n4,.my-md-n4{margin-bottom:-1.5rem!important}.ml-md-n4,.mx-md-n4{margin-left:-1.5rem!important}.m-md-n5{margin:-3rem!important}.mt-md-n5,.my-md-n5{margin-top:-3rem!important}.mr-md-n5,.mx-md-n5{margin-right:-3rem!important}.mb-md-n5,.my-md-n5{margin-bottom:-3rem!important}.ml-md-n5,.mx-md-n5{margin-left:-3rem!important}.m-md-auto{margin:auto!important}.mt-md-auto,.my-md-auto{margin-top:auto!important}.mr-md-auto,.mx-md-auto{margin-right:auto!important}.mb-md-auto,.my-md-auto{margin-bottom:auto!important}.ml-md-auto,.mx-md-auto{margin-left:auto!important}}@media (min-width:992px){.m-lg-0{margin:0!important}.mt-lg-0,.my-lg-0{margin-top:0!important}.mr-lg-0,.mx-lg-0{margin-right:0!important}.mb-lg-0,.my-lg-0{margin-bottom:0!important}.ml-lg-0,.mx-lg-0{margin-left:0!important}.m-lg-1{margin:.25rem!important}.mt-lg-1,.my-lg-1{margin-top:.25rem!important}.mr-lg-1,.mx-lg-1{margin-right:.25rem!important}.mb-lg-1,.my-lg-1{margin-bottom:.25rem!important}.ml-lg-1,.mx-lg-1{margin-left:.25rem!important}.m-lg-2{margin:.5rem!important}.mt-lg-2,.my-lg-2{margin-top:.5rem!important}.mr-lg-2,.mx-lg-2{margin-right:.5rem!important}.mb-lg-2,.my-lg-2{margin-bottom:.5rem!important}.ml-lg-2,.mx-lg-2{margin-left:.5rem!important}.m-lg-3{margin:1rem!important}.mt-lg-3,.my-lg-3{margin-top:1rem!important}.mr-lg-3,.mx-lg-3{margin-right:1rem!important}.mb-lg-3,.my-lg-3{margin-bottom:1rem!important}.ml-lg-3,.mx-lg-3{margin-left:1rem!important}.m-lg-4{margin:1.5rem!important}.mt-lg-4,.my-lg-4{margin-top:1.5rem!important}.mr-lg-4,.mx-lg-4{margin-right:1.5rem!important}.mb-lg-4,.my-lg-4{margin-bottom:1.5rem!important}.ml-lg-4,.mx-lg-4{margin-left:1.5rem!important}.m-lg-5{margin:3rem!important}.mt-lg-5,.my-lg-5{margin-top:3rem!important}.mr-lg-5,.mx-lg-5{margin-right:3rem!important}.mb-lg-5,.my-lg-5{margin-bottom:3rem!important}.ml-lg-5,.mx-lg-5{margin-left:3rem!important}.p-lg-0{padding:0!important}.pt-lg-0,.py-lg-0{padding-top:0!important}.pr-lg-0,.px-lg-0{padding-right:0!important}.pb-lg-0,.py-lg-0{padding-bottom:0!important}.pl-lg-0,.px-lg-0{padding-left:0!important}.p-lg-1{padding:.25rem!important}.pt-lg-1,.py-lg-1{padding-top:.25rem!important}.pr-lg-1,.px-lg-1{padding-right:.25rem!important}.pb-lg-1,.py-lg-1{padding-bottom:.25rem!important}.pl-lg-1,.px-lg-1{padding-left:.25rem!important}.p-lg-2{padding:.5rem!important}.pt-lg-2,.py-lg-2{padding-top:.5rem!important}.pr-lg-2,.px-lg-2{padding-right:.5rem!important}.pb-lg-2,.py-lg-2{padding-bottom:.5rem!important}.pl-lg-2,.px-lg-2{padding-left:.5rem!important}.p-lg-3{padding:1rem!important}.pt-lg-3,.py-lg-3{padding-top:1rem!important}.pr-lg-3,.px-lg-3{padding-right:1rem!important}.pb-lg-3,.py-lg-3{padding-bottom:1rem!important}.pl-lg-3,.px-lg-3{padding-left:1rem!important}.p-lg-4{padding:1.5rem!important}.pt-lg-4,.py-lg-4{padding-top:1.5rem!important}.pr-lg-4,.px-lg-4{padding-right:1.5rem!important}.pb-lg-4,.py-lg-4{padding-bottom:1.5rem!important}.pl-lg-4,.px-lg-4{padding-left:1.5rem!important}.p-lg-5{padding:3rem!important}.pt-lg-5,.py-lg-5{padding-top:3rem!important}.pr-lg-5,.px-lg-5{padding-right:3rem!important}.pb-lg-5,.py-lg-5{padding-bottom:3rem!important}.pl-lg-5,.px-lg-5{padding-left:3rem!important}.m-lg-n1{margin:-.25rem!important}.mt-lg-n1,.my-lg-n1{margin-top:-.25rem!important}.mr-lg-n1,.mx-lg-n1{margin-right:-.25rem!important}.mb-lg-n1,.my-lg-n1{margin-bottom:-.25rem!important}.ml-lg-n1,.mx-lg-n1{margin-left:-.25rem!important}.m-lg-n2{margin:-.5rem!important}.mt-lg-n2,.my-lg-n2{margin-top:-.5rem!important}.mr-lg-n2,.mx-lg-n2{margin-right:-.5rem!important}.mb-lg-n2,.my-lg-n2{margin-bottom:-.5rem!important}.ml-lg-n2,.mx-lg-n2{margin-left:-.5rem!important}.m-lg-n3{margin:-1rem!important}.mt-lg-n3,.my-lg-n3{margin-top:-1rem!important}.mr-lg-n3,.mx-lg-n3{margin-right:-1rem!important}.mb-lg-n3,.my-lg-n3{margin-bottom:-1rem!important}.ml-lg-n3,.mx-lg-n3{margin-left:-1rem!important}.m-lg-n4{margin:-1.5rem!important}.mt-lg-n4,.my-lg-n4{margin-top:-1.5rem!important}.mr-lg-n4,.mx-lg-n4{margin-right:-1.5rem!important}.mb-lg-n4,.my-lg-n4{margin-bottom:-1.5rem!important}.ml-lg-n4,.mx-lg-n4{margin-left:-1.5rem!important}.m-lg-n5{margin:-3rem!important}.mt-lg-n5,.my-lg-n5{margin-top:-3rem!important}.mr-lg-n5,.mx-lg-n5{margin-right:-3rem!important}.mb-lg-n5,.my-lg-n5{margin-bottom:-3rem!important}.ml-lg-n5,.mx-lg-n5{margin-left:-3rem!important}.m-lg-auto{margin:auto!important}.mt-lg-auto,.my-lg-auto{margin-top:auto!important}.mr-lg-auto,.mx-lg-auto{margin-right:auto!important}.mb-lg-auto,.my-lg-auto{margin-bottom:auto!important}.ml-lg-auto,.mx-lg-auto{margin-left:auto!important}}@media (min-width:1200px){.m-xl-0{margin:0!important}.mt-xl-0,.my-xl-0{margin-top:0!important}.mr-xl-0,.mx-xl-0{margin-right:0!important}.mb-xl-0,.my-xl-0{margin-bottom:0!important}.ml-xl-0,.mx-xl-0{margin-left:0!important}.m-xl-1{margin:.25rem!important}.mt-xl-1,.my-xl-1{margin-top:.25rem!important}.mr-xl-1,.mx-xl-1{margin-right:.25rem!important}.mb-xl-1,.my-xl-1{margin-bottom:.25rem!important}.ml-xl-1,.mx-xl-1{margin-left:.25rem!important}.m-xl-2{margin:.5rem!important}.mt-xl-2,.my-xl-2{margin-top:.5rem!important}.mr-xl-2,.mx-xl-2{margin-right:.5rem!important}.mb-xl-2,.my-xl-2{margin-bottom:.5rem!important}.ml-xl-2,.mx-xl-2{margin-left:.5rem!important}.m-xl-3{margin:1rem!important}.mt-xl-3,.my-xl-3{margin-top:1rem!important}.mr-xl-3,.mx-xl-3{margin-right:1rem!important}.mb-xl-3,.my-xl-3{margin-bottom:1rem!important}.ml-xl-3,.mx-xl-3{margin-left:1rem!important}.m-xl-4{margin:1.5rem!important}.mt-xl-4,.my-xl-4{margin-top:1.5rem!important}.mr-xl-4,.mx-xl-4{margin-right:1.5rem!important}.mb-xl-4,.my-xl-4{margin-bottom:1.5rem!important}.ml-xl-4,.mx-xl-4{margin-left:1.5rem!important}.m-xl-5{margin:3rem!important}.mt-xl-5,.my-xl-5{margin-top:3rem!important}.mr-xl-5,.mx-xl-5{margin-right:3rem!important}.mb-xl-5,.my-xl-5{margin-bottom:3rem!important}.ml-xl-5,.mx-xl-5{margin-left:3rem!important}.p-xl-0{padding:0!important}.pt-xl-0,.py-xl-0{padding-top:0!important}.pr-xl-0,.px-xl-0{padding-right:0!important}.pb-xl-0,.py-xl-0{padding-bottom:0!important}.pl-xl-0,.px-xl-0{padding-left:0!important}.p-xl-1{padding:.25rem!important}.pt-xl-1,.py-xl-1{padding-top:.25rem!important}.pr-xl-1,.px-xl-1{padding-right:.25rem!important}.pb-xl-1,.py-xl-1{padding-bottom:.25rem!important}.pl-xl-1,.px-xl-1{padding-left:.25rem!important}.p-xl-2{padding:.5rem!important}.pt-xl-2,.py-xl-2{padding-top:.5rem!important}.pr-xl-2,.px-xl-2{padding-right:.5rem!important}.pb-xl-2,.py-xl-2{padding-bottom:.5rem!important}.pl-xl-2,.px-xl-2{padding-left:.5rem!important}.p-xl-3{padding:1rem!important}.pt-xl-3,.py-xl-3{padding-top:1rem!important}.pr-xl-3,.px-xl-3{padding-right:1rem!important}.pb-xl-3,.py-xl-3{padding-bottom:1rem!important}.pl-xl-3,.px-xl-3{padding-left:1rem!important}.p-xl-4{padding:1.5rem!important}.pt-xl-4,.py-xl-4{padding-top:1.5rem!important}.pr-xl-4,.px-xl-4{padding-right:1.5rem!important}.pb-xl-4,.py-xl-4{padding-bottom:1.5rem!important}.pl-xl-4,.px-xl-4{padding-left:1.5rem!important}.p-xl-5{padding:3rem!important}.pt-xl-5,.py-xl-5{padding-top:3rem!important}.pr-xl-5,.px-xl-5{padding-right:3rem!important}.pb-xl-5,.py-xl-5{padding-bottom:3rem!important}.pl-xl-5,.px-xl-5{padding-left:3rem!important}.m-xl-n1{margin:-.25rem!important}.mt-xl-n1,.my-xl-n1{margin-top:-.25rem!important}.mr-xl-n1,.mx-xl-n1{margin-right:-.25rem!important}.mb-xl-n1,.my-xl-n1{margin-bottom:-.25rem!important}.ml-xl-n1,.mx-xl-n1{margin-left:-.25rem!important}.m-xl-n2{margin:-.5rem!important}.mt-xl-n2,.my-xl-n2{margin-top:-.5rem!important}.mr-xl-n2,.mx-xl-n2{margin-right:-.5rem!important}.mb-xl-n2,.my-xl-n2{margin-bottom:-.5rem!important}.ml-xl-n2,.mx-xl-n2{margin-left:-.5rem!important}.m-xl-n3{margin:-1rem!important}.mt-xl-n3,.my-xl-n3{margin-top:-1rem!important}.mr-xl-n3,.mx-xl-n3{margin-right:-1rem!important}.mb-xl-n3,.my-xl-n3{margin-bottom:-1rem!important}.ml-xl-n3,.mx-xl-n3{margin-left:-1rem!important}.m-xl-n4{margin:-1.5rem!important}.mt-xl-n4,.my-xl-n4{margin-top:-1.5rem!important}.mr-xl-n4,.mx-xl-n4{margin-right:-1.5rem!important}.mb-xl-n4,.my-xl-n4{margin-bottom:-1.5rem!important}.ml-xl-n4,.mx-xl-n4{margin-left:-1.5rem!important}.m-xl-n5{margin:-3rem!important}.mt-xl-n5,.my-xl-n5{margin-top:-3rem!important}.mr-xl-n5,.mx-xl-n5{margin-right:-3rem!important}.mb-xl-n5,.my-xl-n5{margin-bottom:-3rem!important}.ml-xl-n5,.mx-xl-n5{margin-left:-3rem!important}.m-xl-auto{margin:auto!important}.mt-xl-auto,.my-xl-auto{margin-top:auto!important}.mr-xl-auto,.mx-xl-auto{margin-right:auto!important}.mb-xl-auto,.my-xl-auto{margin-bottom:auto!important}.ml-xl-auto,.mx-xl-auto{margin-left:auto!important}} 7 | /*# sourceMappingURL=bootstrap-grid.min.css.map */ -------------------------------------------------------------------------------- /static/vendor/bootstrap/css/bootstrap-reboot.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Bootstrap Reboot v4.3.1 (https://getbootstrap.com/) 3 | * Copyright 2011-2019 The Bootstrap Authors 4 | * Copyright 2011-2019 Twitter, Inc. 5 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 6 | * Forked from Normalize.css, licensed MIT (https://github.com/necolas/normalize.css/blob/master/LICENSE.md) 7 | */ 8 | *, 9 | *::before, 10 | *::after { 11 | box-sizing: border-box; 12 | } 13 | 14 | html { 15 | font-family: sans-serif; 16 | line-height: 1.15; 17 | -webkit-text-size-adjust: 100%; 18 | -webkit-tap-highlight-color: rgba(0, 0, 0, 0); 19 | } 20 | 21 | article, aside, figcaption, figure, footer, header, hgroup, main, nav, section { 22 | display: block; 23 | } 24 | 25 | body { 26 | margin: 0; 27 | font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"; 28 | font-size: 1rem; 29 | font-weight: 400; 30 | line-height: 1.5; 31 | color: #212529; 32 | text-align: left; 33 | background-color: #fff; 34 | } 35 | 36 | [tabindex="-1"]:focus { 37 | outline: 0 !important; 38 | } 39 | 40 | hr { 41 | box-sizing: content-box; 42 | height: 0; 43 | overflow: visible; 44 | } 45 | 46 | h1, h2, h3, h4, h5, h6 { 47 | margin-top: 0; 48 | margin-bottom: 0.5rem; 49 | } 50 | 51 | p { 52 | margin-top: 0; 53 | margin-bottom: 1rem; 54 | } 55 | 56 | abbr[title], 57 | abbr[data-original-title] { 58 | text-decoration: underline; 59 | -webkit-text-decoration: underline dotted; 60 | text-decoration: underline dotted; 61 | cursor: help; 62 | border-bottom: 0; 63 | -webkit-text-decoration-skip-ink: none; 64 | text-decoration-skip-ink: none; 65 | } 66 | 67 | address { 68 | margin-bottom: 1rem; 69 | font-style: normal; 70 | line-height: inherit; 71 | } 72 | 73 | ol, 74 | ul, 75 | dl { 76 | margin-top: 0; 77 | margin-bottom: 1rem; 78 | } 79 | 80 | ol ol, 81 | ul ul, 82 | ol ul, 83 | ul ol { 84 | margin-bottom: 0; 85 | } 86 | 87 | dt { 88 | font-weight: 700; 89 | } 90 | 91 | dd { 92 | margin-bottom: .5rem; 93 | margin-left: 0; 94 | } 95 | 96 | blockquote { 97 | margin: 0 0 1rem; 98 | } 99 | 100 | b, 101 | strong { 102 | font-weight: bolder; 103 | } 104 | 105 | small { 106 | font-size: 80%; 107 | } 108 | 109 | sub, 110 | sup { 111 | position: relative; 112 | font-size: 75%; 113 | line-height: 0; 114 | vertical-align: baseline; 115 | } 116 | 117 | sub { 118 | bottom: -.25em; 119 | } 120 | 121 | sup { 122 | top: -.5em; 123 | } 124 | 125 | a { 126 | color: #007bff; 127 | text-decoration: none; 128 | background-color: transparent; 129 | } 130 | 131 | a:hover { 132 | color: #0056b3; 133 | text-decoration: underline; 134 | } 135 | 136 | a:not([href]):not([tabindex]) { 137 | color: inherit; 138 | text-decoration: none; 139 | } 140 | 141 | a:not([href]):not([tabindex]):hover, a:not([href]):not([tabindex]):focus { 142 | color: inherit; 143 | text-decoration: none; 144 | } 145 | 146 | a:not([href]):not([tabindex]):focus { 147 | outline: 0; 148 | } 149 | 150 | pre, 151 | code, 152 | kbd, 153 | samp { 154 | font-family: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; 155 | font-size: 1em; 156 | } 157 | 158 | pre { 159 | margin-top: 0; 160 | margin-bottom: 1rem; 161 | overflow: auto; 162 | } 163 | 164 | figure { 165 | margin: 0 0 1rem; 166 | } 167 | 168 | img { 169 | vertical-align: middle; 170 | border-style: none; 171 | } 172 | 173 | svg { 174 | overflow: hidden; 175 | vertical-align: middle; 176 | } 177 | 178 | table { 179 | border-collapse: collapse; 180 | } 181 | 182 | caption { 183 | padding-top: 0.75rem; 184 | padding-bottom: 0.75rem; 185 | color: #6c757d; 186 | text-align: left; 187 | caption-side: bottom; 188 | } 189 | 190 | th { 191 | text-align: inherit; 192 | } 193 | 194 | label { 195 | display: inline-block; 196 | margin-bottom: 0.5rem; 197 | } 198 | 199 | button { 200 | border-radius: 0; 201 | } 202 | 203 | button:focus { 204 | outline: 1px dotted; 205 | outline: 5px auto -webkit-focus-ring-color; 206 | } 207 | 208 | input, 209 | button, 210 | select, 211 | optgroup, 212 | textarea { 213 | margin: 0; 214 | font-family: inherit; 215 | font-size: inherit; 216 | line-height: inherit; 217 | } 218 | 219 | button, 220 | input { 221 | overflow: visible; 222 | } 223 | 224 | button, 225 | select { 226 | text-transform: none; 227 | } 228 | 229 | select { 230 | word-wrap: normal; 231 | } 232 | 233 | button, 234 | [type="button"], 235 | [type="reset"], 236 | [type="submit"] { 237 | -webkit-appearance: button; 238 | } 239 | 240 | button:not(:disabled), 241 | [type="button"]:not(:disabled), 242 | [type="reset"]:not(:disabled), 243 | [type="submit"]:not(:disabled) { 244 | cursor: pointer; 245 | } 246 | 247 | button::-moz-focus-inner, 248 | [type="button"]::-moz-focus-inner, 249 | [type="reset"]::-moz-focus-inner, 250 | [type="submit"]::-moz-focus-inner { 251 | padding: 0; 252 | border-style: none; 253 | } 254 | 255 | input[type="radio"], 256 | input[type="checkbox"] { 257 | box-sizing: border-box; 258 | padding: 0; 259 | } 260 | 261 | input[type="date"], 262 | input[type="time"], 263 | input[type="datetime-local"], 264 | input[type="month"] { 265 | -webkit-appearance: listbox; 266 | } 267 | 268 | textarea { 269 | overflow: auto; 270 | resize: vertical; 271 | } 272 | 273 | fieldset { 274 | min-width: 0; 275 | padding: 0; 276 | margin: 0; 277 | border: 0; 278 | } 279 | 280 | legend { 281 | display: block; 282 | width: 100%; 283 | max-width: 100%; 284 | padding: 0; 285 | margin-bottom: .5rem; 286 | font-size: 1.5rem; 287 | line-height: inherit; 288 | color: inherit; 289 | white-space: normal; 290 | } 291 | 292 | progress { 293 | vertical-align: baseline; 294 | } 295 | 296 | [type="number"]::-webkit-inner-spin-button, 297 | [type="number"]::-webkit-outer-spin-button { 298 | height: auto; 299 | } 300 | 301 | [type="search"] { 302 | outline-offset: -2px; 303 | -webkit-appearance: none; 304 | } 305 | 306 | [type="search"]::-webkit-search-decoration { 307 | -webkit-appearance: none; 308 | } 309 | 310 | ::-webkit-file-upload-button { 311 | font: inherit; 312 | -webkit-appearance: button; 313 | } 314 | 315 | output { 316 | display: inline-block; 317 | } 318 | 319 | summary { 320 | display: list-item; 321 | cursor: pointer; 322 | } 323 | 324 | template { 325 | display: none; 326 | } 327 | 328 | [hidden] { 329 | display: none !important; 330 | } 331 | /*# sourceMappingURL=bootstrap-reboot.css.map */ -------------------------------------------------------------------------------- /static/vendor/bootstrap/css/bootstrap-reboot.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Bootstrap Reboot v4.3.1 (https://getbootstrap.com/) 3 | * Copyright 2011-2019 The Bootstrap Authors 4 | * Copyright 2011-2019 Twitter, Inc. 5 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 6 | * Forked from Normalize.css, licensed MIT (https://github.com/necolas/normalize.css/blob/master/LICENSE.md) 7 | */*,::after,::before{box-sizing:border-box}html{font-family:sans-serif;line-height:1.15;-webkit-text-size-adjust:100%;-webkit-tap-highlight-color:transparent}article,aside,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}body{margin:0;font-family:-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";font-size:1rem;font-weight:400;line-height:1.5;color:#212529;text-align:left;background-color:#fff}[tabindex="-1"]:focus{outline:0!important}hr{box-sizing:content-box;height:0;overflow:visible}h1,h2,h3,h4,h5,h6{margin-top:0;margin-bottom:.5rem}p{margin-top:0;margin-bottom:1rem}abbr[data-original-title],abbr[title]{text-decoration:underline;-webkit-text-decoration:underline dotted;text-decoration:underline dotted;cursor:help;border-bottom:0;-webkit-text-decoration-skip-ink:none;text-decoration-skip-ink:none}address{margin-bottom:1rem;font-style:normal;line-height:inherit}dl,ol,ul{margin-top:0;margin-bottom:1rem}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}dt{font-weight:700}dd{margin-bottom:.5rem;margin-left:0}blockquote{margin:0 0 1rem}b,strong{font-weight:bolder}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}a{color:#007bff;text-decoration:none;background-color:transparent}a:hover{color:#0056b3;text-decoration:underline}a:not([href]):not([tabindex]){color:inherit;text-decoration:none}a:not([href]):not([tabindex]):focus,a:not([href]):not([tabindex]):hover{color:inherit;text-decoration:none}a:not([href]):not([tabindex]):focus{outline:0}code,kbd,pre,samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;font-size:1em}pre{margin-top:0;margin-bottom:1rem;overflow:auto}figure{margin:0 0 1rem}img{vertical-align:middle;border-style:none}svg{overflow:hidden;vertical-align:middle}table{border-collapse:collapse}caption{padding-top:.75rem;padding-bottom:.75rem;color:#6c757d;text-align:left;caption-side:bottom}th{text-align:inherit}label{display:inline-block;margin-bottom:.5rem}button{border-radius:0}button:focus{outline:1px dotted;outline:5px auto -webkit-focus-ring-color}button,input,optgroup,select,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,input{overflow:visible}button,select{text-transform:none}select{word-wrap:normal}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button}[type=button]:not(:disabled),[type=reset]:not(:disabled),[type=submit]:not(:disabled),button:not(:disabled){cursor:pointer}[type=button]::-moz-focus-inner,[type=reset]::-moz-focus-inner,[type=submit]::-moz-focus-inner,button::-moz-focus-inner{padding:0;border-style:none}input[type=checkbox],input[type=radio]{box-sizing:border-box;padding:0}input[type=date],input[type=datetime-local],input[type=month],input[type=time]{-webkit-appearance:listbox}textarea{overflow:auto;resize:vertical}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;max-width:100%;padding:0;margin-bottom:.5rem;font-size:1.5rem;line-height:inherit;color:inherit;white-space:normal}progress{vertical-align:baseline}[type=number]::-webkit-inner-spin-button,[type=number]::-webkit-outer-spin-button{height:auto}[type=search]{outline-offset:-2px;-webkit-appearance:none}[type=search]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{font:inherit;-webkit-appearance:button}output{display:inline-block}summary{display:list-item;cursor:pointer}template{display:none}[hidden]{display:none!important} 8 | /*# sourceMappingURL=bootstrap-reboot.min.css.map */ -------------------------------------------------------------------------------- /static/vendor/bootstrap/css/bootstrap-reboot.min.css.map: -------------------------------------------------------------------------------- 1 | {"version":3,"sources":["../../scss/bootstrap-reboot.scss","../../scss/_reboot.scss","dist/css/bootstrap-reboot.css","../../scss/vendor/_rfs.scss","bootstrap-reboot.css","../../scss/mixins/_hover.scss"],"names":[],"mappings":"AAAA;;;;;;ACkBA,ECTA,QADA,SDaE,WAAA,WAGF,KACE,YAAA,WACA,YAAA,KACA,yBAAA,KACA,4BAAA,YAMF,QAAA,MAAA,WAAA,OAAA,OAAA,OAAA,OAAA,KAAA,IAAA,QACE,QAAA,MAUF,KACE,OAAA,EACA,YAAA,aAAA,CAAA,kBAAA,CAAA,UAAA,CAAA,MAAA,CAAA,gBAAA,CAAA,KAAA,CAAA,WAAA,CAAA,UAAA,CAAA,mBAAA,CAAA,gBAAA,CAAA,iBAAA,CAAA,mBEgFI,UAAA,KF9EJ,YAAA,IACA,YAAA,IACA,MAAA,QACA,WAAA,KACA,iBAAA,KGlBF,sBH2BE,QAAA,YASF,GACE,WAAA,YACA,OAAA,EACA,SAAA,QAaF,GAAA,GAAA,GAAA,GAAA,GAAA,GACE,WAAA,EACA,cAAA,MAOF,EACE,WAAA,EACA,cAAA,KC1CF,0BDqDA,YAEE,gBAAA,UACA,wBAAA,UAAA,OAAA,gBAAA,UAAA,OACA,OAAA,KACA,cAAA,EACA,iCAAA,KAAA,yBAAA,KAGF,QACE,cAAA,KACA,WAAA,OACA,YAAA,QC/CF,GDkDA,GCnDA,GDsDE,WAAA,EACA,cAAA,KAGF,MClDA,MACA,MAFA,MDuDE,cAAA,EAGF,GACE,YAAA,IAGF,GACE,cAAA,MACA,YAAA,EAGF,WACE,OAAA,EAAA,EAAA,KAGF,ECnDA,ODqDE,YAAA,OAGF,MEpFI,UAAA,IF6FJ,ICxDA,ID0DE,SAAA,SE/FE,UAAA,IFiGF,YAAA,EACA,eAAA,SAGF,IAAM,OAAA,OACN,IAAM,IAAA,MAON,EACE,MAAA,QACA,gBAAA,KACA,iBAAA,YI5KA,QJ+KE,MAAA,QACA,gBAAA,UAUJ,8BACE,MAAA,QACA,gBAAA,KIxLA,oCAAA,oCJ2LE,MAAA,QACA,gBAAA,KANJ,oCAUI,QAAA,EC1DJ,KACA,IDkEA,ICjEA,KDqEE,YAAA,cAAA,CAAA,KAAA,CAAA,MAAA,CAAA,QAAA,CAAA,iBAAA,CAAA,aAAA,CAAA,UErJE,UAAA,IFyJJ,IAEE,WAAA,EAEA,cAAA,KAEA,SAAA,KAQF,OAEE,OAAA,EAAA,EAAA,KAQF,IACE,eAAA,OACA,aAAA,KAGF,IAGE,SAAA,OACA,eAAA,OAQF,MACE,gBAAA,SAGF,QACE,YAAA,OACA,eAAA,OACA,MAAA,QACA,WAAA,KACA,aAAA,OAGF,GAGE,WAAA,QAQF,MAEE,QAAA,aACA,cAAA,MAMF,OAEE,cAAA,EAOF,aACE,QAAA,IAAA,OACA,QAAA,IAAA,KAAA,yBCrGF,ODwGA,MCtGA,SADA,OAEA,SD0GE,OAAA,EACA,YAAA,QEtPE,UAAA,QFwPF,YAAA,QAGF,OCxGA,MD0GE,SAAA,QAGF,OCxGA,OD0GE,eAAA,KAMF,OACE,UAAA,OCxGF,cACA,aACA,cD6GA,OAIE,mBAAA,OC5GF,6BACA,4BACA,6BD+GE,sBAKI,OAAA,QC/GN,gCACA,+BACA,gCDmHA,yBAIE,QAAA,EACA,aAAA,KClHF,qBDqHA,kBAEE,WAAA,WACA,QAAA,EAIF,iBCrHA,2BACA,kBAFA,iBD+HE,mBAAA,QAGF,SACE,SAAA,KAEA,OAAA,SAGF,SAME,UAAA,EAEA,QAAA,EACA,OAAA,EACA,OAAA,EAKF,OACE,QAAA,MACA,MAAA,KACA,UAAA,KACA,QAAA,EACA,cAAA,MElSI,UAAA,OFoSJ,YAAA,QACA,MAAA,QACA,YAAA,OAGF,SACE,eAAA,SGpIF,yCFGA,yCDuIE,OAAA,KGrIF,cH6IE,eAAA,KACA,mBAAA,KGzIF,yCHiJE,mBAAA,KAQF,6BACE,KAAA,QACA,mBAAA,OAOF,OACE,QAAA,aAGF,QACE,QAAA,UACA,OAAA,QAGF,SACE,QAAA,KGtJF,SH4JE,QAAA","sourcesContent":["/*!\n * Bootstrap Reboot v4.3.1 (https://getbootstrap.com/)\n * Copyright 2011-2019 The Bootstrap Authors\n * Copyright 2011-2019 Twitter, Inc.\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)\n * Forked from Normalize.css, licensed MIT (https://github.com/necolas/normalize.css/blob/master/LICENSE.md)\n */\n\n@import \"functions\";\n@import \"variables\";\n@import \"mixins\";\n@import \"reboot\";\n","// stylelint-disable at-rule-no-vendor-prefix, declaration-no-important, selector-no-qualifying-type, property-no-vendor-prefix\n\n// Reboot\n//\n// Normalization of HTML elements, manually forked from Normalize.css to remove\n// styles targeting irrelevant browsers while applying new styles.\n//\n// Normalize is licensed MIT. https://github.com/necolas/normalize.css\n\n\n// Document\n//\n// 1. Change from `box-sizing: content-box` so that `width` is not affected by `padding` or `border`.\n// 2. Change the default font family in all browsers.\n// 3. Correct the line height in all browsers.\n// 4. Prevent adjustments of font size after orientation changes in IE on Windows Phone and in iOS.\n// 5. Change the default tap highlight to be completely transparent in iOS.\n\n*,\n*::before,\n*::after {\n box-sizing: border-box; // 1\n}\n\nhtml {\n font-family: sans-serif; // 2\n line-height: 1.15; // 3\n -webkit-text-size-adjust: 100%; // 4\n -webkit-tap-highlight-color: rgba($black, 0); // 5\n}\n\n// Shim for \"new\" HTML5 structural elements to display correctly (IE10, older browsers)\n// TODO: remove in v5\n// stylelint-disable-next-line selector-list-comma-newline-after\narticle, aside, figcaption, figure, footer, header, hgroup, main, nav, section {\n display: block;\n}\n\n// Body\n//\n// 1. Remove the margin in all browsers.\n// 2. As a best practice, apply a default `background-color`.\n// 3. Set an explicit initial text-align value so that we can later use\n// the `inherit` value on things like `` elements.\n\nbody {\n margin: 0; // 1\n font-family: $font-family-base;\n @include font-size($font-size-base);\n font-weight: $font-weight-base;\n line-height: $line-height-base;\n color: $body-color;\n text-align: left; // 3\n background-color: $body-bg; // 2\n}\n\n// Suppress the focus outline on elements that cannot be accessed via keyboard.\n// This prevents an unwanted focus outline from appearing around elements that\n// might still respond to pointer events.\n//\n// Credit: https://github.com/suitcss/base\n[tabindex=\"-1\"]:focus {\n outline: 0 !important;\n}\n\n\n// Content grouping\n//\n// 1. Add the correct box sizing in Firefox.\n// 2. Show the overflow in Edge and IE.\n\nhr {\n box-sizing: content-box; // 1\n height: 0; // 1\n overflow: visible; // 2\n}\n\n\n//\n// Typography\n//\n\n// Remove top margins from headings\n//\n// By default, `

`-`

` all receive top and bottom margins. We nuke the top\n// margin for easier control within type scales as it avoids margin collapsing.\n// stylelint-disable-next-line selector-list-comma-newline-after\nh1, h2, h3, h4, h5, h6 {\n margin-top: 0;\n margin-bottom: $headings-margin-bottom;\n}\n\n// Reset margins on paragraphs\n//\n// Similarly, the top margin on `

`s get reset. However, we also reset the\n// bottom margin to use `rem` units instead of `em`.\np {\n margin-top: 0;\n margin-bottom: $paragraph-margin-bottom;\n}\n\n// Abbreviations\n//\n// 1. Duplicate behavior to the data-* attribute for our tooltip plugin\n// 2. Add the correct text decoration in Chrome, Edge, IE, Opera, and Safari.\n// 3. Add explicit cursor to indicate changed behavior.\n// 4. Remove the bottom border in Firefox 39-.\n// 5. Prevent the text-decoration to be skipped.\n\nabbr[title],\nabbr[data-original-title] { // 1\n text-decoration: underline; // 2\n text-decoration: underline dotted; // 2\n cursor: help; // 3\n border-bottom: 0; // 4\n text-decoration-skip-ink: none; // 5\n}\n\naddress {\n margin-bottom: 1rem;\n font-style: normal;\n line-height: inherit;\n}\n\nol,\nul,\ndl {\n margin-top: 0;\n margin-bottom: 1rem;\n}\n\nol ol,\nul ul,\nol ul,\nul ol {\n margin-bottom: 0;\n}\n\ndt {\n font-weight: $dt-font-weight;\n}\n\ndd {\n margin-bottom: .5rem;\n margin-left: 0; // Undo browser default\n}\n\nblockquote {\n margin: 0 0 1rem;\n}\n\nb,\nstrong {\n font-weight: $font-weight-bolder; // Add the correct font weight in Chrome, Edge, and Safari\n}\n\nsmall {\n @include font-size(80%); // Add the correct font size in all browsers\n}\n\n//\n// Prevent `sub` and `sup` elements from affecting the line height in\n// all browsers.\n//\n\nsub,\nsup {\n position: relative;\n @include font-size(75%);\n line-height: 0;\n vertical-align: baseline;\n}\n\nsub { bottom: -.25em; }\nsup { top: -.5em; }\n\n\n//\n// Links\n//\n\na {\n color: $link-color;\n text-decoration: $link-decoration;\n background-color: transparent; // Remove the gray background on active links in IE 10.\n\n @include hover {\n color: $link-hover-color;\n text-decoration: $link-hover-decoration;\n }\n}\n\n// And undo these styles for placeholder links/named anchors (without href)\n// which have not been made explicitly keyboard-focusable (without tabindex).\n// It would be more straightforward to just use a[href] in previous block, but that\n// causes specificity issues in many other styles that are too complex to fix.\n// See https://github.com/twbs/bootstrap/issues/19402\n\na:not([href]):not([tabindex]) {\n color: inherit;\n text-decoration: none;\n\n @include hover-focus {\n color: inherit;\n text-decoration: none;\n }\n\n &:focus {\n outline: 0;\n }\n}\n\n\n//\n// Code\n//\n\npre,\ncode,\nkbd,\nsamp {\n font-family: $font-family-monospace;\n @include font-size(1em); // Correct the odd `em` font sizing in all browsers.\n}\n\npre {\n // Remove browser default top margin\n margin-top: 0;\n // Reset browser default of `1em` to use `rem`s\n margin-bottom: 1rem;\n // Don't allow content to break outside\n overflow: auto;\n}\n\n\n//\n// Figures\n//\n\nfigure {\n // Apply a consistent margin strategy (matches our type styles).\n margin: 0 0 1rem;\n}\n\n\n//\n// Images and content\n//\n\nimg {\n vertical-align: middle;\n border-style: none; // Remove the border on images inside links in IE 10-.\n}\n\nsvg {\n // Workaround for the SVG overflow bug in IE10/11 is still required.\n // See https://github.com/twbs/bootstrap/issues/26878\n overflow: hidden;\n vertical-align: middle;\n}\n\n\n//\n// Tables\n//\n\ntable {\n border-collapse: collapse; // Prevent double borders\n}\n\ncaption {\n padding-top: $table-cell-padding;\n padding-bottom: $table-cell-padding;\n color: $table-caption-color;\n text-align: left;\n caption-side: bottom;\n}\n\nth {\n // Matches default `` alignment by inheriting from the ``, or the\n // closest parent with a set `text-align`.\n text-align: inherit;\n}\n\n\n//\n// Forms\n//\n\nlabel {\n // Allow labels to use `margin` for spacing.\n display: inline-block;\n margin-bottom: $label-margin-bottom;\n}\n\n// Remove the default `border-radius` that macOS Chrome adds.\n//\n// Details at https://github.com/twbs/bootstrap/issues/24093\nbutton {\n // stylelint-disable-next-line property-blacklist\n border-radius: 0;\n}\n\n// Work around a Firefox/IE bug where the transparent `button` background\n// results in a loss of the default `button` focus styles.\n//\n// Credit: https://github.com/suitcss/base/\nbutton:focus {\n outline: 1px dotted;\n outline: 5px auto -webkit-focus-ring-color;\n}\n\ninput,\nbutton,\nselect,\noptgroup,\ntextarea {\n margin: 0; // Remove the margin in Firefox and Safari\n font-family: inherit;\n @include font-size(inherit);\n line-height: inherit;\n}\n\nbutton,\ninput {\n overflow: visible; // Show the overflow in Edge\n}\n\nbutton,\nselect {\n text-transform: none; // Remove the inheritance of text transform in Firefox\n}\n\n// Remove the inheritance of word-wrap in Safari.\n//\n// Details at https://github.com/twbs/bootstrap/issues/24990\nselect {\n word-wrap: normal;\n}\n\n\n// 1. Prevent a WebKit bug where (2) destroys native `audio` and `video`\n// controls in Android 4.\n// 2. Correct the inability to style clickable types in iOS and Safari.\nbutton,\n[type=\"button\"], // 1\n[type=\"reset\"],\n[type=\"submit\"] {\n -webkit-appearance: button; // 2\n}\n\n// Opinionated: add \"hand\" cursor to non-disabled button elements.\n@if $enable-pointer-cursor-for-buttons {\n button,\n [type=\"button\"],\n [type=\"reset\"],\n [type=\"submit\"] {\n &:not(:disabled) {\n cursor: pointer;\n }\n }\n}\n\n// Remove inner border and padding from Firefox, but don't restore the outline like Normalize.\nbutton::-moz-focus-inner,\n[type=\"button\"]::-moz-focus-inner,\n[type=\"reset\"]::-moz-focus-inner,\n[type=\"submit\"]::-moz-focus-inner {\n padding: 0;\n border-style: none;\n}\n\ninput[type=\"radio\"],\ninput[type=\"checkbox\"] {\n box-sizing: border-box; // 1. Add the correct box sizing in IE 10-\n padding: 0; // 2. Remove the padding in IE 10-\n}\n\n\ninput[type=\"date\"],\ninput[type=\"time\"],\ninput[type=\"datetime-local\"],\ninput[type=\"month\"] {\n // Remove the default appearance of temporal inputs to avoid a Mobile Safari\n // bug where setting a custom line-height prevents text from being vertically\n // centered within the input.\n // See https://bugs.webkit.org/show_bug.cgi?id=139848\n // and https://github.com/twbs/bootstrap/issues/11266\n -webkit-appearance: listbox;\n}\n\ntextarea {\n overflow: auto; // Remove the default vertical scrollbar in IE.\n // Textareas should really only resize vertically so they don't break their (horizontal) containers.\n resize: vertical;\n}\n\nfieldset {\n // Browsers set a default `min-width: min-content;` on fieldsets,\n // unlike e.g. `

`s, which have `min-width: 0;` by default.\n // So we reset that to ensure fieldsets behave more like a standard block element.\n // See https://github.com/twbs/bootstrap/issues/12359\n // and https://html.spec.whatwg.org/multipage/#the-fieldset-and-legend-elements\n min-width: 0;\n // Reset the default outline behavior of fieldsets so they don't affect page layout.\n padding: 0;\n margin: 0;\n border: 0;\n}\n\n// 1. Correct the text wrapping in Edge and IE.\n// 2. Correct the color inheritance from `fieldset` elements in IE.\nlegend {\n display: block;\n width: 100%;\n max-width: 100%; // 1\n padding: 0;\n margin-bottom: .5rem;\n @include font-size(1.5rem);\n line-height: inherit;\n color: inherit; // 2\n white-space: normal; // 1\n}\n\nprogress {\n vertical-align: baseline; // Add the correct vertical alignment in Chrome, Firefox, and Opera.\n}\n\n// Correct the cursor style of increment and decrement buttons in Chrome.\n[type=\"number\"]::-webkit-inner-spin-button,\n[type=\"number\"]::-webkit-outer-spin-button {\n height: auto;\n}\n\n[type=\"search\"] {\n // This overrides the extra rounded corners on search inputs in iOS so that our\n // `.form-control` class can properly style them. Note that this cannot simply\n // be added to `.form-control` as it's not specific enough. For details, see\n // https://github.com/twbs/bootstrap/issues/11586.\n outline-offset: -2px; // 2. Correct the outline style in Safari.\n -webkit-appearance: none;\n}\n\n//\n// Remove the inner padding in Chrome and Safari on macOS.\n//\n\n[type=\"search\"]::-webkit-search-decoration {\n -webkit-appearance: none;\n}\n\n//\n// 1. Correct the inability to style clickable types in iOS and Safari.\n// 2. Change font properties to `inherit` in Safari.\n//\n\n::-webkit-file-upload-button {\n font: inherit; // 2\n -webkit-appearance: button; // 1\n}\n\n//\n// Correct element displays\n//\n\noutput {\n display: inline-block;\n}\n\nsummary {\n display: list-item; // Add the correct display in all browsers\n cursor: pointer;\n}\n\ntemplate {\n display: none; // Add the correct display in IE\n}\n\n// Always hide an element with the `hidden` HTML attribute (from PureCSS).\n// Needed for proper display in IE 10-.\n[hidden] {\n display: none !important;\n}\n","/*!\n * Bootstrap Reboot v4.3.1 (https://getbootstrap.com/)\n * Copyright 2011-2019 The Bootstrap Authors\n * Copyright 2011-2019 Twitter, Inc.\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)\n * Forked from Normalize.css, licensed MIT (https://github.com/necolas/normalize.css/blob/master/LICENSE.md)\n */\n*,\n*::before,\n*::after {\n box-sizing: border-box;\n}\n\nhtml {\n font-family: sans-serif;\n line-height: 1.15;\n -webkit-text-size-adjust: 100%;\n -webkit-tap-highlight-color: rgba(0, 0, 0, 0);\n}\n\narticle, aside, figcaption, figure, footer, header, hgroup, main, nav, section {\n display: block;\n}\n\nbody {\n margin: 0;\n font-family: -apple-system, BlinkMacSystemFont, \"Segoe UI\", Roboto, \"Helvetica Neue\", Arial, \"Noto Sans\", sans-serif, \"Apple Color Emoji\", \"Segoe UI Emoji\", \"Segoe UI Symbol\", \"Noto Color Emoji\";\n font-size: 1rem;\n font-weight: 400;\n line-height: 1.5;\n color: #212529;\n text-align: left;\n background-color: #fff;\n}\n\n[tabindex=\"-1\"]:focus {\n outline: 0 !important;\n}\n\nhr {\n box-sizing: content-box;\n height: 0;\n overflow: visible;\n}\n\nh1, h2, h3, h4, h5, h6 {\n margin-top: 0;\n margin-bottom: 0.5rem;\n}\n\np {\n margin-top: 0;\n margin-bottom: 1rem;\n}\n\nabbr[title],\nabbr[data-original-title] {\n text-decoration: underline;\n -webkit-text-decoration: underline dotted;\n text-decoration: underline dotted;\n cursor: help;\n border-bottom: 0;\n -webkit-text-decoration-skip-ink: none;\n text-decoration-skip-ink: none;\n}\n\naddress {\n margin-bottom: 1rem;\n font-style: normal;\n line-height: inherit;\n}\n\nol,\nul,\ndl {\n margin-top: 0;\n margin-bottom: 1rem;\n}\n\nol ol,\nul ul,\nol ul,\nul ol {\n margin-bottom: 0;\n}\n\ndt {\n font-weight: 700;\n}\n\ndd {\n margin-bottom: .5rem;\n margin-left: 0;\n}\n\nblockquote {\n margin: 0 0 1rem;\n}\n\nb,\nstrong {\n font-weight: bolder;\n}\n\nsmall {\n font-size: 80%;\n}\n\nsub,\nsup {\n position: relative;\n font-size: 75%;\n line-height: 0;\n vertical-align: baseline;\n}\n\nsub {\n bottom: -.25em;\n}\n\nsup {\n top: -.5em;\n}\n\na {\n color: #007bff;\n text-decoration: none;\n background-color: transparent;\n}\n\na:hover {\n color: #0056b3;\n text-decoration: underline;\n}\n\na:not([href]):not([tabindex]) {\n color: inherit;\n text-decoration: none;\n}\n\na:not([href]):not([tabindex]):hover, a:not([href]):not([tabindex]):focus {\n color: inherit;\n text-decoration: none;\n}\n\na:not([href]):not([tabindex]):focus {\n outline: 0;\n}\n\npre,\ncode,\nkbd,\nsamp {\n font-family: SFMono-Regular, Menlo, Monaco, Consolas, \"Liberation Mono\", \"Courier New\", monospace;\n font-size: 1em;\n}\n\npre {\n margin-top: 0;\n margin-bottom: 1rem;\n overflow: auto;\n}\n\nfigure {\n margin: 0 0 1rem;\n}\n\nimg {\n vertical-align: middle;\n border-style: none;\n}\n\nsvg {\n overflow: hidden;\n vertical-align: middle;\n}\n\ntable {\n border-collapse: collapse;\n}\n\ncaption {\n padding-top: 0.75rem;\n padding-bottom: 0.75rem;\n color: #6c757d;\n text-align: left;\n caption-side: bottom;\n}\n\nth {\n text-align: inherit;\n}\n\nlabel {\n display: inline-block;\n margin-bottom: 0.5rem;\n}\n\nbutton {\n border-radius: 0;\n}\n\nbutton:focus {\n outline: 1px dotted;\n outline: 5px auto -webkit-focus-ring-color;\n}\n\ninput,\nbutton,\nselect,\noptgroup,\ntextarea {\n margin: 0;\n font-family: inherit;\n font-size: inherit;\n line-height: inherit;\n}\n\nbutton,\ninput {\n overflow: visible;\n}\n\nbutton,\nselect {\n text-transform: none;\n}\n\nselect {\n word-wrap: normal;\n}\n\nbutton,\n[type=\"button\"],\n[type=\"reset\"],\n[type=\"submit\"] {\n -webkit-appearance: button;\n}\n\nbutton:not(:disabled),\n[type=\"button\"]:not(:disabled),\n[type=\"reset\"]:not(:disabled),\n[type=\"submit\"]:not(:disabled) {\n cursor: pointer;\n}\n\nbutton::-moz-focus-inner,\n[type=\"button\"]::-moz-focus-inner,\n[type=\"reset\"]::-moz-focus-inner,\n[type=\"submit\"]::-moz-focus-inner {\n padding: 0;\n border-style: none;\n}\n\ninput[type=\"radio\"],\ninput[type=\"checkbox\"] {\n box-sizing: border-box;\n padding: 0;\n}\n\ninput[type=\"date\"],\ninput[type=\"time\"],\ninput[type=\"datetime-local\"],\ninput[type=\"month\"] {\n -webkit-appearance: listbox;\n}\n\ntextarea {\n overflow: auto;\n resize: vertical;\n}\n\nfieldset {\n min-width: 0;\n padding: 0;\n margin: 0;\n border: 0;\n}\n\nlegend {\n display: block;\n width: 100%;\n max-width: 100%;\n padding: 0;\n margin-bottom: .5rem;\n font-size: 1.5rem;\n line-height: inherit;\n color: inherit;\n white-space: normal;\n}\n\nprogress {\n vertical-align: baseline;\n}\n\n[type=\"number\"]::-webkit-inner-spin-button,\n[type=\"number\"]::-webkit-outer-spin-button {\n height: auto;\n}\n\n[type=\"search\"] {\n outline-offset: -2px;\n -webkit-appearance: none;\n}\n\n[type=\"search\"]::-webkit-search-decoration {\n -webkit-appearance: none;\n}\n\n::-webkit-file-upload-button {\n font: inherit;\n -webkit-appearance: button;\n}\n\noutput {\n display: inline-block;\n}\n\nsummary {\n display: list-item;\n cursor: pointer;\n}\n\ntemplate {\n display: none;\n}\n\n[hidden] {\n display: none !important;\n}\n/*# sourceMappingURL=bootstrap-reboot.css.map */","// stylelint-disable property-blacklist, scss/dollar-variable-default\n\n// SCSS RFS mixin\n//\n// Automated font-resizing\n//\n// See https://github.com/twbs/rfs\n\n// Configuration\n\n// Base font size\n$rfs-base-font-size: 1.25rem !default;\n$rfs-font-size-unit: rem !default;\n\n// Breakpoint at where font-size starts decreasing if screen width is smaller\n$rfs-breakpoint: 1200px !default;\n$rfs-breakpoint-unit: px !default;\n\n// Resize font-size based on screen height and width\n$rfs-two-dimensional: false !default;\n\n// Factor of decrease\n$rfs-factor: 10 !default;\n\n@if type-of($rfs-factor) != \"number\" or $rfs-factor <= 1 {\n @error \"`#{$rfs-factor}` is not a valid $rfs-factor, it must be greater than 1.\";\n}\n\n// Generate enable or disable classes. Possibilities: false, \"enable\" or \"disable\"\n$rfs-class: false !default;\n\n// 1 rem = $rfs-rem-value px\n$rfs-rem-value: 16 !default;\n\n// Safari iframe resize bug: https://github.com/twbs/rfs/issues/14\n$rfs-safari-iframe-resize-bug-fix: false !default;\n\n// Disable RFS by setting $enable-responsive-font-sizes to false\n$enable-responsive-font-sizes: true !default;\n\n// Cache $rfs-base-font-size unit\n$rfs-base-font-size-unit: unit($rfs-base-font-size);\n\n// Remove px-unit from $rfs-base-font-size for calculations\n@if $rfs-base-font-size-unit == \"px\" {\n $rfs-base-font-size: $rfs-base-font-size / ($rfs-base-font-size * 0 + 1);\n}\n@else if $rfs-base-font-size-unit == \"rem\" {\n $rfs-base-font-size: $rfs-base-font-size / ($rfs-base-font-size * 0 + 1 / $rfs-rem-value);\n}\n\n// Cache $rfs-breakpoint unit to prevent multiple calls\n$rfs-breakpoint-unit-cache: unit($rfs-breakpoint);\n\n// Remove unit from $rfs-breakpoint for calculations\n@if $rfs-breakpoint-unit-cache == \"px\" {\n $rfs-breakpoint: $rfs-breakpoint / ($rfs-breakpoint * 0 + 1);\n}\n@else if $rfs-breakpoint-unit-cache == \"rem\" or $rfs-breakpoint-unit-cache == \"em\" {\n $rfs-breakpoint: $rfs-breakpoint / ($rfs-breakpoint * 0 + 1 / $rfs-rem-value);\n}\n\n// Responsive font-size mixin\n@mixin rfs($fs, $important: false) {\n // Cache $fs unit\n $fs-unit: if(type-of($fs) == \"number\", unit($fs), false);\n\n // Add !important suffix if needed\n $rfs-suffix: if($important, \" !important\", \"\");\n\n // If $fs isn't a number (like inherit) or $fs has a unit (not px or rem, like 1.5em) or $ is 0, just print the value\n @if not $fs-unit or $fs-unit != \"\" and $fs-unit != \"px\" and $fs-unit != \"rem\" or $fs == 0 {\n font-size: #{$fs}#{$rfs-suffix};\n }\n @else {\n // Variables for storing static and fluid rescaling\n $rfs-static: null;\n $rfs-fluid: null;\n\n // Remove px-unit from $fs for calculations\n @if $fs-unit == \"px\" {\n $fs: $fs / ($fs * 0 + 1);\n }\n @else if $fs-unit == \"rem\" {\n $fs: $fs / ($fs * 0 + 1 / $rfs-rem-value);\n }\n\n // Set default font-size\n @if $rfs-font-size-unit == rem {\n $rfs-static: #{$fs / $rfs-rem-value}rem#{$rfs-suffix};\n }\n @else if $rfs-font-size-unit == px {\n $rfs-static: #{$fs}px#{$rfs-suffix};\n }\n @else {\n @error \"`#{$rfs-font-size-unit}` is not a valid unit for $rfs-font-size-unit. Use `px` or `rem`.\";\n }\n\n // Only add media query if font-size is bigger as the minimum font-size\n // If $rfs-factor == 1, no rescaling will take place\n @if $fs > $rfs-base-font-size and $enable-responsive-font-sizes {\n $min-width: null;\n $variable-unit: null;\n\n // Calculate minimum font-size for given font-size\n $fs-min: $rfs-base-font-size + ($fs - $rfs-base-font-size) / $rfs-factor;\n\n // Calculate difference between given font-size and minimum font-size for given font-size\n $fs-diff: $fs - $fs-min;\n\n // Base font-size formatting\n // No need to check if the unit is valid, because we did that before\n $min-width: if($rfs-font-size-unit == rem, #{$fs-min / $rfs-rem-value}rem, #{$fs-min}px);\n\n // If two-dimensional, use smallest of screen width and height\n $variable-unit: if($rfs-two-dimensional, vmin, vw);\n\n // Calculate the variable width between 0 and $rfs-breakpoint\n $variable-width: #{$fs-diff * 100 / $rfs-breakpoint}#{$variable-unit};\n\n // Set the calculated font-size.\n $rfs-fluid: calc(#{$min-width} + #{$variable-width}) #{$rfs-suffix};\n }\n\n // Rendering\n @if $rfs-fluid == null {\n // Only render static font-size if no fluid font-size is available\n font-size: $rfs-static;\n }\n @else {\n $mq-value: null;\n\n // RFS breakpoint formatting\n @if $rfs-breakpoint-unit == em or $rfs-breakpoint-unit == rem {\n $mq-value: #{$rfs-breakpoint / $rfs-rem-value}#{$rfs-breakpoint-unit};\n }\n @else if $rfs-breakpoint-unit == px {\n $mq-value: #{$rfs-breakpoint}px;\n }\n @else {\n @error \"`#{$rfs-breakpoint-unit}` is not a valid unit for $rfs-breakpoint-unit. Use `px`, `em` or `rem`.\";\n }\n\n @if $rfs-class == \"disable\" {\n // Adding an extra class increases specificity,\n // which prevents the media query to override the font size\n &,\n .disable-responsive-font-size &,\n &.disable-responsive-font-size {\n font-size: $rfs-static;\n }\n }\n @else {\n font-size: $rfs-static;\n }\n\n @if $rfs-two-dimensional {\n @media (max-width: #{$mq-value}), (max-height: #{$mq-value}) {\n @if $rfs-class == \"enable\" {\n .enable-responsive-font-size &,\n &.enable-responsive-font-size {\n font-size: $rfs-fluid;\n }\n }\n @else {\n font-size: $rfs-fluid;\n }\n\n @if $rfs-safari-iframe-resize-bug-fix {\n // stylelint-disable-next-line length-zero-no-unit\n min-width: 0vw;\n }\n }\n }\n @else {\n @media (max-width: #{$mq-value}) {\n @if $rfs-class == \"enable\" {\n .enable-responsive-font-size &,\n &.enable-responsive-font-size {\n font-size: $rfs-fluid;\n }\n }\n @else {\n font-size: $rfs-fluid;\n }\n\n @if $rfs-safari-iframe-resize-bug-fix {\n // stylelint-disable-next-line length-zero-no-unit\n min-width: 0vw;\n }\n }\n }\n }\n }\n}\n\n// The font-size & responsive-font-size mixin uses RFS to rescale font sizes\n@mixin font-size($fs, $important: false) {\n @include rfs($fs, $important);\n}\n\n@mixin responsive-font-size($fs, $important: false) {\n @include rfs($fs, $important);\n}\n","/*!\n * Bootstrap Reboot v4.3.1 (https://getbootstrap.com/)\n * Copyright 2011-2019 The Bootstrap Authors\n * Copyright 2011-2019 Twitter, Inc.\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)\n * Forked from Normalize.css, licensed MIT (https://github.com/necolas/normalize.css/blob/master/LICENSE.md)\n */\n*,\n*::before,\n*::after {\n box-sizing: border-box;\n}\n\nhtml {\n font-family: sans-serif;\n line-height: 1.15;\n -webkit-text-size-adjust: 100%;\n -webkit-tap-highlight-color: rgba(0, 0, 0, 0);\n}\n\narticle, aside, figcaption, figure, footer, header, hgroup, main, nav, section {\n display: block;\n}\n\nbody {\n margin: 0;\n font-family: -apple-system, BlinkMacSystemFont, \"Segoe UI\", Roboto, \"Helvetica Neue\", Arial, \"Noto Sans\", sans-serif, \"Apple Color Emoji\", \"Segoe UI Emoji\", \"Segoe UI Symbol\", \"Noto Color Emoji\";\n font-size: 1rem;\n font-weight: 400;\n line-height: 1.5;\n color: #212529;\n text-align: left;\n background-color: #fff;\n}\n\n[tabindex=\"-1\"]:focus {\n outline: 0 !important;\n}\n\nhr {\n box-sizing: content-box;\n height: 0;\n overflow: visible;\n}\n\nh1, h2, h3, h4, h5, h6 {\n margin-top: 0;\n margin-bottom: 0.5rem;\n}\n\np {\n margin-top: 0;\n margin-bottom: 1rem;\n}\n\nabbr[title],\nabbr[data-original-title] {\n text-decoration: underline;\n text-decoration: underline dotted;\n cursor: help;\n border-bottom: 0;\n text-decoration-skip-ink: none;\n}\n\naddress {\n margin-bottom: 1rem;\n font-style: normal;\n line-height: inherit;\n}\n\nol,\nul,\ndl {\n margin-top: 0;\n margin-bottom: 1rem;\n}\n\nol ol,\nul ul,\nol ul,\nul ol {\n margin-bottom: 0;\n}\n\ndt {\n font-weight: 700;\n}\n\ndd {\n margin-bottom: .5rem;\n margin-left: 0;\n}\n\nblockquote {\n margin: 0 0 1rem;\n}\n\nb,\nstrong {\n font-weight: bolder;\n}\n\nsmall {\n font-size: 80%;\n}\n\nsub,\nsup {\n position: relative;\n font-size: 75%;\n line-height: 0;\n vertical-align: baseline;\n}\n\nsub {\n bottom: -.25em;\n}\n\nsup {\n top: -.5em;\n}\n\na {\n color: #007bff;\n text-decoration: none;\n background-color: transparent;\n}\n\na:hover {\n color: #0056b3;\n text-decoration: underline;\n}\n\na:not([href]):not([tabindex]) {\n color: inherit;\n text-decoration: none;\n}\n\na:not([href]):not([tabindex]):hover, a:not([href]):not([tabindex]):focus {\n color: inherit;\n text-decoration: none;\n}\n\na:not([href]):not([tabindex]):focus {\n outline: 0;\n}\n\npre,\ncode,\nkbd,\nsamp {\n font-family: SFMono-Regular, Menlo, Monaco, Consolas, \"Liberation Mono\", \"Courier New\", monospace;\n font-size: 1em;\n}\n\npre {\n margin-top: 0;\n margin-bottom: 1rem;\n overflow: auto;\n}\n\nfigure {\n margin: 0 0 1rem;\n}\n\nimg {\n vertical-align: middle;\n border-style: none;\n}\n\nsvg {\n overflow: hidden;\n vertical-align: middle;\n}\n\ntable {\n border-collapse: collapse;\n}\n\ncaption {\n padding-top: 0.75rem;\n padding-bottom: 0.75rem;\n color: #6c757d;\n text-align: left;\n caption-side: bottom;\n}\n\nth {\n text-align: inherit;\n}\n\nlabel {\n display: inline-block;\n margin-bottom: 0.5rem;\n}\n\nbutton {\n border-radius: 0;\n}\n\nbutton:focus {\n outline: 1px dotted;\n outline: 5px auto -webkit-focus-ring-color;\n}\n\ninput,\nbutton,\nselect,\noptgroup,\ntextarea {\n margin: 0;\n font-family: inherit;\n font-size: inherit;\n line-height: inherit;\n}\n\nbutton,\ninput {\n overflow: visible;\n}\n\nbutton,\nselect {\n text-transform: none;\n}\n\nselect {\n word-wrap: normal;\n}\n\nbutton,\n[type=\"button\"],\n[type=\"reset\"],\n[type=\"submit\"] {\n -webkit-appearance: button;\n}\n\nbutton:not(:disabled),\n[type=\"button\"]:not(:disabled),\n[type=\"reset\"]:not(:disabled),\n[type=\"submit\"]:not(:disabled) {\n cursor: pointer;\n}\n\nbutton::-moz-focus-inner,\n[type=\"button\"]::-moz-focus-inner,\n[type=\"reset\"]::-moz-focus-inner,\n[type=\"submit\"]::-moz-focus-inner {\n padding: 0;\n border-style: none;\n}\n\ninput[type=\"radio\"],\ninput[type=\"checkbox\"] {\n box-sizing: border-box;\n padding: 0;\n}\n\ninput[type=\"date\"],\ninput[type=\"time\"],\ninput[type=\"datetime-local\"],\ninput[type=\"month\"] {\n -webkit-appearance: listbox;\n}\n\ntextarea {\n overflow: auto;\n resize: vertical;\n}\n\nfieldset {\n min-width: 0;\n padding: 0;\n margin: 0;\n border: 0;\n}\n\nlegend {\n display: block;\n width: 100%;\n max-width: 100%;\n padding: 0;\n margin-bottom: .5rem;\n font-size: 1.5rem;\n line-height: inherit;\n color: inherit;\n white-space: normal;\n}\n\nprogress {\n vertical-align: baseline;\n}\n\n[type=\"number\"]::-webkit-inner-spin-button,\n[type=\"number\"]::-webkit-outer-spin-button {\n height: auto;\n}\n\n[type=\"search\"] {\n outline-offset: -2px;\n -webkit-appearance: none;\n}\n\n[type=\"search\"]::-webkit-search-decoration {\n -webkit-appearance: none;\n}\n\n::-webkit-file-upload-button {\n font: inherit;\n -webkit-appearance: button;\n}\n\noutput {\n display: inline-block;\n}\n\nsummary {\n display: list-item;\n cursor: pointer;\n}\n\ntemplate {\n display: none;\n}\n\n[hidden] {\n display: none !important;\n}\n\n/*# sourceMappingURL=bootstrap-reboot.css.map */","// Hover mixin and `$enable-hover-media-query` are deprecated.\n//\n// Originally added during our alphas and maintained during betas, this mixin was\n// designed to prevent `:hover` stickiness on iOS-an issue where hover styles\n// would persist after initial touch.\n//\n// For backward compatibility, we've kept these mixins and updated them to\n// always return their regular pseudo-classes instead of a shimmed media query.\n//\n// Issue: https://github.com/twbs/bootstrap/issues/25195\n\n@mixin hover {\n &:hover { @content; }\n}\n\n@mixin hover-focus {\n &:hover,\n &:focus {\n @content;\n }\n}\n\n@mixin plain-hover-focus {\n &,\n &:hover,\n &:focus {\n @content;\n }\n}\n\n@mixin hover-focus-active {\n &:hover,\n &:focus,\n &:active {\n @content;\n }\n}\n"]} -------------------------------------------------------------------------------- /static/vendor/jquery-easing/jquery.easing.compatibility.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Easing Compatibility v1 - http://gsgd.co.uk/sandbox/jquery/easing 3 | * 4 | * Adds compatibility for applications that use the pre 1.2 easing names 5 | * 6 | * Copyright (c) 2007 George Smith 7 | * Licensed under the MIT License: 8 | * http://www.opensource.org/licenses/mit-license.php 9 | */ 10 | 11 | (function($){ 12 | $.extend( $.easing, 13 | { 14 | easeIn: function (x, t, b, c, d) { 15 | return $.easing.easeInQuad(x, t, b, c, d); 16 | }, 17 | easeOut: function (x, t, b, c, d) { 18 | return $.easing.easeOutQuad(x, t, b, c, d); 19 | }, 20 | easeInOut: function (x, t, b, c, d) { 21 | return $.easing.easeInOutQuad(x, t, b, c, d); 22 | }, 23 | expoin: function(x, t, b, c, d) { 24 | return $.easing.easeInExpo(x, t, b, c, d); 25 | }, 26 | expoout: function(x, t, b, c, d) { 27 | return $.easing.easeOutExpo(x, t, b, c, d); 28 | }, 29 | expoinout: function(x, t, b, c, d) { 30 | return $.easing.easeInOutExpo(x, t, b, c, d); 31 | }, 32 | bouncein: function(x, t, b, c, d) { 33 | return $.easing.easeInBounce(x, t, b, c, d); 34 | }, 35 | bounceout: function(x, t, b, c, d) { 36 | return $.easing.easeOutBounce(x, t, b, c, d); 37 | }, 38 | bounceinout: function(x, t, b, c, d) { 39 | return $.easing.easeInOutBounce(x, t, b, c, d); 40 | }, 41 | elasin: function(x, t, b, c, d) { 42 | return $.easing.easeInElastic(x, t, b, c, d); 43 | }, 44 | elasout: function(x, t, b, c, d) { 45 | return $.easing.easeOutElastic(x, t, b, c, d); 46 | }, 47 | elasinout: function(x, t, b, c, d) { 48 | return $.easing.easeInOutElastic(x, t, b, c, d); 49 | }, 50 | backin: function(x, t, b, c, d) { 51 | return $.easing.easeInBack(x, t, b, c, d); 52 | }, 53 | backout: function(x, t, b, c, d) { 54 | return $.easing.easeOutBack(x, t, b, c, d); 55 | }, 56 | backinout: function(x, t, b, c, d) { 57 | return $.easing.easeInOutBack(x, t, b, c, d); 58 | } 59 | });})(jQuery); 60 | -------------------------------------------------------------------------------- /static/vendor/jquery-easing/jquery.easing.js: -------------------------------------------------------------------------------- 1 | /* 2 | * jQuery Easing v1.4.1 - http://gsgd.co.uk/sandbox/jquery/easing/ 3 | * Open source under the BSD License. 4 | * Copyright © 2008 George McGinley Smith 5 | * All rights reserved. 6 | * https://raw.github.com/gdsmith/jquery-easing/master/LICENSE 7 | */ 8 | 9 | (function (factory) { 10 | if (typeof define === "function" && define.amd) { 11 | define(['jquery'], function ($) { 12 | return factory($); 13 | }); 14 | } else if (typeof module === "object" && typeof module.exports === "object") { 15 | exports = factory(require('jquery')); 16 | } else { 17 | factory(jQuery); 18 | } 19 | })(function($){ 20 | 21 | // Preserve the original jQuery "swing" easing as "jswing" 22 | $.easing.jswing = $.easing.swing; 23 | 24 | var pow = Math.pow, 25 | sqrt = Math.sqrt, 26 | sin = Math.sin, 27 | cos = Math.cos, 28 | PI = Math.PI, 29 | c1 = 1.70158, 30 | c2 = c1 * 1.525, 31 | c3 = c1 + 1, 32 | c4 = ( 2 * PI ) / 3, 33 | c5 = ( 2 * PI ) / 4.5; 34 | 35 | // x is the fraction of animation progress, in the range 0..1 36 | function bounceOut(x) { 37 | var n1 = 7.5625, 38 | d1 = 2.75; 39 | if ( x < 1/d1 ) { 40 | return n1*x*x; 41 | } else if ( x < 2/d1 ) { 42 | return n1*(x-=(1.5/d1))*x + 0.75; 43 | } else if ( x < 2.5/d1 ) { 44 | return n1*(x-=(2.25/d1))*x + 0.9375; 45 | } else { 46 | return n1*(x-=(2.625/d1))*x + 0.984375; 47 | } 48 | } 49 | 50 | $.extend( $.easing, 51 | { 52 | def: 'easeOutQuad', 53 | swing: function (x) { 54 | return $.easing[$.easing.def](x); 55 | }, 56 | easeInQuad: function (x) { 57 | return x * x; 58 | }, 59 | easeOutQuad: function (x) { 60 | return 1 - ( 1 - x ) * ( 1 - x ); 61 | }, 62 | easeInOutQuad: function (x) { 63 | return x < 0.5 ? 64 | 2 * x * x : 65 | 1 - pow( -2 * x + 2, 2 ) / 2; 66 | }, 67 | easeInCubic: function (x) { 68 | return x * x * x; 69 | }, 70 | easeOutCubic: function (x) { 71 | return 1 - pow( 1 - x, 3 ); 72 | }, 73 | easeInOutCubic: function (x) { 74 | return x < 0.5 ? 75 | 4 * x * x * x : 76 | 1 - pow( -2 * x + 2, 3 ) / 2; 77 | }, 78 | easeInQuart: function (x) { 79 | return x * x * x * x; 80 | }, 81 | easeOutQuart: function (x) { 82 | return 1 - pow( 1 - x, 4 ); 83 | }, 84 | easeInOutQuart: function (x) { 85 | return x < 0.5 ? 86 | 8 * x * x * x * x : 87 | 1 - pow( -2 * x + 2, 4 ) / 2; 88 | }, 89 | easeInQuint: function (x) { 90 | return x * x * x * x * x; 91 | }, 92 | easeOutQuint: function (x) { 93 | return 1 - pow( 1 - x, 5 ); 94 | }, 95 | easeInOutQuint: function (x) { 96 | return x < 0.5 ? 97 | 16 * x * x * x * x * x : 98 | 1 - pow( -2 * x + 2, 5 ) / 2; 99 | }, 100 | easeInSine: function (x) { 101 | return 1 - cos( x * PI/2 ); 102 | }, 103 | easeOutSine: function (x) { 104 | return sin( x * PI/2 ); 105 | }, 106 | easeInOutSine: function (x) { 107 | return -( cos( PI * x ) - 1 ) / 2; 108 | }, 109 | easeInExpo: function (x) { 110 | return x === 0 ? 0 : pow( 2, 10 * x - 10 ); 111 | }, 112 | easeOutExpo: function (x) { 113 | return x === 1 ? 1 : 1 - pow( 2, -10 * x ); 114 | }, 115 | easeInOutExpo: function (x) { 116 | return x === 0 ? 0 : x === 1 ? 1 : x < 0.5 ? 117 | pow( 2, 20 * x - 10 ) / 2 : 118 | ( 2 - pow( 2, -20 * x + 10 ) ) / 2; 119 | }, 120 | easeInCirc: function (x) { 121 | return 1 - sqrt( 1 - pow( x, 2 ) ); 122 | }, 123 | easeOutCirc: function (x) { 124 | return sqrt( 1 - pow( x - 1, 2 ) ); 125 | }, 126 | easeInOutCirc: function (x) { 127 | return x < 0.5 ? 128 | ( 1 - sqrt( 1 - pow( 2 * x, 2 ) ) ) / 2 : 129 | ( sqrt( 1 - pow( -2 * x + 2, 2 ) ) + 1 ) / 2; 130 | }, 131 | easeInElastic: function (x) { 132 | return x === 0 ? 0 : x === 1 ? 1 : 133 | -pow( 2, 10 * x - 10 ) * sin( ( x * 10 - 10.75 ) * c4 ); 134 | }, 135 | easeOutElastic: function (x) { 136 | return x === 0 ? 0 : x === 1 ? 1 : 137 | pow( 2, -10 * x ) * sin( ( x * 10 - 0.75 ) * c4 ) + 1; 138 | }, 139 | easeInOutElastic: function (x) { 140 | return x === 0 ? 0 : x === 1 ? 1 : x < 0.5 ? 141 | -( pow( 2, 20 * x - 10 ) * sin( ( 20 * x - 11.125 ) * c5 )) / 2 : 142 | pow( 2, -20 * x + 10 ) * sin( ( 20 * x - 11.125 ) * c5 ) / 2 + 1; 143 | }, 144 | easeInBack: function (x) { 145 | return c3 * x * x * x - c1 * x * x; 146 | }, 147 | easeOutBack: function (x) { 148 | return 1 + c3 * pow( x - 1, 3 ) + c1 * pow( x - 1, 2 ); 149 | }, 150 | easeInOutBack: function (x) { 151 | return x < 0.5 ? 152 | ( pow( 2 * x, 2 ) * ( ( c2 + 1 ) * 2 * x - c2 ) ) / 2 : 153 | ( pow( 2 * x - 2, 2 ) *( ( c2 + 1 ) * ( x * 2 - 2 ) + c2 ) + 2 ) / 2; 154 | }, 155 | easeInBounce: function (x) { 156 | return 1 - bounceOut( 1 - x ); 157 | }, 158 | easeOutBounce: bounceOut, 159 | easeInOutBounce: function (x) { 160 | return x < 0.5 ? 161 | ( 1 - bounceOut( 1 - 2 * x ) ) / 2 : 162 | ( 1 + bounceOut( 2 * x - 1 ) ) / 2; 163 | } 164 | }); 165 | 166 | }); 167 | -------------------------------------------------------------------------------- /static/vendor/jquery-easing/jquery.easing.min.js: -------------------------------------------------------------------------------- 1 | (function(factory){if(typeof define==="function"&&define.amd){define(["jquery"],function($){return factory($)})}else if(typeof module==="object"&&typeof module.exports==="object"){exports=factory(require("jquery"))}else{factory(jQuery)}})(function($){$.easing.jswing=$.easing.swing;var pow=Math.pow,sqrt=Math.sqrt,sin=Math.sin,cos=Math.cos,PI=Math.PI,c1=1.70158,c2=c1*1.525,c3=c1+1,c4=2*PI/3,c5=2*PI/4.5;function bounceOut(x){var n1=7.5625,d1=2.75;if(x<1/d1){return n1*x*x}else if(x<2/d1){return n1*(x-=1.5/d1)*x+.75}else if(x<2.5/d1){return n1*(x-=2.25/d1)*x+.9375}else{return n1*(x-=2.625/d1)*x+.984375}}$.extend($.easing,{def:"easeOutQuad",swing:function(x){return $.easing[$.easing.def](x)},easeInQuad:function(x){return x*x},easeOutQuad:function(x){return 1-(1-x)*(1-x)},easeInOutQuad:function(x){return x<.5?2*x*x:1-pow(-2*x+2,2)/2},easeInCubic:function(x){return x*x*x},easeOutCubic:function(x){return 1-pow(1-x,3)},easeInOutCubic:function(x){return x<.5?4*x*x*x:1-pow(-2*x+2,3)/2},easeInQuart:function(x){return x*x*x*x},easeOutQuart:function(x){return 1-pow(1-x,4)},easeInOutQuart:function(x){return x<.5?8*x*x*x*x:1-pow(-2*x+2,4)/2},easeInQuint:function(x){return x*x*x*x*x},easeOutQuint:function(x){return 1-pow(1-x,5)},easeInOutQuint:function(x){return x<.5?16*x*x*x*x*x:1-pow(-2*x+2,5)/2},easeInSine:function(x){return 1-cos(x*PI/2)},easeOutSine:function(x){return sin(x*PI/2)},easeInOutSine:function(x){return-(cos(PI*x)-1)/2},easeInExpo:function(x){return x===0?0:pow(2,10*x-10)},easeOutExpo:function(x){return x===1?1:1-pow(2,-10*x)},easeInOutExpo:function(x){return x===0?0:x===1?1:x<.5?pow(2,20*x-10)/2:(2-pow(2,-20*x+10))/2},easeInCirc:function(x){return 1-sqrt(1-pow(x,2))},easeOutCirc:function(x){return sqrt(1-pow(x-1,2))},easeInOutCirc:function(x){return x<.5?(1-sqrt(1-pow(2*x,2)))/2:(sqrt(1-pow(-2*x+2,2))+1)/2},easeInElastic:function(x){return x===0?0:x===1?1:-pow(2,10*x-10)*sin((x*10-10.75)*c4)},easeOutElastic:function(x){return x===0?0:x===1?1:pow(2,-10*x)*sin((x*10-.75)*c4)+1},easeInOutElastic:function(x){return x===0?0:x===1?1:x<.5?-(pow(2,20*x-10)*sin((20*x-11.125)*c5))/2:pow(2,-20*x+10)*sin((20*x-11.125)*c5)/2+1},easeInBack:function(x){return c3*x*x*x-c1*x*x},easeOutBack:function(x){return 1+c3*pow(x-1,3)+c1*pow(x-1,2)},easeInOutBack:function(x){return x<.5?pow(2*x,2)*((c2+1)*2*x-c2)/2:(pow(2*x-2,2)*((c2+1)*(x*2-2)+c2)+2)/2},easeInBounce:function(x){return 1-bounceOut(1-x)},easeOutBounce:bounceOut,easeInOutBounce:function(x){return x<.5?(1-bounceOut(1-2*x))/2:(1+bounceOut(2*x-1))/2}})}); -------------------------------------------------------------------------------- /templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Pytorch - Text recognition tool 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 51 | 52 |
53 |
54 |

Pytorch Text Recognition Tool

55 |

Craft and CRNN based tool.

56 | 57 |
58 |
59 | 60 | 61 |
62 | 63 | 64 |
65 |
66 | 67 | 68 |
69 | 70 |
71 |

Drag an image

72 |
73 |
74 |
75 | your image 76 |
77 | 78 |
79 |
80 |
81 |
82 | 83 |

{% if label %} {{ label }} {% endif %}

84 |
85 | 86 | 87 |
88 |
89 |
90 |
91 |

Project description

92 |

Two pretrained neural networks are responsible for detect boxes with text and then recognize word in each of the boxes. It allows to put some complex/high-resolution documents and gather information from it.

93 |
    94 |
  • I decided to beautify this project with some template
  • 95 |
  • CRAFT Paper
  • 96 |
  • CRNN Paper
  • 97 |
  • Example
  • 98 |
99 |
100 |
101 |
102 |
103 | 104 |
105 | 106 | 107 | 108 |
109 | 110 |
111 |
112 |
113 |
114 |

Drop me a line

115 | 116 | 117 | 118 | 119 |
120 |
121 |
122 |
123 | 124 | 125 |
126 |
127 |

Copyright ©

128 |
129 | 130 |
131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | -------------------------------------------------------------------------------- /templates/index_old.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | File upload input 6 | 7 | 8 | 9 | 10 | 11 |
12 |
13 | 14 | 15 |
16 | 17 |
18 |

Drag and drop a file or select add Image

19 |
20 |
21 |
22 | your image 23 |
24 | 25 |
26 |
27 |
28 | 29 | {% if label %} {{ label }} {% endif %} 30 | 31 |
32 | 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /text_reco.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 1.0 2 | Name: text-reco 3 | Version: 0.2a0 4 | Summary: UNKNOWN 5 | Home-page: UNKNOWN 6 | Author: UNKNOWN 7 | Author-email: UNKNOWN 8 | License: UNKNOWN 9 | Description: UNKNOWN 10 | Platform: UNKNOWN 11 | -------------------------------------------------------------------------------- /text_reco.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | README.md 2 | setup.py 3 | base/__init__.py 4 | base/watchdog.py 5 | text_reco/__init__.py 6 | text_reco.egg-info/PKG-INFO 7 | text_reco.egg-info/SOURCES.txt 8 | text_reco.egg-info/dependency_links.txt 9 | text_reco.egg-info/top_level.txt 10 | text_reco/boxdetect/__init__.py 11 | text_reco/boxdetect/box_detection.py 12 | text_reco/models/__init__.py 13 | text_reco/models/craft/__init__.py 14 | text_reco/models/craft/craft.py 15 | text_reco/models/craft/craft_reader.py 16 | text_reco/models/craft/craft_utils.py 17 | text_reco/models/craft/imgproc.py 18 | text_reco/models/craft/basenet/__init__.py 19 | text_reco/models/craft/basenet/vgg16_bn.py 20 | text_reco/models/crnn/__init__.py 21 | text_reco/models/crnn/crnn.py 22 | text_reco/models/crnn/crnn_run.py 23 | text_reco/models/crnn/preprocess.py 24 | text_reco/models/crnn/utils.py -------------------------------------------------------------------------------- /text_reco.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /text_reco.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | base 2 | text_reco 3 | -------------------------------------------------------------------------------- /text_reco/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/__init__.py -------------------------------------------------------------------------------- /text_reco/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/boxdetect/.box_detection.py.swp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/boxdetect/.box_detection.py.swp -------------------------------------------------------------------------------- /text_reco/boxdetect/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/boxdetect/__init__.py -------------------------------------------------------------------------------- /text_reco/boxdetect/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/boxdetect/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/boxdetect/__pycache__/box_detection.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/boxdetect/__pycache__/box_detection.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/boxdetect/box_detection.py: -------------------------------------------------------------------------------- 1 | # Simple logic 2 | # box on input - preprocess - sliced image on output 3 | import cv2 4 | import numpy as np 5 | class BoxDetect(): 6 | 7 | def __init__(self, boxes): 8 | self.boxes = boxes 9 | self.n_boxes = len(self.boxes) 10 | 11 | def preprocess(self, image): 12 | img_storage = dict() 13 | print("To sa boxy") 14 | print(self.boxes) 15 | for el in self.boxes: 16 | tmp_img = cv2.rectangle(image, (el[0], el[1]), (el[2], el[3])) 17 | 18 | @staticmethod 19 | def load_box(path): 20 | with open(path, 'r') as outfile: 21 | file_ = json.load(outfile) 22 | return file_ 23 | 24 | @staticmethod 25 | def preprocess_box(file_, img): 26 | for el in file_.keys(): 27 | x,y,w,h = cv2.boundingRect(np.array(file_[el])) 28 | roi = img[x:x+w, y:y+h] 29 | cv2.imshow('image', roi) 30 | cv2.waitKey(0) 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /text_reco/cleaning/.image_saver.py.swp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/cleaning/.image_saver.py.swp -------------------------------------------------------------------------------- /text_reco/dist/text-reco-0.1.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/dist/text-reco-0.1.tar.gz -------------------------------------------------------------------------------- /text_reco/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/__init__.py -------------------------------------------------------------------------------- /text_reco/models/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/craft/.craft_reader.py.swi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/.craft_reader.py.swi -------------------------------------------------------------------------------- /text_reco/models/craft/.craft_reader.py.swj: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/.craft_reader.py.swj -------------------------------------------------------------------------------- /text_reco/models/craft/.craft_reader.py.swk: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/.craft_reader.py.swk -------------------------------------------------------------------------------- /text_reco/models/craft/.craft_reader.py.swl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/.craft_reader.py.swl -------------------------------------------------------------------------------- /text_reco/models/craft/.craft_reader.py.swm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/.craft_reader.py.swm -------------------------------------------------------------------------------- /text_reco/models/craft/.craft_reader.py.swn: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/.craft_reader.py.swn -------------------------------------------------------------------------------- /text_reco/models/craft/.craft_reader.py.swo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/.craft_reader.py.swo -------------------------------------------------------------------------------- /text_reco/models/craft/.craft_reader.py.swp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/.craft_reader.py.swp -------------------------------------------------------------------------------- /text_reco/models/craft/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/__init__.py -------------------------------------------------------------------------------- /text_reco/models/craft/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/craft/__pycache__/craft.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/__pycache__/craft.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/craft/__pycache__/craft_reader.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/__pycache__/craft_reader.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/craft/__pycache__/craft_utils.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/__pycache__/craft_utils.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/craft/__pycache__/file_utils.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/__pycache__/file_utils.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/craft/__pycache__/imgproc.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/__pycache__/imgproc.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/craft/basenet/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/basenet/__init__.py -------------------------------------------------------------------------------- /text_reco/models/craft/basenet/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/basenet/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/craft/basenet/__pycache__/vgg16_bn.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/basenet/__pycache__/vgg16_bn.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/craft/basenet/vgg16_bn.py: -------------------------------------------------------------------------------- 1 | # https://github.com/clovaai/CRAFT-pytorch/blob/master/basenet/vgg16_bn.py 2 | 3 | 4 | # Imports 5 | 6 | from collections import namedtuple 7 | 8 | import torch 9 | import torch.nn as nn 10 | import torch.nn.init as init 11 | from torchvision import models 12 | from torchvision.models.vgg import model_urls 13 | 14 | def init_weights(modules): 15 | # https://prateekvjoshi.com/2016/03/29/understanding-xavier-initialization-in-deep-neural-networks/ 16 | for m in modules: 17 | if isinstance(m, nn.Conv2d): 18 | init.xavier_uniform_(m.weight.data) 19 | if m.bias is not None: 20 | m.bias.data.zero_() 21 | 22 | elif isinstance(m, nn.BatchNorm2d): 23 | m.weight.data.fill_(1) 24 | m.bias.data.zero_() 25 | 26 | elif isinstance(m, nn.Linear): 27 | m.weight.data.normal_(0, 0.01) 28 | m.bias.data.zero_() 29 | 30 | 31 | class vgg16_bn(torch.nn.Module): 32 | 33 | def __init__(self, pretrained = True, freeze = True): 34 | super(vgg16_bn, self).__init__() 35 | model_urls['vgg16_bn'] = model_urls['vgg16_bn'].replace('https://', 'http://') 36 | vgg_pretrained_features = models.vgg16_bn(pretrained=pretrained).features 37 | self.slice1 = torch.nn.Sequential() 38 | self.slice2 = torch.nn.Sequential() 39 | self.slice3 = torch.nn.Sequential() 40 | self.slice4 = torch.nn.Sequential() 41 | self.slice5 = torch.nn.Sequential() 42 | 43 | for x in range(12): 44 | self.slice1.add_module(str(x), vgg_pretrained_features[x]) 45 | for x in range(12, 19): 46 | self.slice2.add_module(str(x), vgg_pretrained_features[x]) 47 | for x in range(19, 29): 48 | self.slice3.add_module(str(x), vgg_pretrained_features[x]) 49 | for x in range(29, 39): 50 | self.slice4.add_module(str(x), vgg_pretrained_features[x]) 51 | 52 | self.slice5 = torch.nn.Sequential( 53 | nn.MaxPool2d(3, 1, 1), 54 | nn.Conv2d(512, 1024, kernel_size = 3, padding = 6, dilation = 6), 55 | nn.Conv2d(1024, 1024, 1)) 56 | 57 | if not pretrained: 58 | init_weights(self.slice1.modules()) 59 | init_weights(self.slice2.modules()) 60 | init_weights(self.slice3.modules()) 61 | init_weights(self.slice4.modules()) 62 | 63 | init_weights(self.slice5.modules()) 64 | 65 | 66 | if freeze: 67 | for param in self.slice1.parameters(): 68 | param.requires_grad = False 69 | 70 | 71 | # Define forward pass 72 | 73 | 74 | def forward(self, x): 75 | h = self.slice1(x) 76 | h_relu2_2 = h 77 | h = self.slice2(h) 78 | h_relu3_2 = h 79 | h = self.slice3(h) 80 | h_relu4_3 = h 81 | h = self.slice4(h) 82 | h_relu5_3 = h 83 | h = self.slice5(h) 84 | h_fc7 = h 85 | vgg_outputs = namedtuple("VggOutputs", ['fc7', 'relu5_3', 'relu4_3', 'relu3_2', 'relu2_2']) 86 | out = vgg_outputs(h_fc7 ,h_relu5_3, h_relu4_3, h_relu3_2, h_relu2_2) 87 | return out 88 | 89 | -------------------------------------------------------------------------------- /text_reco/models/craft/craft.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from text_reco.models.craft.basenet.vgg16_bn import vgg16_bn, init_weights 5 | 6 | class DoubleConv(nn.Module): 7 | def __init__(self, in_channel, mid_channel, out_channel): 8 | super(DoubleConv, self).__init__() 9 | 10 | self.conv = nn.Sequential( 11 | nn.Conv2d(in_channel + mid_channel, mid_channel, kernel_size = 1), 12 | nn.BatchNorm2d(mid_channel), 13 | nn.ReLU(inplace=True), 14 | nn.Conv2d(mid_channel, out_channel, kernel_size = 3, padding = 1), 15 | nn.BatchNorm2d(out_channel), 16 | nn.ReLU(inplace=True)) 17 | 18 | def forward(self, x): 19 | x = self.conv(x) 20 | return x 21 | 22 | class CRAFT(nn.Module): 23 | def __init__(self, pretrained=False, freeze=False): 24 | super(CRAFT, self).__init__() 25 | self.basenet = vgg16_bn(pretrained, freeze) 26 | self.upconv1 = DoubleConv(1024, 512, 256) 27 | self.upconv2 = DoubleConv(512, 256, 128) 28 | self.upconv3 = DoubleConv(256, 128, 64) 29 | self.upconv4 = DoubleConv(128, 64, 32) 30 | 31 | n_classes = 2 32 | self.conv_cls = nn.Sequential( 33 | nn.Conv2d(32, 32, 3, 1), 34 | nn.ReLU(inplace=True), 35 | nn.Conv2d(32, 32, 3, 1), 36 | nn.ReLU(inplace=True), 37 | nn.Conv2d(32, 16, 3, 1), 38 | nn.ReLU(inplace=True), 39 | nn.Conv2d(16, 16, 1), 40 | nn.ReLU(inplace=True), 41 | nn.Conv2d(16, n_classes, kernel_size=1),) 42 | 43 | init_weights(self.upconv1.modules()) 44 | init_weights(self.upconv2.modules()) 45 | init_weights(self.upconv3.modules()) 46 | init_weights(self.upconv4.modules()) 47 | init_weights(self.conv_cls.modules()) 48 | 49 | def forward(self, x): 50 | sources = self.basenet(x) 51 | y = torch.cat([sources[0], sources[1]], dim=1) 52 | y = self.upconv1(y) 53 | 54 | y = F.interpolate(y, size = sources[2].size()[2:], mode = 'bilinear', align_corners=False) 55 | y = torch.cat([y, sources[2]], dim=1) 56 | y = self.upconv2(y) 57 | 58 | y = F.interpolate(y, size = sources[3].size()[2:], mode = 'bilinear', align_corners=False) 59 | y = torch.cat([y, sources[3]], dim=1) 60 | y = self.upconv3(y) 61 | 62 | y = F.interpolate(y, size = sources[4].size()[2:], mode = 'bilinear', align_corners=False) 63 | y = torch.cat([y, sources[4]], dim =1) 64 | feature = self.upconv4(y) 65 | y = self.conv_cls(feature) 66 | 67 | return y.permute(0, 2, 3, 1), feature 68 | 69 | -------------------------------------------------------------------------------- /text_reco/models/craft/craft_reader.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.backends.cudnn as cudnn 4 | from torch.autograd import Variable 5 | import text_reco.models.craft.craft_utils as craft_utils 6 | import text_reco.models.craft.imgproc as img_proc 7 | from text_reco.models.craft.craft import CRAFT 8 | from text_reco.boxdetect.box_detection import BoxDetect 9 | from text_reco.models.crnn.crnn_run import CRNNReader 10 | 11 | from PIL import Image 12 | 13 | import cv2 14 | from skimage import io 15 | import numpy as np 16 | from text_reco.models.craft.imgproc import ImageConvert 17 | import json 18 | import zipfile 19 | from collections import OrderedDict 20 | from skimage import io 21 | from skimage.transform import rescale, resize, downscale_local_mean 22 | 23 | class CraftReader(ImageConvert): 24 | def __init__(self, image): 25 | super(CraftReader, self).__init__(image) 26 | self.model_path = 'text_reco/models/craft/pretrain/craft_mlt_25k.pth' 27 | self.net = CRAFT() 28 | self.net.load_state_dict(self.copyStateDict(torch.load(self.model_path, map_location='cpu'))) 29 | self.net.eval() 30 | self.mag_ratio = 1 31 | self.square_size = 1280 32 | 33 | @staticmethod 34 | def copyStateDict(state_dict): 35 | if list(state_dict.keys())[0].startswith("module"): 36 | start_idx = 1 37 | else: 38 | start_idx = 0 39 | new_state_dict = OrderedDict() 40 | for k, v in state_dict.items(): 41 | name = ".".join(k.split(".")[start_idx:]) 42 | new_state_dict[name] = v 43 | return new_state_dict 44 | 45 | @staticmethod 46 | def str2bool(v): 47 | return v.lower() in ("yes", "y", "t", "1") 48 | 49 | def image_preprocess(self, image): 50 | image = self.normalizeMeanVariance(image) 51 | image = torch.from_numpy(image).permute(2, 0, 1) 52 | image = Variable(image.unsqueeze(0)) 53 | return image 54 | 55 | def boxes_detect(self): 56 | img_resized, target_ratio, size_heatmap = self.resize_aspect_ratio(self.image) 57 | ratio_h = ratio_w = 1/ target_ratio 58 | x = self.image_preprocess(img_resized) 59 | y, _ = self.net(x) 60 | score_text = y[0, :, :, 0].cpu().data.numpy() 61 | score_link = y[0, :, :, 1].cpu().data.numpy() 62 | boxes = craft_utils.getDetBoxes(textmap =score_text, linkmap = score_link, text_threshold =0.7, link_threshold=0.4, low_text=0.4) 63 | print("Ilosc boxow {}".format(len(boxes))) 64 | boxes = craft_utils.adjustResultCoordinates(boxes, ratio_w, ratio_h) 65 | return boxes, img_resized 66 | 67 | -------------------------------------------------------------------------------- /text_reco/models/craft/craft_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | https://raw.githubusercontent.com/clovaai/CRAFT-pytorch/master/craft_utils.py 3 | """ 4 | import numpy as np 5 | import math 6 | import cv2 7 | def getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text): 8 | linkmap = linkmap.copy() 9 | textmap = textmap.copy() 10 | img_h, img_w = textmap.shape 11 | 12 | ret, text_score = cv2.threshold(textmap, low_text, 1, 0) 13 | ret, link_score = cv2.threshold(linkmap, link_threshold, 1, 0) 14 | text_score_comb = np.clip(text_score + link_score, 0, 1) 15 | nLabels, labels, stats, centroids = cv2.connectedComponentsWithStats(text_score_comb.astype(np.uint8), connectivity=4) 16 | det = [] 17 | mapper = [] 18 | for k in range(1,nLabels): 19 | size = stats[k, cv2.CC_STAT_AREA] 20 | if size < 10: continue 21 | if np.max(textmap[labels==k]) < text_threshold: continue 22 | segmap = np.zeros(textmap.shape, dtype=np.uint8) 23 | segmap[labels==k] = 255 24 | segmap[np.logical_and(link_score==1, text_score==0)] = 0 # remove link area 25 | x, y = stats[k, cv2.CC_STAT_LEFT], stats[k, cv2.CC_STAT_TOP] 26 | w, h = stats[k, cv2.CC_STAT_WIDTH], stats[k, cv2.CC_STAT_HEIGHT] 27 | niter = int(math.sqrt(size * min(w, h) / (w * h)) * 2) 28 | sx, ex, sy, ey = x - niter, x + w + niter + 1, y - niter, y + h + niter + 1 29 | if sx < 0 : sx = 0 30 | if sy < 0 : sy = 0 31 | if ex >= img_w: ex = img_w 32 | if ey >= img_h: ey = img_h 33 | kernel = cv2.getStructuringElement(cv2.MORPH_RECT,(1 + niter, 1 + niter)) 34 | segmap[sy:ey, sx:ex] = cv2.dilate(segmap[sy:ey, sx:ex], kernel) 35 | np_contours = np.roll(np.array(np.where(segmap!=0)),1,axis=0).transpose().reshape(-1,2) 36 | rectangle = cv2.minAreaRect(np_contours) 37 | box = cv2.boxPoints(rectangle) 38 | w, h = np.linalg.norm(box[0] - box[1]), np.linalg.norm(box[1] - box[2]) 39 | box_ratio = max(w, h) / (min(w, h) + 1e-5) 40 | if abs(1 - box_ratio) <= 0.1: 41 | l, r = min(np_contours[:,0]), max(np_contours[:,0]) 42 | t, b = min(np_contours[:,1]), max(np_contours[:,1]) 43 | box = np.array([[l, t], [r, t], [r, b], [l, b]], dtype=np.float32) 44 | startidx = box.sum(axis=1).argmin() 45 | box = np.roll(box, 4-startidx, 0) 46 | box = np.array(box) 47 | det.append(box) 48 | mapper.append(k) 49 | return det, labels, mapper 50 | 51 | def getDetBoxes(textmap, linkmap, text_threshold, link_threshold, low_text): 52 | boxes, labels, mapper = getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text) 53 | return boxes 54 | 55 | def adjustResultCoordinates(polys, ratio_w, ratio_h, ratio_net = 2): 56 | if len(polys) > 0: 57 | polys = np.array(polys) 58 | for k in range(len(polys)): 59 | if polys[k] is not None: 60 | polys[k] *= (ratio_w * ratio_net, ratio_h * ratio_net) 61 | return polys 62 | -------------------------------------------------------------------------------- /text_reco/models/craft/imgproc.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from skimage import io 3 | import cv2 4 | class ImageConvert(): 5 | def __init__(self, img_array, interpolation =cv2.INTER_LINEAR , square_size = 1280, mag_ratio=1): 6 | self.image = io.imread(img_array) 7 | self.image = self.image[:, :, :3] 8 | self.image = np.array(self.image) 9 | print("Shape of processed file {}".format(len(self.image))) 10 | self.mean = (0.485, 0.456, 0.406) 11 | self.variance = (0.229, 0.224, 0.225) 12 | self.square_size = square_size 13 | self.interpolation = interpolation 14 | self.mag_ratio = mag_ratio 15 | 16 | def normalizeMeanVariance(self,image): 17 | image = image.copy().astype(np.float32) 18 | image -= np.array([self.mean[0] * 255.0, self.mean[1] * 255.0, self.mean[2] * 255.0], dtype = np.float32) 19 | image /= np.array([self.variance[0] * 255.0, self.variance[1] * 255.0, self.variance[2] * 255.0], dtype = np.float32) 20 | return image 21 | 22 | def resize_aspect_ratio(self, image): 23 | height, width, channel = image.shape 24 | target_size = self.mag_ratio * max(height, width) 25 | if target_size > self.square_size: 26 | target_size = self.square_size 27 | 28 | ratio = target_size / max(height, width) 29 | target_h, target_w = int(height * ratio), int(width * ratio) 30 | proc = cv2.resize(self.image, (target_w, target_h), interpolation = cv2.INTER_LINEAR) 31 | 32 | target_h32, target_w32 = target_h, target_w 33 | if target_h % 32 != 0: 34 | target_h32 = target_h + (32 - target_h % 32) 35 | if target_w % 32 != 0: 36 | target_w32 = target_w + (32 - target_w % 32) 37 | 38 | resized = np.zeros((target_h32, target_w32, channel), dtype = np.float32) 39 | resized[0:target_h, 0:target_w, :] = proc 40 | target_h, target_w = target_h32, target_w32 41 | size_heatmap = (int(target_w/2), int(target_h/2)) 42 | return resized, ratio, size_heatmap 43 | 44 | def cvt2HeatmapImg(img): 45 | img = (np.clip(img, 0, 1) * 255).astype(np.uint8) 46 | img = cv2.applyColorMap(img, cv2.COLORMAP_JET) 47 | return img 48 | -------------------------------------------------------------------------------- /text_reco/models/craft/pretrain/craft_mlt_25k.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/craft/pretrain/craft_mlt_25k.pth -------------------------------------------------------------------------------- /text_reco/models/crnn/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/crnn/__init__.py -------------------------------------------------------------------------------- /text_reco/models/crnn/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/crnn/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/crnn/__pycache__/crnn.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/crnn/__pycache__/crnn.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/crnn/__pycache__/crnn_run.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/crnn/__pycache__/crnn_run.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/crnn/__pycache__/preprocess.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/crnn/__pycache__/preprocess.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/crnn/__pycache__/utils.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/crnn/__pycache__/utils.cpython-37.pyc -------------------------------------------------------------------------------- /text_reco/models/crnn/crnn.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | 3 | class BidirectionalLSTM(nn.Module): 4 | def __init__(self, _in, hidden, out): 5 | super(BidirectionalLSTM, self).__init__() 6 | self.rnn = nn.LSTM(_in, hidden, bidirectional=True) 7 | self.embedding = nn.Linear(hidden * 2, out) 8 | 9 | def forward(self, x): 10 | recurrent, _ = self.rnn(x) 11 | T, b, h = recurrent.size() 12 | t_rec = recurrent.view(T * b, h) 13 | output = self.embedding(t_rec) 14 | output = output.view(T, b, -1) 15 | return output 16 | 17 | class CRNN(nn.Module): 18 | def __init__(self, imgh, nc, nclass, nh, n_rnn=2, leakyReLU = False): 19 | super(CRNN, self).__init__() 20 | ks = [3, 3, 3, 3, 3, 3, 2] 21 | ps = [1, 1, 1, 1, 1, 1, 0] 22 | ss = [1, 1, 1, 1, 1, 1, 1] 23 | nm = [64, 128, 256, 256, 512, 512, 512] 24 | cnn = nn.Sequential() 25 | def convRelu(i, batchNormalization=False): 26 | nIn = nc if i == 0 else nm[i - 1] 27 | nOut = nm[i] 28 | cnn.add_module('conv{0}'.format(i), 29 | nn.Conv2d(nIn, nOut, ks[i], ss[i], ps[i])) 30 | if batchNormalization: 31 | cnn.add_module('batchnorm{0}'.format(i), nn.BatchNorm2d(nOut)) 32 | if leakyReLU: 33 | cnn.add_module('relu{0}'.format(i), 34 | nn.LeakyReLU(0.2, inplace=True)) 35 | else: 36 | cnn.add_module('relu{0}'.format(i), nn.ReLU(True)) 37 | convRelu(0) 38 | cnn.add_module('pooling{0}'.format(0), nn.MaxPool2d(2, 2)) # 64x16x64 39 | convRelu(1) 40 | cnn.add_module('pooling{0}'.format(1), nn.MaxPool2d(2, 2)) # 128x8x32 41 | convRelu(2, True) 42 | convRelu(3) 43 | cnn.add_module('pooling{0}'.format(2), 44 | nn.MaxPool2d((2, 2), (2, 1), (0, 1))) # 256x4x16 45 | convRelu(4, True) 46 | convRelu(5) 47 | cnn.add_module('pooling{0}'.format(3), 48 | nn.MaxPool2d((2, 2), (2, 1), (0, 1))) # 512x2x16 49 | convRelu(6, True) # 512x1x16 50 | self.cnn = cnn 51 | self.rnn = nn.Sequential( 52 | BidirectionalLSTM(512, nh, nh), 53 | BidirectionalLSTM(nh, nh, nclass)) 54 | 55 | def forward(self, input): 56 | conv = self.cnn(input) 57 | b, c, h, w = conv.size() 58 | assert h == 1, "the height of conv must be 1" 59 | conv = conv.squeeze(2) 60 | conv = conv.permute(2, 0, 1) # [w, b, c] 61 | output = self.rnn(conv) 62 | return output 63 | -------------------------------------------------------------------------------- /text_reco/models/crnn/crnn_run.py: -------------------------------------------------------------------------------- 1 | from PIL import Image 2 | from skimage import io 3 | import text_reco.models.crnn.crnn as crnn 4 | 5 | import torch 6 | from torch.autograd import Variable 7 | import text_reco.models.crnn.utils as utils 8 | import text_reco.models.crnn.preprocess as preprocess 9 | 10 | class CRNNReader(): 11 | def __init__(self, model_path= 'text_reco/models/crnn/pretrain/crnn.pth'): 12 | self.model_path = model_path 13 | self.model = crnn.CRNN(32, 1,37, 256) 14 | self.model = self.model.float() 15 | self.model.load_state_dict(torch.load(self.model_path)) 16 | self.model.eval() 17 | self.alphabet = '0123456789abcdefghijklmnopqrstuv2xyz' 18 | self.transformer = preprocess.resizeNormalize((100, 32)) 19 | self.converter = utils.strLabelConverter(self.alphabet) 20 | 21 | def get_predictions(self, img): 22 | self.model = self.model.float() 23 | img = img.float() 24 | predictions = self.model(img) 25 | _, predictions = predictions.max(2) 26 | predictions = predictions.transpose(1, 0).contiguous().view(-1) 27 | pred_size = Variable(torch.IntTensor([predictions.size(0)])) 28 | results = self.converter.decode(predictions.data, pred_size.data, raw=False) 29 | return results 30 | -------------------------------------------------------------------------------- /text_reco/models/crnn/preprocess.py: -------------------------------------------------------------------------------- 1 | import skimage 2 | 3 | import random 4 | import torch 5 | from torch.utils.data import Dataset 6 | from torch.utils.data import sampler 7 | import torchvision.transforms as transforms 8 | import lmdb 9 | import six 10 | import sys 11 | from PIL import Image 12 | import numpy as np 13 | 14 | class lmdbDataset(Dataset): 15 | def __init__(self, root=None, transform=None, target_transform=None): 16 | self.env = lmdb.open( 17 | root, 18 | max_readers=1, 19 | readonly=True, 20 | lock=False, 21 | readahead=False, 22 | meminit=False) 23 | 24 | if not self.env: 25 | print('cannot creat lmdb from %s' % (root)) 26 | sys.exit(0) 27 | 28 | with self.env.begin(write=False) as txn: 29 | nSamples = int(txn.get('num-samples')) 30 | self.nSamples = nSamples 31 | self.transform = transform 32 | self.target_transform = target_transform 33 | 34 | def __len__(self): 35 | return self.nSamples 36 | 37 | def __getitem__(self, index): 38 | assert index <= len(self), 'index range error' 39 | index += 1 40 | with self.env.begin(write=False) as txn: 41 | img_key = 'image-%09d' % index 42 | imgbuf = txn.get(img_key) 43 | buf = six.BytesIO() 44 | buf.write(imgbuf) 45 | buf.seek(0) 46 | try: 47 | img = Image.open(buf).convert('L') 48 | except IOError: 49 | print('Corrupted image for %d' % index) 50 | return self[index + 1] 51 | 52 | if self.transform is not None: 53 | img = self.transform(img) 54 | label_key = 'label-%09d' % index 55 | label = str(txn.get(label_key)) 56 | if self.target_transform is not None: 57 | label = self.target_transform(label) 58 | return (img, label) 59 | 60 | class resizeNormalize(object): 61 | def __init__(self, size, interpolation=Image.BILINEAR): 62 | self.size = size 63 | self.interpolation = interpolation 64 | self.toTensor = transforms.ToTensor() 65 | 66 | def __call__(self, img): 67 | img = img.resize(self.size , self.interpolation) 68 | img = self.toTensor(img) 69 | img.sub_(0.5).div_(0.5) 70 | return img 71 | 72 | class randomSequentialSampler(sampler.Sampler): 73 | def __init__(self, data_source, batch_size): 74 | self.num_samples = len(data_source) 75 | self.batch_size = batch_size 76 | 77 | def __iter__(self): 78 | n_batch = len(self) // self.batch_size 79 | tail = len(self) % self.batch_size 80 | index = torch.LongTensor(len(self)).fill_(0) 81 | for i in range(n_batch): 82 | random_start = random.randint(0, len(self) - self.batch_size) 83 | batch_index = random_start + torch.range(0, self.batch_size - 1) 84 | index[i * self.batch_size:(i + 1) * self.batch_size] = batch_index 85 | if tail: 86 | random_start = random.randint(0, len(self) - self.batch_size) 87 | tail_index = random_start + torch.range(0, tail - 1) 88 | index[(i + 1) * self.batch_size:] = tail_index 89 | 90 | return iter(index) 91 | 92 | def __len__(self): 93 | return self.num_samples 94 | 95 | 96 | class alignCollate(object): 97 | 98 | def __init__(self, imgH=32, imgW=100, keep_ratio=False, min_ratio=1): 99 | self.imgH = imgH 100 | self.imgW = imgW 101 | self.keep_ratio = keep_ratio 102 | self.min_ratio = min_ratio 103 | 104 | def __call__(self, batch): 105 | images, labels = zip(*batch) 106 | 107 | imgH = self.imgH 108 | imgW = self.imgW 109 | if self.keep_ratio: 110 | ratios = [] 111 | for image in images: 112 | w, h = image.size 113 | ratios.append(w / float(h)) 114 | ratios.sort() 115 | max_ratio = ratios[-1] 116 | imgW = int(np.floor(max_ratio * imgH)) 117 | imgW = max(imgH * self.min_ratio, imgW) # assure imgH >= imgW 118 | 119 | transform = resizeNormalize((imgW, imgH)) 120 | images = [transform(image) for image in images] 121 | images = torch.cat([t.unsqueeze(0) for t in images], 0) 122 | 123 | return images, labels 124 | -------------------------------------------------------------------------------- /text_reco/models/crnn/pretrain/crnn.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s3nh/text-detector/c2274f6b11057ecae47512dcc762c875e2f79611/text_reco/models/crnn/pretrain/crnn.pth -------------------------------------------------------------------------------- /text_reco/models/crnn/utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from torch.autograd import Variable 4 | import collections 5 | 6 | 7 | class strLabelConverter(object): 8 | """Convert between str and label. 9 | 10 | NOTE: 11 | Insert `blank` to the alphabet for CTC. 12 | 13 | Args: 14 | alphabet (str): set of the possible characters. 15 | ignore_case (bool, default=True): whether or not to ignore all of the case. 16 | """ 17 | 18 | def __init__(self, alphabet, ignore_case=True): 19 | self._ignore_case = ignore_case 20 | if self._ignore_case: 21 | alphabet = alphabet.lower() 22 | self.alphabet = alphabet + '-' # for `-1` index 23 | 24 | self.dict = {} 25 | for i, char in enumerate(alphabet): 26 | # NOTE: 0 is reserved for 'blank' required by wrap_ctc 27 | self.dict[char] = i + 1 28 | 29 | def encode(self, text): 30 | """Support batch or single str. 31 | 32 | Args: 33 | text (str or list of str): texts to convert. 34 | 35 | Returns: 36 | torch.IntTensor [length_0 + length_1 + ... length_{n - 1}]: encoded texts. 37 | torch.IntTensor [n]: length of each text. 38 | """ 39 | if isinstance(text, str): 40 | text = [ 41 | self.dict[char.lower() if self._ignore_case else char] 42 | for char in text 43 | ] 44 | length = [len(text)] 45 | elif isinstance(text, collections.Iterable): 46 | length = [len(s) for s in text] 47 | text = ''.join(text) 48 | text, _ = self.encode(text) 49 | return (torch.IntTensor(text), torch.IntTensor(length)) 50 | 51 | def decode(self, t, length, raw=False): 52 | """Decode encoded texts back into strs. 53 | 54 | Args: 55 | torch.IntTensor [length_0 + length_1 + ... length_{n - 1}]: encoded texts. 56 | torch.IntTensor [n]: length of each text. 57 | 58 | Raises: 59 | AssertionError: when the texts and its length does not match. 60 | 61 | Returns: 62 | text (str or list of str): texts to convert. 63 | """ 64 | if length.numel() == 1: 65 | length = length[0] 66 | assert t.numel() == length, "text with length: {} does not match declared length: {}".format(t.numel(), length) 67 | if raw: 68 | return ''.join([self.alphabet[i - 1] for i in t]) 69 | else: 70 | char_list = [] 71 | for i in range(length): 72 | if t[i] != 0 and (not (i > 0 and t[i - 1] == t[i])): 73 | char_list.append(self.alphabet[t[i] - 1]) 74 | return ''.join(char_list) 75 | else: 76 | # batch mode 77 | assert t.numel() == length.sum(), "texts with length: {} does not match declared length: {}".format(t.numel(), length.sum()) 78 | texts = [] 79 | index = 0 80 | for i in range(length.numel()): 81 | l = length[i] 82 | texts.append( 83 | self.decode( 84 | t[index:index + l], torch.IntTensor([l]), raw=raw)) 85 | index += l 86 | return texts 87 | 88 | 89 | class averager(object): 90 | """Compute average for `torch.Variable` and `torch.Tensor`. """ 91 | 92 | def __init__(self): 93 | self.reset() 94 | 95 | def add(self, v): 96 | if isinstance(v, Variable): 97 | count = v.data.numel() 98 | v = v.data.sum() 99 | elif isinstance(v, torch.Tensor): 100 | count = v.numel() 101 | v = v.sum() 102 | 103 | self.n_count += count 104 | self.sum += v 105 | 106 | def reset(self): 107 | self.n_count = 0 108 | self.sum = 0 109 | 110 | def val(self): 111 | res = 0 112 | if self.n_count != 0: 113 | res = self.sum / float(self.n_count) 114 | return res 115 | 116 | def oneHot(v, v_length, nc): 117 | batchSize = v_length.size(0) 118 | maxLength = v_length.max() 119 | v_onehot = torch.FloatTensor(batchSize, maxLength, nc).fill_(0) 120 | acc = 0 121 | for i in range(batchSize): 122 | length = v_length[i] 123 | label = v[acc:acc + length].view(-1, 1).long() 124 | v_onehot[i, :length].scatter_(1, label, 1.0) 125 | acc += length 126 | return v_onehot 127 | 128 | 129 | def loadData(v, data): 130 | v.data.resize_(data.size()).copy_(data) 131 | 132 | 133 | def prettyPrint(v): 134 | print('Size {0}, Type: {1}'.format(str(v.size()), v.data.type())) 135 | print('| Max: %f | Min: %f | Mean: %f' % (v.max().data[0], v.min().data[0], 136 | v.mean().data[0])) 137 | 138 | 139 | def assureRatio(img): 140 | """Ensure imgH <= imgW.""" 141 | b, c, h, w = img.size() 142 | if h > w: 143 | main = nn.UpsamplingBilinear2d(size=(h, h), scale_factor=None) 144 | img = main(img) 145 | return img 146 | --------------------------------------------------------------------------------