├── .gitignore ├── L1 ├── data_linear.csv ├── data_square.csv └── l1.py ├── L12 ├── LICENSE ├── README.md ├── data.py ├── dataPrepare.ipynb ├── main.py ├── model.py └── trainUnet.ipynb ├── L15 └── Automatic Image Captioning.ipynb ├── L2 ├── dataset.csv └── l2.py ├── L3 ├── bitwise.xlsx ├── l3.py ├── latex.txt └── latex2.txt ├── L4 ├── dataset.csv ├── l4.py └── xor.csv ├── L5 └── l5.py ├── L7 ├── mnist.html └── mnist.ipynb ├── L8 ├── drive.py ├── model.h5 ├── self-driving-car.ipynb └── utils.py ├── L9 ├── dataset.zip ├── feature-extractor.ipynb └── fine-tune.ipynb └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | *.png 106 | L2/matrix.txt 107 | L2/answer.txt 108 | *.gif 109 | *.jpg 110 | *.txt 111 | *.jpeg 112 | L8/dataset/driving_log.csv 113 | L8/model.py 114 | L9/fine_tune.py 115 | L9/feature-extractor.py 116 | L9/data_augmentation.py 117 | L9/data augmentation.ipynb 118 | L10/Vectorization.ipynb 119 | *.tif 120 | *.h5 121 | *.pkl 122 | -------------------------------------------------------------------------------- /L1/data_linear.csv: -------------------------------------------------------------------------------- 1 | Diện tích,Giá 2 | 30,448.524 3 | 32.4138,509.248 4 | 34.8276,535.104 5 | 37.2414,551.432 6 | 39.6552,623.418 7 | 42.069,625.992 8 | 44.4828,655.248 9 | 46.8966,701.377 10 | 49.3103,748.918 11 | 51.7241,757.881 12 | 54.1379,831.004 13 | 56.5517,855.409 14 | 58.9655,866.707 15 | 61.3793,902.545 16 | 63.7931,952.261 17 | 66.2069,995.531 18 | 68.6207,1069.78 19 | 71.0345,1074.42 20 | 73.4483,1103.88 21 | 75.8621,1138.69 22 | 78.2759,1153.13 23 | 80.6897,1240.27 24 | 83.1034,1251.9 25 | 85.5172,1287.97 26 | 87.931,1320.47 27 | 90.3448,1374.92 28 | 92.7586,1410.16 29 | 95.1724,1469.69 30 | 97.5862,1478.54 31 | 100,1515.28 -------------------------------------------------------------------------------- /L1/data_square.csv: -------------------------------------------------------------------------------- 1 | Diện tích,Giá nhà 2 | 30,-100 3 | 32.4138,-190.725 4 | 34.8276,-269.798 5 | 37.2414,-337.218 6 | 39.6552,-392.985 7 | 42.069,-437.099 8 | 44.4828,-469.56 9 | 46.8966,-490.369 10 | 49.3103,-499.524 11 | 51.7241,-497.027 12 | 54.1379,-482.878 13 | 56.5517,-457.075 14 | 58.9655,-419.62 15 | 61.3793,-370.511 16 | 63.7931,-309.75 17 | 66.2069,-237.337 18 | 68.6207,-153.27 19 | 71.0345,-57.5505 20 | 73.4483,49.8216 21 | 75.8621,168.847 22 | 78.2759,299.524 23 | 80.6897,441.855 24 | 83.1034,595.838 25 | 85.5172,761.474 26 | 87.931,938.763 27 | 90.3448,1127.71 28 | 92.7586,1328.3 29 | 95.1724,1540.55 30 | 97.5862,1764.45 31 | 100,2000 32 | -------------------------------------------------------------------------------- /L1/l1.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon Feb 18 22:06:34 2019 4 | 5 | @author: DELL 6 | """ 7 | 8 | import numpy as np 9 | import pandas as pd 10 | import matplotlib.pyplot as plt 11 | #numOfPoint = 30 12 | #noise = np.random.normal(0,1,numOfPoint).reshape(-1,1) 13 | #x = np.linspace(30, 100, numOfPoint).reshape(-1,1) 14 | #N = x.shape[0] 15 | #y = 15*x + 8 + 20*noise 16 | #plt.scatter(x, y) 17 | 18 | data = pd.read_csv('data_linear.csv').values 19 | N = data.shape[0] 20 | x = data[:, 0].reshape(-1, 1) 21 | y = data[:, 1].reshape(-1, 1) 22 | plt.scatter(x, y) 23 | plt.xlabel('mét vuông') 24 | plt.ylabel('giá') 25 | 26 | x = np.hstack((np.ones((N, 1)), x)) 27 | 28 | w = np.array([0.,1.]).reshape(-1,1) 29 | 30 | numOfIteration = 100 31 | cost = np.zeros((numOfIteration,1)) 32 | learning_rate = 0.000001 33 | for i in range(1, numOfIteration): 34 | r = np.dot(x, w) - y 35 | cost[i] = 0.5*np.sum(r*r) 36 | w[0] -= learning_rate*np.sum(r) 37 | # correct the shape dimension 38 | w[1] -= learning_rate*np.sum(np.multiply(r, x[:,1].reshape(-1,1))) 39 | print(cost[i]) 40 | predict = np.dot(x, w) 41 | plt.plot((x[0][1], x[N-1][1]),(predict[0], predict[N-1]), 'r') 42 | plt.show() 43 | 44 | x1 = 50 45 | y1 = w[0] + w[1] * 50 46 | print('Giá nhà cho 50m^2 là : ', y1) -------------------------------------------------------------------------------- /L12/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 zhixuhao 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /L12/README.md: -------------------------------------------------------------------------------- 1 | # Implementation of deep learning framework -- Unet, using Keras 2 | 3 | The architecture was inspired by [U-Net: Convolutional Networks for Biomedical Image Segmentation](http://lmb.informatik.uni-freiburg.de/people/ronneber/u-net/). 4 | 5 | --- 6 | 7 | ## Overview 8 | 9 | ### Data 10 | 11 | The original dataset is from [isbi challenge](http://brainiac2.mit.edu/isbi_challenge/), and I've downloaded it and done the pre-processing. 12 | 13 | You can find it in folder data/membrane. 14 | 15 | ### Data augmentation 16 | 17 | The data for training contains 30 512*512 images, which are far not enough to feed a deep learning neural network. I use a module called ImageDataGenerator in keras.preprocessing.image to do data augmentation. 18 | 19 | See dataPrepare.ipynb and data.py for detail. 20 | 21 | 22 | ### Model 23 | 24 | ![img/u-net-architecture.png](img/u-net-architecture.png) 25 | 26 | This deep neural network is implemented with Keras functional API, which makes it extremely easy to experiment with different interesting architectures. 27 | 28 | Output from the network is a 512*512 which represents mask that should be learned. Sigmoid activation function 29 | makes sure that mask pixels are in \[0, 1\] range. 30 | 31 | ### Training 32 | 33 | The model is trained for 5 epochs. 34 | 35 | After 5 epochs, calculated accuracy is about 0.97. 36 | 37 | Loss function for the training is basically just a binary crossentropy. 38 | 39 | 40 | --- 41 | 42 | ## How to use 43 | 44 | ### Dependencies 45 | 46 | This tutorial depends on the following libraries: 47 | 48 | * Tensorflow 49 | * Keras >= 1.0 50 | 51 | Also, this code should be compatible with Python versions 2.7-3.5. 52 | 53 | ### Run main.py 54 | 55 | You will see the predicted results of test image in data/membrane/test 56 | 57 | ### Or follow notebook trainUnet 58 | 59 | 60 | 61 | ### Results 62 | 63 | Use the trained model to do segmentation on test images, the result is statisfactory. 64 | 65 | ![img/0test.png](img/0test.png) 66 | 67 | ![img/0label.png](img/0label.png) 68 | 69 | 70 | ## About Keras 71 | 72 | Keras is a minimalist, highly modular neural networks library, written in Python and capable of running on top of either TensorFlow or Theano. It was developed with a focus on enabling fast experimentation. Being able to go from idea to result with the least possible delay is key to doing good research. 73 | 74 | Use Keras if you need a deep learning library that: 75 | 76 | allows for easy and fast prototyping (through total modularity, minimalism, and extensibility). 77 | supports both convolutional networks and recurrent networks, as well as combinations of the two. 78 | supports arbitrary connectivity schemes (including multi-input and multi-output training). 79 | runs seamlessly on CPU and GPU. 80 | Read the documentation [Keras.io](http://keras.io/) 81 | 82 | Keras is compatible with: Python 2.7-3.5. 83 | -------------------------------------------------------------------------------- /L12/data.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | from keras.preprocessing.image import ImageDataGenerator 3 | import numpy as np 4 | import os 5 | import glob 6 | import skimage.io as io 7 | import skimage.transform as trans 8 | 9 | Sky = [128,128,128] 10 | Building = [128,0,0] 11 | Pole = [192,192,128] 12 | Road = [128,64,128] 13 | Pavement = [60,40,222] 14 | Tree = [128,128,0] 15 | SignSymbol = [192,128,128] 16 | Fence = [64,64,128] 17 | Car = [64,0,128] 18 | Pedestrian = [64,64,0] 19 | Bicyclist = [0,128,192] 20 | Unlabelled = [0,0,0] 21 | 22 | COLOR_DICT = np.array([Sky, Building, Pole, Road, Pavement, 23 | Tree, SignSymbol, Fence, Car, Pedestrian, Bicyclist, Unlabelled]) 24 | 25 | 26 | def adjustData(img,mask,flag_multi_class,num_class): 27 | if(flag_multi_class): 28 | img = img / 255 29 | mask = mask[:,:,:,0] if(len(mask.shape) == 4) else mask[:,:,0] 30 | new_mask = np.zeros(mask.shape + (num_class,)) 31 | for i in range(num_class): 32 | #for one pixel in the image, find the class in mask and convert it into one-hot vector 33 | #index = np.where(mask == i) 34 | #index_mask = (index[0],index[1],index[2],np.zeros(len(index[0]),dtype = np.int64) + i) if (len(mask.shape) == 4) else (index[0],index[1],np.zeros(len(index[0]),dtype = np.int64) + i) 35 | #new_mask[index_mask] = 1 36 | new_mask[mask == i,i] = 1 37 | new_mask = np.reshape(new_mask,(new_mask.shape[0],new_mask.shape[1]*new_mask.shape[2],new_mask.shape[3])) if flag_multi_class else np.reshape(new_mask,(new_mask.shape[0]*new_mask.shape[1],new_mask.shape[2])) 38 | mask = new_mask 39 | elif(np.max(img) > 1): 40 | img = img / 255 41 | mask = mask /255 42 | mask[mask > 0.5] = 1 43 | mask[mask <= 0.5] = 0 44 | return (img,mask) 45 | 46 | 47 | 48 | def trainGenerator(batch_size,train_path,image_folder,mask_folder,aug_dict,image_color_mode = "grayscale", 49 | mask_color_mode = "grayscale",image_save_prefix = "image",mask_save_prefix = "mask", 50 | flag_multi_class = False,num_class = 2,save_to_dir = None,target_size = (256,256),seed = 1): 51 | ''' 52 | can generate image and mask at the same time 53 | use the same seed for image_datagen and mask_datagen to ensure the transformation for image and mask is the same 54 | if you want to visualize the results of generator, set save_to_dir = "your path" 55 | ''' 56 | image_datagen = ImageDataGenerator(**aug_dict) 57 | mask_datagen = ImageDataGenerator(**aug_dict) 58 | image_generator = image_datagen.flow_from_directory( 59 | train_path, 60 | classes = [image_folder], 61 | class_mode = None, 62 | color_mode = image_color_mode, 63 | target_size = target_size, 64 | batch_size = batch_size, 65 | save_to_dir = save_to_dir, 66 | save_prefix = image_save_prefix, 67 | seed = seed) 68 | mask_generator = mask_datagen.flow_from_directory( 69 | train_path, 70 | classes = [mask_folder], 71 | class_mode = None, 72 | color_mode = mask_color_mode, 73 | target_size = target_size, 74 | batch_size = batch_size, 75 | save_to_dir = save_to_dir, 76 | save_prefix = mask_save_prefix, 77 | seed = seed) 78 | train_generator = zip(image_generator, mask_generator) 79 | for (img,mask) in train_generator: 80 | img,mask = adjustData(img,mask,flag_multi_class,num_class) 81 | yield (img,mask) 82 | 83 | 84 | 85 | def testGenerator(test_path,num_image = 30,target_size = (256,256),flag_multi_class = False,as_gray = True): 86 | for i in range(num_image): 87 | img = io.imread(os.path.join(test_path,"%d.png"%i),as_gray = as_gray) 88 | img = img / 255 89 | img = trans.resize(img,target_size) 90 | img = np.reshape(img,img.shape+(1,)) if (not flag_multi_class) else img 91 | img = np.reshape(img,(1,)+img.shape) 92 | yield img 93 | 94 | 95 | def geneTrainNpy(image_path,mask_path,flag_multi_class = False,num_class = 2,image_prefix = "image",mask_prefix = "mask",image_as_gray = True,mask_as_gray = True): 96 | image_name_arr = glob.glob(os.path.join(image_path,"%s*.png"%image_prefix)) 97 | image_arr = [] 98 | mask_arr = [] 99 | for index,item in enumerate(image_name_arr): 100 | img = io.imread(item,as_gray = image_as_gray) 101 | img = np.reshape(img,img.shape + (1,)) if image_as_gray else img 102 | mask = io.imread(item.replace(image_path,mask_path).replace(image_prefix,mask_prefix),as_gray = mask_as_gray) 103 | mask = np.reshape(mask,mask.shape + (1,)) if mask_as_gray else mask 104 | img,mask = adjustData(img,mask,flag_multi_class,num_class) 105 | image_arr.append(img) 106 | mask_arr.append(mask) 107 | image_arr = np.array(image_arr) 108 | mask_arr = np.array(mask_arr) 109 | return image_arr,mask_arr 110 | 111 | 112 | def labelVisualize(num_class,color_dict,img): 113 | img = img[:,:,0] if len(img.shape) == 3 else img 114 | img_out = np.zeros(img.shape + (3,)) 115 | for i in range(num_class): 116 | img_out[img == i,:] = color_dict[i] 117 | return img_out / 255 118 | 119 | 120 | 121 | def saveResult(save_path,npyfile,flag_multi_class = False,num_class = 2): 122 | for i,item in enumerate(npyfile): 123 | img = labelVisualize(num_class,COLOR_DICT,item) if flag_multi_class else item[:,:,0] 124 | io.imsave(os.path.join(save_path,"%d_predict.png"%i),img) -------------------------------------------------------------------------------- /L12/dataPrepare.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stderr", 10 | "output_type": "stream", 11 | "text": [ 12 | "C:\\SoftWare\\Anaconda2\\envs\\python3\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", 13 | " from ._conv import register_converters as _register_converters\n", 14 | "Using TensorFlow backend.\n" 15 | ] 16 | } 17 | ], 18 | "source": [ 19 | "from data import *" 20 | ] 21 | }, 22 | { 23 | "cell_type": "markdown", 24 | "metadata": {}, 25 | "source": [ 26 | "# data augmentation \n", 27 | "\n", 28 | "In deep learning tasks, a lot of data is need to train DNN model, when the dataset is not big enough, data augmentation should be applied.\n", 29 | "\n", 30 | "keras.preprocessing.image.ImageDataGenerator is a data generator, which can feed the DNN with data like : (data,label), it can also do data augmentation at the same time.\n", 31 | "\n", 32 | "It is very convenient for us to use keras.preprocessing.image.ImageDataGenerator to do data augmentation by implement image rotation, shift, rescale and so on... see [keras documentation](https://keras.io/preprocessing/image/) for detail.\n", 33 | "\n", 34 | "For image segmentation tasks, the image and mask must be transformed **together!!**" 35 | ] 36 | }, 37 | { 38 | "cell_type": "markdown", 39 | "metadata": {}, 40 | "source": [ 41 | "## define your data generator\n", 42 | "\n", 43 | "If you want to visualize your data augmentation result, set save_to_dir = your path" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 6, 49 | "metadata": {}, 50 | "outputs": [], 51 | "source": [ 52 | "#if you don't want to do data augmentation, set data_gen_args as an empty dict.\n", 53 | "#data_gen_args = dict()\n", 54 | "\n", 55 | "data_gen_args = dict(rotation_range=0.2,\n", 56 | " width_shift_range=0.05,\n", 57 | " height_shift_range=0.05,\n", 58 | " shear_range=0.05,\n", 59 | " zoom_range=0.05,\n", 60 | " horizontal_flip=True,\n", 61 | " fill_mode='nearest')\n", 62 | "myGenerator = trainGenerator(20,'data/membrane/train','image','label',data_gen_args,save_to_dir = \"data/membrane/train/aug\")" 63 | ] 64 | }, 65 | { 66 | "cell_type": "markdown", 67 | "metadata": {}, 68 | "source": [ 69 | "## visualize your data augmentation result" 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 8, 75 | "metadata": {}, 76 | "outputs": [], 77 | "source": [ 78 | "#you will see 60 transformed images and their masks in data/membrane/train/aug\n", 79 | "num_batch = 3\n", 80 | "for i,batch in enumerate(myGenerator):\n", 81 | " if(i >= num_batch):\n", 82 | " break" 83 | ] 84 | }, 85 | { 86 | "cell_type": "markdown", 87 | "metadata": {}, 88 | "source": [ 89 | "## create .npy data\n", 90 | "\n", 91 | "If your computer has enough memory, you can create npy files containing all your images and masks, and feed your DNN with them." 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": 9, 97 | "metadata": {}, 98 | "outputs": [], 99 | "source": [ 100 | "image_arr,mask_arr = geneTrainNpy(\"data/membrane/train/aug/\",\"data/membrane/train/aug/\")\n", 101 | "#np.save(\"data/image_arr.npy\",image_arr)\n", 102 | "#np.save(\"data/mask_arr.npy\",mask_arr)" 103 | ] 104 | } 105 | ], 106 | "metadata": { 107 | "kernelspec": { 108 | "display_name": "Python 3", 109 | "language": "python", 110 | "name": "python3" 111 | }, 112 | "language_info": { 113 | "codemirror_mode": { 114 | "name": "ipython", 115 | "version": 3 116 | }, 117 | "file_extension": ".py", 118 | "mimetype": "text/x-python", 119 | "name": "python", 120 | "nbconvert_exporter": "python", 121 | "pygments_lexer": "ipython3", 122 | "version": "3.6.2" 123 | } 124 | }, 125 | "nbformat": 4, 126 | "nbformat_minor": 2 127 | } 128 | -------------------------------------------------------------------------------- /L12/main.py: -------------------------------------------------------------------------------- 1 | from model import * 2 | from data import * 3 | 4 | #os.environ["CUDA_VISIBLE_DEVICES"] = "0" 5 | 6 | 7 | data_gen_args = dict(rotation_range=0.2, 8 | width_shift_range=0.05, 9 | height_shift_range=0.05, 10 | shear_range=0.05, 11 | zoom_range=0.05, 12 | horizontal_flip=True, 13 | fill_mode='nearest') 14 | myGene = trainGenerator(2,'data/membrane/train','image','label',data_gen_args,save_to_dir = None) 15 | 16 | model = unet() 17 | model_checkpoint = ModelCheckpoint('unet_membrane.hdf5', monitor='loss',verbose=1, save_best_only=True) 18 | model.fit_generator(myGene,steps_per_epoch=300,epochs=1,callbacks=[model_checkpoint]) 19 | 20 | testGene = testGenerator("data/membrane/test") 21 | results = model.predict_generator(testGene,30,verbose=1) 22 | saveResult("data/membrane/test",results) -------------------------------------------------------------------------------- /L12/model.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import os 3 | import skimage.io as io 4 | import skimage.transform as trans 5 | import numpy as np 6 | from keras.models import * 7 | from keras.layers import * 8 | from keras.optimizers import * 9 | from keras.callbacks import ModelCheckpoint, LearningRateScheduler 10 | from keras import backend as keras 11 | 12 | 13 | def unet(pretrained_weights = None,input_size = (256,256,1)): 14 | inputs = Input(input_size) 15 | conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(inputs) 16 | conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv1) 17 | pool1 = MaxPooling2D(pool_size=(2, 2))(conv1) 18 | conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool1) 19 | conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv2) 20 | pool2 = MaxPooling2D(pool_size=(2, 2))(conv2) 21 | conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool2) 22 | conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv3) 23 | pool3 = MaxPooling2D(pool_size=(2, 2))(conv3) 24 | conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool3) 25 | conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv4) 26 | drop4 = Dropout(0.5)(conv4) 27 | pool4 = MaxPooling2D(pool_size=(2, 2))(drop4) 28 | 29 | conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool4) 30 | conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv5) 31 | drop5 = Dropout(0.5)(conv5) 32 | 33 | up6 = Conv2D(512, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(drop5)) 34 | merge6 = concatenate([drop4,up6], axis = 3) 35 | conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge6) 36 | conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv6) 37 | 38 | up7 = Conv2D(256, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv6)) 39 | merge7 = concatenate([conv3,up7], axis = 3) 40 | conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge7) 41 | conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv7) 42 | 43 | up8 = Conv2D(128, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv7)) 44 | merge8 = concatenate([conv2,up8], axis = 3) 45 | conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge8) 46 | conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv8) 47 | 48 | up9 = Conv2D(64, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv8)) 49 | merge9 = concatenate([conv1,up9], axis = 3) 50 | conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge9) 51 | conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9) 52 | conv9 = Conv2D(2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9) 53 | conv10 = Conv2D(1, 1, activation = 'sigmoid')(conv9) 54 | 55 | model = Model(input = inputs, output = conv10) 56 | 57 | model.compile(optimizer = Adam(lr = 1e-4), loss = 'binary_crossentropy', metrics = ['accuracy']) 58 | 59 | #model.summary() 60 | 61 | if(pretrained_weights): 62 | model.load_weights(pretrained_weights) 63 | 64 | return model 65 | 66 | 67 | -------------------------------------------------------------------------------- /L12/trainUnet.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 3, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "from model import *\n", 10 | "from data import *" 11 | ] 12 | }, 13 | { 14 | "cell_type": "markdown", 15 | "metadata": {}, 16 | "source": [ 17 | "## Train your Unet with membrane data\n", 18 | "membrane data is in folder membrane/, it is a binary classification task.\n", 19 | "\n", 20 | "The input shape of image and mask are the same :(batch_size,rows,cols,channel = 1)" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "### Train with data generator" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 4, 33 | "metadata": {}, 34 | "outputs": [ 35 | { 36 | "name": "stdout", 37 | "output_type": "stream", 38 | "text": [ 39 | "__________________________________________________________________________________________________\n", 40 | "Layer (type) Output Shape Param # Connected to \n", 41 | "==================================================================================================\n", 42 | "input_2 (InputLayer) (None, 256, 256, 1) 0 \n", 43 | "__________________________________________________________________________________________________\n", 44 | "conv2d_25 (Conv2D) (None, 256, 256, 64) 640 input_2[0][0] \n", 45 | "__________________________________________________________________________________________________\n", 46 | "conv2d_26 (Conv2D) (None, 256, 256, 64) 36928 conv2d_25[0][0] \n", 47 | "__________________________________________________________________________________________________\n", 48 | "max_pooling2d_5 (MaxPooling2D) (None, 128, 128, 64) 0 conv2d_26[0][0] \n", 49 | "__________________________________________________________________________________________________\n", 50 | "conv2d_27 (Conv2D) (None, 128, 128, 128 73856 max_pooling2d_5[0][0] \n", 51 | "__________________________________________________________________________________________________\n", 52 | "conv2d_28 (Conv2D) (None, 128, 128, 128 147584 conv2d_27[0][0] \n", 53 | "__________________________________________________________________________________________________\n", 54 | "max_pooling2d_6 (MaxPooling2D) (None, 64, 64, 128) 0 conv2d_28[0][0] \n", 55 | "__________________________________________________________________________________________________\n", 56 | "conv2d_29 (Conv2D) (None, 64, 64, 256) 295168 max_pooling2d_6[0][0] \n", 57 | "__________________________________________________________________________________________________\n", 58 | "conv2d_30 (Conv2D) (None, 64, 64, 256) 590080 conv2d_29[0][0] \n", 59 | "__________________________________________________________________________________________________\n", 60 | "max_pooling2d_7 (MaxPooling2D) (None, 32, 32, 256) 0 conv2d_30[0][0] \n", 61 | "__________________________________________________________________________________________________\n", 62 | "conv2d_31 (Conv2D) (None, 32, 32, 512) 1180160 max_pooling2d_7[0][0] \n", 63 | "__________________________________________________________________________________________________\n", 64 | "conv2d_32 (Conv2D) (None, 32, 32, 512) 2359808 conv2d_31[0][0] \n", 65 | "__________________________________________________________________________________________________\n", 66 | "dropout_3 (Dropout) (None, 32, 32, 512) 0 conv2d_32[0][0] \n", 67 | "__________________________________________________________________________________________________\n", 68 | "max_pooling2d_8 (MaxPooling2D) (None, 16, 16, 512) 0 dropout_3[0][0] \n", 69 | "__________________________________________________________________________________________________\n", 70 | "conv2d_33 (Conv2D) (None, 16, 16, 1024) 4719616 max_pooling2d_8[0][0] \n", 71 | "__________________________________________________________________________________________________\n", 72 | "conv2d_34 (Conv2D) (None, 16, 16, 1024) 9438208 conv2d_33[0][0] \n", 73 | "__________________________________________________________________________________________________\n", 74 | "dropout_4 (Dropout) (None, 16, 16, 1024) 0 conv2d_34[0][0] \n", 75 | "__________________________________________________________________________________________________\n", 76 | "up_sampling2d_5 (UpSampling2D) (None, 32, 32, 1024) 0 dropout_4[0][0] \n", 77 | "__________________________________________________________________________________________________\n", 78 | "conv2d_35 (Conv2D) (None, 32, 32, 512) 2097664 up_sampling2d_5[0][0] \n", 79 | "__________________________________________________________________________________________________\n", 80 | "concatenate_5 (Concatenate) (None, 32, 32, 1024) 0 dropout_3[0][0] \n", 81 | " conv2d_35[0][0] \n", 82 | "__________________________________________________________________________________________________\n", 83 | "conv2d_36 (Conv2D) (None, 32, 32, 512) 4719104 concatenate_5[0][0] \n", 84 | "__________________________________________________________________________________________________\n", 85 | "conv2d_37 (Conv2D) (None, 32, 32, 512) 2359808 conv2d_36[0][0] \n", 86 | "__________________________________________________________________________________________________\n", 87 | "up_sampling2d_6 (UpSampling2D) (None, 64, 64, 512) 0 conv2d_37[0][0] \n", 88 | "__________________________________________________________________________________________________\n", 89 | "conv2d_38 (Conv2D) (None, 64, 64, 256) 524544 up_sampling2d_6[0][0] \n", 90 | "__________________________________________________________________________________________________\n", 91 | "concatenate_6 (Concatenate) (None, 64, 64, 512) 0 conv2d_30[0][0] \n", 92 | " conv2d_38[0][0] \n", 93 | "__________________________________________________________________________________________________\n", 94 | "conv2d_39 (Conv2D) (None, 64, 64, 256) 1179904 concatenate_6[0][0] \n", 95 | "__________________________________________________________________________________________________\n", 96 | "conv2d_40 (Conv2D) (None, 64, 64, 256) 590080 conv2d_39[0][0] \n", 97 | "__________________________________________________________________________________________________\n", 98 | "up_sampling2d_7 (UpSampling2D) (None, 128, 128, 256 0 conv2d_40[0][0] \n", 99 | "__________________________________________________________________________________________________\n", 100 | "conv2d_41 (Conv2D) (None, 128, 128, 128 131200 up_sampling2d_7[0][0] \n", 101 | "__________________________________________________________________________________________________\n", 102 | "concatenate_7 (Concatenate) (None, 128, 128, 256 0 conv2d_28[0][0] \n", 103 | " conv2d_41[0][0] \n", 104 | "__________________________________________________________________________________________________\n", 105 | "conv2d_42 (Conv2D) (None, 128, 128, 128 295040 concatenate_7[0][0] \n", 106 | "__________________________________________________________________________________________________\n", 107 | "conv2d_43 (Conv2D) (None, 128, 128, 128 147584 conv2d_42[0][0] \n", 108 | "__________________________________________________________________________________________________\n", 109 | "up_sampling2d_8 (UpSampling2D) (None, 256, 256, 128 0 conv2d_43[0][0] \n", 110 | "__________________________________________________________________________________________________\n", 111 | "conv2d_44 (Conv2D) (None, 256, 256, 64) 32832 up_sampling2d_8[0][0] \n", 112 | "__________________________________________________________________________________________________\n", 113 | "concatenate_8 (Concatenate) (None, 256, 256, 128 0 conv2d_26[0][0] \n", 114 | " conv2d_44[0][0] \n", 115 | "__________________________________________________________________________________________________\n", 116 | "conv2d_45 (Conv2D) (None, 256, 256, 64) 73792 concatenate_8[0][0] \n", 117 | "__________________________________________________________________________________________________\n", 118 | "conv2d_46 (Conv2D) (None, 256, 256, 64) 36928 conv2d_45[0][0] \n", 119 | "__________________________________________________________________________________________________\n", 120 | "conv2d_47 (Conv2D) (None, 256, 256, 2) 1154 conv2d_46[0][0] \n", 121 | "__________________________________________________________________________________________________\n", 122 | "conv2d_48 (Conv2D) (None, 256, 256, 1) 3 conv2d_47[0][0] \n", 123 | "==================================================================================================\n", 124 | "Total params: 31,031,685\n", 125 | "Trainable params: 31,031,685\n", 126 | "Non-trainable params: 0\n", 127 | "__________________________________________________________________________________________________\n" 128 | ] 129 | }, 130 | { 131 | "name": "stderr", 132 | "output_type": "stream", 133 | "text": [ 134 | "F:\\DL_Tutorial\\L12\\model.py:55: UserWarning: Update your `Model` call to the Keras 2 API: `Model(inputs=Tensor(\"in..., outputs=Tensor(\"co...)`\n", 135 | " model = Model(input = inputs, output = conv10)\n" 136 | ] 137 | } 138 | ], 139 | "source": [ 140 | "data_gen_args = dict(rotation_range=0.2,\n", 141 | " width_shift_range=0.05,\n", 142 | " height_shift_range=0.05,\n", 143 | " shear_range=0.05,\n", 144 | " zoom_range=0.05,\n", 145 | " horizontal_flip=True,\n", 146 | " fill_mode='nearest')\n", 147 | "myGene = trainGenerator(2,'data/membrane/train','image','label',data_gen_args,save_to_dir = None)\n", 148 | "model = unet()\n", 149 | "model.summary()\n", 150 | "model_checkpoint = ModelCheckpoint('unet_membrane.hdf5', monitor='loss',verbose=1, save_best_only=True)\n", 151 | "#model.fit_generator(myGene,steps_per_epoch=200,epochs=5,callbacks=[model_checkpoint])" 152 | ] 153 | }, 154 | { 155 | "cell_type": "markdown", 156 | "metadata": {}, 157 | "source": [ 158 | "### Train with npy file" 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": null, 164 | "metadata": {}, 165 | "outputs": [], 166 | "source": [ 167 | "#imgs_train,imgs_mask_train = geneTrainNpy(\"data/membrane/train/aug/\",\"data/membrane/train/aug/\")\n", 168 | "#model.fit(imgs_train, imgs_mask_train, batch_size=2, nb_epoch=10, verbose=1,validation_split=0.2, shuffle=True, callbacks=[model_checkpoint])" 169 | ] 170 | }, 171 | { 172 | "cell_type": "markdown", 173 | "metadata": {}, 174 | "source": [ 175 | "### test your model and save predicted results" 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": null, 181 | "metadata": {}, 182 | "outputs": [], 183 | "source": [ 184 | "testGene = testGenerator(\"data/membrane/test\")\n", 185 | "model = unet()\n", 186 | "model.load_weights(\"unet_membrane.hdf5\")\n", 187 | "results = model.predict_generator(testGene,30,verbose=1)\n", 188 | "saveResult(\"data/membrane/test\",results)" 189 | ] 190 | } 191 | ], 192 | "metadata": { 193 | "kernelspec": { 194 | "display_name": "Python 3", 195 | "language": "python", 196 | "name": "python3" 197 | }, 198 | "language_info": { 199 | "codemirror_mode": { 200 | "name": "ipython", 201 | "version": 3 202 | }, 203 | "file_extension": ".py", 204 | "mimetype": "text/x-python", 205 | "name": "python", 206 | "nbconvert_exporter": "python", 207 | "pygments_lexer": "ipython3", 208 | "version": "3.6.5" 209 | } 210 | }, 211 | "nbformat": 4, 212 | "nbformat_minor": 2 213 | } 214 | -------------------------------------------------------------------------------- /L2/dataset.csv: -------------------------------------------------------------------------------- 1 | Lương,Thời gian làm việc,Cho vay 2 | 10,1,1 3 | 5,2,1 4 | 6,1.8,1 5 | 7,1,1 6 | 8,2,1 7 | 9,0.5,1 8 | 4,3,1 9 | 5,2.5,1 10 | 8,1,1 11 | 4,2.5,1 12 | 8,0.1,0 13 | 7,0.15,0 14 | 4,1,0 15 | 5,0.8,0 16 | 7,0.3,0 17 | 4,1,0 18 | 5,0.5,0 19 | 6,0.3,0 20 | 7,0.2,0 21 | 8,0.15,0 22 | -------------------------------------------------------------------------------- /L2/l2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Tue Feb 26 13:49:07 2019 4 | 5 | @author: DELL 6 | """ 7 | 8 | import numpy as np 9 | import pandas as pd 10 | import matplotlib.pyplot as plt 11 | 12 | # Hàm sigmoid 13 | def sigmoid(x): 14 | return 1 / (1 + np.exp(-x)) 15 | 16 | # Load data từ file csv 17 | data = pd.read_csv('dataset.csv').values 18 | N, d = data.shape 19 | x = data[:, 0:d-1].reshape(-1, d-1) 20 | y = data[:, 2].reshape(-1, 1) 21 | 22 | # Vẽ data bằng scatter 23 | x_cho_vay = x[y[:,0]==1] 24 | x_tu_choi = x[y[:,0]==0] 25 | 26 | plt.scatter(x_cho_vay[:, 0], x_cho_vay[:, 1], c='red', edgecolors='none', s=30, label='cho vay') 27 | plt.scatter(x_tu_choi[:, 0], x_tu_choi[:, 1], c='blue', edgecolors='none', s=30, label='từ chối') 28 | plt.legend(loc=1) 29 | plt.xlabel('mức lương (triệu)') 30 | plt.ylabel('kinh nghiệm (năm)') 31 | 32 | # Thêm cột 1 vào dữ liệu x 33 | x = np.hstack((np.ones((N, 1)), x)) 34 | 35 | w = np.array([0.,0.1,0.1]).reshape(-1,1) 36 | 37 | # Số lần lặp bước 2 38 | numOfIteration = 1000 39 | cost = np.zeros((numOfIteration,1)) 40 | learning_rate = 0.01 41 | 42 | for i in range(1, numOfIteration): 43 | 44 | # Tính giá trị dự đoán 45 | y_predict = sigmoid(np.dot(x, w)) 46 | cost[i] = -np.sum(np.multiply(y, np.log(y_predict)) + np.multiply(1-y, np.log(1-y_predict))) 47 | # Gradient descent 48 | w = w - learning_rate * np.dot(x.T, y_predict-y) 49 | print(cost[i]) 50 | 51 | # Vẽ đường phân cách. 52 | t = 0.5 53 | plt.plot((4, 10),(-(w[0]+4*w[1]+ np.log(1/t-1))/w[2], -(w[0] + 10*w[1]+ np.log(1/t-1))/w[2]), 'g') 54 | plt.show() -------------------------------------------------------------------------------- /L3/bitwise.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nttuan8/DL_Tutorial/b0a5dfb2898c17a7e8cb0b85035ef09d0b2eed4d/L3/bitwise.xlsx -------------------------------------------------------------------------------- /L3/l3.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Tue Mar 5 10:56:14 2019 4 | 5 | @author: DELL 6 | """ 7 | 8 | import numpy as np 9 | import pandas as pd 10 | import matplotlib.pyplot as plt 11 | 12 | # Hàm sigmoid 13 | def sigmoid(x): 14 | return 1 / (1 + np.exp(-x)) 15 | 16 | # Toán tử AND 17 | plt.scatter([1], [1], c='red', edgecolors='none', s=30, label='cho vay') 18 | plt.scatter([0, 0, 1], [0, 1, 0], c='blue', edgecolors='none', s=30, label='từ chối') 19 | plt.plot([0, 1.5], [1.5, 0], 'g') 20 | 21 | # Toán tử OR 22 | plt.scatter([0, 1, 1], [1, 0, 1], c='red', edgecolors='none', s=30, label='cho vay') 23 | plt.scatter([0], [0], c='blue', edgecolors='none', s=30, label='từ chối') 24 | plt.plot([-0.5, 1.5], [1, -1], 'g') 25 | plt.xlabel('x1') 26 | plt.ylabel('x2') 27 | 28 | # Toán tử XOR 29 | plt.scatter([1, 0], [0, 1], c='red', edgecolors='none', s=30, label='cho vay') 30 | plt.scatter([1, 0], [1, 0], c='blue', edgecolors='none', s=30, label='từ chối') 31 | plt.xlabel('x1') 32 | plt.ylabel('x2') 33 | -------------------------------------------------------------------------------- /L3/latex.txt: -------------------------------------------------------------------------------- 1 | z^{(1)} = 2 | \begin{bmatrix} 3 | &z_1^{(1)} \\ 4 | &z_2^{(1)} \\ 5 | &z_3^{(1)} 6 | \end{bmatrix} 7 | = 8 | \begin{bmatrix} 9 | &a_1^{(0)} * w_{11}^{(1)} + a_2^{(0)} * w_{21}^{(1)} + a_3^{(0)} * w_{31}^{(1)} + b_1^{(1)} \\ 10 | &a_1^{(0)} * w_{12}^{(1)} + a_2^{(0)} * w_{22}^{(1)} + a_3^{(0)} * w_{32}^{(1)} + b_2^{(1)} \\ 11 | &a_1^{(0)} * w_{13}^{(1)} + a_2^{(0)} * w_{23}^{(1)} + a_3^{(0)} * w_{33}^{(1)} + b_3^{(1)} \\ 12 | \end{bmatrix}\newline\newline 13 | = (W^{(1)})^T * a^{(0)} + b^{(1)} 14 | \newline\newline 15 | a^{(1)} = \sigma(z^{(1)}) 16 | \newline \newline 17 | z^{(2)} = (W^{(2)})^T * a^{(1)} + b^{(2)}, 18 | a^{(2)} = \sigma(z^{(2)}) -------------------------------------------------------------------------------- /L3/latex2.txt: -------------------------------------------------------------------------------- 1 | X = 2 | \begin{bmatrix} 3 | &x_1^{[1]} &x_2^{[1]} &... &x_d^{[1]}\\ 4 | &x_1^{[2]} &x_2^{[2]} &... &x_d^{[2]}\\ 5 | &... &... &... &... \\ 6 | &x_1^{[n]} &x_2^{[n]} &... &x_d^{[n]} 7 | \end{bmatrix} 8 | = 9 | \begin{bmatrix} 10 | &-(x^{[1]})^T-\\ 11 | &-(x^{[2]})^T-\\ 12 | &...\\ 13 | &-(x^{[n]})^T-\\ 14 | \end{bmatrix} 15 | 16 | 17 | Z^{(i)}= 18 | \begin{bmatrix} 19 | &z^{(i)[1]}_1 &z^{(i)[1]}_2 &... &z^{(i)[1]}_{l^{(i)}} \\ 20 | &z^{(i)[2]}_1 &z^{(i)[2]}_2 &... &z^{(i)[2]}_{l^{(i)}} \\ 21 | &...\\ 22 | &z^{(i)[n]}_1 &z^{(i)[n]}_2 &... &z^{(i)[n]}_{l^{(i)}} \\ 23 | \end{bmatrix} 24 | = 25 | \begin{bmatrix} 26 | &-(z^{(i)[1]})^T-\\ 27 | &-(z^{(i)[2]})^T-\\ 28 | &...\\ 29 | &-(z^{(i)[n]})^T-\\ 30 | \end{bmatrix} 31 | 32 | 33 | Z^{(1)} = 34 | \begin{bmatrix} 35 | &(z^{(1)[1]})^T\\ 36 | &(z^{(1)[2]})^T\\ 37 | &...\\ 38 | &(z^{(1)[n]})^T\\ 39 | \end{bmatrix} 40 | = \begin{bmatrix} 41 | &(x^{[1]})^T * w^{(1)} + (b^{(1)})^T\\ 42 | &(x^{[2]})^T * w^{(1)} + (b^{(1)})^T\\ 43 | &...\\ 44 | &(x^{[n]})^T * w^{(1)} + (b^{(1)})^T\\ 45 | \end{bmatrix} = X * W^{(1)} + 46 | \begin{bmatrix} 47 | &(b^{(1)})^T\\ 48 | &(b^{(1)})^T\\ 49 | &...\\ 50 | &(b^{(1)})^T\\ 51 | \end{bmatrix}\newline \newline 52 | = X * W^{(1)} + b^{(1)} 53 | 54 | 55 | \newline \newline 56 | A^{(1)} = \sigma(Z^{(1)}) \newline \newline 57 | Z^{(2)} = A^{(1)} * W^{(2)}, A^{(2)} = \sigma({Z^{(2)}})\newline \newline 58 | Z^{(3)} = A^{(2)} * W^{(3)}\newline \newline 59 | \hat{Y} = A^{(3)} = \sigma({Z^{(3)}}) 60 | 61 | 62 | -------------------------------------------------------------------------------- /L4/dataset.csv: -------------------------------------------------------------------------------- 1 | Lương,Thời gian làm việc,Cho vay 2 | 10,1,1 3 | 5,2,1 4 | 6,1.8,1 5 | 7,1,1 6 | 8,2,1 7 | 9,0.5,1 8 | 4,3,1 9 | 5,2.5,1 10 | 8,1,1 11 | 4,2.5,1 12 | 8,0.1,0 13 | 7,0.15,0 14 | 4,1,0 15 | 5,0.8,0 16 | 7,0.3,0 17 | 4,1,0 18 | 5,0.5,0 19 | 6,0.3,0 20 | 7,0.2,0 21 | 8,0.15,0 22 | -------------------------------------------------------------------------------- /L4/l4.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu Mar 14 14:37:33 2019 4 | 5 | @author: DELL 6 | """ 7 | 8 | # Thêm thư viện 9 | import numpy as np 10 | import pandas as pd 11 | 12 | # Hàm sigmoid 13 | def sigmoid(x): 14 | return 1/(1+np.exp(-x)) 15 | 16 | 17 | # Đạo hàm hàm sigmoid 18 | def sigmoid_derivative(x): 19 | return x*(1-x) 20 | 21 | 22 | # Lớp neural network 23 | class NeuralNetwork: 24 | def __init__(self, layers, alpha=0.1): 25 | # Mô hình layer ví dụ [2,2,1] 26 | self.layers = layers 27 | 28 | # Hệ số learning rate 29 | self.alpha = alpha 30 | 31 | # Tham số W, b 32 | self.W = [] 33 | self.b = [] 34 | 35 | # Khởi tạo các tham số ở mỗi layer 36 | for i in range(0, len(layers)-1): 37 | w_ = np.random.randn(layers[i], layers[i+1]) 38 | b_ = np.zeros((layers[i+1], 1)) 39 | self.W.append(w_/layers[i]) 40 | self.b.append(b_) 41 | 42 | 43 | # Tóm tắt mô hình neural network 44 | def __repr__(self): 45 | return "Neural network [{}]".format("-".join(str(l) for l in self.layers)) 46 | 47 | 48 | # Train mô hình với dữ liệu 49 | def fit_partial(self, x, y): 50 | A = [x] 51 | 52 | # quá trình feedforward 53 | out = A[-1] 54 | for i in range(0, len(self.layers) - 1): 55 | out = sigmoid(np.dot(out, self.W[i]) + (self.b[i].T)) 56 | A.append(out) 57 | 58 | # quá trình backpropagation 59 | y = y.reshape(-1, 1) 60 | dA = [-(y/A[-1] - (1-y)/(1-A[-1]))] 61 | dW = [] 62 | db = [] 63 | for i in reversed(range(0, len(self.layers)-1)): 64 | dw_ = np.dot((A[i]).T, dA[-1] * sigmoid_derivative(A[i+1])) 65 | db_ = (np.sum(dA[-1] * sigmoid_derivative(A[i+1]), 0)).reshape(-1,1) 66 | dA_ = np.dot(dA[-1] * sigmoid_derivative(A[i+1]), self.W[i].T) 67 | dW.append(dw_) 68 | db.append(db_) 69 | dA.append(dA_) 70 | 71 | # Đảo ngược dW, db 72 | dW = dW[::-1] 73 | db = db[::-1] 74 | 75 | # Gradient descent 76 | for i in range(0, len(self.layers)-1): 77 | self.W[i] = self.W[i] - self.alpha * dW[i] 78 | self.b[i] = self.b[i] - self.alpha * db[i] 79 | 80 | def fit(self, X, y, epochs=20, verbose=10): 81 | for epoch in range(0, epochs): 82 | self.fit_partial(X, y) 83 | if epoch % verbose == 0: 84 | loss = self.calculate_loss(X, y) 85 | print("Epoch {}, loss {}".format(epoch, loss)) 86 | 87 | # Dự đoán 88 | def predict(self, X): 89 | for i in range(0, len(self.layers) - 1): 90 | X = sigmoid(np.dot(X, self.W[i]) + (self.b[i].T)) 91 | return X 92 | 93 | # Tính loss function 94 | def calculate_loss(self, X, y): 95 | y_predict = self.predict(X) 96 | #return np.sum((y_predict-y)**2)/2 97 | return -(np.sum(y*np.log(y_predict) + (1-y)*np.log(1-y_predict))) 98 | 99 | # Dataset bài 2 100 | data = pd.read_csv('dataset.csv').values 101 | N, d = data.shape 102 | X = data[:, 0:d-1].reshape(-1, d-1) 103 | y = data[:, 2].reshape(-1, 1) 104 | 105 | p = NeuralNetwork([X.shape[1], 2, 1], 0.1) 106 | p.fit(X, y, 10000, 100) -------------------------------------------------------------------------------- /L4/xor.csv: -------------------------------------------------------------------------------- 1 | x1,x2,x1 XOR x2 2 | 1,1,0 3 | 1,0,1 4 | 0,1,1 5 | 0,0,0 6 | -------------------------------------------------------------------------------- /L5/l5.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Sat Mar 23 18:00:00 2019 4 | 5 | @author: DELL 6 | """ 7 | 8 | import cv2 9 | 10 | img = cv2.imread("gray.jpg", 0) 11 | 12 | gray = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB) 13 | 14 | cv2.imshow('gray.jpg', gray) -------------------------------------------------------------------------------- /L7/mnist.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "name": "mnist.ipynb", 7 | "version": "0.3.2", 8 | "provenance": [], 9 | "collapsed_sections": [] 10 | }, 11 | "kernelspec": { 12 | "name": "python3", 13 | "display_name": "Python 3" 14 | }, 15 | "accelerator": "GPU" 16 | }, 17 | "cells": [ 18 | { 19 | "metadata": { 20 | "id": "hIArylcR3KOZ", 21 | "colab_type": "code", 22 | "outputId": "ee3b6b52-83f5-4273-a586-c95d5981a10b", 23 | "colab": { 24 | "base_uri": "https://localhost:8080/", 25 | "height": 34 26 | } 27 | }, 28 | "cell_type": "code", 29 | "source": [ 30 | "# 1. Thêm các thư viện cần thiết\n", 31 | "import numpy as np\n", 32 | "import matplotlib.pyplot as plt\n", 33 | "from keras.models import Sequential\n", 34 | "from keras.layers import Dense, Dropout, Activation, Flatten\n", 35 | "from keras.layers import Conv2D, MaxPooling2D\n", 36 | "from keras.utils import np_utils\n", 37 | "from keras.datasets import mnist" 38 | ], 39 | "execution_count": 1, 40 | "outputs": [ 41 | { 42 | "output_type": "stream", 43 | "text": [ 44 | "Using TensorFlow backend.\n" 45 | ], 46 | "name": "stderr" 47 | } 48 | ] 49 | }, 50 | { 51 | "metadata": { 52 | "id": "MZMLgpcY3b7G", 53 | "colab_type": "text" 54 | }, 55 | "cell_type": "markdown", 56 | "source": [ 57 | "Load dữ liệu từ MNIST dataset, bao gồm 60.000 training set và 10.000 test set. Sau đó chia bộ traning set thành 2: 50.000 cho training set và 10.000 dữ liệu cho validation set." 58 | ] 59 | }, 60 | { 61 | "metadata": { 62 | "id": "IYP0FcDeRnJO", 63 | "colab_type": "code", 64 | "outputId": "c907c963-c9ae-4215-87c5-4cf1b114f645", 65 | "colab": { 66 | "base_uri": "https://localhost:8080/", 67 | "height": 34 68 | } 69 | }, 70 | "cell_type": "code", 71 | "source": [ 72 | "# 2. Load dữ liệu MNIST\n", 73 | "(X_train, y_train), (X_test, y_test) = mnist.load_data()\n", 74 | "X_val, y_val = X_train[50000:60000,:], y_train[50000:60000]\n", 75 | "X_train, y_train = X_train[:50000,:], y_train[:50000]\n", 76 | "print(X_train.shape)" 77 | ], 78 | "execution_count": 2, 79 | "outputs": [ 80 | { 81 | "output_type": "stream", 82 | "text": [ 83 | "(50000, 28, 28)\n" 84 | ], 85 | "name": "stdout" 86 | } 87 | ] 88 | }, 89 | { 90 | "metadata": { 91 | "id": "jIEIibrB4LDb", 92 | "colab_type": "text" 93 | }, 94 | "cell_type": "markdown", 95 | "source": [ 96 | "Dữ liệu input cho mô hình convolutional neural network là 1 tensor 4 chiều (N, W, H, D), trong bài này là ảnh xám nên W = H = 28, D = 1, N là số lượng ảnh cho mỗi lần training. Do dữ liệu ảnh ở trên có kích thước là (N, 28, 28) tức là (N, W, H) nên rần reshape lại thành kích thước N * 28 * 28 * 1 để giống kích thước mà keras yêu cầu." 97 | ] 98 | }, 99 | { 100 | "metadata": { 101 | "id": "cfFUzBFcRot6", 102 | "colab_type": "code", 103 | "colab": {} 104 | }, 105 | "cell_type": "code", 106 | "source": [ 107 | "# 3. Reshape lại dữ liệu cho đúng kích thước mà keras yêu cầu\n", 108 | "X_train = X_train.reshape(X_train.shape[0], 28, 28, 1)\n", 109 | "X_val = X_val.reshape(X_val.shape[0], 28, 28, 1)\n", 110 | "X_test = X_test.reshape(X_test.shape[0], 28, 28, 1)" 111 | ], 112 | "execution_count": 3, 113 | "outputs": [] 114 | }, 115 | { 116 | "metadata": { 117 | "id": "FifpZK_b7KvJ", 118 | "colab_type": "text" 119 | }, 120 | "cell_type": "markdown", 121 | "source": [ 122 | "Bước này chuyển đổi one-hot encoding label Y của ảnh ví dụ số 5 thành vector [0, 0, 0, 0, 0, 1, 0, 0, 0, 0]" 123 | ] 124 | }, 125 | { 126 | "metadata": { 127 | "id": "OHwO1pw6RsA9", 128 | "colab_type": "code", 129 | "outputId": "5f9165db-c4ba-4778-aca2-ac43a1e75794", 130 | "colab": { 131 | "base_uri": "https://localhost:8080/", 132 | "height": 50 133 | } 134 | }, 135 | "cell_type": "code", 136 | "source": [ 137 | "# 4. One hot encoding label (Y)\n", 138 | "Y_train = np_utils.to_categorical(y_train, 10)\n", 139 | "Y_val = np_utils.to_categorical(y_val, 10)\n", 140 | "Y_test = np_utils.to_categorical(y_test, 10)\n", 141 | "print('Dữ liệu y ban đầu ', y_train[0])\n", 142 | "print('Dữ liệu y sau one-hot encoding ',Y_train[0])" 143 | ], 144 | "execution_count": 4, 145 | "outputs": [ 146 | { 147 | "output_type": "stream", 148 | "text": [ 149 | "Dữ liệu y ban đầu 5\n", 150 | "Dữ liệu y sau one-hot encoding [0. 0. 0. 0. 0. 1. 0. 0. 0. 0.]\n" 151 | ], 152 | "name": "stdout" 153 | } 154 | ] 155 | }, 156 | { 157 | "metadata": { 158 | "id": "LuoSHJ1m7uUQ", 159 | "colab_type": "text" 160 | }, 161 | "cell_type": "markdown", 162 | "source": [ 163 | "Bước này định nghĩa model:\n", 164 | "1. Model = Sequential() để nói cho keras là ta sẽ xếp các layer lên nhau để tạo model. Ví dụ input -> CONV -> POOL -> CONV -> POOL -> FLATTEN -> FC -> OUTPUT\n", 165 | "2. Ở layer đầu tiên cần chỉ rõ input_shape của ảnh, input_shape = (W, H, D), ta dùng ảnh xám kích thước (28,28) nên input_shape = (28, 28, 1)\n", 166 | "3. Khi thêm Convolutional Layer ta cần chỉ rõ các tham số: K (số lượng layer), kernel size (W, H), hàm activation sử dụng. cấu trúc: model.add(Conv2D(K, (W, H), activation='tên_hàm_activation'))\n", 167 | "4. Khi thêm Maxpooling Layer cần chỉ rõ size của kernel, model.add(MaxPooling2D(pool_size=(W, H)))\n", 168 | "5. Bước Flatten chuyển từ tensor sang vector chỉ cần thêm flatten layer.\n", 169 | "6. Để thêm Fully Connected Layer (FC) cần chỉ rõ số lượng node trong layer và hàm activation sử dụng trong layer, cấu trúc: model.add(Dense(số_lượng_node activation='tên_hàm activation'))\n" 170 | ] 171 | }, 172 | { 173 | "metadata": { 174 | "id": "YhER3pYeRtjC", 175 | "colab_type": "code", 176 | "outputId": "bf4676d3-d716-4300-e789-aa66b6270574", 177 | "colab": { 178 | "base_uri": "https://localhost:8080/", 179 | "height": 87 180 | } 181 | }, 182 | "cell_type": "code", 183 | "source": [ 184 | "# 5. Định nghĩa model\n", 185 | "model = Sequential()\n", 186 | " \n", 187 | "# Thêm Convolutional layer với 32 kernel, kích thước kernel 3*3\n", 188 | "# dùng hàm sigmoid làm activation và chỉ rõ input_shape cho layer đầu tiên\n", 189 | "model.add(Conv2D(32, (3, 3), activation='sigmoid', input_shape=(28,28,1)))\n", 190 | "\n", 191 | "# Thêm Convolutional layer\n", 192 | "model.add(Conv2D(32, (3, 3), activation='sigmoid'))\n", 193 | "\n", 194 | "# Thêm Max pooling layer\n", 195 | "model.add(MaxPooling2D(pool_size=(2,2)))\n", 196 | "\n", 197 | "# Flatten layer chuyển từ tensor sang vector\n", 198 | "model.add(Flatten())\n", 199 | "\n", 200 | "# Thêm Fully Connected layer với 128 nodes và dùng hàm sigmoid\n", 201 | "model.add(Dense(128, activation='sigmoid'))\n", 202 | "\n", 203 | "# Output layer với 10 node và dùng softmax function để chuyển sang xác xuất.\n", 204 | "model.add(Dense(10, activation='softmax'))" 205 | ], 206 | "execution_count": 5, 207 | "outputs": [ 208 | { 209 | "output_type": "stream", 210 | "text": [ 211 | "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n", 212 | "Instructions for updating:\n", 213 | "Colocations handled automatically by placer.\n" 214 | ], 215 | "name": "stdout" 216 | } 217 | ] 218 | }, 219 | { 220 | "metadata": { 221 | "id": "T37UmXrzRvfT", 222 | "colab_type": "code", 223 | "colab": {} 224 | }, 225 | "cell_type": "code", 226 | "source": [ 227 | "# 6. Compile model, chỉ rõ hàm loss_function nào được sử dụng, phương thức \n", 228 | "# đùng để tối ưu hàm loss function.\n", 229 | "model.compile(loss='categorical_crossentropy',\n", 230 | " optimizer='adam',\n", 231 | " metrics=['accuracy'])" 232 | ], 233 | "execution_count": 6, 234 | "outputs": [] 235 | }, 236 | { 237 | "metadata": { 238 | "id": "u8oF7cHeRwoM", 239 | "colab_type": "code", 240 | "outputId": "7db6801e-cd87-4330-9b93-3513b87b55e3", 241 | "colab": { 242 | "base_uri": "https://localhost:8080/", 243 | "height": 440 244 | } 245 | }, 246 | "cell_type": "code", 247 | "source": [ 248 | "# 7. Thực hiện train model với data\n", 249 | "H = model.fit(X_train, Y_train, validation_data=(X_val, Y_val),\n", 250 | " batch_size=32, epochs=10, verbose=1)" 251 | ], 252 | "execution_count": 7, 253 | "outputs": [ 254 | { 255 | "output_type": "stream", 256 | "text": [ 257 | "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n", 258 | "Instructions for updating:\n", 259 | "Use tf.cast instead.\n", 260 | "Train on 50000 samples, validate on 10000 samples\n", 261 | "Epoch 1/10\n", 262 | "50000/50000 [==============================] - 18s 351us/step - loss: 0.3167 - acc: 0.9132 - val_loss: 0.0724 - val_acc: 0.9822\n", 263 | "Epoch 2/10\n", 264 | "50000/50000 [==============================] - 16s 317us/step - loss: 0.0637 - acc: 0.9818 - val_loss: 0.0555 - val_acc: 0.9836\n", 265 | "Epoch 3/10\n", 266 | "50000/50000 [==============================] - 16s 315us/step - loss: 0.0403 - acc: 0.9882 - val_loss: 0.0458 - val_acc: 0.9872\n", 267 | "Epoch 4/10\n", 268 | "50000/50000 [==============================] - 16s 314us/step - loss: 0.0278 - acc: 0.9919 - val_loss: 0.0455 - val_acc: 0.9871\n", 269 | "Epoch 5/10\n", 270 | "50000/50000 [==============================] - 16s 313us/step - loss: 0.0183 - acc: 0.9949 - val_loss: 0.0409 - val_acc: 0.9870\n", 271 | "Epoch 6/10\n", 272 | "50000/50000 [==============================] - 16s 312us/step - loss: 0.0132 - acc: 0.9962 - val_loss: 0.0450 - val_acc: 0.9868\n", 273 | "Epoch 7/10\n", 274 | "50000/50000 [==============================] - 16s 313us/step - loss: 0.0097 - acc: 0.9974 - val_loss: 0.0415 - val_acc: 0.9891\n", 275 | "Epoch 8/10\n", 276 | "50000/50000 [==============================] - 15s 309us/step - loss: 0.0060 - acc: 0.9988 - val_loss: 0.0414 - val_acc: 0.9897\n", 277 | "Epoch 9/10\n", 278 | "50000/50000 [==============================] - 15s 308us/step - loss: 0.0052 - acc: 0.9987 - val_loss: 0.0442 - val_acc: 0.9889\n", 279 | "Epoch 10/10\n", 280 | "50000/50000 [==============================] - 15s 307us/step - loss: 0.0030 - acc: 0.9995 - val_loss: 0.0456 - val_acc: 0.9899\n" 281 | ], 282 | "name": "stdout" 283 | } 284 | ] 285 | }, 286 | { 287 | "metadata": { 288 | "id": "NkITk_WEYc2R", 289 | "colab_type": "code", 290 | "outputId": "efa7428c-1001-4904-af44-35a26a9f047d", 291 | "colab": { 292 | "base_uri": "https://localhost:8080/", 293 | "height": 393 294 | } 295 | }, 296 | "cell_type": "code", 297 | "source": [ 298 | "# 8. Vẽ đồ thị loss, accuracy của traning set và validation set\n", 299 | "fig = plt.figure()\n", 300 | "numOfEpoch = 10\n", 301 | "plt.plot(np.arange(0, numOfEpoch), H.history['loss'], label='training loss')\n", 302 | "plt.plot(np.arange(0, numOfEpoch), H.history['val_loss'], label='validation loss')\n", 303 | "plt.plot(np.arange(0, numOfEpoch), H.history['acc'], label='accuracy')\n", 304 | "plt.plot(np.arange(0, numOfEpoch), H.history['val_acc'], label='validation accuracy')\n", 305 | "plt.title('Accuracy and Loss')\n", 306 | "plt.xlabel('Epoch')\n", 307 | "plt.ylabel('Loss|Accuracy')\n", 308 | "plt.legend()" 309 | ], 310 | "execution_count": 8, 311 | "outputs": [ 312 | { 313 | "output_type": "execute_result", 314 | "data": { 315 | "text/plain": [ 316 | "" 317 | ] 318 | }, 319 | "metadata": { 320 | "tags": [] 321 | }, 322 | "execution_count": 8 323 | }, 324 | { 325 | "output_type": "display_data", 326 | "data": { 327 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAe8AAAFnCAYAAACPasF4AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3Xd8U+X+B/DPGU060gktU0CKUAQR\nEBEsUlYpFLwOEHpdKCoOVLyogFUvDsBxHQiIE8Et92LlpyIiW0CmoEABERSkrLbQnTTrnN8fGU3a\npARskqb9vF/mlXOeM/LNQ83nnCfjCKqqqiAiIqKQIQa7ACIiIjo/DG8iIqIQw/AmIiIKMQxvIiKi\nEMPwJiIiCjEMbyIiohDD8CYKgKysLPzjH/8Idhn1Rnp6OrZu3Vqjfdq0aZg/f34QKiIKLQxvIj87\nePAgoqOj0bJlS+zatSvY5RBRA8DwJvKzr776CsOGDcPIkSOxdOlSt2VLly5FRkYGMjIy8Pjjj8Nk\nMnlt37p1K9LT053bus7PnTsXTz31FEaPHo1FixZBURQ8++yzyMjIwKBBg/D444/DbDYDAM6ePYv7\n7rsPgwcPxrXXXouNGzdi3bp1GDlypFttN954I1atWlXj+bz55pvIyMjAkCFDcO+996K0tNRZw3PP\nPYeJEydi8ODBGD16NPLz8wEAe/fuxYgRI5CRkYFZs2ZdUD8uX74cI0eOxLBhw3D77bfjr7/+AmA7\nOBo7dixGjBiBoUOH4pNPPqm1nahBUInIbywWizp48GC1rKxM1ev16oABA1Sj0aiqqqoeO3ZM7dOn\nj3rq1ClVURR14sSJ6nvvvee1fcuWLeqQIUOc+3adnzNnjtqvXz/1zJkzqqqq6vfff6+OHDlSNZlM\namVlpTp8+HB16dKlqqqqanZ2tvryyy+rqqqqubm5au/evVWj0aj27t1b3b9/v6qqqnr8+HH1iiuu\ncNbqsGfPHrVv375qWVmZarVa1TvuuEN98803nTX07dtXzcvLUxVFUSdMmKDOnz9fVVVVHTVqlPrF\nF1+oqqqq3333nZqSkqJu2bKlRn9NnTrVuT9XjnqOHDmiqqqqLliwQB03bpyqqqr60EMPqTk5Oaqq\nquqZM2fU+++/XzUajV7biRoCnnkT+dHGjRtx2WWXQafTISIiAr1798batWsBAJs2bUKPHj3QrFkz\nCIKAV199FXfccYfX9nO5/PLLkZCQAADIyMjAl19+ibCwMGi1Wlx22WU4duwYAGD9+vXOs+xLL70U\nq1evhkajQUZGBpYtWwYAWLVqFQYPHgyNRuP2GF27dsW6deug0+kgiiJ69Ojh3C8A9OrVC61atYIg\nCOjcuTNOnjwJo9GIPXv2IDMzEwAwbNgwREREnFc/btq0CVdddRXatm0LALjpppuwdetWWCwWNGnS\nBCtWrEBubi7i4+Mxf/58aDQar+1EDQHDm8iPcnJysG7dOvTq1Qu9evXCDz/8gK+++goAUFRUhJiY\nGOe6Wq0Wsix7bT+X2NhY5/TZs2cxdepUZGRkYNiwYVi9ejVU+2UMiouLER0d7VxXp9MBAEaMGOEW\n3o6wdWUwGDBjxgznkP5nn33m3C8At/1KkgSr1Yri4mK3xxEEwe35+aJ6n0RHR0NVVRQVFeGxxx5D\nx44d8cgjjyAtLQ2ffvopAHhtJ2oIGN5EflJSUoJt27Zh69at2LFjB3bs2IHt27djz549OHv2LOLj\n41FUVORcv7y8HIWFhV7bHWHo4Hiv2ZPXX38dsizjm2++wffff4+0tDTnsri4OLf95+XlwWw248or\nr4TFYsHatWvx+++/4+qrr66x3w8//BBHjhxBTk4OVqxYgbFjx56zHxwHFeXl5QAARVFQUlJyzu1c\nNWnSxHkQANj6VhRFxMfHIyoqCpMnT8bKlSsxb948zJkzB3/++afXdqKGgOFN5CfLli1Dnz593IZq\nZVlGv3798O233yItLQ07d+5EXl4eVFXF9OnTsWTJEq/tiYmJKCgowJkzZ2C1WvHNN994fewzZ86g\nY8eO0Gg0OHDgAHbt2gW9Xg8AGDRokPPs/9ChQ7jxxhthtVohiiIyMzPx/PPPY9CgQQgLC/O43/bt\n2yMqKgrHjx/H+vXrnfv1Jjw8HCkpKVi5cqWzX4xG43n1ZWpqKnbs2OEcov/iiy+QmpoKWZZx3333\n4ffffwcAdOzYETqdDoIgeG0nagjOPRZHRBdk6dKlGDduXI329PR0zJ8/H7fffjuee+45jBs3DpIk\n4bLLLsOdd94JrVbrtX3UqFG4/vrr0bJlS1x33XXYv3+/x8ceP348pk6dipycHPTq1QtTp07Fk08+\niW7duuHxxx/H1KlTMWjQIERFReGVV15BeHg4ANvQ+cKFCz0OmQO276s//PDDyMjIQKdOnTBt2jQ8\n9NBDWLRoUa198cwzzyA7OxvvvPMO+vfvj+TkZK/rfvTRR/j666+d8wMGDMC0adMwY8YMPPDAAzCb\nzWjdujWef/55AMCtt96KRx991Plp+ptvvhnt2rXz2k7UEAiqyut5E5FNYWEhbrjhBqxbtw6SJAW7\nHCLygsPmROQ0Z84c/POf/2RwE9VzDG8iQmFhIQYPHozCwkKMHz8+2OUQ0Tlw2JyIiCjE8MybiIgo\nxDC8iYiIQkzIfFWsoKCsTvcXHx+JoqLav59KdYN9HRjs58BgPwcG+9kmMTHaY3ujPfOWZX6aNlDY\n14HBfg4M9nNgsJ9r12jDm4iIKFQxvImIiEIMw5uIiCjEMLyJiIhCDMObiIgoxDC8iYiIQgzDm4iI\nKMQwvImIiEKMX8P74MGDGDJkCD755JMay3766SeMHj0aY8eOxZtvvunPMoiIiBoUv4W3Xq/H888/\nj759+3pcPmPGDMydOxeff/45Nm3ahEOHDvmrFCIiogbFb79trtFo8N577+G9996rsezYsWOIjY1F\nixYtAABpaWnYvHkzOnTo4K9yiIiCRlVVwH5TFcU5DVWBqlRfptinXdZRVUCpvr59PbdlVfu2tdvn\n65IgBGQ/JacjoS+qcNbv2oeAavtPVQAVVc9RVeyLHeuozuVqrW2w9yegVpt3bKM6toO9v+HYHs55\nOS4O0X2uhlBXfVQLv4W3LMuQZc+7LygoQEJCgnM+ISEBx44dq3V/8fGRdf5bt95+8J3qHvv6/Dhe\neBWLBarVWnWzWKFaLS7TVigWC6AoKC08Do3ieOF3eRFXXF/kq4JAdQsJxXnv1ua6ruJ4AasKkHM/\njuJ80XUur/44qFYD1JqPBfc6qrbx0qYotn6sUR/cnzfc+8LTc69e3xH1HP3h4TnXeYA2AnnBLuAC\ntUnri7DYGL8/TshcVayury6TmBhd51cqI88C2deqokC1WgGrxRZujvCzWGyhZ7EAriFYbTmsLttY\nrbZ5lxusVvfHsFqhWhWX9Sy2F+zq2zimler7U5xhDPt+HdtQHRIE500QBEC0v2MoiBBE+zIIgGhf\nLgiAIAICIAiibV4UIAgipDAJVhUQBMG+rVi1X/u+vbZ7mXZdTxBF57ZV9djXcey3ert9XdvzEd2f\nr1iH7476cBCi1tGBSmREGPQGs8u/h/1s1rVPIQCCS5vLvPP5A85/S8c2tu3hsg+XNvvfgWNfQrV5\nCKK9FJe67PNyXByKTSJQh6933k58ghLeSUlJKCwsdM6fPn0aSUlJwSiFLpCqqraQM5mgmExQTUYo\nRiMsRhOslSZYjEYoRjMsRiNKtDLKy/SAxR6oLgEGi8UWfoojSJWqELUHqS0wrVVhrCj2cLa4hZ9q\ntdjPlGpU6/V5eB3cuoAXoFoHyiQRgiRBECXnNCQJohwGaCJty+w3URJt64miWztE0baNZN+H6LpM\nQqROC0Olxf3FzRkGVfOi/cW/aj3XeTgDQZQcL1j2MHKEnFAVPII90Dw+lmOZPWhcH8vxQqsKAlR7\nzykQYDvZFez/agIUOEY5HcscI56Cc7RURdXosLNNVaGoQtW0otq2t58tK4rtbFqxn2mryrnXccxr\ntWGorDQ7h1cdI6m2YVTXP5+qecefoMdt7NOqy99d1X6q/g6r9mXfxjGqW22fzmHfats5R3IF+4GH\n849WcMkfwbmu4DJtu3eEWdU2znBzZqvgeX3X9ew7dVvP8TgudUVGaKBXTc5+cfxbOp6Tqqhuz9e1\nz53r2pdXjY471nXZj+pYrrqMwLts41gOl/Wc67r8e6kqInVFyLypNcLC/H9FtKCEd+vWrVFeXo68\nvDw0b94ca9euxSuvvBKMUgJOdf0jVAHF+f6U+7Iaf1T2eatVhaIoUKwqrFYFiqJCsbdZrSoUqwKr\n2QqryQSryQyL0QzFbIHVbLG1m82wWqxQzFZYLQqsVisUi2Lbzr5f2/7sN9vIp+0ethdWBYL9hVWE\nKohQBNu9Knj7gzXB9qf2N/7cRAAa+60xUAFY7beAU4LxoHSBqoegW0C6rOcMQfuMW/CQG+fBi/ME\n3XGA4umgo+rgRQ6TAtahglpXYxzV7N27Fy+99BKOHz8OWZbRrFkzDBo0CK1bt0Z6ejq2b9/uDOyh\nQ4firrvuqnV/dTnsWlZSia3r/0R5aaXzqMwZpo6jOZcjK1XxEKZAjXb3e5f9uB29hRhVhQgFgqpA\ntJ3/QBQcN1TdRMF5kyQBoiRClASIkgSNRobZqjjP0JzDlG7Dge5nh+7Dm87D/VpOoj0vuJA+r32b\nunscb89FrWWkwPs2gFYjw2iyeFhP9TDlPlNjt16eUG3P09tLiddtVNV2hi/a/iZE57Tt78kxLC06\n7u1tHtdxXVZ9HZd9VF/nQh6vSRMdzp6tcDmTFVzOLuF29mq7d5+3t7qOAjs3rn6W6r5dzeCoS+5h\n7uFs1b7Q2+iC22uc61lstbPcqser/THj46NQXKR3GdmuCkvXfnA9Y3dO1xg5cLS5rlsVum5BXM94\nGzb3W3jXtboM7/yTpfj6819hNlmd/3CuR1qi/f2N6stsOSNUHdUKcL4gONeB+7quR2sCHC8g1f7o\nqj+OoxbFCkthAcynT0ExGGznu6oCUVVs5701AlOCKIuQJBGSLEGSRYiyBClMhhgmQ3bca2RIGg1E\njQxZq4GkCbPdazWQw7WQwrXOe0mr+dt/0Px8QWCwnwOD/RwY7GebevWed7AltYjBtFnDUVhYHuxS\nPKr86yiK165G2dYtUE0mCLIM3ZW9Edd/IDQtWkDQaCDIct1+EIWIiEJGowxvoP4NjyhmE8p37EDx\nujWoPGz7wRq5aVPEpQ1CTL9+kKP9/9UDIiIKDY02vOsLc2EBitevQ+mGH2EtLwMEAVGXdUPswEGI\n6tqNZ9dERFQDwzsIVEWBPncviteuRsWe3bYPhel0iB+Widi0AdAk8mtzRETkHcM7gKzl5SjZtAEl\n69bAXFAAAAhv3x5xAwZDd+WVEMMay/egiIjo72B4B4Dhjz9Qsm41yrZthWqxQNBoENOvP+IGDEJ4\nu3bBLo+IiEIMw9tPFKMRZdu3oXjdGhiP/AkACEtqhrgBgxCT2g9SVFSQKyQiolDF8K5jptOnULJu\nLUo2bYSir7B9AK17D8QNHIzIzpfyA2hERPS3MbzrgKooqNj9K4rXroY+dy8AQIqOQcKIaxHbfwDC\nmjQJcoVERNSQMLz/BktpKUo3/ojidWthOXsGABBxSUfEDhgEXc8rIIaFBblCIiJqiBje50lVVVQe\nOoTidatRtmM7YLVC0GoRmzYQcQMGQXvRRcEukYiIGjiGt4+UykqUbt2CknWrYTx2DACgadESsQMH\nIabP1ZAiI4NcIRERNRYM73MwnjiBknVrULp5ExSDARBF6K7ohbiBgxHRKaXe/cwqERE1fAxvD1SL\nBeW/7kLx2jUwHNgPAJDi4tAkPQOx/dMgx8UHuUIiImrMGN4uLMVFKPlxPYp/XAdrcTEAICKlM+IG\nDIKuew8IMruLiIiCr9GnkaqqMPx2AMXr1qB8107AaoUYEYG4QUMQO2AgtC1bBbtEIiIiN402vC16\nPYrWrELJ2jUwnTwBANC0ao24QYMRc1VfiOHhQa6QiIjIs0YZ3sa8Y9jx0ixYDQZAkhDduw/iBg5G\neIcO/AAaERHVe40yvIUwDXQdkiF36ITYfv0hx8YGuyQiIiKfNcrw1jRrhq4znkVBQVmwSyEiIjpv\nvEoGERFRiGF4ExERhRiGNxERUYhheBMREYUYhjcREVGIYXgTERGFGIY3ERFRiGF4ExERhRiGNxER\nUYhheBMREYUYhjcREVGIYXgTERGFGIY3ERFRiGF4ExERhRiGNxERUYhheBMREYUYhjcREVGIYXgT\nERGFGIY3ERFRiGF4ExERhRiGNxERUYhheBMREYUYhjcREVGIYXgTERGFGIY3ERFRiJH9ufNZs2bh\n119/hSAIyM7ORrdu3ZzLPv30U3z99dcQRRFdu3bFk08+6c9SiIiIGgy/nXlv27YNR48exeLFizFz\n5kzMnDnTuay8vBwLFizAp59+is8//xyHDx/GL7/84q9SiIiIGhS/hffmzZsxZMgQAEBycjJKSkpQ\nXl4OAAgLC0NYWBj0ej0sFgsMBgNiY2P9VQoREVGD4rfwLiwsRHx8vHM+ISEBBQUFAACtVouJEydi\nyJAhGDhwIC6//HJcfPHF/iqFiIioQfHre96uVFV1TpeXl+Odd97B999/D51Oh3HjxuHAgQNISUnx\nun18fCRkWarTmhITo+t0f+Qd+zow2M+BwX4ODPazd34L76SkJBQWFjrn8/PzkZiYCAA4fPgwLrro\nIiQkJAAAevXqhb1799Ya3kVF+jqtLzExGgUFZXW6T/KMfR0Y7OfAYD8HBvvZxtsBjN+GzVNTU7Fi\nxQoAQG5uLpKSkqDT6QAArVq1wuHDh1FZWQkA2Lt3L9q1a+evUoiIiBoUv5159+zZE126dEFWVhYE\nQcD06dORk5OD6OhopKen46677sLtt98OSZLQo0cP9OrVy1+lEBERNSiC6vpmdD1W18MnHJIJHPZ1\nYLCfA4P9HBjsZ5uAD5sTERGRfzC8iYiIQgzDm4iIKMQwvImIiEIMw5uIiCjEMLyJiIhCDMObiIgo\nxDC8iYiIQgzDm4iIKMQwvImIiEIMw5uIiCjEMLyJiIhCDMObiIgoxDC8iYiIQgzDm4iIKMQwvImI\niEIMw5uIiCjEMLyJiIhCDMObiIgoxDC8iYiIQgzDm4iIKMQwvImIiEIMw5uIiCjEMLyJiIhCDMOb\niIgoxDC8iYiIQgzDm4iIKMQwvImIiEIMw5uIiCjEMLyJiIhCDMObiIgoxDC8iYiIQgzDm4iIKMQw\nvImIiEIMw5uIiCjEMLyJiIhCDMObiIgoxDC8iYiIQgzDm4iIKMQwvImIiEIMw5uIiCjEMLyJiIhC\njE/h/eOPP/q7DiIiIvKRT+H98ccfIz09HXPmzMHx48f9XRMRERHVQvZlpffeew8lJSVYuXIlnnnm\nGQDAjTfeiKFDh0KSJH/WR0RERNX4/J53bGwsRowYgZEjR6KsrAwffPABrrvuOvzyyy/+rI+IiIiq\n8enMe/v27cjJycHWrVuRnp6OmTNnIjk5GXl5eXjwwQexdOlSj9vNmjULv/76KwRBQHZ2Nrp16+Zc\ndvLkSUyePBlmsxmXXnopnnvuubp5RkRERA2cT2fer732Gvr06YPvv/8eTzzxBJKTkwEArVu3xvDh\nwz1us23bNhw9ehSLFy/GzJkzMXPmTLflL774IsaPH48lS5ZAkiScOHHibz4VIiKixsGn8J49ezZK\nSkqg0WgAAK+//jpOnz4NALj33ns9brN582YMGTIEAJCcnIySkhKUl5cDABRFwc8//4xBgwYBAKZP\nn46WLVv+vWdCRETUSPg0bJ6dnY1Ro0Y55zt16oTs7GwsWLDA6zaFhYXo0qWLcz4hIQEFBQXQ6XQ4\ne/YsoqKi8MILLyA3Nxe9evXCo48+WmsN8fGRkOW6/XBcYmJ0ne6PvGNfBwb7OTAaaz+vWLECGRkZ\nPq07c+ZM3H777bjooos8Lr///vvx1ltv1bqP2vr5qquuwtatW32qpSHyKbyNRiMyMzOd85mZmfji\niy/O64FUVXWbPn36NG6//Xa0atUKEyZMwLp16zBgwACv2xcV6c/r8c4lMTEaBQVldbpP8ox9HRjs\n58BorP188uQJ5OQsRc+eV/u0/oQJDwOA17567rmXa+3Hc/WzqqqN4t/B2wGMT+EtCAJ+/PFH9O7d\nG4qiYMOGDefcJikpCYWFhc75/Px8JCYmAgDi4+PRsmVLtGnTBgDQt29f/P7777WGNxERBc9rr72E\n/ftzsXDhe1AUBSdOHMfJkycwe/Z8vPDCcygoyIfBYMD48ROQmnoNHnxwAiZPnoK1a1ejoqIcf/11\nFMeP5+Hhhx9F376pGDFiMJYtW40HH5yAK6+8Cjt37kBxcTFeeul1NG3aFI888giOHj2Gyy7rhjVr\nVuGrr77zWNfhw4fw2msvQRAEREZG4amnnoEoSvj3v6fBZDLBbDZj8uSpaNWqdY22Tp1SAtyLdcen\n8J4xYwamT5+OSZMmQRAE9OzZEzNmzKh1m9TUVMydOxdZWVnIzc1FUlISdDqd7UFlGRdddBGOHDmC\ndu3aITc3FyNGjPj7z4aIqBH475pD2H4gv073eWVKEsYM6uB1+T//eRtycv6LO++8BwsWvAOLxYz5\n899HUdFZ9O7dB8OHj8Tx43l4+ulpSE29xm3b/PzTeOWVOdiy5Sf83/99ib59U92WR0VF4Y033sJb\nb83Fjz+uQcuWrWE0GvHuu4uwadMG/Pe/n3ut6403XsEDD0xCly5d8dlnH+N///sCHTpcgsTEJDzx\nxL9x/Hgejh37C6dOnajRFsp8Cu+2bdti0aJFbm0rVqxwnjl70rNnT3Tp0gVZWVkQBAHTp09HTk4O\noqOjkZ6ejuzsbEybNg2qqqJjx47OD68REVH917mz7TNN0dEx2L8/F19/nQNBEFFaWlJj3W7dugOw\njcg6Prjs6vLLeziXl5SU4OjRP9GzZ08AQN++qbX+GNiRI3+iS5euAICePXth4cJ3cd11o/Dee2/h\nP/+ZhbS0QejT52oUFhbWaAtlPoX3iRMn8Mknn6CoqAgAYDKZsHXr1nN+cOGxxx5zm09JqRqiaNu2\nLT7/3PvRFBEReTZmUIdaz5IDISwsDACwcuX3KC0txZtvvo/S0lLcffdtNdZ1DV/Xzz95W66qKkTR\n9mUoQRAgCIJPNVksZoiiiKZNm2LRos+xc+cOfPXVEuTm7sGdd97jsS1U+fRVsSlTpiAuLg6//PIL\nunbtiqKiIrz88sv+ro2IiOoJURRhtVprtBcXF6NFi5YQRRHr16+B2Wz+24/VqlVr7N27FwCwbdsW\nj4/rcPHFydi7dzcAYNeunejUqTO2b9+K7du3onfvPvjXvx7HgQP7PLaFMp/OvCVJwoQJE7Bhwwbc\ncsstGD16NCZPnoyrrw7tYQciIvJN27YX47ffDmDOnFcRFaVztg8YMAjTpk3Gvn17MWLEP5CUlISF\nC9/7W4919dXXYOXK73D//XehR48rEBMT63XdRx55zPmBtejoaGRnT0dpaSmee+5pfPrphxBFEXfd\ndS+SkprVaAtlguppDKOarKwszJ49G1OmTMHMmTPRsmVLjBo1yuvPovpDXX8loLF+3SMY2NeBwX4O\nDPaz/5WWluDQoVz07Hk1CgryMWnS/fjssy+DXVZQ/K2vit19993YvHkz7rrrLlx33XWQJAkjR46s\n0wKJiIgAIDIyCsuXL8fbb78LVVXw0EOTg11SveNTeHfr1g1JSUkAbL9ZXlFRgdhY78MYREREF0qW\nZcyePZsjHLXw6QNrrp8al2WZwU1ERBREPp15t2vXDlOmTEGPHj2cXw8AgNGjR/utMCIiIvLMp/A2\nm82QJAm7d+92a2d4ExERBZ5P4f3CCy/4uw4iIiLykU/veaelpWHAgAE1bkRERNWNHn0t9Ho9Pv54\nkfMHVBz0ej1Gj7621u3XrVsNAPjuu2+wfv3aC65jwYJ38OWXiy94+/rMpzPvzz77zDltNpuxefNm\nVFZW+q0oIiIKfbfddsd5b3Py5AmsWrUCN910PTIzaw/5xsyn8G7VqpXbfLt27XDXXXfhzjvv9EtR\nRERUv4wffwtmzXoVzZs3x6lTJ5Gd/Tjmzn0bzz77FAwGAyorK/Gvfz2OSy/t6txm5sxnMGDAYHTv\n3gNPPjkFJpPJeZESAPjhh+VYsmQxJElEu3bJmDr1SeelR+fNm4fy8krExcVh1KixmD//DezZ8yss\nFitGjRqDYcNGeLycaPPmzT3W72n75cu/RU7OfyHLYejQoSMefXSqx7b6yKfw3rx5s9v8qVOn8Ndf\noX05NSKiUJVz6Fvsyt9Tp/vskXQZbuzg/ce3+vcfiE2bfsSoUWOwYcN6DBgwCGfOnMHIkdejf/8B\n+Pnn7fj00w8xc+Z/amy7YsVytG+fjIcffhSrV/+AVatWAAAMBgNefXUuoqOjMXHiPTh8+JDz0qMP\nPvggXnzxFQDAL7/sxB9/HMZbb30Ag8GAceOy0L//AAA1Lyc6ZszNNR7f2/ZffPEJXn55Npo1a45l\ny76G0VjpsU2rDa+DHq5bPoX3/PnzndOCIECn0+HZZ5/1W1FERFS/9O8/EPPmzcaoUWOwceN6PPro\nNCQkNMGHH76Pzz//GGazGeHhnkPuyJE/0L37FQCAHj2ucLbHxMTgiSceBQAcPfonSkqKPW5/4MA+\ndO9uu0RoREQE2rVrj2PHjgGoeTnR89l+yJAMZGc/joyM4RgyJANabbjHtvrIp/D++OOPUVZWhuho\n22+sFhYWomnTpn4tjIiIPLuxw8haz5L9oX37ZJw5U4DTp0+hrKwMbdq0xQcfvIumTZPw9NPP48CB\nfZg3b7bHbVUVEEXbZT0VxXY5DbPZjNdeexmLFn2GJk2aYsqUR7w+tiAIcL0Kh+3Sn7b9netyo7Vt\nf9ttdyI9fTjWrVuFhx++H2+++a7HttjYOJ/6KJB8+rT5p59+iqlTq8b9J0+ejE8++cRvRRERUf3T\nt28/vPvufFxzTRoAoKSkGK1atQYArF+/FhaLxeN2bdq0xYED+wEAO3fuAADo9RWQJAlNmjTF6dOn\ncODAflgsFo+XHk1J6YJdu34XWr84AAAgAElEQVS2b6fH8eN5aN26jc91e9v+nXfeRNOmTZGVdSu6\ndr0Mp06d8thWH/kU3l9//TXmzJnjnP/ggw/w7bff+q0oIiKqf9LSBmLVqhUYMGAwAGDYsBFYvPhT\n/OtfE9GlS1ecOXMGy5Z9XWO7YcNGIDd3DyZNuh/Hjh2FIAiIjY3DlVdehbvvvh0LF76Hm2++DXPm\nvOa89OisWbOc219+eXd06pSCiRPvwb/+NRH33fcgIiIifK7b2/aRkVG49947MWnS/RAEAZdc0tFj\nW33k0yVBR48ejSVLljjnrVYrbr75ZixeHLjvz/GSoKGLfR0Y7OfAYD8HBvvZ5m9dEnTQoEHIysrC\nFVdcAUVRsGXLFgwdOrROCyQiIiLf+BTeDzzwAHr37o3du3dDEARMnz4d3bt3P/eGREREVOd8Cu/8\n/Hzs27cP48ePBwC8/vrraNGiBZo1a+bX4oiIiKgmnz6w9sQTT7h9NaxTp07Izs72W1FERETknU/h\nbTKZkJmZ6ZzPzMyE2Wz2W1FERETknU/hDQA//vgjKisrodfrsWLFCn/WRERERLXw6T3vGTNmYPr0\n6Zg0aRJEUUSPHj0wY8YMf9dGREREHvgU3m3btsWiRYvc2nbs2IE2bXz/hRsiIiKqGz6Ft8OpU6fw\n1Vdf4auvvkJUVBS++uorf9VFRET1SEVFeY3Lf1ZUlOOdd+ZDFEUMGTIUY8bcjO3bt9RoGz36Wnz0\n0WJERkZi3rzZaN8+GQCwZctPKCwswLPPzsIXX3yCfftyYTKZcP31ozB+/G04deokZsyYDkVR0Lx5\nC0ya9CjuvXc8Pv/8SwiCgB9+WI7fftuPhx6aHOTeCbxzhrfJZMLKlSvx5Zdf4pdffkFYWBjefvtt\n9OjRIxD1ERFRNQX/+wJlO7bX6T6je12JxJuyvC73dPnPw4cP4a23PnBeHey6627Eq6++VKPNm9On\nT+Httz+AyWRC8+Yt8dBDk2E0VmLMmOsxfvxtePfd+cjKugX9+qVh/vw3kJeXhw4dOmDv3t247LLL\nsWHDetxyy+112g+hotbwfuaZZ7B8+XIkJyfjhhtuwJw5c3DrrbcyuImIGpnql/+srDRAo9EgPj4e\nAPDyy7NRVHS2RlttOne+FIIgQKvVorS0BPfdNx6yLKO4uAgAcPDgAUyaZLtk6AMPTAJg+5301at/\nQErKpTh58gRSUi7111Ou12oN7+XLlyMpKQk33XQThg0bhoiICAiCEKjaiIjIg8Sbsmo9S/aH//73\nM7fLf86a9azz8p4OoijWaAPglhuuVx6T5TAAwK5dP2Pnzh2YN+9dyLKM9PRrvO6vT59UvPfe2/j5\n5+24+up+dfb8Qk2tXxXbsGED7r//fnzzzTfo378/nnzySej1+kDVRkRE9UT1y39GRkZBUawoKMiH\nqqqYMuURiKJUo62srAyRkVE4c6YQVqsVubl7PO47KakZZFnGxo3rYbUqMJlMSEm5FDt32t4eeP/9\nt7F9+1bIsozu3XtgwYK3MXTo8ID2QX0iPfPMM894XShJuOSSS3DdddchMzMTeXl5yM3NxZIlS6Cq\nKrp16xawQvV6U53uLypKW+f7JM/Y14HBfg6MxtrPcXFxeOutOdi4cT3S0gZi7do1uO22cXj77Xn4\n9tv/Q2rqNejZsxfatWuP//znBbc2SZIwe/Z/8PPP29C8eQs0b94CAFBUVITevfugadMk/O9/n+GH\nH5YjPj4BWq0W27Ztwbhxd+P999/BsmX/B1VVMGbMzRBFEZGRUdi2bQtuvXVckHvF/6KitB7bfbok\nqCtVVbFx40Z8+eWXmD279vcz6hIvCRq62NeBwX4ODPZzYNTWzwsWvIPmzVtgxIh/BLiqwLugS4Ke\nOHHCY3tycjKmTJniXN6yZcu/WR4REdG5Pf74JGi1Wtxxx93BLiWoag3vqVOnQhAEeDo5d7QLgoCP\nPvrIbwUSERE5/Oc/bwS7hHqh1vD++OOPA1UHERER+cinC5Ps3bsXa9euBWC7lve4ceOwY8cOvxZG\nREREnvkU3jNmzMDFF1+MHTt2YM+ePXj66acxZ84cf9dGREREHvgU3lqtFu3atcPq1asxZswYdOjQ\nAaLo89VEiYiIqA75lMAGgwHLly/HqlWr0K9fPxQXF6O0tNTftRERUQgaPfpa6PV6fPzxIuzdu9tt\nmV6vx+jR19a6/bp1qwEA3333DdavX+u3OkOZT+E9efJkfPPNN/jXv/4FnU6Hjz/+GHfccYefSyMi\nolB22213oGvX8/sxr5MnT2DVqhUAgMzMa5GWNtAfpYU8ny4J2qdPH3Tt2hU6nQ6FhYXo27cvevbs\n6e/aiIionhg//hbMmvUqmjdvjlOnTiI7+3HMnft2jcuEXnppV+c2M2c+gwEDBqN79x548skpMJlM\n6Natu3P5Dz8sx5IliyFJItq1S8bUqU/itddewv79uZg3bx7KyysRFxeHUaPGYv78N7Bnz6+wWKwY\nNWoMhg0bgQcfnIArr7wKO3fuQHFxMV566XU0b97cuf/8/NN4/vl/A7D9pvpTTz2LVq1a4/vvl2HJ\nksUQBAFZWbdg8OChHttGjBiMZctsowBPPTUFN944Brt2/YwTJ47j5MkTmD17Pl544TkUFOTDYDBg\n/PgJSE29BgcPHsCrr74EURTQtevlGDHiH3j55ZmYP/99AMCHHy5AZGQUbvobv0/vU3g///zzSElJ\nQXp6OrKystC1a1d8/fXXeO655y74gYmI6ML8tOYw/jiQX6f7bJ+ShKsHJXtd3r//QGza9CNGjRqD\nDRvWY8CAQR4vEzpz5n9qbLtixXK0b5+Mhx9+FKtX/+A8szYYDHj11bmIjo7GxIn34PDhQ/jnP29D\nTs5/8eCDD+LFF18BAPzyy0788cdhvPXWBzAYDBg3Lgv9+w8AAERFReGNN97CW2/NxY8/rsGYMTc7\nH/fMmULceec96NmzF7799v+Qk/M/3HXXBCxa9D4+/PBzmExmzJw5HX37ptZoGzx4qNe+sFjMmD//\nfRQVnUXv3n0wfPhIHD+eh6efnobU1Gswe/YrePzxbHTocAmef/7fCA8Ph9lsQn7+aSQlNcNPP23E\nCy+8ciH/TE4+hfe+ffvw9NNP4/PPP8cNN9yAiRMnYty4hv+bskREZNO//0DMmzcbo0aNwcaN6/Ho\no9NqXCY0PDzc47ZHjvyB7t2vAAD06HGFs91xzW8AOHr0T5SUFHvc/sCBfeje3TbaGxERgXbt2uPY\nsWMAgMsvt12iOikpCSUlJW7bJSQ0wezZr2DBgndQVlaKTp0648iRP9GmTTtoteHQasPx4ouvYd++\nvTXaatO5cxcAQHR0DPbvz8XXX+dAEESUltoe/6+/jqJDh0sAAE8/bTvJHTo0E2vWrMSQIRmIitIh\nIaFJrY9xLj6Ft+MX1tatW4dHHnkEAGAyNb4f5iciqg+uHpRc61myP7Rvn4wzZwpw+vQplJWVoU2b\ntvjgg3fdLhM6b57n612oKiCKtsuCOi7xaTab8dprL2PRos/QpElTTJnyiNfHtv2iZ9W8xWJ27k+S\nJJfHcf810AUL3sFVV/XB9dePxtq1q/DTTxshihJUVXFbz1Nbda6XMg0Ls13KdOXK71FaWoo333wf\npaWluPvu2+z7q/lxsiFDMvDUU1MQHh6B9PSMWh/LFz59YO3iiy9GZmYmKioq0LlzZyxduhSxsbHn\n3G7WrFkYO3YssrKysHv3bo/rvPrqq7jtttvOr2oiIgq4vn374d135+Oaa9IA1LxMqGvAuWrTpi0O\nHNgPANi50/YDX3p9BSRJQpMmTXH69CkcOLAfFosFoijCarW6bZ+S0gW7dv1s306P48fz0Lp1m3PW\nW1xsq892Qa31MJvNaNu2Hf766yj0ej2MRiMeeeQBj22On/+urKxEZWUlDh78zeP+W7RoCVEUsX79\nGpjNZgBAu3YXIzd3LwDghReew5EjfyI+Ph4xMTFYseK7OvkQnk9n3jNmzMDBgweRnGw70uvQoQNe\nfvnlWrfZtm0bjh49isWLF+Pw4cPIzs7G4sWL3dY5dOgQtm/f7jyKISKi+istbSDuu288Fi36HAAw\nbNgIzJgxHWvXrsKoUWOwatUPWLbs6xrbDRs2AtnZj2HSpPvRrVt3CIKA2Ng4XHnlVbj77tvRocMl\nuPnm2zBnzmuYO/cd/PbbAcyaNQuiqAEAXH55d3TqlIKJE++BxWLBffc9iIiIiHPWe911N+L11/+D\n5s1bYvTosXj55ZnYs+dX3HXXfXjkkQcAAGPH3oyIiIgabYIg4PrrR2PChHFo1649OnXqXGP/AwYM\nwrRpk7Fv316MGPEPJCUlYeHC9zBp0mN45ZUXAABdulyGdu0utq8/GJs2bUBkZNQF9L47ny4JWlFR\ngUWLFmHPnj0QBAHdu3fHuHHjvL6/AQBvvPEGWrZsiZtuugkAMGzYMCxZsgQ6nc65zt1334177rkH\n8+bNO+fvqPOSoKGLfR0Y7OfAYD8HRkPs5xkzpiMz81r07NnL5228XRLUp2Hzp59+GuXl5cjKysKY\nMWNQWFiIp556qtZtCgsLER8f75xPSEhAQUGBcz4nJwe9e/dGq1atfCmBiIgoJBmNRkyYcAeioqLO\nK7hr49OweWFhIV57rerTdwMHDjzv96ldT/CLi4uRk5ODhQsX4vTp0z5tHx8fCVmWzr3iefB2REN1\nj30dGOznwGA/B0bD6edofPXVl3W6R5/C22AwwGAwON9jcLypX5ukpCQUFhY65/Pz85GYmAgA2LJl\nC86ePYtbbrkFJpMJf/31F2bNmoXs7Gyv+ysq0vtSqs8a4pBMfcW+Dgz2c2CwnwOD/Wzj7QDGp/Ae\nO3Yshg8fjq5dbb+ck5ubi0mTJtW6TWpqKubOnYusrCzk5uYiKSnJ+X73sGHDMGzYMABAXl4ennji\niVqDm4iIiKr4FN6jR49GamoqcnNzIQgCnn766XN+wKxnz57o0qULsrKyIAgCpk+fjpycHERHRyM9\nPb1OiiciImqMfApvAGjRogVatGjhnPf2vW1Xjz32mNt8SkpKjXVat259zgMBIiIiqnLBF+X24Rtm\nRERE5AcXHN6CINRlHUREROSjWofN09LSPIa0qqooKiryW1FERETkXa3h/dlnnwWqDiIiIvJRreHN\nXz8jIiKqfy74PW8iIiIKDoY3ERFRiGF4ExERhRiGNxERUYhheBMREYUYhjcREVGIYXgTERGFGIY3\nERFRiGF4ExERhRiGNxERUYhheBMREYWYRhneqqri9Fl9sMsgIiK6II0yvPcdLcLdM1di9c95wS6F\niIjovDXK8L4oSYfoSA3+t+4Q8osNwS6HiIjovDTK8I6J1GDCDZfBZFbw4fIDUFU12CURERH5rFGG\nNwCk9WiFy5ObYP/RIvz464lgl0NEROSzRhvegiDg9mEpiNBK+O/aQzhbWhnskoiIiHzSaMMbAOKj\ntRg76BIYjFZ8tOI3Dp8TEVFIaNThDQDXdGuBzm3jsfvwGWzZdzrY5RAREZ1Tow9vQRBwx/AUaMJE\nfLbyIEoqTMEuiYiIqFaNPrwBIDEuAqPSklFRacGnKw8GuxwiIqJaMbztBl/RGh1axWLHgXz8/FtB\nsMshIiLyiuFtJwoC7sxMgSyJ+OSH31BuMAe7JCIiIo8Y3i5aNInCdf3aoaTChMWrfw92OURERB4x\nvKsZdlUbtG0WjU17T2HPH2eCXQ4REVENDO9qJFHEnZkpkEQBH35/AAajJdglERERuWF4e9CmWTQy\n+7TF2VIjlqw7HOxyiIiI3DC8vRh5dTu0bBqFtbuO47e/ioJdDhERkRPD24sw2TZ8LgjAwuUHYDRb\ng10SERERAIZ3rZJbxmLolRchv8iA/9vwZ7DLISIiAsDwPqfrr2mPpPgIrNj+F/44URrscoiIiBje\n56INk3Dn8BSoKrDwu/0wW5Rgl0RERI0cw9sHndrEY0CPVjheWIFlm48EuxwiImrkGN4+umlAMhJi\ntFi2+Sj+Ol0W7HKIiKgRY3j7KEIrY9ywFFgVFQuXH4BV4fA5EREFB8P7PFzWvgmu7tocR0+VYcW2\nY8Euh4iIGimG93nKGnwJYqI0WLrhT5w8UxHscoiIqBFieJ8nXUQYbhvaERargoXLD0BR1WCXRERE\njQzD+wJc0SkJvTol4lBeCdb8nBfscoiIqJFheF+gW4Z2QlS4jCXrD6Og2BDscoiIqBFheF+g2CgN\nbh7SESazgg+/PwCVw+dERBQgfg3vWbNmYezYscjKysLu3bvdlm3ZsgVjxoxBVlYWnnjiCSgh+NWr\nPl2aoVtyE+w7UoQNu08GuxwiImok/Bbe27Ztw9GjR7F48WLMnDkTM2fOdFv+73//G3PmzMEXX3yB\niooKbNiwwV+l+I0gCLg9oxMitBIWr/kdRWXGYJdERESNgN/Ce/PmzRgyZAgAIDk5GSUlJSgvL3cu\nz8nJQfPmzQEACQkJKCoKzWtmJ8SE46aBHWAwWvHxit84fE5ERH4n+2vHhYWF6NKli3M+ISEBBQUF\n0Ol0AOC8z8/Px6ZNmzBp0qRa9xcfHwlZluq0xsTE6DrZz+ghnfDLoTP45VAh9ueVIq1n6zrZb0NS\nV31NtWM/Bwb7OTDYz975Lbyr83RGeubMGdx3332YPn064uPja92+qEhfp/UkJkajoKDufqP85sEd\ncODIWbydsxutEyIQE6Wps32Hurrua/KM/RwY7OfAYD/beDuA8duweVJSEgoLC53z+fn5SExMdM6X\nl5fjnnvuwSOPPIJ+/fr5q4yASYqPxI1pySg3mPHZqoPBLoeIiBowv4V3amoqVqxYAQDIzc1FUlKS\nc6gcAF588UWMGzcO/fv391cJATfkitZIbhWDbfvzsetgQbDLISKiBspvw+Y9e/ZEly5dkJWVBUEQ\nMH36dOTk5CA6Ohr9+vXD0qVLcfToUSxZsgQAMHLkSIwdO9Zf5QSEKAq4c3hnPLNwGz764Td0bBOH\nqPCwYJdFREQNjKCGyMej6/q9D3++n/LtT0eQ8+Mf6HdZC4wf0dkvjxFK+N5VYLCfA4P9HBjsZ5uA\nv+fdmA27qg3aJOmwcc9J7P3zTLDLISKiBobh7QeyJOLOzM4QBQEfLv8NBqMl2CUREVEDwvD2k7bN\no5HZtw3OlFbiy/WHg10OERE1IAxvP7r26ovRokkk1uw8joPHioNdDhERNRAMbz8Kk23D5wKAhd/t\nh8lsDXZJRETUADC8/axDq1ikX3kRThcZsHTjn8Euh4iIGgCGdwDc0L89EuPCsWLbX/jzZGmwyyEi\nohDH8A4AbZiEO4Z3hqoCH3y3HxZr6F27nIiI6g+Gd4B0bhuPtO4tcbygAt/+dCTY5RARUQhjeAfQ\nTQM6ID5ai2WbjyIvv/zcGxAREXnA8A6gyHAZ44Z1glVR8cF3+2FVOHxORETnj+EdYN2Sm6Jvl2Y4\ncqoMP2w7FuxyiIgoBDG8g+CfQzoiJjIMSzf+iVNn9cEuh4iIQgzDOwh0EWG4dWgnmC0KFn63H0po\nXNiNiIjqCYZ3kPRKScIVHRPxe14J1u48HuxyiIgohDC8g+jWoR0RFS5jybrDKCw2BLscIiIKEQzv\nIIrVaZE1+BIYzVZ8+P0BqBw+JyIiHzC8g+zqrs3RtX0Cco8UYePuk8Euh4iIQgDDO8gEQcC4jBSE\nayR8seYQisqMwS6JiIjqOYZ3PdAkNhw3DewAg9GCT374jcPnRERUK4Z3PZHWvSU6XRSHXb8XYvuB\n/GCXQ0RE9RjDu54QBQF3ZKZAI4v45IeDKNWbgl0SERHVUwzveqRZfCRu6N8e5QYzPl/1e7DLISKi\neorhXc+k97oI7VvGYOu+09j1e0GwyyEionqI4V3PiKKAO4enQBIFfLTiN+grzcEuiYiI6hmGdz3U\nKlGHf6S2Q0m5CYvXHAp2OUREVM8wvOup4X3a4qIkHTbsPoncP88GuxwiIqpHGN71lCyJGJ/ZGaIg\nYNHyA6g0WYJdEhER1RNysAsIhnJzBb7/5QdYjCpitTGI1cTY7rUxiNFEQxTqxzFN2+bRGN6nDZZt\nPoov1/+BW9I7BrskIiKqBxpleJ8oP4lvflvlcZkAATGaaGeYx2pjEKeJrZq2h31UWCQEQfB7rf9I\nbYedBwuw5uc8XJmShI4Xxfn9MYmIqH4T1BD5Lc6CgrK63WGkCQePH0OxsQQlxlKUmEpt9/ZbsakU\nFsX7ULUsSFUBr3EEe2yNM/kIOfxvl3oorwQvfPIzkhIi8djY7oiP0UIMwIFDXUlMjK77fz+qgf0c\nGOznwGA/2yQmRntsb5Rn3gCQGNUEiNd4Xa6qKvQWg1uYlziC3jlfiiOlx6Coitf9aCVNjUCP08a6\nBH4MYjQx0EhhXvfRoXUsBvdqjVU78vD4Wz9BI4tIjI9AUlwEmsVHIik+As3iI5AUHxlywU5EROev\n0Yb3uQiCgKiwSESFRaKlrrnX9RRVQZmpAiWmEvcz92pn8/n6wlofL0qO9HAmXxX4Q/o0RUyUjLx8\nPU6fNeB0kR7HCypq7EeWRCTGhbuHekIkmsVFICEmHKLIYCciCnUM779JFETEaqMRq40GPI9uAAAs\nigVlpnLnMH2xh2H6ImMJTlSc8roPAQK0cVpENA1HSzkcGlEDUQmDYpFhMUkwGUUY9EBxOZBfJEIt\nkAFrGFSrDFhliGoYmkZHo3l8JJKqhXuTGC0ksX58UI+IiGrH8A4QWZQRHx6H+PDaP3BmsppQYixD\niam06v14l7N4vcWASkslSowlMFgqocLlIwsa+y3OdudJqQqUWGUcMMvASRlqni3gYZURLoUjShOB\n2PAoxEVGoYkuGkkx0UiKiYFOE4EIORwRcji0krbefCKfiKgxYnjXMxpJg8TIJkiMbHLOdVVVhdFq\nQqW1EgZL1a3SYoDeUolKS7V2qwEGSyUqTAZUmG0HASa1ArAfAJgBFNtvRysBVALwMtofJmgQIdvC\nPlK2BXu4HI4I+3SE5JgPR5whCqWlBgDw+Al9AYL93rXRQ5uH9WrbHzyuJ7is52F9+6QkSJBFGbIo\nQRZkyKKMMFG2t9lvghSQbxw0FIqqwKJYYVUtsChW5wdCZVGGJIgQBQmyKEFiv5KPVFWFChWqqkKx\n39vmlap2e5vtXnHbxtauuEy7L7Ptt9pyT9vb52M00WgT0zogz53hHcIEQUC4rEW4rEWcNvaC9lH9\nAKBIX45TxSUoKCtDYXk5ivXlKDXqoTcZYFJNECQzIFmgSBYYZTNKJIOtrZG+1srOkK8KdLd5l/B3\nztunwwS55rpu7S77qqUtzN5WaTFCb9bDolphVay2gLQHpSMwrW5ttgB1W65ancHqWF6zzT2Aq9pc\n9+e+D4tidR8lOgdRECEJkv0gSqoR7pLj3mOb6MNy2wGD48DBbbnLes7lLvszhOlQVK73Wvu5Djz8\ntrS2x1VVWFUFVlWBolrt9wqsitV2b2+zqrZ5T8sUl+VVy1zaHPtWPDyOyzqu+61tGQTAqig1ArT6\nfX3zYr9/I1qj8/vjMLwbueoHAC2imuHSRM/rGowW5BcZkF9sQH6R7YNz+Wf0OFWsR6neAEgWCLIZ\ngmSxTUsW6KKAqEgZsiRCGyZBGyZBo7FPa1zawiRow0SPH6hz/g/q8q1Gtfoy1/WrffvRbR21Zlv1\nKVVFVejYQ8qsmJ0hZLuvWuacVyywqFZUmo2wKBaY7eHV0ImCCFmQINkPXiT7wUS4pLVNC7Kzzbnc\nfg/A+aJttR902MKi6oDAttwCq6rAZDXDqla6La/t2x5UP0mC5HKAJtqmRVubRgyDKIRDI0tQFNvI\nmCAIECFCEATnvAABosu8KIhuy2zbuK5bc3u3dQTRbX33dcQa+7Pd29vtbXHaWOjCogLShwxv8lmE\nVkbb5tFo27zmJ/MqTfZgrx7uxQbknzDDYj3XC6wVgBXaMAm6CBm6CA10ETKiIsIQHaFBVIQMXUQY\ndJFhtnuXmzas/g6zOs5SXIPefI7wN1vNXpe5zpud+7XdtBoZikVwC0f3wKwaCZDEqhCtWsd9ueOM\ntfpy9zYp6J9/UFW1Rsi7hX+1AwLFy4GB7d+p2jaKAqtqW26xr6cNl1Bp8Hy1v3OdB9Z6pniOn9z4\nO0sBOEcOXIPSNUAdoxu25bZpR7tjO9E5ClG1XHRZXrVMdAto12WO4DsXfs+7dgxvqhPhGhltmkWj\nTbOawd60qQ55J4pRbjC73/S2+wqDBWUGEyoMZpQZzKgwmHHqrB5Gs29nrbIk1Ah0XUSYPfht99WD\nP0IrB+T78KIgQpREhMH79/jrSmN9sRMEwXZgAgCS999uqCuNtZ+pfmF4k98JgoBwjYxwjYymsRE+\nb2e2WFFusNiD3oTySpdpR7vBcQBgxplSI/I8fPfdE1EQqs7mq4W9LiIMEeEyIrUyIrQyIjQyIrSS\nbVorI1xTf8/0iahxYHhTvRUmS4iPlhAfrfV5G6ui2M/kbYHu7Wy/vNI2Xaa3neWfz48EC4JtpCHS\nJdDdbxIitbJ9naq26uvwe/VEdKEY3tSgSKKImCgNYqJ8Hz5VVBX6Sosz7MsMZhgqLTCYLDAYLdAb\nLTAYrTAYLTVuZ0uNMJgqziv8HbRhEsLtQe8MdU1VyDvaa6zjXC4hTJbO/4GJKOQxvKnRE4Wq98yb\nXcD2qqqi0uQS7i7TemfQewp/W1uZ3oz8IgOsyvkfAciS4Bzaj4wIgwAgTBZtN0mEbL93tskiZKlq\nuWu7c/3qy+zTrvuSRN8+dERE/sHwJvqbBEFwng1fKFVVYbEq0Bvdg7/yHGf+eqMVlSbbOhVFZhjN\nig+f7P/7BAEeQ91b2Hs7OJAl24GA270kQBJt97IoQKq2jiQJNbazrevYlgcW1PD5NbxnzZqFX3/9\nFYIgIDs7G926dXMu++mnn/Daa69BkiT0798fEydO9GcpRPWaIAgIkyXEyhJiz2PI35XjU9C2AwEV\nZosCs1WB2WK1TVsUewZTeQUAAAl8SURBVLvV3q64tCsu61dr87HdaDA7py9kFKEuOQJdEh0hb5uW\nJdeDAZc2l4MESapqcx4cOLcVEBMdAWOlydnm6cDD0S67Hoi4PK5t3WrzPOig8+C38N62bRuOHj2K\nxYsX4/Dhw8jOzsbixYudy2fMmIEFCxagWbNmuPXWW5GRkYEOHTr4qxyiRsN2ICAgTA7eB+IURXUG\nu6XagYLZqsBiUWCyKLBabUFvqXZvtaqwKIrt3tHmtty2zH1bFVZFcd7btq2atioKKs0KrJWWqvWs\ntp+8rC9cDzpcQ91xQCFXH5Gwt3lat/rBigAAAuw/LmIbPXFMQ3D8GArsywR7m30db+32bR1fu6y+\nz6r2qscEBIhCtcf00B5fVImyMoPtR1BE28GUaK9FEm1tor1NFKrmbevZHrP6eg2J38J78+bNGDJk\nCAAgOTkZJSUlKC8vh06nw7FjxxAbG4sWLVoAANLS0rB582aGN1EDIYoCtKLt1/PqO0VVneFusR8Q\nWK0KLF4OEnS6cJwpqrDNezjIsB0UVG3vdlBRbd71YMTi8jhuByCKApPRUmOfdP6qQh4uBwMuIS/Y\nlrkeDDinhaoDA0/LRFFAQrQWWYMvCcill/0W3oWFhejSpYtzPiEhAQUFBdDpdCgoKEBCQoLbsmPH\njvmrFCIir0RBgCgLCINvIxW2tyfC/VxV7RwX3HA/UKg2guFyYGG1KlBh/x021XFBD9uPujmmPbXb\nm23tKuy/KW5f1z7t1u62rm1HyrnanfuxNSj2AiIitSgvr4RVsV8QRIFzWnG9t0/blgGqolat57qO\nfbljXVVRYVVd2hUFigJYrKptWq22b8X+a361vCUUrpHwj34XQxfh/x9lCtgH1qr/3vT5io+PhFzH\nX4tJTKzlAtxUp9jXgcF+Dgz2c+OmuB4gOKYVFVpN4Eab/BbeSUlJKCysup5kfn4+EhMTPS47ffo0\nkpKSat1fUZH3q/hcCP7EYeCwrwOD/RwY7OfACMV+NtZtTAHwfqDot0+0pKamYsWKFQCA3NxcJCUl\nQaezXSatdevWKC8vR15eHiwWC9auXYvU1FR/lUJERNSg+O3Mu2fPnujSpQuysrIgCAKmT5+OnJwc\nREdHIz09Hc888wweffRRAEBmZiYuvvhif5VCRETUoAjq330zOkDqevgkFIdkQhX7OjDYz4HBfg4M\n9rNNwIfNiYiIyD8Y3kRERCGG4U1ERBRiGN5EREQhhuFNREQUYhjeREREIYbhTUREFGIY3kRERCEm\nZH6khYiIiGx45k30/+3dS0hUfRzG8e84g0GjlZom0oWuRkGokFBJtbALBEFJzRRaRC1CCqLoNmRT\nBDbjIqQLJWSboWREuy26ETRgMGNQoGAXSuhiUmZOTmkucngXgry9b7yLN+x4zjyf3ZzF4TkgPPP/\n/8fzExExGZW3iIiIyai8RURETEblLSIiYjIqbxEREZNReYuIiJhMQpZ3ZWUlLpcLt9tNa2ur0XEs\nq6qqCpfLRUlJCffu3TM6jqUNDAxQXFzM1atXjY5iaTdv3mTt2rWsX7+eUChkdBxL6uvrY9euXZSV\nleF2u2lqajI60qjkMDrAn/bo0SPevHlDMBikvb0dj8dDMBg0OpblRCIRXr58STAYJBqNsm7dOlau\nXGl0LMs6f/4848ePNzqGpUWjUc6dO0djYyP9/f2cOXOG5cuXGx3Lcq5du8b06dPZt28fHz9+ZOvW\nrdy5c8foWKNOwpV3OBymuLgYgJkzZ9Lb28u3b99ISUkxOJm1LFy4kAULFgAwbtw4vn//zuDgIHa7\n3eBk1tPe3s6rV69UJCMsHA6zaNEiUlJSSElJ4cSJE0ZHsqS0tDRevHgBQCwWIy0tzeBEo1PCbZt3\nd3f/9MeQnp7Op0+fDExkTXa7nbFjxwLQ0NDA0qVLVdwjxO/3c+jQIaNjWF5HRwcDAwPs3LmTzZs3\nEw6HjY5kSWvWrKGzs5MVK1ZQWlrKwYMHjY40KiXcyvuf9HbYkXX//n0aGhq4dOmS0VEs6fr16+Tl\n5TFlyhSjoySEL1++cPbsWTo7O9myZQsPHjzAZrMZHctSbty4QU5ODrW1tTx//hyPx6PfcvxCwpV3\nVlYW3d3dw5+7urrIzMw0MJF1NTU1ceHCBS5evEhqaqrRcSwpFArx7t07QqEQHz58IDk5mezsbBYv\nXmx0NMvJyMggPz8fh8PB1KlTcTqd9PT0kJGRYXQ0S3ny5AlFRUUAzJ07l66uLh25/ULCbZsvWbKE\nu3fvAtDW1kZWVpbOu0fA169fqaqqoqamhgkTJhgdx7Kqq6tpbGykvr6eDRs2UF5eruIeIUVFRUQi\nEeLxONFolP7+fp3HjoBp06bR0tICwPv373E6nSruX0i4lXdBQQHz58/H7XZjs9nwer1GR7KkW7du\nEY1G2bNnz/A1v99PTk6OgalE/r9JkyaxatUqNm7cCMCRI0dISkq49c+Ic7lceDweSktL+fHjB8eO\nHTM60qikkaAiIiImo6+NIiIiJqPyFhERMRmVt4iIiMmovEVERExG5S0iImIyCfevYiKJqqOjg9Wr\nV5Ofn//T9WXLlrFjx47fvn9zczPV1dXU1dX99r1E5L+pvEUSSHp6OoFAwOgYIvKbVN4iwrx58ygv\nL6e5uZm+vj58Ph9z5syhpaUFn8+Hw+HAZrNx9OhRZs2axevXr6moqCAejzNmzBhOnjwJQDwex+v1\n8uzZM5KTk6mpqcHpdBr8dCLWozNvEWFwcJDZs2cTCATYtGkTp0+fBuDAgQMcPnyYQCDAtm3bOH78\nOABer5ft27dz+fJlSkpKuH37NjA0nnT37t3U19fjcDh4+PChYc8kYmVaeYskkJ6eHsrKyn66tn//\nfoDhYRAFBQXU1tYSi8X4/Pnz8Fz2wsJC9u7dC0BrayuFhYXA0AhHGDrznjFjBhMnTgQgOzubWCw2\n8g8lkoBU3iIJ5L/OvP/+pmSbzfavUZf/fJNyPB7/1z00QELkz9C2uYgAEIlEAHj8+DG5ubmkpqaS\nmZk5POEpHA6Tl5cHDK3Om5qagKEhNKdOnTImtEiC0spbJIH8att88uTJADx9+pS6ujp6e3vx+/3A\n0CQ4n8+H3W4nKSlpeMJTRUUFFRUVXLlyBYfDQWVlJW/fvv2jzyKSyDRVTETIzc2lra0Nh0Pf50XM\nQNvmIiIiJqOVt4iIiMlo5S0iImIyKm8RERGTUXmLiIiYjMpbRETEZFTeIiIiJqPyFhERMZm/AMCG\ncIb7hcvXAAAAAElFTkSuQmCC\n", 328 | "text/plain": [ 329 | "
" 330 | ] 331 | }, 332 | "metadata": { 333 | "tags": [] 334 | } 335 | } 336 | ] 337 | }, 338 | { 339 | "metadata": { 340 | "id": "mvRM2M6NRy9Q", 341 | "colab_type": "code", 342 | "outputId": "a01fbf3b-976f-476b-feae-f7e67bdfd634", 343 | "colab": { 344 | "base_uri": "https://localhost:8080/", 345 | "height": 34 346 | } 347 | }, 348 | "cell_type": "code", 349 | "source": [ 350 | "# 9. Đánh giá model với dữ liệu test set\n", 351 | "score = model.evaluate(X_test, Y_test, verbose=0)\n", 352 | "print(score)" 353 | ], 354 | "execution_count": 9, 355 | "outputs": [ 356 | { 357 | "output_type": "stream", 358 | "text": [ 359 | "[0.03442202161136956, 0.9892]\n" 360 | ], 361 | "name": "stdout" 362 | } 363 | ] 364 | }, 365 | { 366 | "metadata": { 367 | "id": "lgAVysTefR5Q", 368 | "colab_type": "text" 369 | }, 370 | "cell_type": "markdown", 371 | "source": [ 372 | "Ta sẽ dùng kết quả đánh giá của mode với test set để làm kết quả cuối cùng của model. Tức model của chúng ta dữ đoán chữ số có độ chính xác 98.92% với MNIST dataset. Nghĩa là dự đoán khoảng 100 ảnh thì sai 1 ảnh." 373 | ] 374 | }, 375 | { 376 | "metadata": { 377 | "id": "y25mu5OTYG4d", 378 | "colab_type": "code", 379 | "outputId": "8214104c-a382-47a2-c911-5c496f7bc1b9", 380 | "colab": { 381 | "base_uri": "https://localhost:8080/", 382 | "height": 364 383 | } 384 | }, 385 | "cell_type": "code", 386 | "source": [ 387 | "# 10. Dự đoán ảnh\n", 388 | "plt.imshow(X_test[0].reshape(28,28), cmap='gray')\n", 389 | "\n", 390 | "y_predict = model.predict(X_test[0].reshape(1,28,28,1))\n", 391 | "print('Giá trị dự đoán: ', np.argmax(y_predict))" 392 | ], 393 | "execution_count": 10, 394 | "outputs": [ 395 | { 396 | "output_type": "stream", 397 | "text": [ 398 | "Giá trị dự đoán: 7\n" 399 | ], 400 | "name": "stdout" 401 | }, 402 | { 403 | "output_type": "display_data", 404 | "data": { 405 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUsAAAFKCAYAAACU6307AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAE6ZJREFUeJzt3X1olfX/x/HX+e009GAyXZtkdIcp\njbbRDUbTvFkOy8LUMsyhEkQZqWiiosObQPJmieGS8i79oxWcOkFYSBtLKrE5cX/YNqo5A1lWa+ZI\nl1s6Od8/fnwP6s48752dc65zXd/nAwT3uT7Xdb3fXOPFdZ1r17l84XA4LADATf2f0wUAgBsQlgBg\nQFgCgAFhCQAGhCUAGBCWAGARTgFJUf81NDT0ucyt/7zYk1f7oif3/EtVXzfjS8XfWfp8vqjj4XC4\nz2Vu5cWeJG/2RU/ukaq+bhaH/ng3umnTJp08eVI+n09lZWUqLCyMd1MAkPbiCsvjx4/rzJkzCgaD\nOn36tMrKyhQMBhNdGwCkjbhu8NTW1qqkpESSNGrUKP3999/q7OxMaGEAkE7iOrM8d+6cHnjggcjP\nw4cPV3t7u4YMGRJ1fkNDg/Lz86MuS8FHpinnxZ4kb/ZFT+7hdF9xf2Z5rVhNFBQU9Lme1z6M9mJP\nkjf7oif3SIcbPHFdhufm5urcuXORn//880/l5OTEsykAcIW4wnL8+PGqqqqSJDU1NSk3N7fPS3AA\n8IK4LsMffvhhPfDAA3rxxRfl8/m0YcOGRNcFAGmFP0pPMC/2JHmzL3pyD9d+ZgkA/2sISwAwICwB\nwICwBAADwhIADAhLADAgLAHAgLAEAAPCEgAMCEsAMCAsAcCAsAQAA8ISAAwISwAwICwBwICwBAAD\nwhIADAhLADAgLAHAgLAEAAPCEgAMCEsAMCAsAcCAsAQAA8ISAAwISwAwICwBwICwBAADwhIADAhL\nADAgLAHAgLAEAAPCEgAMCEsAMCAsAcCAsAQAA8ISAAwISwAwICwBwICwBAADwhIADAhLADAgLAHA\ngLAEAAN/PCvV1dVp6dKlGj16tCRpzJgxWrduXUILA4B0EldYStKjjz6qioqKRNYCAGmLy3AAMIg7\nLFtaWvTaa69p7ty5Onr0aCJrAoC04wuHw+H+rtTW1qb6+npNmzZNra2tWrBggaqrq5WZmRl1fmNj\no/Lz8wdcLAA4Ja6wvNHs2bP1zjvv6M4774y+E58v6ng4HO5zmVt5sSfJm33Rk3ukqq+bxWFcl+EH\nDx7UBx98IElqb2/XX3/9pREjRsRXHQC4QFxnlp2dnVqxYoUuXLigK1euaPHixZo0aVLfO+HM0vW8\n2Bc9uUc6nFkm5DI8FsLS/bzYFz25RzqEJX86BAAGhCUAGBCWAGBAWAKAAWEJAAaEJQAYEJYAYEBY\nAoABYQkABoQlABgQlgBgEPdrJWA3e/Zs89xXXnnFNO+3334zb7O7u9s896OPPupz2eOPPx75/x9/\n/GHeZktLi3kukK44swQAA8ISAAwISwAwICwBwICwBAADwhIADAhLADAgLAHAgLAEAAPe7phg0Xr6\n5ZdfzOvfc889Ca4oMXw+33Vvvrt48aJ53aampmSUNGBFRUWqra11uoy4/frrr73GXnjhBX366afX\njZWXl5u3eeLEiQHXlQy83REAXIKwBAADwhIADAhLADAgLAHAgLAEAAPCEgAMCEsAMCAsAcCAsAQA\nAx53TLBoPU2ZMsW8fmFhoWnejz/+aN5mXl6eee7DDz8cdXzevHmqrKyM/Dx58mTzNu+44w7z3NbW\nVtO8O++807zNvtz4CGd/9PT0mOe2t7eb595+++3xlBMRraft27eb11+xYsWA9p8sPO4IAC5BWAKA\nAWEJAAaEJQAYEJYAYEBYAoABYQkABoQlABgQlgBgQFgCgAGPOyaYF3uSevc1bNgw87oPPvigeW59\nfb1p3tixY83b7EtNTY1KSkriWre7u9s8t7m52Ty3P4+xDh8+vNdYtMcdFy1aZN7m+++/b56bSq55\n3LG5uVklJSWRZ4N///13zZ8/X6WlpVq6dKkuX76cmEoBIE3FDMtLly5p48aNKioqioxVVFSotLRU\nH3/8se6++26FQqGkFgkATosZlpmZmdq7d69yc3MjY3V1dZFv0ikuLnb1i+oBwMIfc4LfL7//+mld\nXV3KzMyUJGVnZ/frK6gAwI1ihmUslvtDDQ0Nys/Pj3t9t/FiT5I3+6qpqXG6hIS78UbIe++9Z163\nP3NTzenfv7jCMhAIqLu7W4MGDVJbW9t1l+jRFBQURB334p1jL/YkcTf8RtwNTy3X3A2/0bhx41RV\nVSVJqq6u1oQJE+KrDABcIuaZZWNjo7Zu3aqzZ8/K7/erqqpK27Zt0+rVqxUMBjVy5EjNnDkzFbUC\ngGNihmV+fr4+/PDDXuMHDhxISkEAkI54gifBvNiT5M2+UtXT888/b577ySefmOc2Njb2GissLNQP\nP/xw3VhxcbF5m+fPnzfPTSXXfmYJAP9rCEsAMCAsAcCAsAQAA8ISAAwISwAwICwBwICwBAADwhIA\nDAhLADDgcccE82JPkjf7GkhPsb6W8FoNDQ1J2e7s2bN7jYVCoV7jn332mXmb6YrHHQHAJQhLADAg\nLAHAgLAEAAPCEgAMCEsAMCAsAcCAsAQAA8ISAAwISwAwiPkqXAC9LVq0yDw3JyfHPLejo8M89+ef\nf+7XOAaGM0sAMCAsAcCAsAQAA8ISAAwISwAwICwBwICwBAADwhIADAhLADDghWUJ5sWeJG/2Fa2n\n8ePHm9Y9fPiweT+33HKLee7kyZPNc7/77rteY148ThIvLAMA1yAsAcCAsAQAA8ISAAwISwAwICwB\nwICwBAADwhIADAhLADAgLAHAgBeWAdd4+umnTfP68wjj119/bZ5bW1trnovU4swSAAxMYdnc3KyS\nkhJVVlZKklavXq3p06dr/vz5mj9/vr755ptk1ggAjot5GX7p0iVt3LhRRUVF140vX75cxcXFSSsM\nANJJzDPLzMxM7d27V7m5uamoBwDSUswzS7/fL7+/97TKykodOHBA2dnZWrdunYYPH97nNhoaGpSf\nnx91WQq+TjPlvNiT5M2+UtFTSUmJee7ly5cHvD8vHifJ+b7iuhs+Y8YMZWVlKS8vT3v27NHOnTu1\nfv36PucXFBREHffiF5V6sSfJm31F6+mtt94yrbtmzRrzfvpzN9x6N16Srly50mvMi8dJcvGX/xYV\nFSkvL0+S9MQTT6i5uTm+ygDAJeIKyyVLlqi1tVWSVFdXp9GjRye0KABINzEvwxsbG7V161adPXtW\nfr9fVVVVmjdvnpYtW6bBgwcrEAho8+bNqagVABwTMyzz8/P14Ycf9hp/8sknk1IQAKQjHneE5w0e\nPNi87KmnnjJtsz93rTds2GCeG+2mDdIDjzsCgAFhCQAGhCUAGBCWAGBAWAKAAWEJAAaEJQAYEJYA\nYEBYAoABYQkABjzuCM9buXKledlDDz1k2uZXX31l3v/3339vnov0xZklABgQlgBgQFgCgAFhCQAG\nhCUAGBCWAGBAWAKAAWEJAAaEJQAY+MLhcDjpO/H5oo6Hw+E+l7mVF3uS0q+vZ555xjz3888/jzru\n9/vV09Nz3dg///xj2qb1xWaSdOzYMfPcgUq345QoqerrZnHImSUAGBCWAGBAWAKAAWEJAAaEJQAY\nEJYAYEBYAoABYQkABoQlABgQlgBgwAvLkFays7NN8yoqKszbzMjIMC87dOiQaZupfIQR6YEzSwAw\nICwBwICwBAADwhIADAhLADAgLAHAgLAEAAPCEgAMCEsAMCAsAcCAtzsmmBd7kgbW180eN7yR9THC\nRx55xLzN06dPRx2/77771NLSct2Y9a2NfW3Tafz+DXw/fTE9G15eXq76+nr19PRo4cKFKigo0KpV\nq3T16lXl5OTo7bffVmZmZsIKBoB0EzMsjx07plOnTikYDKqjo0OzZs1SUVGRSktLNW3aNG3fvl2h\nUEilpaWpqBcAHBHzM8uxY8dqx44dkqShQ4eqq6tLdXV1mjJliiSpuLhYtbW1ya0SABwWMywzMjIU\nCAQkSaFQSBMnTlRXV1fksjs7O1vt7e3JrRIAHGb+PsuamhqFQiHt379fU6dOjYxb7g81NDQoPz8/\n6rIU3F9KOS/2JLm3r/vuu8+87MYbPm7k1uMUi9N9mcLyyJEj2rVrl/bt26dbb71VgUBA3d3dGjRo\nkNra2pSbm3vT9QsKCqKOe/HOnRd7krgbbt2m0/j9G/h++hLzMvzixYsqLy/X7t27lZWVJUkaN26c\nqqqqJEnV1dWaMGFCgkoFgPQU88zy0KFD6ujo0LJlyyJjW7Zs0dq1axUMBjVy5EjNnDkzqUUCgNNi\nhuWcOXM0Z86cXuMHDhxISkEAkI54gifBvNiTNLC+xowZY577008/xbWPm5kxY0bU8YMHD+rZZ5+9\nbuyLL75I+P5Tid+/ge+nLzwbDgAGhCUAGBCWAGBAWAKAAWEJAAaEJQAYEJYAYEBYAoABYQkABoQl\nABiYv88SuNbdd99tnltdXZ3w/a9cudI898svv4xrGXAtziwBwICwBAADwhIADAhLADAgLAHAgLAE\nAAPCEgAMCEsAMCAsAcCAsAQAAx53RFxeffVV89y77ror4fv/9ttvzXNv9sa+FLzcFB7BmSUAGBCW\nAGBAWAKAAWEJAAaEJQAYEJYAYEBYAoABYQkABoQlABjwBA+u8/jjj5uWLVmyJBXlAGmDM0sAMCAs\nAcCAsAQAA8ISAAwISwAwICwBwICwBAADwhIADAhLADAgLAHAgMcdcZ0JEyaYlg0ZMiQp+z99+rRp\nXmdnZ1L2D/TFFJbl5eWqr69XT0+PFi5cqMOHD6upqUlZWVmSpJdfflmTJ09OZp0A4KiYYXns2DGd\nOnVKwWBQHR0dmjVrlh577DEtX75cxcXFqagRABwXMyzHjh2rwsJCSdLQoUPV1dWlq1evJr0wAEgn\nMW/wZGRkKBAISJJCoZAmTpyojIwMVVZWasGCBXrjjTd0/vz5pBcKAE7yhcPhsGViTU2Ndu/erf37\n96uxsVFZWVnKy8vTnj179Mcff2j9+vV9rtvY2Kj8/PyEFQ0AqWYKyyNHjmjHjh3at29f5KbOf7W0\ntOjNN99UZWVl3zvx+aKOh8PhPpe5ldt7WrNmTdTxTZs2qaysLPLzW2+9lZT9W++GT58+3bzNn376\nKeq4249VNF7sSUpdXzeLw5iX4RcvXlR5ebl2794dCcolS5aotbVVklRXV6fRo0cnqFQASE8xb/Ac\nOnRIHR0dWrZsWWTsueee07JlyzR48GAFAgFt3rw5qUUCgNNihuWcOXM0Z86cXuOzZs1KSkEAkI54\n3BEADHjcEUl38uRJ89wpU6aY5vHnakg1ziwBwICwBAADwhIADAhLADAgLAHAgLAEAAPCEgAMCEsA\nMCAsAcDA/H2WA9oJX9Hmel7si57cwxVf0QYAICwBwISwBAADwhIADAhLADAgLAHAgLAEAAPCEgAM\nCEsAMCAsAcAgJY87AoDbcWYJAAaEJQAYEJYAYEBYAoABYQkABoQlABj4ndjppk2bdPLkSfl8PpWV\nlamwsNCJMhKqrq5OS5cu1ejRoyVJY8aM0bp16xyuKn7Nzc16/fXX9dJLL2nevHn6/ffftWrVKl29\nelU5OTl6++23lZmZ6XSZ/XJjT6tXr1ZTU5OysrIkSS+//LImT57sbJH9VF5ervr6evX09GjhwoUq\nKChw/XGSevd1+PBhx49VysPy+PHjOnPmjILBoE6fPq2ysjIFg8FUl5EUjz76qCoqKpwuY8AuXbqk\njRs3qqioKDJWUVGh0tJSTZs2Tdu3b1coFFJpaamDVfZPtJ4kafny5SouLnaoqoE5duyYTp06pWAw\nqI6ODs2aNUtFRUWuPk5S9L4ee+wxx49Vyi/Da2trVVJSIkkaNWqU/v77b3V2dqa6DNxEZmam9u7d\nq9zc3MhYXV2dpkyZIkkqLi5WbW2tU+XFJVpPbjd27Fjt2LFDkjR06FB1dXW5/jhJ0fu6evWqw1U5\nEJbnzp3TsGHDIj8PHz5c7e3tqS4jKVpaWvTaa69p7ty5Onr0qNPlxM3v92vQoEHXjXV1dUUu57Kz\ns113zKL1JEmVlZVasGCB3njjDZ0/f96ByuKXkZGhQCAgSQqFQpo4caLrj5MUva+MjAzHj5Ujn1le\nyytPW95zzz1avHixpk2bptbWVi1YsEDV1dWu/LwoFq8csxkzZigrK0t5eXnas2ePdu7cqfXr1ztd\nVr/V1NQoFApp//79mjp1amTc7cfp2r4aGxsdP1YpP7PMzc3VuXPnIj//+eefysnJSXUZCTdixAg9\n/fTT8vl8uuuuu3Tbbbepra3N6bISJhAIqLu7W5LU1tbmicvZoqIi5eXlSZKeeOIJNTc3O1xR/x05\nckS7du3S3r17deutt3rmON3YVzocq5SH5fjx41VVVSVJampqUm5uroYMGZLqMhLu4MGD+uCDDyRJ\n7e3t+uuvvzRixAiHq0qccePGRY5bdXW1JkyY4HBFA7dkyRK1trZK+v/PZP/7lwxucfHiRZWXl2v3\n7t2Ru8ReOE7R+kqHY+XItw5t27ZNJ06ckM/n04YNG3T//fenuoSE6+zs1IoVK3ThwgVduXJFixcv\n1qRJk5wuKy6NjY3aunWrzp49K7/frxEjRmjbtm1avXq1/v33X40cOVKbN2/WLbfc4nSpZtF6mjdv\nnvbs2aPBgwcrEAho8+bNys7OdrpUs2AwqHfffVf33ntvZGzLli1au3ata4+TFL2v5557TpWVlY4e\nK76iDQAMeIIHAAwISwAwICwBwICwBAADwhIADAhLADAgLAHAgLAEAIP/APzEXxVDzLNBAAAAAElF\nTkSuQmCC\n", 406 | "text/plain": [ 407 | "
" 408 | ] 409 | }, 410 | "metadata": { 411 | "tags": [] 412 | } 413 | } 414 | ] 415 | } 416 | ] 417 | } -------------------------------------------------------------------------------- /L8/drive.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import base64 3 | from datetime import datetime 4 | import os 5 | import shutil 6 | import numpy as np 7 | import socketio 8 | import eventlet 9 | import eventlet.wsgi 10 | from PIL import Image 11 | from flask import Flask 12 | from io import BytesIO 13 | 14 | from keras.models import load_model 15 | 16 | import utils 17 | 18 | #initialize our server 19 | sio = socketio.Server() 20 | #our flask (web) app 21 | app = Flask(__name__) 22 | #init our model and image array as empty 23 | model = None 24 | prev_image_array = None 25 | 26 | # Tốc độ tối thiểu và tối đa của xe 27 | MAX_SPEED = 25 28 | MIN_SPEED = 10 29 | 30 | # Tốc độ thời điểm ban đầu 31 | speed_limit = MAX_SPEED 32 | 33 | #registering event handler for the server 34 | @sio.on('telemetry') 35 | def telemetry(sid, data): 36 | if data: 37 | # Lấy giá trị throttle hiện tại 38 | throttle = float(data["throttle"]) 39 | # Góc lái hiện tại của ô tô 40 | steering_angle = float(data["steering_angle"]) 41 | # Tốc độ hiện tại của ô tô 42 | speed = float(data["speed"]) 43 | # Ảnh từ camera giữa 44 | image = Image.open(BytesIO(base64.b64decode(data["image"]))) 45 | try: 46 | # Tiền xử lý ảnh, cắt, reshape 47 | image = np.asarray(image) 48 | image = utils.preprocess(image) 49 | image = np.array([image]) 50 | print('*****************************************************') 51 | steering_angle = float(model.predict(image, batch_size=1)) 52 | 53 | # Tốc độ ta để trong khoảng từ 10 đến 25 54 | global speed_limit 55 | if speed > speed_limit: 56 | speed_limit = MIN_SPEED # giảm tốc độ 57 | else: 58 | speed_limit = MAX_SPEED 59 | throttle = 1.0 - steering_angle**2 - (speed/speed_limit)**2 60 | 61 | print('{} {} {}'.format(steering_angle, throttle, speed)) 62 | 63 | # Gửi lại dữ liệu về góc lái, tốc độ cho phần mềm để ô tô tự lái 64 | send_control(steering_angle, throttle) 65 | except Exception as e: 66 | print(e) 67 | 68 | # save frame 69 | if args.image_folder != '': 70 | timestamp = datetime.utcnow().strftime('%Y_%m_%d_%H_%M_%S_%f')[:-3] 71 | image_filename = os.path.join(args.image_folder, timestamp) 72 | image.save('{}.jpg'.format(image_filename)) 73 | else: 74 | 75 | sio.emit('manual', data={}, skip_sid=True) 76 | 77 | 78 | @sio.on('connect') 79 | def connect(sid, environ): 80 | print("connect ", sid) 81 | send_control(0, 0) 82 | 83 | 84 | def send_control(steering_angle, throttle): 85 | sio.emit( 86 | "steer", 87 | data={ 88 | 'steering_angle': steering_angle.__str__(), 89 | 'throttle': throttle.__str__() 90 | }, 91 | skip_sid=True) 92 | 93 | 94 | if __name__ == '__main__': 95 | parser = argparse.ArgumentParser(description='Remote Driving') 96 | parser.add_argument( 97 | 'model', 98 | type=str, 99 | help='Path to model h5 file. Model should be on the same path.' 100 | ) 101 | parser.add_argument( 102 | 'image_folder', 103 | type=str, 104 | nargs='?', 105 | default='', 106 | help='Path to image folder. This is where the images from the run will be saved.' 107 | ) 108 | args = parser.parse_args() 109 | 110 | # Load model mà ta đã train được từ bước trước 111 | model = load_model(args.model) 112 | 113 | if args.image_folder != '': 114 | print("Creating image folder at {}".format(args.image_folder)) 115 | if not os.path.exists(args.image_folder): 116 | os.makedirs(args.image_folder) 117 | else: 118 | shutil.rmtree(args.image_folder) 119 | os.makedirs(args.image_folder) 120 | print("RECORDING THIS RUN ...") 121 | else: 122 | print("NOT RECORDING THIS RUN ...") 123 | 124 | # wrap Flask application with engineio's middleware 125 | app = socketio.Middleware(sio, app) 126 | 127 | # deploy as an eventlet WSGI server 128 | eventlet.wsgi.server(eventlet.listen(('', 4567)), app) 129 | -------------------------------------------------------------------------------- /L8/model.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nttuan8/DL_Tutorial/b0a5dfb2898c17a7e8cb0b85035ef09d0b2eed4d/L8/model.h5 -------------------------------------------------------------------------------- /L8/self-driving-car.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stderr", 10 | "output_type": "stream", 11 | "text": [ 12 | "C:\\Users\\DELL\\Anaconda3\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", 13 | " from ._conv import register_converters as _register_converters\n", 14 | "Using TensorFlow backend.\n" 15 | ] 16 | } 17 | ], 18 | "source": [ 19 | "import pandas as pd\n", 20 | "import numpy as np\n", 21 | "import os\n", 22 | "import matplotlib.pyplot as plt\n", 23 | "\n", 24 | "from sklearn.model_selection import train_test_split\n", 25 | "\n", 26 | "from keras.models import Sequential \n", 27 | "from keras.optimizers import Adam\n", 28 | "from keras.callbacks import ModelCheckpoint\n", 29 | "from keras.layers import Lambda, Conv2D, Dropout, Dense, Flatten\n", 30 | "from keras.regularizers import l2\n", 31 | "\n", 32 | "from utils import INPUT_SHAPE, batch_generator" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": 2, 38 | "metadata": {}, 39 | "outputs": [ 40 | { 41 | "data": { 42 | "text/plain": [ 43 | "(array([ 7., 53., 302., 610., 5553., 208., 179., 43., 18.,\n", 44 | " 11.]),\n", 45 | " array([-0.9197341 , -0.72776069, -0.53578728, -0.34381387, -0.15184046,\n", 46 | " 0.04013295, 0.23210636, 0.42407977, 0.61605318, 0.80802659,\n", 47 | " 1. ]),\n", 48 | " )" 49 | ] 50 | }, 51 | "execution_count": 2, 52 | "metadata": {}, 53 | "output_type": "execute_result" 54 | }, 55 | { 56 | "data": { 57 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAD8CAYAAAB+UHOxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAEtVJREFUeJzt3X+s3fV93/HnK7iQbd2CCYZRm9VE9bpQTSXIImiR1gYi8yNTTDXYHK2Lm3my0rGp0zatsExiS4oG+2NU0dZ0XvHiZGsIpYvwGjrm8kPVpEIwCyEBRnwhWfDsYacGuigKK+S9P87nZgf7XJ9zfM891/B5PqSr8/2+v5/v977P5x7f1z3f8z3HqSokSf1522o3IElaHQaAJHXKAJCkThkAktQpA0CSOmUASFKnDABJ6pQBIEmdMgAkqVNrVruBkzn33HNr48aNq92GJL2pPP7449+pqnXjxp3WAbBx40b279+/2m1I0ptKkv85yThPAUlSpwwASeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1CkDQJI6ZQBIUqdO63cCS+NsvOlLq/a9v3XbB1fte0uz4DMASeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1CkDQJI6ZQBIUqcMAEnqlAEgSZ0yACSpUwaAJHXKAJCkThkAktQpA0CSOmUASFKnJgqAJN9K8rUkTyTZ32rnJNmX5EC7XdvqSfKpJAtJnkxy6dBxtrfxB5JsX5m7JEmaxDTPAN5fVZdU1ea2fhPwQFVtAh5o6wDXAJva107g0zAIDOAW4L3AZcAti6EhSZq/5ZwC2grsact7gOuG6p+tgUeAs5NcAFwF7KuqY1X1ErAPuHoZ31+StAyTBkAB/zXJ40l2ttr5VXUYoN2e1+rrgReG9j3YakvV3yDJziT7k+w/evTo5PdEkjSVSf9T+PdV1aEk5wH7kvyPk4zNiFqdpP7GQtUuYBfA5s2bT9guSZqNiZ4BVNWhdnsE+CKDc/gvtlM7tNsjbfhB4MKh3TcAh05SlyStgrEBkORPJfnTi8vAFuDrwF5g8Uqe7cC9bXkv8JF2NdDlwCvtFNH9wJYka9uLv1taTZK0CiY5BXQ+8MUki+N/s6r+S5LHgLuT7AC+DdzQxt8HXAssAN8DPgpQVceSfBJ4rI37RFUdm9k9kSRNZWwAVNXzwE+PqP8hcOWIegE3LnGs3cDu6duUJM2a7wSWpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1CkDQJI6ZQBIUqcMAEnqlAEgSZ0yACSpUwaAJHXKAJCkThkAktQpA0CSOmUASFKnDABJ6pQBIEmdMgAkqVMGgCR1ygCQpE4ZAJLUKQNAkjplAEhSpyYOgCRnJPlKkt9p6xcleTTJgSRfSHJmq5/V1hfa9o1Dx7i51Z9NctWs74wkaXLTPAP4JeCZofXbgTuqahPwErCj1XcAL1XVTwB3tHEkuRjYBvwUcDXwa0nOWF77kqRTNVEAJNkAfBD4jbYe4ArgnjZkD3BdW97a1mnbr2zjtwJ3VdWrVfVNYAG4bBZ3QpI0vUmfAfwq8I+BH7T1dwIvV9Vrbf0gsL4trwdeAGjbX2njf1gfsY8kac7GBkCSvwIcqarHh8sjhtaYbSfbZ/j77UyyP8n+o0ePjmtPknSKJnkG8D7gQ0m+BdzF4NTPrwJnJ1nTxmwADrXlg8CFAG37O4Bjw/UR+/xQVe2qqs1VtXndunVT3yFJ0mTGBkBV3VxVG6pqI4MXcR+sqr8BPARc34ZtB+5ty3vbOm37g1VVrb6tXSV0EbAJ+PLM7okkaSprxg9Z0i8DdyX5FeArwJ2tfifwuSQLDP7y3wZQVU8luRt4GngNuLGqXl/G95ckLcNUAVBVDwMPt+XnGXEVT1V9H7hhif1vBW6dtklJ0uz5TmBJ6pQBIEmdMgAkqVMGgCR1ygCQpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1CkDQJI6ZQBIUqcMAEnqlAEgSZ0yACSpUwaAJHXKAJCkThkAktQpA0CSOmUASFKnDABJ6pQBIEmdMgAkqVMGgCR1amwAJHl7ki8n+WqSp5L881a/KMmjSQ4k+UKSM1v9rLa+0LZvHDrWza3+bJKrVupOSZLGm+QZwKvAFVX108AlwNVJLgduB+6oqk3AS8CONn4H8FJV/QRwRxtHkouBbcBPAVcDv5bkjFneGUnS5MYGQA18t63+SPsq4ArgnlbfA1zXlre2ddr2K5Ok1e+qqler6pvAAnDZTO6FJGlqE70GkOSMJE8AR4B9wHPAy1X1WhtyEFjfltcDLwC07a8A7xyuj9hHkjRnEwVAVb1eVZcAGxj81f7uUcPabZbYtlT9DZLsTLI/yf6jR49O0p4k6RRMdRVQVb0MPAxcDpydZE3btAE41JYPAhcCtO3vAI4N10fsM/w9dlXV5qravG7dumnakyRNYZKrgNYlObst/wngA8AzwEPA9W3YduDetry3rdO2P1hV1erb2lVCFwGbgC/P6o5IkqazZvwQLgD2tCt23gbcXVW/k+Rp4K4kvwJ8Bbizjb8T+FySBQZ/+W8DqKqnktwNPA28BtxYVa/P9u5IkiY1NgCq6kngPSPqzzPiKp6q+j5wwxLHuhW4dfo2JUmz5juBJalTBoAkdcoAkKROGQCS1CkDQJI6ZQBIUqcMAEnqlAEgSZ0yACSpUwaAJHXKAJCkThkAktQpA0CSOmUASFKnDABJ6pQBIEmdMgAkqVMGgCR1ygCQpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1CkDQJI6NTYAklyY5KEkzyR5Kskvtfo5SfYlOdBu17Z6knwqyUKSJ5NcOnSs7W38gSTbV+5uSZLGmeQZwGvAP6yqdwOXAzcmuRi4CXigqjYBD7R1gGuATe1rJ/BpGAQGcAvwXuAy4JbF0JAkzd/YAKiqw1X139vy/wGeAdYDW4E9bdge4Lq2vBX4bA08Apyd5ALgKmBfVR2rqpeAfcDVM703kqSJTfUaQJKNwHuAR4Hzq+owDEICOK8NWw+8MLTbwVZbqn7899iZZH+S/UePHp2mPUnSFCYOgCQ/Cvw28Per6o9ONnRErU5Sf2OhaldVba6qzevWrZu0PUnSlCYKgCQ/wuCX/3+sqv/Uyi+2Uzu02yOtfhC4cGj3DcChk9QlSatgkquAAtwJPFNV/2po015g8Uqe7cC9Q/WPtKuBLgdeaaeI7ge2JFnbXvzd0mqSpFWwZoIx7wP+JvC1JE+02j8BbgPuTrID+DZwQ9t2H3AtsAB8D/goQFUdS/JJ4LE27hNVdWwm90KSNLWxAVBV/43R5+8BrhwxvoAblzjWbmD3NA1KklaG7wSWpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1CkDQJI6ZQBIUqcMAEnqlAEgSZ0yACSpUwaAJHXKAJCkThkAktQpA0CSOmUASFKnDABJ6pQBIEmdMgAkqVMGgCR1ygCQpE4ZAJLUKQNAkjplAEhSp8YGQJLdSY4k+fpQ7Zwk+5IcaLdrWz1JPpVkIcmTSS4d2md7G38gyfaVuTuSpElN8gzgM8DVx9VuAh6oqk3AA20d4BpgU/vaCXwaBoEB3AK8F7gMuGUxNCRJq2NsAFTV7wPHjitvBfa05T3AdUP1z9bAI8DZSS4ArgL2VdWxqnoJ2MeJoSJJmqNTfQ3g/Ko6DNBuz2v19cALQ+MOttpSdUnSKpn1i8AZUauT1E88QLIzyf4k+48ePTrT5iRJ/9+pBsCL7dQO7fZIqx8ELhwatwE4dJL6CapqV1VtrqrN69atO8X2JEnjnGoA7AUWr+TZDtw7VP9IuxrocuCVdorofmBLkrXtxd8trSZJWiVrxg1I8nngZ4FzkxxkcDXPbcDdSXYA3wZuaMPvA64FFoDvAR8FqKpjST4JPNbGfaKqjn9hWZI0R2MDoKo+vMSmK0eMLeDGJY6zG9g9VXeSpBXjO4ElqVMGgCR1ygCQpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlTY98JLE1i401fWu0WJE3JZwCS1CkDQJI6ZQBIUqcMAEnqlAEgSZ0yACSpUwaAJHXKAJCkThkAktQpA0CSOmUASFKn/CygtxA/j0fSNHwGIEmdMgAkqVOeApJO0WqdcvvWbR9cle+rtx4DQHqTWc3XegyftxZPAUlSpwwASerU3AMgydVJnk2ykOSmeX9/SdLAXAMgyRnAvwGuAS4GPpzk4nn2IEkamPeLwJcBC1X1PECSu4CtwNNz7mNF+YYsvVV55dNby7wDYD3wwtD6QeC9c+5B0ptMj39UzSP05h0AGVGrNwxIdgI72+p3kzzbls8FvrOCvS3H6dwb2N9ynM69gf0tx+ncG7l9Wf39+CSD5h0AB4ELh9Y3AIeGB1TVLmDX8Tsm2V9Vm1e2vVNzOvcG9rccp3NvYH/LcTr3BvPpb95XAT0GbEpyUZIzgW3A3jn3IElizs8Aquq1JH8XuB84A9hdVU/NswdJ0sDcPwqiqu4D7juFXU84LXQaOZ17A/tbjtO5N7C/5Tide4M59JeqGj9KkvSW40dBSFKnTqsASHJDkqeS/CDJkq9+L/VxEu3F5UeTHEjyhfZC86x6OyfJvnbsfUnWjhjz/iRPDH19P8l1bdtnknxzaNsls+pt0v7auNeHetg7VF+xuZu0vySXJPmD9hh4MslfH9o28/kb97EkSc5qc7HQ5mbj0LabW/3ZJFctt5dT6O0fJHm6zdMDSX58aNvIn/Gc+/uFJEeH+vjbQ9u2t8fBgSTbV6m/O4Z6+0aSl4e2rej8Jdmd5EiSry+xPUk+1Xp/MsmlQ9tmO3dVddp8Ae8GfhJ4GNi8xJgzgOeAdwFnAl8FLm7b7ga2teVfB35xhr39S+CmtnwTcPuY8ecAx4A/2dY/A1y/gnM3UX/Ad5eor9jcTdof8OeBTW35x4DDwNkrMX8nexwNjfk7wK+35W3AF9ryxW38WcBF7ThnzLm39w89tn5xsbeT/Yzn3N8vAP96xL7nAM+327Vtee28+ztu/N9jcEHKvObvLwOXAl9fYvu1wO8yeN/U5cCjKzV3p9UzgKp6pqqeHTPshx8nUVX/F7gL2JokwBXAPW3cHuC6Gba3tR1z0mNfD/xuVX1vhj2czLT9/dAc5g4m6K+qvlFVB9ryIeAIsG7GfSwa+Tg6Sc/3AFe2udoK3FVVr1bVN4GFdry59VZVDw09th5h8J6aeZlk7pZyFbCvqo5V1UvAPuDqVe7vw8DnZ9zDkqrq9xn8cbiUrcBna+AR4OwkF7ACc3daBcCERn2cxHrgncDLVfXacfVZOb+qDgO02/PGjN/GiQ+qW9tTujuSnDXD3qbp7+1J9id5ZPH0FCs/d9P0B0CSyxj89fbcUHmW87fU42jkmDY3rzCYq0n2Xenehu1g8BfjolE/41matL+/2n5e9yRZfAPoSs/dVN+jnTq7CHhwqLzS8zfOUv3PfO7mfhlokt8D/uyITR+vqnsnOcSIWp2kPpPepjzOBcBfZPB+h0U3A/+bwS+1XcAvA59Yhf7+XFUdSvIu4MEkXwP+aMS4qS8Pm/H8fQ7YXlU/aOVlz9/x32ZE7fj7vGKPtTEmPn6Snwc2Az8zVD7hZ1xVz43afwX7+8/A56vq1SQfY/BM6ooJ951Hf4u2AfdU1etDtZWev3Hm9rhbjfcBfGCZh1jq4yS+w+Cp0pr219oJHzOxnN6SvJjkgqo63H5BHTnJof4a8MWq+uOhYx9ui68m+ffAP5qmt1n1106tUFXPJ3kYeA/w2yxz7mbVX5I/A3wJ+Kft6e/isZc9f8cZ+7EkQ2MOJlkDvIPBU/dJ9l3p3kjyAQbh+jNV9epifYmf8Sx/gU3ykS5/OLT674Dbh/b92eP2fXiGvU3U35BtwI3DhTnM3zhL9T/zuXszngIa+XESNXiV5CEG594BtgOTPKOY1N52zEmOfcI5xfZLb/F8+3XAyCsAVrK/JGsXT50kORd4H/D0HOZu0v7OBL7I4Pznbx23bdbzN8nHkgz3fD3wYJurvcC2DK4SugjYBHx5mf1M1VuS9wD/FvhQVR0Zqo/8Gc+wt0n7u2Bo9UPAM235fmBL63MtsIU3PlOeS3+tx59k8GLqHwzV5jF/4+wFPtKuBroceKX9ATT7uVvJV7un/QJ+jkHKvQq8CNzf6j8G3Dc07lrgGwxS+eND9Xcx+Ie4APwWcNYMe3sn8ABwoN2e0+qbgd8YGrcR+F/A247b/0Hgawx+cf0H4EdnPHdj+wP+Uuvhq+12xzzmbor+fh74Y+CJoa9LVmr+Rj2OGJxW+lBbfnubi4U2N+8a2vfjbb9ngWtW4N/CuN5+r/0bWZynveN+xnPu718AT7U+HgL+wtC+f6vN6QLw0dXor63/M+C24/Zb8flj8Mfh4fZYP8jgNZyPAR9r28PgP856rvWweWjfmc6d7wSWpE69GU8BSZJmwACQpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlT/w9UGwU84yEPbQAAAABJRU5ErkJggg==\n", 58 | "text/plain": [ 59 | "
" 60 | ] 61 | }, 62 | "metadata": {}, 63 | "output_type": "display_data" 64 | } 65 | ], 66 | "source": [ 67 | "# Thư mục để dữ liệu\n", 68 | "data_dir = 'dataset'\n", 69 | "# Đọc file driving_log.csv với các cột tương ứng\n", 70 | "data_df = pd.read_csv(os.path.join(os.getcwd(), data_dir, 'driving_log.csv'), names=['center', 'left', 'right', 'steering', 'throttle', 'reverse', 'speed'])\n", 71 | "\n", 72 | "# Lấy đường dẫn đến ảnh ở camera giữa, trái, phải\n", 73 | "X = data_df[['center', 'left', 'right']].values\n", 74 | "# Lấy góc lái của ô tô\n", 75 | "y = data_df['steering'].values\n", 76 | "\n", 77 | "# Vẽ histogram dữ liệu\n", 78 | "plt.hist(y)" 79 | ] 80 | }, 81 | { 82 | "cell_type": "markdown", 83 | "metadata": {}, 84 | "source": [ 85 | "Do quá nhiều dữ liệu với góc lái ở 0 tức là để ô tô đi thẳng, nên ta sẽ loại bỏ bớt đi để tránh việc model có xu hướng dự đoán góc lái thiên về 0." 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": 3, 91 | "metadata": {}, 92 | "outputs": [ 93 | { 94 | "data": { 95 | "text/plain": [ 96 | "(array([ 7., 53., 302., 610., 1503., 208., 179., 43., 18.,\n", 97 | " 11.]),\n", 98 | " array([-0.9197341 , -0.72776069, -0.53578728, -0.34381387, -0.15184046,\n", 99 | " 0.04013295, 0.23210636, 0.42407977, 0.61605318, 0.80802659,\n", 100 | " 1. ]),\n", 101 | " )" 102 | ] 103 | }, 104 | "execution_count": 3, 105 | "metadata": {}, 106 | "output_type": "execute_result" 107 | }, 108 | { 109 | "data": { 110 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAD8CAYAAAB+UHOxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAFIhJREFUeJzt3X+MZWd93/H3J97alEaJ1/ZAzK7JmmZLQtMGrJFxg5QQTIwNkddVcbuoKRuy1YrEpLQ0Ckup5IoIFdqqTlBT0k3sYFpkQ5wgb4upu/EPoUrYYZ2C8Y+YHQy1By/s0DVOU4TB8O0f99lwM3tn5s7cOz/M835JV/ec5zznnO88c+d+5pxzf6SqkCT15/s2uwBJ0uYwACSpUwaAJHXKAJCkThkAktQpA0CSOmUASFKnDABJ6pQBIEmd2rbZBSznvPPOq127dm12GZL0rHLfffd9tapmVuq3pQNg165dHD16dLPLkKRnlST/e5x+ngKSpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlTBoAkdcoAkKRObel3Aksr2XXwY5u27y++53Wbtm9pGjwCkKROGQCS1CkDQJI6ZQBIUqcMAEnqlAEgSZ0yACSpUwaAJHXKAJCkTq0YAEluSHIiyQMjlv1qkkpyXptPkvclmUtyf5KLhvruS3Ks3fZN98eQJK3WOEcAHwAuX9yY5ALgZ4HHhpqvAHa32wHg/a3vOcC1wMuBi4Frk2yfpHBJ0mRWDICq+gRwcsSi64BfA2qobQ/wwRq4Bzg7yfnAa4AjVXWyqp4EjjAiVCRJG2dN1wCSXAl8qao+s2jRDuDxofn51rZUuyRpk6z600CTPBd4J3DZqMUj2mqZ9lHbP8Dg9BEvfOELV1ueJGlMazkC+OvAhcBnknwR2An8SZIfYvCf/QVDfXcCTyzTfpqqOlRVs1U1OzMzs4byJEnjWHUAVNVnq+p5VbWrqnYxeHK/qKq+DBwG3theDXQJ8FRVHQduBy5Lsr1d/L2stUmSNsk4LwO9Cfgk8OIk80n2L9P9NuBRYA74HeCXAarqJPDrwKfa7V2tTZK0SVa8BlBVb1hh+a6h6QKuWaLfDcANq6xPkrROfCewJHXKAJCkThkAktQpA0CSOmUASFKnDABJ6pQBIEmdMgAkqVMGgCR1ygCQpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1KlxvhT+hiQnkjww1PZvk/xpkvuTfDTJ2UPL3pFkLskjSV4z1H55a5tLcnD6P4okaTXGOQL4AHD5orYjwI9X1d8GPge8AyDJS4C9wN9s6/zHJGckOQP4LeAK4CXAG1pfSdImWTEAquoTwMlFbf+jqp5ps/cAO9v0HuDmqnq6qr4AzAEXt9tcVT1aVd8Ebm59JUmbZBrXAH4R+Hib3gE8PrRsvrUt1X6aJAeSHE1ydGFhYQrlSZJGmSgAkrwTeAb40KmmEd1qmfbTG6sOVdVsVc3OzMxMUp4kaRnb1rpikn3AzwGXVtWpJ/N54IKhbjuBJ9r0Uu2SpE2wpiOAJJcDbweurKqvDy06DOxNclaSC4HdwB8DnwJ2J7kwyZkMLhQfnqx0SdIkVjwCSHIT8ErgvCTzwLUMXvVzFnAkCcA9VfXmqnowyUeAhxicGrqmqr7dtvMW4HbgDOCGqnpwHX4eSdKYVgyAqnrDiObrl+n/buDdI9pvA25bVXWSpHXjO4ElqVMGgCR1ygCQpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1CkDQJI6ZQBIUqcMAEnqlAEgSZ0yACSpUwaAJHVqxQBIckOSE0keGGo7J8mRJMfa/fbWniTvSzKX5P4kFw2ts6/1P5Zk3/r8OJKkcY1zBPAB4PJFbQeBO6pqN3BHmwe4AtjdbgeA98MgMBh8mfzLgYuBa0+FhiRpc6wYAFX1CeDkouY9wI1t+kbgqqH2D9bAPcDZSc4HXgMcqaqTVfUkcITTQ0WStIHWeg3g+VV1HKDdP6+17wAeH+o339qWapckbZJpXwTOiLZapv30DSQHkhxNcnRhYWGqxUmSvmutAfCVdmqHdn+itc8DFwz12wk8sUz7aarqUFXNVtXszMzMGsuTJK1krQFwGDj1Sp59wK1D7W9srwa6BHiqnSK6HbgsyfZ28fey1iZJ2iTbVuqQ5CbglcB5SeYZvJrnPcBHkuwHHgOubt1vA14LzAFfB94EUFUnk/w68KnW711VtfjCsiRpA60YAFX1hiUWXTqibwHXLLGdG4AbVlWdJGnd+E5gSeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1CkDQJI6ZQBIUqcMAEnqlAEgSZ0yACSpUwaAJHXKAJCkThkAktQpA0CSOmUASFKnDABJ6pQBIEmdMgAkqVMTBUCSf5bkwSQPJLkpyXOSXJjk3iTHknw4yZmt71ltfq4t3zWNH0CStDZrDoAkO4B/AsxW1Y8DZwB7gfcC11XVbuBJYH9bZT/wZFX9CHBd6ydJ2iSTngLaBvzVJNuA5wLHgVcBt7TlNwJXtek9bZ62/NIkmXD/kqQ1WnMAVNWXgH8HPMbgif8p4D7ga1X1TOs2D+xo0zuAx9u6z7T+5651/5KkyUxyCmg7g//qLwReAPw14IoRXevUKsssG97ugSRHkxxdWFhYa3mSpBVMcgro1cAXqmqhqr4F/CHwk8DZ7ZQQwE7giTY9D1wA0Jb/IHBy8Uar6lBVzVbV7MzMzATlSZKWM0kAPAZckuS57Vz+pcBDwF3A61uffcCtbfpwm6ctv7OqTjsCkCRtjEmuAdzL4GLunwCfbds6BLwdeFuSOQbn+K9vq1wPnNva3wYcnKBuSdKEtq3cZWlVdS1w7aLmR4GLR/T9BnD1JPuTJE2P7wSWpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1CkDQJI6ZQBIUqcMAEnqlAEgSZ0yACSpUwaAJHXKAJCkThkAktSpiQIgydlJbknyp0keTvJ3kpyT5EiSY+1+e+ubJO9LMpfk/iQXTedHkCStxaRHAL8J/Peq+lHgJ4CHGXzZ+x1VtRu4g+9++fsVwO52OwC8f8J9S5ImsOYASPIDwE8B1wNU1Ter6mvAHuDG1u1G4Ko2vQf4YA3cA5yd5Pw1Vy5Jmsi2CdZ9EbAA/F6SnwDuA94KPL+qjgNU1fEkz2v9dwCPD60/39qOT1CDtohdBz+22SVIWqVJTgFtAy4C3l9VLwP+H9893TNKRrTVaZ2SA0mOJjm6sLAwQXmSpOVMEgDzwHxV3dvmb2EQCF85dWqn3Z8Y6n/B0Po7gScWb7SqDlXVbFXNzszMTFCeJGk5aw6Aqvoy8HiSF7emS4GHgMPAvta2D7i1TR8G3theDXQJ8NSpU0WSpI03yTUAgF8BPpTkTOBR4E0MQuUjSfYDjwFXt763Aa8F5oCvt76SpE0yUQBU1aeB2RGLLh3Rt4BrJtmfJGl6fCewJHXKAJCkThkAktQpA0CSOmUASFKnDABJ6pQBIEmdMgAkqVMGgCR1ygCQpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1KmJAyDJGUn+V5L/1uYvTHJvkmNJPty+MJ4kZ7X5ubZ816T7liSt3TSOAN4KPDw0/17guqraDTwJ7G/t+4Enq+pHgOtaP0nSJpkoAJLsBF4H/G6bD/Aq4JbW5Ubgqja9p83Tll/a+kuSNsGkRwC/Afwa8J02fy7wtap6ps3PAzva9A7gcYC2/KnWX5K0CdYcAEl+DjhRVfcNN4/oWmMsG97ugSRHkxxdWFhYa3mSpBVMcgTwCuDKJF8EbmZw6uc3gLOTbGt9dgJPtOl54AKAtvwHgZOLN1pVh6pqtqpmZ2ZmJihPkrScNQdAVb2jqnZW1S5gL3BnVf1D4C7g9a3bPuDWNn24zdOW31lVpx0BSJI2xnq8D+DtwNuSzDE4x399a78eOLe1vw04uA77liSNadvKXVZWVXcDd7fpR4GLR/T5BnD1NPYnSZqc7wSWpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlTBoAkdWoqbwTT1rHr4Mc2uwRJzxIeAUhSpwwASeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1CkDQJI6ZQBIUqcMAEnq1JoDIMkFSe5K8nCSB5O8tbWfk+RIkmPtfntrT5L3JZlLcn+Si6b1Q0iSVm+SI4BngH9eVT8GXAJck+QlwEHgjqraDdzR5gGuAHa32wHg/RPsW5I0oTV/GFxVHQeOt+n/m+RhYAewB3hl63YjcDfw9tb+waoq4J4kZyc5v21HetbZrA/e++J7Xrcp+9X3nqlcA0iyC3gZcC/w/FNP6u3+ea3bDuDxodXmW9vibR1IcjTJ0YWFhWmUJ0kaYeIASPL9wB8A/7Sq/my5riPa6rSGqkNVNVtVszMzM5OWJ0lawkQBkOSvMHjy/1BV/WFr/kqS89vy84ETrX0euGBo9Z3AE5PsX5K0dmu+BpAkwPXAw1X174cWHQb2Ae9p97cOtb8lyc3Ay4GnPP8vrd5mfumP1x++t0zyjWCvAP4R8Nkkn25t/4LBE/9HkuwHHgOubstuA14LzAFfB940wb4lSROa5FVA/5PR5/UBLh3Rv4Br1ro/SdJ0+U5gSeqUASBJnTIAJKlTBoAkdcoAkKROGQCS1CkDQJI6ZQBIUqcMAEnqlAEgSZ0yACSpUwaAJHXKAJCkThkAktSpSb4PQEvYzC/skKRxeQQgSZ3yCEDS2Dbr6NavolwfHgFIUqc2PACSXJ7kkSRzSQ5u9P4lSQMbegooyRnAbwE/C8wDn0pyuKoe2sg6JD27bOYLK76XTz9t9DWAi4G5qnoUIMnNwB7AAJC0JX0vX/fY6ADYATw+ND8PvHy9dubLMSVpaRsdABnRVn+pQ3IAONBm/zzJI236POCr61jbpLZyfVu5Ntja9W3l2sD6JrGVayPvnai+Hx6n00YHwDxwwdD8TuCJ4Q5VdQg4tHjFJEeranZ9y1u7rVzfVq4NtnZ9W7k2sL5JbOXaYGPq2+hXAX0K2J3kwiRnAnuBwxtcgySJDT4CqKpnkrwFuB04A7ihqh7cyBokSQMb/k7gqroNuG0Nq552WmiL2cr1beXaYGvXt5VrA+ubxFauDTagvlTVyr0kSd9z/CgISerUlgqAJFcneTDJd5IsefV7qY+TaBeX701yLMmH24XmadV2TpIjbdtHkmwf0ednknx66PaNJFe1ZR9I8oWhZS+dVm3j1tf6fXuohsND7es2duPWl+SlST7ZHgP3J/kHQ8umPn4rfSxJkrPaWMy1sdk1tOwdrf2RJK+ZtJY11ve2JA+1sbojyQ8PLRv5e97A2n4hycJQDf94aNm+9jg4lmTftGsbs77rhmr7XJKvDS1b77G7IcmJJA8ssTxJ3tdqvz/JRUPLpjt2VbVlbsCPAS8G7gZml+hzBvB54EXAmcBngJe0ZR8B9rbp3wZ+aYq1/RvgYJs+CLx3hf7nACeB57b5DwCvX8exG6s+4M+XaF+3sRu3PuBvALvb9AuA48DZ6zF+yz2Ohvr8MvDbbXov8OE2/ZLW/yzgwradM6Y8XuPU9zNDj69fOlXfcr/nDaztF4D/MGLdc4BH2/32Nr19o+tb1P9XGLwgZd3Hrm3/p4CLgAeWWP5a4OMM3jd1CXDveo3dljoCqKqHq+qRFbr9xcdJVNU3gZuBPUkCvAq4pfW7EbhqiuXtadscd9uvBz5eVV+fYg3LWW19f2EDxg7GqK+qPldVx9r0E8AJYGbKdZwy8nG0TM23AJe2sdoD3FxVT1fVF4C5tr0Nra+q7hp6fN3D4H01G2GcsVvKa4AjVXWyqp4EjgCXb3J9bwBumnINS6qqTzD453Ape4AP1sA9wNlJzmcdxm5LBcCYRn2cxA7gXOBrVfXMovZpeX5VHQdo989bof9eTn9Qvbsd0l2X5Kwp1raa+p6T5GiSe06dnmL9x2419QGQ5GIG/719fqh5muO31ONoZJ82Nk8xGKtx1p3Uavexn8F/jaeM+j1vdG1/r/2+bkly6g2gW2rs2mmzC4E7h5rXc+zGsVT9Ux+7DX8ZaJI/An5oxKJ3VtWt42xiRFst0z6V2la5nfOBv8Xg/Q6nvAP4MoMntUPA24F3bUJ9L6yqJ5K8CLgzyWeBPxvRb9UvD5vy+P1nYF9Vfac1Tzx+i3czom3xz7xuj7UxjL2PJD8PzAI/PdR82u+5qj4/av11qu2/AjdV1dNJ3szgSOpVY667EfWdshe4paq+PdS2nmM3jg173G3G+wBePeEmlvo4ia8yOFTa1v5bO+1jJiapLclXkpxfVcfbE9SJZTb194GPVtW3hrZ9vE0+neT3gF9dTW3Tqq+dWqGqHk1yN/Ay4A+YcOymVV+SHwA+BvzLdvh7atsTj98iK34syVCf+STbgB9kcOg+zrqTGmsfSV7NIGB/uqqePtW+xO95Wk9i43yky/8Zmv0d4L1D675y0bp3T6musesbshe4ZrhhncduHEvVP/WxezaeAhr5cRI1uEpyF4Nz7wD7gHGOKMZ1uG1znG2fdk6xPemdOt9+FTDyFQDrWV+S7adOnSQ5D3gF8NAGjN249Z0JfJTB+c/fX7Rs2uM3zseSDNf8euDONlaHgb0ZvEroQmA38McT1rPq+pK8DPhPwJVVdWKofeTveYNrO39o9krg4TZ9O3BZq3E7cBl/+Uh5Q+prNb6YwcXUTw61rffYjeMw8Mb2aqBLgKfaP0DTH7v1vNq92hvwdxmk3NPAV4DbW/sLgNuG+r0W+ByDVH7nUPuLGPwhzgG/D5w1xdrOBe4AjrX7c1r7LPC7Q/12AV8Cvm/R+ncCn2XwxPVfgO+f8titWB/wk62Gz7T7/Rsxdquo7+eBbwGfHrq9dL3Gb9TjiMFppSvb9HPaWMy1sXnR0LrvbOs9AlyxTn8PK9X3R+3v5NRYHV7p97yBtf1r4MFWw13Ajw6t+4ttTOeAN23G2LX5fwW8Z9F6GzF2NzF4hdu3GDzf7QfeDLy5LQ+DL876fKthdmjdqY6d7wSWpE49G08BSZKmwACQpE4ZAJLUKQNAkjplAEhSpwwASeqUASBJnTIAJKlT/x8mB4FuXq+DEQAAAABJRU5ErkJggg==\n", 111 | "text/plain": [ 112 | "
" 113 | ] 114 | }, 115 | "metadata": {}, 116 | "output_type": "display_data" 117 | } 118 | ], 119 | "source": [ 120 | "# Loại bỏ và chỉ lấy 1000 dữ liệu có góc lái ở 0\n", 121 | "pos_zero = np.array(np.where(y==0)).reshape(-1, 1)\n", 122 | "pos_none_zero = np.array(np.where(y!=0)).reshape(-1, 1)\n", 123 | "np.random.shuffle(pos_zero)\n", 124 | "pos_zero = pos_zero[:1000]\n", 125 | "\n", 126 | "pos_combined = np.vstack((pos_zero, pos_none_zero))\n", 127 | "pos_combined = list(pos_combined)\n", 128 | "\n", 129 | "y = y[pos_combined].reshape(len(pos_combined))\n", 130 | "X = X[pos_combined, :].reshape((len(pos_combined), 3))\n", 131 | "\n", 132 | "# After process\n", 133 | "plt.hist(y)" 134 | ] 135 | }, 136 | { 137 | "cell_type": "markdown", 138 | "metadata": {}, 139 | "source": [ 140 | "Ta vẫn để giá trị góc lái ở 0 là nhiều nhất vì có thể phần nhiều thời gian của ô tô là đi thẳng thật. Việc histogram lệch bên trái không quá quan trọng vì ta dùng flip ảnh." 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 4, 146 | "metadata": {}, 147 | "outputs": [], 148 | "source": [ 149 | "# Chia ra traing set và validation set\n", 150 | "X_train, X_valid, y_train, y_valid = train_test_split(X, y, test_size=0.2, random_state=0)" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": 6, 156 | "metadata": {}, 157 | "outputs": [ 158 | { 159 | "name": "stdout", 160 | "output_type": "stream", 161 | "text": [ 162 | "_________________________________________________________________\n", 163 | "Layer (type) Output Shape Param # \n", 164 | "=================================================================\n", 165 | "lambda_1 (Lambda) (None, 66, 200, 3) 0 \n", 166 | "_________________________________________________________________\n", 167 | "conv2d_1 (Conv2D) (None, 31, 98, 24) 1824 \n", 168 | "_________________________________________________________________\n", 169 | "conv2d_2 (Conv2D) (None, 14, 47, 36) 21636 \n", 170 | "_________________________________________________________________\n", 171 | "conv2d_3 (Conv2D) (None, 5, 22, 48) 43248 \n", 172 | "_________________________________________________________________\n", 173 | "conv2d_4 (Conv2D) (None, 3, 20, 64) 27712 \n", 174 | "_________________________________________________________________\n", 175 | "conv2d_5 (Conv2D) (None, 1, 18, 64) 36928 \n", 176 | "_________________________________________________________________\n", 177 | "dropout_1 (Dropout) (None, 1, 18, 64) 0 \n", 178 | "_________________________________________________________________\n", 179 | "flatten_1 (Flatten) (None, 1152) 0 \n", 180 | "_________________________________________________________________\n", 181 | "dense_1 (Dense) (None, 100) 115300 \n", 182 | "_________________________________________________________________\n", 183 | "dropout_2 (Dropout) (None, 100) 0 \n", 184 | "_________________________________________________________________\n", 185 | "dense_2 (Dense) (None, 50) 5050 \n", 186 | "_________________________________________________________________\n", 187 | "dense_3 (Dense) (None, 10) 510 \n", 188 | "_________________________________________________________________\n", 189 | "dense_4 (Dense) (None, 1) 11 \n", 190 | "=================================================================\n", 191 | "Total params: 252,219\n", 192 | "Trainable params: 252,219\n", 193 | "Non-trainable params: 0\n", 194 | "_________________________________________________________________\n" 195 | ] 196 | }, 197 | { 198 | "name": "stderr", 199 | "output_type": "stream", 200 | "text": [ 201 | "C:\\Users\\DELL\\Anaconda3\\lib\\site-packages\\ipykernel_launcher.py:3: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(24, (5, 5), activation=\"elu\", strides=(2, 2))`\n", 202 | " This is separate from the ipykernel package so we can avoid doing imports until\n", 203 | "C:\\Users\\DELL\\Anaconda3\\lib\\site-packages\\ipykernel_launcher.py:4: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(36, (5, 5), activation=\"elu\", strides=(2, 2))`\n", 204 | " after removing the cwd from sys.path.\n", 205 | "C:\\Users\\DELL\\Anaconda3\\lib\\site-packages\\ipykernel_launcher.py:5: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(48, (5, 5), activation=\"elu\", strides=(2, 2))`\n", 206 | " \"\"\"\n", 207 | "C:\\Users\\DELL\\Anaconda3\\lib\\site-packages\\ipykernel_launcher.py:6: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(64, (3, 3), activation=\"elu\")`\n", 208 | " \n", 209 | "C:\\Users\\DELL\\Anaconda3\\lib\\site-packages\\ipykernel_launcher.py:7: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(64, (3, 3), activation=\"elu\")`\n", 210 | " import sys\n" 211 | ] 212 | } 213 | ], 214 | "source": [ 215 | "# Xây dựng model\n", 216 | "model = Sequential()\n", 217 | "model.add(Lambda(lambda x: x/127.5-1.0, input_shape=INPUT_SHAPE))\n", 218 | "model.add(Conv2D(24, 5, 5, activation='elu', subsample=(2, 2)))\n", 219 | "model.add(Conv2D(36, 5, 5, activation='elu', subsample=(2, 2)))\n", 220 | "model.add(Conv2D(48, 5, 5, activation='elu', subsample=(2, 2)))\n", 221 | "model.add(Conv2D(64, 3, 3, activation='elu'))\n", 222 | "model.add(Conv2D(64, 3, 3, activation='elu'))\n", 223 | "model.add(Dropout(0.5))\n", 224 | "model.add(Flatten())\n", 225 | "model.add(Dense(100, activation='elu'))\n", 226 | "model.add(Dropout(0.5))\n", 227 | "model.add(Dense(50, activation='elu'))\n", 228 | "model.add(Dense(10, activation='elu'))\n", 229 | "model.add(Dense(1))\n", 230 | "model.summary()" 231 | ] 232 | }, 233 | { 234 | "cell_type": "code", 235 | "execution_count": 7, 236 | "metadata": {}, 237 | "outputs": [ 238 | { 239 | "name": "stderr", 240 | "output_type": "stream", 241 | "text": [ 242 | "C:\\Users\\DELL\\Anaconda3\\lib\\site-packages\\ipykernel_launcher.py:28: UserWarning: The semantics of the Keras 2 argument `steps_per_epoch` is not the same as the Keras 1 argument `samples_per_epoch`. `steps_per_epoch` is the number of batches to draw from the generator at each epoch. Basically steps_per_epoch = samples_per_epoch/batch_size. Similarly `nb_val_samples`->`validation_steps` and `val_samples`->`steps` arguments have changed. Update your method calls accordingly.\n", 243 | "C:\\Users\\DELL\\Anaconda3\\lib\\site-packages\\ipykernel_launcher.py:28: UserWarning: Update your `fit_generator` call to the Keras 2 API: `fit_generator( (y2-y1)/(x2-x1) 97 | # as x2 == x1 causes zero-division problem, we'll write it in the below form: 98 | # (ym-y1)*(x2-x1) - (y2-y1)*(xm-x1) > 0 99 | mask = np.zeros_like(image[:, :, 1]) 100 | mask[(ym - y1) * (x2 - x1) - (y2 - y1) * (xm - x1) > 0] = 1 101 | 102 | # choose which side should have shadow and adjust saturation 103 | cond = mask == np.random.randint(2) 104 | s_ratio = np.random.uniform(low=0.2, high=0.5) 105 | 106 | # adjust Saturation in HLS(Hue, Light, Saturation) 107 | hls = cv2.cvtColor(image, cv2.COLOR_RGB2HLS) 108 | hls[:, :, 1][cond] = hls[:, :, 1][cond] * s_ratio 109 | return cv2.cvtColor(hls, cv2.COLOR_HLS2RGB) 110 | 111 | 112 | def random_brightness(image): 113 | """ 114 | Thêm giá trị sáng cho ảnh. 115 | """ 116 | # HSV (Hue, Saturation, Value) is also called HSB ('B' for Brightness). 117 | hsv = cv2.cvtColor(image, cv2.COLOR_RGB2HSV) 118 | ratio = 1.0 + 0.4 * (np.random.rand() - 0.5) 119 | hsv[:,:,2] = hsv[:,:,2] * ratio 120 | return cv2.cvtColor(hsv, cv2.COLOR_HSV2RGB) 121 | 122 | 123 | def augument(data_dir, center, left, right, steering_angle, range_x=100, range_y=10): 124 | """ 125 | Kết hợp các phương pháp augmentation ảnh 126 | """ 127 | image, steering_angle = choose_image(data_dir, center, left, right, steering_angle) 128 | image, steering_angle = random_flip(image, steering_angle) 129 | image, steering_angle = random_translate(image, steering_angle, range_x, range_y) 130 | image = random_shadow(image) 131 | image = random_brightness(image) 132 | return image, steering_angle 133 | 134 | 135 | def batch_generator(data_dir, image_paths, steering_angles, batch_size, is_training): 136 | """ 137 | Trả về ảnh và góc lái tương ứng cho việc training 138 | """ 139 | images = np.empty([batch_size, IMAGE_HEIGHT, IMAGE_WIDTH, IMAGE_CHANNELS]) 140 | steers = np.empty(batch_size) 141 | while True: 142 | i = 0 143 | for index in np.random.permutation(image_paths.shape[0]): 144 | center, left, right = image_paths[index] 145 | steering_angle = steering_angles[index] 146 | # argumentation 147 | if is_training and np.random.rand() < 0.6: 148 | image, steering_angle = augument(data_dir, center, left, right, steering_angle) 149 | else: 150 | image = load_image(data_dir, center) 151 | # add the image and steering angle to the batch 152 | images[i] = preprocess(image) 153 | steers[i] = steering_angle 154 | i += 1 155 | if i == batch_size: 156 | break 157 | yield images, steers -------------------------------------------------------------------------------- /L9/dataset.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nttuan8/DL_Tutorial/b0a5dfb2898c17a7e8cb0b85035ef09d0b2eed4d/L9/dataset.zip -------------------------------------------------------------------------------- /L9/feature-extractor.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stderr", 10 | "output_type": "stream", 11 | "text": [ 12 | "C:\\Users\\DELL\\Anaconda3\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", 13 | " from ._conv import register_converters as _register_converters\n", 14 | "Using TensorFlow backend.\n" 15 | ] 16 | } 17 | ], 18 | "source": [ 19 | "# Thêm thư viện\n", 20 | "from sklearn.linear_model import LogisticRegression\n", 21 | "from sklearn.model_selection import GridSearchCV\n", 22 | "from sklearn.metrics import classification_report\n", 23 | "from imutils import paths\n", 24 | "from keras.applications import VGG16\n", 25 | "from keras.applications import imagenet_utils\n", 26 | "from keras.preprocessing.image import img_to_array\n", 27 | "from keras.preprocessing.image import load_img\n", 28 | "from sklearn.preprocessing import LabelEncoder\n", 29 | "from sklearn.model_selection import train_test_split\n", 30 | "import numpy as np\n", 31 | "import random\n", 32 | "import os" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": 2, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "# Lấy các đường dẫn đến ảnh.\n", 42 | "image_path = list(paths.list_images('dataset/'))\n", 43 | "\n", 44 | "# Đổi vị trí ngẫu nhiên các đường dẫn ảnh\n", 45 | "random.shuffle(image_path)" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 3, 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "# Đường dẫn ảnh sẽ là dataset/tên_loài_hoa/tên_ảnh ví dụ dataset/Bluebell/image_0241.jpg nên p.split(os.path.sep)[-2] sẽ lấy ra được tên loài hoa\n", 55 | "labels = [p.split(os.path.sep)[-2] for p in image_path]\n", 56 | "\n", 57 | "# Chuyển tên các loài hoa thành số\n", 58 | "le = LabelEncoder()\n", 59 | "labels = le.fit_transform(labels)" 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "execution_count": 4, 65 | "metadata": {}, 66 | "outputs": [], 67 | "source": [ 68 | "# Load model VGG 16 của ImageNet dataset, include_top=False để bỏ phần Fully connected layer ở cuối.\n", 69 | "model = VGG16(weights='imagenet', include_top=False)" 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 5, 75 | "metadata": {}, 76 | "outputs": [], 77 | "source": [ 78 | "# Load ảnh và resize về đúng kích thước mà VGG 16 cần là (224,224)\n", 79 | "list_image = []\n", 80 | "for (j, imagePath) in enumerate(image_path):\n", 81 | " image = load_img(imagePath, target_size=(224, 224))\n", 82 | " image = img_to_array(image)\n", 83 | " \n", 84 | " image = np.expand_dims(image, 0)\n", 85 | " image = imagenet_utils.preprocess_input(image)\n", 86 | " \n", 87 | " list_image.append(image)\n", 88 | " \n", 89 | "list_image = np.vstack(list_image)" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": 6, 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "# Dùng pre-trained model để lấy ra các feature của ảnh\n", 99 | "features = model.predict(list_image)\n", 100 | "\n", 101 | "# Giống bước flatten trong CNN, chuyển từ tensor 3 chiều sau ConvNet sang vector 1 chiều\n", 102 | "features = features.reshape((features.shape[0], 512*7*7))" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": 7, 108 | "metadata": {}, 109 | "outputs": [], 110 | "source": [ 111 | "# Chia traing set, test set tỉ lệ 80-20\n", 112 | "X_train, X_test, y_train, y_test = train_test_split(features, labels, test_size=0.2, random_state=42)" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": 11, 118 | "metadata": {}, 119 | "outputs": [ 120 | { 121 | "name": "stdout", 122 | "output_type": "stream", 123 | "text": [ 124 | "Best parameter for the model {'C': 0.1}\n" 125 | ] 126 | } 127 | ], 128 | "source": [ 129 | "# Grid search để tìm các parameter tốt nhất cho model. C = 1/lamda, hệ số trong regularisation. Solver là kiểu optimize\n", 130 | "# https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LogisticRegression.html\n", 131 | "params = {'C' : [0.1, 1.0, 10.0, 100.0]}\n", 132 | "#model = GridSearchCV(LogisticRegression(solver='lbfgs', multi_class='multinomial'), params)\n", 133 | "model = GridSearchCV(LogisticRegression(), params)\n", 134 | "model.fit(X_train, y_train)\n", 135 | "print('Best parameter for the model {}'.format(model.best_params_))" 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": 12, 141 | "metadata": {}, 142 | "outputs": [ 143 | { 144 | "name": "stdout", 145 | "output_type": "stream", 146 | "text": [ 147 | " precision recall f1-score support\n", 148 | "\n", 149 | " 0 0.89 1.00 0.94 17\n", 150 | " 1 0.91 0.77 0.83 13\n", 151 | " 2 0.95 0.95 0.95 19\n", 152 | " 3 0.62 0.91 0.74 11\n", 153 | " 4 0.87 0.93 0.90 14\n", 154 | " 5 0.86 0.86 0.86 14\n", 155 | " 6 1.00 1.00 1.00 11\n", 156 | " 7 0.92 0.85 0.88 13\n", 157 | " 8 1.00 0.95 0.97 20\n", 158 | " 9 1.00 0.94 0.97 18\n", 159 | " 10 0.91 1.00 0.95 10\n", 160 | " 11 1.00 0.94 0.97 16\n", 161 | " 12 0.84 0.94 0.89 17\n", 162 | " 13 1.00 1.00 1.00 19\n", 163 | " 14 1.00 0.96 0.98 27\n", 164 | " 15 0.75 0.69 0.72 13\n", 165 | " 16 1.00 0.85 0.92 20\n", 166 | "\n", 167 | "avg / total 0.93 0.92 0.92 272\n", 168 | "\n" 169 | ] 170 | } 171 | ], 172 | "source": [ 173 | "# Đánh giá model\n", 174 | "preds = model.predict(X_test)\n", 175 | "print(classification_report(y_test, preds))" 176 | ] 177 | } 178 | ], 179 | "metadata": { 180 | "kernelspec": { 181 | "display_name": "Python 3", 182 | "language": "python", 183 | "name": "python3" 184 | }, 185 | "language_info": { 186 | "codemirror_mode": { 187 | "name": "ipython", 188 | "version": 3 189 | }, 190 | "file_extension": ".py", 191 | "mimetype": "text/x-python", 192 | "name": "python", 193 | "nbconvert_exporter": "python", 194 | "pygments_lexer": "ipython3", 195 | "version": "3.6.5" 196 | } 197 | }, 198 | "nbformat": 4, 199 | "nbformat_minor": 2 200 | } 201 | -------------------------------------------------------------------------------- /L9/fine-tune.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stderr", 10 | "output_type": "stream", 11 | "text": [ 12 | "C:\\Users\\DELL\\Anaconda3\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", 13 | " from ._conv import register_converters as _register_converters\n", 14 | "Using TensorFlow backend.\n" 15 | ] 16 | } 17 | ], 18 | "source": [ 19 | "# Thêm thư viện\n", 20 | "from sklearn.linear_model import LogisticRegression\n", 21 | "from sklearn.model_selection import GridSearchCV\n", 22 | "from sklearn.metrics import classification_report\n", 23 | "from imutils import paths\n", 24 | "from keras.applications import VGG16\n", 25 | "from keras.applications import imagenet_utils\n", 26 | "from keras.preprocessing.image import img_to_array\n", 27 | "from keras.preprocessing.image import load_img\n", 28 | "from sklearn.preprocessing import LabelEncoder\n", 29 | "from sklearn.preprocessing import LabelBinarizer\n", 30 | "from sklearn.model_selection import train_test_split\n", 31 | "from keras.preprocessing.image import ImageDataGenerator\n", 32 | "from keras.optimizers import SGD\n", 33 | "from keras.optimizers import RMSprop\n", 34 | "from keras.applications import VGG16\n", 35 | "from keras.layers import Input\n", 36 | "from keras.models import Model\n", 37 | "from keras.layers.core import Dense\n", 38 | "from keras.layers.core import Dropout\n", 39 | "from keras.layers.core import Flatten\n", 40 | "import numpy as np\n", 41 | "import random\n", 42 | "import os" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 2, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "# Lấy các đường dẫn đến ảnh.\n", 52 | "image_path = list(paths.list_images('dataset/'))\n", 53 | "\n", 54 | "# Đổi vị trí ngẫu nhiên các đường dẫn ảnh\n", 55 | "random.shuffle(image_path)" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 4, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "# Đường dẫn ảnh sẽ là dataset/tên_loài_hoa/tên_ảnh ví dụ dataset/Bluebell/image_0241.jpg nên p.split(os.path.sep)[-2] sẽ lấy ra được tên loài hoa\n", 65 | "labels = [p.split(os.path.sep)[-2] for p in image_path]\n", 66 | "\n", 67 | "# Chuyển tên các loài hoa thành số\n", 68 | "le = LabelEncoder()\n", 69 | "labels = le.fit_transform(labels)\n", 70 | "\n", 71 | "# One-hot encoding\n", 72 | "lb = LabelBinarizer()\n", 73 | "labels = lb.fit_transform(labels)" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": 5, 79 | "metadata": {}, 80 | "outputs": [], 81 | "source": [ 82 | "# Load ảnh và resize về đúng kích thước mà VGG 16 cần là (224,224)\n", 83 | "list_image = []\n", 84 | "for (j, imagePath) in enumerate(image_path):\n", 85 | " image = load_img(imagePath, target_size=(224, 224))\n", 86 | " image = img_to_array(image)\n", 87 | " \n", 88 | " image = np.expand_dims(image, 0)\n", 89 | " image = imagenet_utils.preprocess_input(image)\n", 90 | " \n", 91 | " list_image.append(image)\n", 92 | " \n", 93 | "list_image = np.vstack(list_image)" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": 6, 99 | "metadata": {}, 100 | "outputs": [], 101 | "source": [ 102 | "# Load model VGG 16 của ImageNet dataset, include_top=False để bỏ phần Fully connected layer ở cuối.\n", 103 | "baseModel = VGG16(weights='imagenet', include_top=False, input_tensor=Input(shape=(224, 224, 3)))\n", 104 | "\n", 105 | "# Xây thêm các layer\n", 106 | "# Lấy output của ConvNet trong VGG16\n", 107 | "fcHead = baseModel.output\n", 108 | "\n", 109 | "# Flatten trước khi dùng FCs\n", 110 | "fcHead = Flatten(name='flatten')(fcHead)\n", 111 | "\n", 112 | "# Thêm FC\n", 113 | "fcHead = Dense(256, activation='relu')(fcHead)\n", 114 | "fcHead = Dropout(0.5)(fcHead)\n", 115 | "\n", 116 | "# Output layer với softmax activation\n", 117 | "fcHead = Dense(17, activation='softmax')(fcHead)\n", 118 | "\n", 119 | "# Xây dựng model bằng việc nối ConvNet của VGG16 và fcHead\n", 120 | "model = model = Model(inputs=baseModel.input, outputs=fcHead)" 121 | ] 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": 7, 126 | "metadata": {}, 127 | "outputs": [], 128 | "source": [ 129 | "# Chia traing set, test set tỉ lệ 80-20\n", 130 | "X_train, X_test, y_train, y_test = train_test_split(list_image, labels, test_size=0.2, random_state=42)" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": 8, 136 | "metadata": {}, 137 | "outputs": [], 138 | "source": [ 139 | "# augmentation cho training data\n", 140 | "aug_train = ImageDataGenerator(rescale=1./255, rotation_range=30, width_shift_range=0.1, height_shift_range=0.1, shear_range=0.2, \n", 141 | " zoom_range=0.2, horizontal_flip=True, fill_mode='nearest')\n", 142 | "# augementation cho test\n", 143 | "aug_test= ImageDataGenerator(rescale=1./255)" 144 | ] 145 | }, 146 | { 147 | "cell_type": "code", 148 | "execution_count": 11, 149 | "metadata": {}, 150 | "outputs": [ 151 | { 152 | "name": "stdout", 153 | "output_type": "stream", 154 | "text": [ 155 | "Epoch 1/25\n", 156 | "34/34 [==============================] - 36s 1s/step - loss: 5.8708 - acc: 0.1443 - val_loss: 1.8312 - val_acc: 0.4805\n", 157 | "Epoch 2/25\n", 158 | "34/34 [==============================] - 25s 727ms/step - loss: 2.2167 - acc: 0.3097 - val_loss: 1.4920 - val_acc: 0.4833\n", 159 | "Epoch 3/25\n", 160 | "34/34 [==============================] - 21s 631ms/step - loss: 1.8023 - acc: 0.4292 - val_loss: 1.2073 - val_acc: 0.6583\n", 161 | "Epoch 4/25\n", 162 | "34/34 [==============================] - 21s 632ms/step - loss: 1.5419 - acc: 0.5064 - val_loss: 0.9879 - val_acc: 0.7208\n", 163 | "Epoch 5/25\n", 164 | "34/34 [==============================] - 22s 634ms/step - loss: 1.4261 - acc: 0.5506 - val_loss: 0.6725 - val_acc: 0.8083\n", 165 | "Epoch 6/25\n", 166 | "34/34 [==============================] - 22s 633ms/step - loss: 1.3022 - acc: 0.5873 - val_loss: 0.7243 - val_acc: 0.7667\n", 167 | "Epoch 7/25\n", 168 | "34/34 [==============================] - 22s 636ms/step - loss: 1.1401 - acc: 0.6241 - val_loss: 0.6351 - val_acc: 0.8708\n", 169 | "Epoch 8/25\n", 170 | "34/34 [==============================] - 22s 636ms/step - loss: 1.1156 - acc: 0.6480 - val_loss: 0.5055 - val_acc: 0.8500\n", 171 | "Epoch 9/25\n", 172 | "34/34 [==============================] - 22s 636ms/step - loss: 1.0335 - acc: 0.6498 - val_loss: 0.4785 - val_acc: 0.8417\n", 173 | "Epoch 10/25\n", 174 | "34/34 [==============================] - 22s 638ms/step - loss: 0.9727 - acc: 0.6949 - val_loss: 0.4631 - val_acc: 0.8711\n", 175 | "Epoch 11/25\n", 176 | "34/34 [==============================] - 22s 638ms/step - loss: 0.9333 - acc: 0.7050 - val_loss: 0.5691 - val_acc: 0.8417\n", 177 | "Epoch 12/25\n", 178 | "34/34 [==============================] - 22s 638ms/step - loss: 0.8679 - acc: 0.7243 - val_loss: 0.5470 - val_acc: 0.8125\n", 179 | "Epoch 13/25\n", 180 | "34/34 [==============================] - 22s 639ms/step - loss: 0.8397 - acc: 0.7335 - val_loss: 0.5751 - val_acc: 0.8000\n", 181 | "Epoch 14/25\n", 182 | "34/34 [==============================] - 22s 639ms/step - loss: 0.8081 - acc: 0.7491 - val_loss: 0.5523 - val_acc: 0.8333\n", 183 | "Epoch 15/25\n", 184 | "34/34 [==============================] - 22s 643ms/step - loss: 0.7724 - acc: 0.7629 - val_loss: 0.4636 - val_acc: 0.8750\n", 185 | "Epoch 16/25\n", 186 | "34/34 [==============================] - 22s 640ms/step - loss: 0.7494 - acc: 0.7509 - val_loss: 0.3266 - val_acc: 0.9083\n", 187 | "Epoch 17/25\n", 188 | "34/34 [==============================] - 22s 641ms/step - loss: 0.7434 - acc: 0.7445 - val_loss: 0.4980 - val_acc: 0.8625\n", 189 | "Epoch 18/25\n", 190 | "34/34 [==============================] - 22s 638ms/step - loss: 0.6952 - acc: 0.7693 - val_loss: 0.3791 - val_acc: 0.8917\n", 191 | "Epoch 19/25\n", 192 | "34/34 [==============================] - 22s 637ms/step - loss: 0.6805 - acc: 0.7739 - val_loss: 0.4193 - val_acc: 0.8633\n", 193 | "Epoch 20/25\n", 194 | "34/34 [==============================] - 22s 640ms/step - loss: 0.6195 - acc: 0.7858 - val_loss: 0.4143 - val_acc: 0.8750\n", 195 | "Epoch 21/25\n", 196 | "34/34 [==============================] - 22s 639ms/step - loss: 0.6625 - acc: 0.7849 - val_loss: 0.3923 - val_acc: 0.8958\n", 197 | "Epoch 22/25\n", 198 | "34/34 [==============================] - 22s 636ms/step - loss: 0.5939 - acc: 0.8079 - val_loss: 0.4184 - val_acc: 0.8833\n", 199 | "Epoch 23/25\n", 200 | "34/34 [==============================] - 22s 643ms/step - loss: 0.5655 - acc: 0.8171 - val_loss: 0.3251 - val_acc: 0.9042\n", 201 | "Epoch 24/25\n", 202 | "34/34 [==============================] - 22s 637ms/step - loss: 0.6053 - acc: 0.8079 - val_loss: 0.5748 - val_acc: 0.8542\n", 203 | "Epoch 25/25\n", 204 | "34/34 [==============================] - 22s 636ms/step - loss: 0.5360 - acc: 0.8208 - val_loss: 0.3918 - val_acc: 0.9000\n" 205 | ] 206 | } 207 | ], 208 | "source": [ 209 | "# freeze VGG model\n", 210 | "for layer in baseModel.layers:\n", 211 | " layer.trainable = False\n", 212 | " \n", 213 | "opt = RMSprop(0.001)\n", 214 | "model.compile(opt, 'categorical_crossentropy', ['accuracy'])\n", 215 | "numOfEpoch = 25\n", 216 | "H = model.fit_generator(aug_train.flow(X_train, y_train, batch_size=32), \n", 217 | " steps_per_epoch=len(X_train)//32,\n", 218 | " validation_data=(aug_test.flow(X_test, y_test, batch_size=32)),\n", 219 | " validation_steps=len(X_test)//32,\n", 220 | " epochs=numOfEpoch)" 221 | ] 222 | }, 223 | { 224 | "cell_type": "code", 225 | "execution_count": 13, 226 | "metadata": {}, 227 | "outputs": [ 228 | { 229 | "name": "stdout", 230 | "output_type": "stream", 231 | "text": [ 232 | "Epoch 1/35\n", 233 | "34/34 [==============================] - 26s 771ms/step - loss: 0.4242 - acc: 0.8603 - val_loss: 0.3123 - val_acc: 0.9258\n", 234 | "Epoch 2/35\n", 235 | "34/34 [==============================] - 24s 712ms/step - loss: 0.3769 - acc: 0.8814 - val_loss: 0.2623 - val_acc: 0.9250\n", 236 | "Epoch 3/35\n", 237 | "34/34 [==============================] - 24s 709ms/step - loss: 0.3101 - acc: 0.9062 - val_loss: 0.2925 - val_acc: 0.9375\n", 238 | "Epoch 4/35\n", 239 | "34/34 [==============================] - 24s 704ms/step - loss: 0.2902 - acc: 0.9035 - val_loss: 0.4253 - val_acc: 0.9292\n", 240 | "Epoch 5/35\n", 241 | "34/34 [==============================] - 24s 705ms/step - loss: 0.2829 - acc: 0.8961 - val_loss: 0.3271 - val_acc: 0.9417\n", 242 | "Epoch 6/35\n", 243 | "34/34 [==============================] - 24s 707ms/step - loss: 0.2556 - acc: 0.9191 - val_loss: 0.1768 - val_acc: 0.9250\n", 244 | "Epoch 7/35\n", 245 | "34/34 [==============================] - 24s 706ms/step - loss: 0.2351 - acc: 0.9228 - val_loss: 0.3223 - val_acc: 0.9375\n", 246 | "Epoch 8/35\n", 247 | "34/34 [==============================] - 24s 707ms/step - loss: 0.2641 - acc: 0.9118 - val_loss: 0.3800 - val_acc: 0.9292\n", 248 | "Epoch 9/35\n", 249 | "34/34 [==============================] - 24s 708ms/step - loss: 0.2760 - acc: 0.9173 - val_loss: 0.2979 - val_acc: 0.9458\n", 250 | "Epoch 10/35\n", 251 | "34/34 [==============================] - 24s 710ms/step - loss: 0.2466 - acc: 0.9090 - val_loss: 0.3352 - val_acc: 0.9453\n", 252 | "Epoch 11/35\n", 253 | "34/34 [==============================] - 24s 709ms/step - loss: 0.2247 - acc: 0.9210 - val_loss: 0.2563 - val_acc: 0.9500\n", 254 | "Epoch 12/35\n", 255 | "34/34 [==============================] - 24s 709ms/step - loss: 0.2423 - acc: 0.9274 - val_loss: 0.3930 - val_acc: 0.9417\n", 256 | "Epoch 13/35\n", 257 | "34/34 [==============================] - 24s 709ms/step - loss: 0.2042 - acc: 0.9311 - val_loss: 0.2740 - val_acc: 0.9542\n", 258 | "Epoch 14/35\n", 259 | "34/34 [==============================] - 24s 709ms/step - loss: 0.2141 - acc: 0.9256 - val_loss: 0.3703 - val_acc: 0.9375\n", 260 | "Epoch 15/35\n", 261 | "34/34 [==============================] - 24s 705ms/step - loss: 0.2355 - acc: 0.9191 - val_loss: 0.2008 - val_acc: 0.9625\n", 262 | "Epoch 16/35\n", 263 | "34/34 [==============================] - 24s 707ms/step - loss: 0.2310 - acc: 0.9182 - val_loss: 0.2346 - val_acc: 0.9500\n", 264 | "Epoch 17/35\n", 265 | "34/34 [==============================] - 24s 708ms/step - loss: 0.1893 - acc: 0.9329 - val_loss: 0.3341 - val_acc: 0.9542\n", 266 | "Epoch 18/35\n", 267 | "34/34 [==============================] - 24s 709ms/step - loss: 0.2123 - acc: 0.9256 - val_loss: 0.3322 - val_acc: 0.9458\n", 268 | "Epoch 19/35\n", 269 | "34/34 [==============================] - 24s 710ms/step - loss: 0.1802 - acc: 0.9375 - val_loss: 0.2476 - val_acc: 0.9570\n", 270 | "Epoch 20/35\n", 271 | "34/34 [==============================] - 24s 705ms/step - loss: 0.1902 - acc: 0.9366 - val_loss: 0.3922 - val_acc: 0.9375\n", 272 | "Epoch 21/35\n", 273 | "34/34 [==============================] - 24s 708ms/step - loss: 0.1852 - acc: 0.9384 - val_loss: 0.2375 - val_acc: 0.9583\n", 274 | "Epoch 22/35\n", 275 | "34/34 [==============================] - 24s 709ms/step - loss: 0.1782 - acc: 0.9458 - val_loss: 0.2803 - val_acc: 0.9500\n", 276 | "Epoch 23/35\n", 277 | "34/34 [==============================] - 24s 709ms/step - loss: 0.2021 - acc: 0.9347 - val_loss: 0.3591 - val_acc: 0.9208\n", 278 | "Epoch 24/35\n", 279 | "34/34 [==============================] - 24s 705ms/step - loss: 0.2031 - acc: 0.9265 - val_loss: 0.4288 - val_acc: 0.9292\n", 280 | "Epoch 25/35\n", 281 | "34/34 [==============================] - 24s 707ms/step - loss: 0.1587 - acc: 0.9449 - val_loss: 0.2866 - val_acc: 0.9333\n", 282 | "Epoch 26/35\n", 283 | "34/34 [==============================] - 24s 708ms/step - loss: 0.1837 - acc: 0.9375 - val_loss: 0.3268 - val_acc: 0.9417\n", 284 | "Epoch 27/35\n", 285 | "34/34 [==============================] - 24s 708ms/step - loss: 0.1921 - acc: 0.9384 - val_loss: 0.2889 - val_acc: 0.9458\n", 286 | "Epoch 28/35\n", 287 | "34/34 [==============================] - 24s 708ms/step - loss: 0.1747 - acc: 0.9375 - val_loss: 0.3303 - val_acc: 0.9492\n", 288 | "Epoch 29/35\n", 289 | "34/34 [==============================] - 24s 705ms/step - loss: 0.1627 - acc: 0.9485 - val_loss: 0.3628 - val_acc: 0.9500\n", 290 | "Epoch 30/35\n", 291 | "34/34 [==============================] - 24s 706ms/step - loss: 0.1721 - acc: 0.9393 - val_loss: 0.2802 - val_acc: 0.9333\n", 292 | "Epoch 31/35\n", 293 | "34/34 [==============================] - 24s 707ms/step - loss: 0.1788 - acc: 0.9421 - val_loss: 0.2832 - val_acc: 0.9375\n", 294 | "Epoch 32/35\n", 295 | "34/34 [==============================] - 24s 707ms/step - loss: 0.1880 - acc: 0.9375 - val_loss: 0.3612 - val_acc: 0.9417\n", 296 | "Epoch 33/35\n", 297 | "34/34 [==============================] - 24s 708ms/step - loss: 0.1932 - acc: 0.9320 - val_loss: 0.4107 - val_acc: 0.9417\n", 298 | "Epoch 34/35\n", 299 | "34/34 [==============================] - 24s 708ms/step - loss: 0.1603 - acc: 0.9485 - val_loss: 0.2739 - val_acc: 0.9500\n", 300 | "Epoch 35/35\n", 301 | "34/34 [==============================] - 24s 707ms/step - loss: 0.1168 - acc: 0.9632 - val_loss: 0.1849 - val_acc: 0.9667\n" 302 | ] 303 | } 304 | ], 305 | "source": [ 306 | "# unfreeze some last CNN layer:\n", 307 | "for layer in baseModel.layers[15:]:\n", 308 | " layer.trainable = True\n", 309 | "\n", 310 | "numOfEpoch = 35\n", 311 | "opt = SGD(0.001)\n", 312 | "model.compile(opt, 'categorical_crossentropy', ['accuracy'])\n", 313 | "H = model.fit_generator(aug_train.flow(X_train, y_train, batch_size=32), \n", 314 | " steps_per_epoch=len(X_train)//32,\n", 315 | " validation_data=(aug_test.flow(X_test, y_test, batch_size=32)),\n", 316 | " validation_steps=len(X_test)//32,\n", 317 | " epochs=numOfEpoch)" 318 | ] 319 | } 320 | ], 321 | "metadata": { 322 | "kernelspec": { 323 | "display_name": "Python 3", 324 | "language": "python", 325 | "name": "python3" 326 | }, 327 | "language_info": { 328 | "codemirror_mode": { 329 | "name": "ipython", 330 | "version": 3 331 | }, 332 | "file_extension": ".py", 333 | "mimetype": "text/x-python", 334 | "name": "python", 335 | "nbconvert_exporter": "python", 336 | "pygments_lexer": "ipython3", 337 | "version": "3.6.5" 338 | } 339 | }, 340 | "nbformat": 4, 341 | "nbformat_minor": 2 342 | } 343 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DL_Tutorial 2 | It contains the dataset and code for deep learning tutorial in my website: https://nttuan8.com/gioi-thieu-ve-deep-learning/ 3 | --------------------------------------------------------------------------------