├── Kaggle code with dataset ├── Plant Disease Detection Using Convolutional Neural Network.docx ├── PlantDiseaseDetection.py └── README.md /Kaggle code with dataset: -------------------------------------------------------------------------------- 1 | https://www.kaggle.com/sumanismcse/plant-disease-detection-using-keras 2 | -------------------------------------------------------------------------------- /Plant Disease Detection Using Convolutional Neural Network.docx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sumanismcse/Plant-Disease-Identification-using-CNN/40e61d9b81c4933607dbc0ac29d2663c15c7a88f/Plant Disease Detection Using Convolutional Neural Network.docx -------------------------------------------------------------------------------- /PlantDiseaseDetection.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Untitled3.ipynb 3 | 4 | Automatically generated by Colaboratory. 5 | 6 | Original file is located at 7 | https://colab.research.google.com/drive/1_7AzFr6ROKutEDjaMCJBAWnAveDGpTxv 8 | """ 9 | 10 | import numpy as np 11 | import pickle 12 | import cv2 13 | from os import listdir 14 | from sklearn.preprocessing import LabelBinarizer 15 | from keras.models import Sequential 16 | from keras.layers.normalization import BatchNormalization 17 | from keras.layers.convolutional import Conv2D 18 | from keras.layers.convolutional import MaxPooling2D 19 | from keras.layers.core import Activation, Flatten, Dropout, Dense 20 | from keras import backend as K 21 | from keras.preprocessing.image import ImageDataGenerator 22 | from keras.optimizers import Adam 23 | from keras.preprocessing import image 24 | from keras.preprocessing.image import img_to_array 25 | from sklearn.preprocessing import MultiLabelBinarizer 26 | from sklearn.model_selection import train_test_split 27 | import matplotlib.pyplot as plt 28 | import tensorflow 29 | 30 | EPOCHS = 25 31 | INIT_LR = 1e-3 32 | BS = 32 33 | default_image_size = tuple((256, 256)) 34 | image_size = 0 35 | directory_root = '../input/plantvillage/' 36 | width=256 37 | height=256 38 | depth=3 39 | 40 | def convert_image_to_array(image_dir): 41 | try: 42 | image = cv2.imread(image_dir) 43 | if image is not None : 44 | image = cv2.resize(image, default_image_size) 45 | return img_to_array(image) 46 | else : 47 | return np.array([]) 48 | except Exception as e: 49 | print(f"Error : {e}") 50 | return None 51 | 52 | image_list, label_list = [], [] 53 | try: 54 | print("[INFO] Loading images ...") 55 | root_dir = listdir(directory_root) 56 | for directory in root_dir : 57 | # remove .DS_Store from list 58 | if directory == ".DS_Store" : 59 | root_dir.remove(directory) 60 | 61 | for plant_folder in root_dir : 62 | plant_disease_folder_list = listdir(f"{directory_root}/{plant_folder}") 63 | 64 | for disease_folder in plant_disease_folder_list : 65 | # remove .DS_Store from list 66 | if disease_folder == ".DS_Store" : 67 | plant_disease_folder_list.remove(disease_folder) 68 | 69 | for plant_disease_folder in plant_disease_folder_list: 70 | print(f"[INFO] Processing {plant_disease_folder} ...") 71 | plant_disease_image_list = listdir(f"{directory_root}/{plant_folder}/{plant_disease_folder}/") 72 | 73 | for single_plant_disease_image in plant_disease_image_list : 74 | if single_plant_disease_image == ".DS_Store" : 75 | plant_disease_image_list.remove(single_plant_disease_image) 76 | 77 | for image in plant_disease_image_list[:200]: 78 | image_directory = f"{directory_root}/{plant_folder}/{plant_disease_folder}/{image}" 79 | if image_directory.endswith(".jpg") == True or image_directory.endswith(".JPG") == True: 80 | image_list.append(convert_image_to_array(image_directory)) 81 | label_list.append(plant_disease_folder) 82 | print("[INFO] Image loading completed") 83 | except Exception as e: 84 | print(f"Error : {e}") 85 | 86 | image_size = len(image_list) 87 | 88 | label_binarizer = LabelBinarizer() 89 | image_labels = label_binarizer.fit_transform(label_list) 90 | pickle.dump(label_binarizer,open('label_transform.pkl', 'wb')) 91 | n_classes = len(label_binarizer.classes_) 92 | 93 | print(label_binarizer.classes_) 94 | 95 | np_image_list = np.array(image_list, dtype=np.float16) / 225.0 96 | 97 | print("[INFO] Spliting data to train, test") 98 | x_train, x_test, y_train, y_test = train_test_split(np_image_list, image_labels, test_size=0.2, random_state = 42) 99 | 100 | aug = ImageDataGenerator( 101 | rotation_range=25, width_shift_range=0.1, 102 | height_shift_range=0.1, shear_range=0.2, 103 | zoom_range=0.2,horizontal_flip=True, 104 | fill_mode="nearest") 105 | 106 | model = Sequential() 107 | inputShape = (height, width, depth) 108 | chanDim = -1 109 | if K.image_data_format() == "channels_first": 110 | inputShape = (depth, height, width) 111 | chanDim = 1 112 | model.add(Conv2D(32, (3, 3), padding="same",input_shape=inputShape)) 113 | model.add(Activation("relu")) 114 | model.add(BatchNormalization(axis=chanDim)) 115 | model.add(MaxPooling2D(pool_size=(3, 3))) 116 | model.add(Dropout(0.25)) 117 | model.add(Conv2D(64, (3, 3), padding="same")) 118 | model.add(Activation("relu")) 119 | model.add(BatchNormalization(axis=chanDim)) 120 | model.add(Conv2D(64, (3, 3), padding="same")) 121 | model.add(Activation("relu")) 122 | model.add(BatchNormalization(axis=chanDim)) 123 | model.add(MaxPooling2D(pool_size=(2, 2))) 124 | model.add(Dropout(0.25)) 125 | model.add(Conv2D(128, (3, 3), padding="same")) 126 | model.add(Activation("relu")) 127 | model.add(BatchNormalization(axis=chanDim)) 128 | model.add(Conv2D(128, (3, 3), padding="same")) 129 | model.add(Activation("relu")) 130 | model.add(BatchNormalization(axis=chanDim)) 131 | model.add(MaxPooling2D(pool_size=(2, 2))) 132 | model.add(Dropout(0.25)) 133 | model.add(Flatten()) 134 | model.add(Dense(1024)) 135 | model.add(Activation("relu")) 136 | model.add(BatchNormalization()) 137 | model.add(Dropout(0.5)) 138 | model.add(Dense(n_classes)) 139 | model.add(Activation("softmax")) 140 | 141 | opt = Adam(lr=INIT_LR, decay=INIT_LR / EPOCHS) 142 | # distribution 143 | model.compile(loss="binary_crossentropy", optimizer=opt,metrics=["accuracy"]) 144 | # train the network 145 | print("[INFO] training network...") 146 | 147 | history = model.fit_generator( 148 | aug.flow(x_train, y_train, batch_size=BS), 149 | validation_data=(x_test, y_test), 150 | steps_per_epoch=len(x_train) // BS, 151 | epochs=EPOCHS, verbose=1 152 | ) 153 | 154 | acc = history.history['acc'] 155 | val_acc = history.history['val_acc'] 156 | loss = history.history['loss'] 157 | val_loss = history.history['val_loss'] 158 | epochs = range(1, len(acc) + 1) 159 | #Train and validation accuracy 160 | plt.plot(epochs, acc, 'b', label='Training accurarcy') 161 | plt.plot(epochs, val_acc, 'r', label='Validation accurarcy') 162 | plt.title('Training and Validation accurarcy') 163 | plt.legend() 164 | 165 | plt.figure() 166 | #Train and validation loss 167 | plt.plot(epochs, loss, 'b', label='Training loss') 168 | plt.plot(epochs, val_loss, 'r', label='Validation loss') 169 | plt.title('Training and Validation loss') 170 | plt.legend() 171 | plt.show() 172 | 173 | print("[INFO] Calculating model accuracy") 174 | scores = model.evaluate(x_test, y_test) 175 | print(f"Test Accuracy: {scores[1]*100}") 176 | 177 | # save the model to disk 178 | print("[INFO] Saving model...") 179 | pickle.dump(model,open('cnn_model.pkl', 'wb')) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Plant-Disease-Identification-using-CNN 2 | # Plant Disease Identification Using Convulutional neural Network 3 | 4 | Here is how I built a Plant Disease Detection model using a Convolutional Neural Network . 5 | 6 | # For those having issues 7 | 8 | For finding DataSet;Go to Kaggle and download the PlantVillage Dataset. 9 | 10 | I have included a running version of my code in kaggle link. If you have any problem please refer to that. 11 | --------------------------------------------------------------------------------