├── documentation ├── project_proposal.pdf ├── project_report.pdf └── assets │ └── images │ ├── home_page.png │ ├── compare_page.png │ ├── predict_page.png │ └── results_page.png ├── web_application ├── static │ ├── bg.jpg │ ├── comparison_graph.png │ ├── temp │ │ └── UNADJUSTEDNONRAW_thumb_1d.jpg │ └── style.css ├── templates │ ├── bg.jpg │ ├── prediction.html │ ├── comapre.html │ ├── result.html │ └── index.html └── main.py ├── data └── README.md ├── LICENSE ├── requirements.txt └── README.md /documentation/project_proposal.pdf: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /web_application/static/bg.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenishborah/Tea-Leaf-Disease-Detection/HEAD/web_application/static/bg.jpg -------------------------------------------------------------------------------- /documentation/project_report.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenishborah/Tea-Leaf-Disease-Detection/HEAD/documentation/project_report.pdf -------------------------------------------------------------------------------- /web_application/templates/bg.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenishborah/Tea-Leaf-Disease-Detection/HEAD/web_application/templates/bg.jpg -------------------------------------------------------------------------------- /documentation/assets/images/home_page.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenishborah/Tea-Leaf-Disease-Detection/HEAD/documentation/assets/images/home_page.png -------------------------------------------------------------------------------- /documentation/assets/images/compare_page.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenishborah/Tea-Leaf-Disease-Detection/HEAD/documentation/assets/images/compare_page.png -------------------------------------------------------------------------------- /documentation/assets/images/predict_page.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenishborah/Tea-Leaf-Disease-Detection/HEAD/documentation/assets/images/predict_page.png -------------------------------------------------------------------------------- /documentation/assets/images/results_page.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenishborah/Tea-Leaf-Disease-Detection/HEAD/documentation/assets/images/results_page.png -------------------------------------------------------------------------------- /web_application/static/comparison_graph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenishborah/Tea-Leaf-Disease-Detection/HEAD/web_application/static/comparison_graph.png -------------------------------------------------------------------------------- /web_application/static/temp/UNADJUSTEDNONRAW_thumb_1d.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenishborah/Tea-Leaf-Disease-Detection/HEAD/web_application/static/temp/UNADJUSTEDNONRAW_thumb_1d.jpg -------------------------------------------------------------------------------- /data/README.md: -------------------------------------------------------------------------------- 1 | # Dataset 2 | 3 | The dataset "Identifying Disease in Tea leaves" used for the tea leaf disease detection project can be downloaded from the link below: 4 | 5 | [Download Dataset (tea_leaf_dataset)](https://www.kaggle.com/datasets/shashwatwork/identifying-disease-in-tea-leafs) 6 | 7 | Please make sure to cite the source of the dataset if you use it for any research or projects. 8 | 9 | For more details about the project, refer to the main README.md. 10 | -------------------------------------------------------------------------------- /web_application/templates/prediction.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Prediction 5 | 6 | 7 | 8 |

Prediction

9 | {% if accuracy and loss %} 10 |

Accuracy: {{ accuracy }}

11 |

Loss: {{ loss }}

12 | {% endif %} 13 |
14 | 15 | 16 |

17 | 18 |
19 | 20 | 21 | -------------------------------------------------------------------------------- /web_application/templates/comapre.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Model Comparison Results 5 | 29 | 30 | 31 |
32 |

Model Comparison Results

33 |

Comparison Graph:

34 | Comparison Graph 35 |
36 | 37 | 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Jenish 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /web_application/static/style.css: -------------------------------------------------------------------------------- 1 | /* styles for index.html */ 2 | body { 3 | font-family: Arial, sans-serif; 4 | } 5 | 6 | .container { 7 | max-width: 600px; 8 | margin: 0 auto; 9 | padding: 20px; 10 | } 11 | 12 | h1 { 13 | text-align: center; 14 | margin-bottom: 20px; 15 | } 16 | 17 | form { 18 | display: flex; 19 | flex-direction: column; 20 | align-items: center; 21 | } 22 | 23 | label { 24 | margin-bottom: 10px; 25 | } 26 | 27 | input[type="text"], 28 | input[type="number"] { 29 | padding: 5px; 30 | margin-bottom: 10px; 31 | width: 100%; 32 | } 33 | 34 | select { 35 | padding: 5px; 36 | margin-bottom: 10px; 37 | width: 100%; 38 | } 39 | 40 | button { 41 | padding: 10px 20px; 42 | } 43 | 44 | /* styles for result.html */ 45 | .result-container { 46 | max-width: 800px; 47 | margin: 0 auto; 48 | padding: 20px; 49 | } 50 | 51 | h2 { 52 | text-align: center; 53 | margin-bottom: 20px; 54 | } 55 | 56 | img.plot { 57 | display: block; 58 | margin: 0 auto; 59 | width: 100%; 60 | max-width: 600px; 61 | border: 1px solid #ddd; 62 | padding: 5px; 63 | box-sizing: border-box; 64 | } 65 | -------------------------------------------------------------------------------- /web_application/templates/result.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Model Comparison Result 5 | 39 | 40 | 41 |
42 |

Model Comparison Result

43 |
44 |

Accuracy Comparison

45 |

Model 1 Accuracy: {{ accuracy_model1 }}

46 |

Model 2 Accuracy: {{ accuracy_model2 }}

47 |
48 |
49 |

Loss Comparison

50 |

Model 1 Loss: {{ loss_model1 }}

51 |

Model 2 Loss: {{ loss_model2 }}

52 |
53 |
54 |

Prediction

55 |

Prediction: {{ prediction }}

56 |
57 |
58 | 59 | 60 | -------------------------------------------------------------------------------- /web_application/templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Model Comparison 5 | 53 | 54 | Model Selection 55 | 56 | 57 | 58 |

Model Selection

59 |
60 | 61 | 67 | 68 |

69 | 70 | 71 | 76 | 77 |

78 | 79 | 80 | 81 | 82 |

83 | 84 | 85 |
86 | 87 | 88 | 89 | 90 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | absl-py==1.4.0 2 | aiohttp==3.8.4 3 | aiohttp-retry==2.8.3 4 | aiosignal==1.3.1 5 | appdirs==1.4.4 6 | astunparse==1.6.3 7 | async-timeout==4.0.2 8 | attrs==23.1.0 9 | beautifulsoup4==4.12.2 10 | cachetools==5.3.0 11 | certifi==2022.12.7 12 | cffi==1.15.1 13 | charset-normalizer==3.1.0 14 | click==8.1.3 15 | colorama==0.4.6 16 | comtypes==1.1.14 17 | contourpy==1.0.7 18 | cycler==0.11.0 19 | fastjsonschema==2.17.1 20 | filelock==3.12.0 21 | Flask==2.2.3 22 | flatbuffers==23.3.3 23 | fonttools==4.39.3 24 | frozenlist==1.3.3 25 | gast==0.4.0 26 | gevent==22.10.2 27 | google-auth==2.17.1 28 | google-auth-oauthlib==1.0.0 29 | google-pasta==0.2.0 30 | greenlet==2.0.2 31 | grpcio==1.53.0 32 | h5py==3.8.0 33 | hmmlearn==0.3.0 34 | idna==3.4 35 | itsdangerous==2.1.2 36 | jax==0.4.8 37 | Jinja2==3.1.2 38 | joblib==1.2.0 39 | keras==2.12.0 40 | kiwisolver==1.4.4 41 | libclang==16.0.0 42 | Markdown==3.4.3 43 | MarkupSafe==2.1.2 44 | matplotlib==3.7.1 45 | ml-dtypes==0.0.4 46 | MouseInfo==0.1.3 47 | mpmath==1.3.0 48 | multidict==6.0.4 49 | networkx==3.1 50 | numpy==1.23.5 51 | oauthlib==3.2.2 52 | opencv-python==4.7.0.72 53 | opt-einsum==3.3.0 54 | packaging==21.3 55 | pandas==2.0.1 56 | Pillow==9.5.0 57 | protobuf==4.22.1 58 | psutil==5.9.5 59 | pyasn1==0.4.8 60 | pyasn1-modules==0.2.8 61 | PyAudio==0.2.13 62 | PyAutoGUI==0.9.53 63 | pycparser==2.21 64 | pydot==1.4.2 65 | pyee==8.2.2 66 | PyGetWindow==0.0.9 67 | Pygments==2.15.1 68 | pyjokes==0.6.0 69 | PyJWT==2.7.0 70 | PyMsgBox==1.0.9 71 | pyparsing==3.0.9 72 | pyperclip==1.8.2 73 | pypiwin32==223 74 | PyRect==0.2.0 75 | pyrsistent==0.19.3 76 | PyScreeze==0.1.28 77 | python-dateutil==2.8.2 78 | python-http-client==3.3.7 79 | pyttsx3==2.90 80 | pytweening==1.0.4 81 | pytz==2023.3 82 | pywhatkit==5.4 83 | pywin32==306 84 | PyYAML==6.0 85 | pyzmq==25.1.0 86 | requests==2.28.2 87 | requests-oauthlib==1.3.1 88 | rsa==4.9 89 | scikeras==0.4.1 90 | scikit-learn==1.2.2 91 | scipy==1.10.1 92 | seaborn==0.12.2 93 | sendgrid==6.10.0 94 | sentry-sdk==1.22.2 95 | six==1.16.0 96 | sklearn==0.0.post1 97 | soundfile==0.12.1 98 | soupsieve==2.4 99 | SpeechRecognition==3.10.0 100 | starkbank-ecdsa==2.2.0 101 | sympy==1.12 102 | tensorboard==2.12.1 103 | tensorboard-data-server==0.7.0 104 | tensorboard-plugin-wit==1.8.1 105 | tensorflow==2.12.0 106 | tensorflow-estimator==2.12.0 107 | tensorflow-intel==2.12.0 108 | tensorflow-io-gcs-filesystem==0.31.0 109 | termcolor==2.2.0 110 | thop==0.1.1.post2209072238 111 | threadpoolctl==3.1.0 112 | tinycss2==1.2.1 113 | torch==2.0.1 114 | torchvision==0.15.2 115 | tornado==6.3.2 116 | tqdm==4.65.0 117 | traitlets==5.9.0 118 | twilio==8.4.0 119 | typing_extensions==4.5.0 120 | tzdata==2023.3 121 | ultralytics==8.0.99 122 | urllib3==1.26.15 123 | webencodings==0.5.1 124 | websockets==10.4 125 | Werkzeug==2.2.3 126 | wikipedia==1.4.0 127 | wrapt==1.14.1 128 | yarl==1.9.2 129 | zipp==3.15.0 130 | zope.event==4.6 131 | zope.interface==6.0 132 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Tea-Leaf-Disease-Detection 2 | This repo contains the code and documentation for my final year project on tea leaf diseases using deep learning as a partial fulfillment of my degree of master in science in information technology from the department of computer science, Gauhati university, Ghy-14 3 | 4 | # !Important! 5 | The complete folders including `saved_models` are in master branch instead of main branch 6 | 7 | 8 | ## Dataset 9 | I used a publicly available dataset for tea leaf disease detection. The dataset is available in the `data` directory. 10 | 11 | ## Notebooks 12 | 13 | We trained four different machine learning models to classify tea leaf diseases. The models are saved in the `saved_models` directory. For more details about the models and their performance, refer to the project proposal/project report in the `documentation` directory. 14 | 15 | 16 | - `VGG16withCustomLayers.ipynb`: Jupyter Notebook for tea leaf disease detection using transfer learing technique: base model as VGG16. 17 | - `NasNetWithCustomLayers.ipynb`: Jupyter Notebook for tea leaf disease detection using transfer learing technique: base model as NasNetMobile. 18 | - `InceptionV3.ipynb`: Jupyter Notebook for tea leaf disease detection using transfer learing technique: base model as InceptionV3. 19 | - `Mysequential.ipynb`: Jupyter Notebook for tea leaf disease detection using sequentail personal customized CNN model. 20 | 21 | 22 | 23 | ## Saved Models 24 | The trained models are saved in the `saved_models` directory. 25 | 26 | ## Web Application 27 | We have developed a web application to allow users to interact with the trained models and get predictions for tea leaf diseases. 28 | 29 | 30 | ### Web Application Features 31 | 32 | - Home Page: The home page allow user to upload a dataset path or link and allow user to choose two trained models among the four machine learning models. 33 | 34 | - Comapre Page: The compare page shows the comparesion graph of the two selected models by the user in the home page along with the hyparperameters. 35 | 36 | - Predict Page: Users can select any model among the four train/saved model and give input images of tea leaf to detect the tea leaf disease 37 | 38 | - Result Page: Users get predictions for the type of disease affecting the tea leaves. 39 | 40 | ### Screenshots 41 | 42 | ![Home Page](documentation/assets/images/home_page.png) 43 | 44 | ![Compare Page](documentation/assets/images/compare_page.png) 45 | 46 | ![Predict Page](documentation/assets/images/predict_page.png) 47 | 48 | ![Result Page](documentation/assets/images/results_page.png) 49 | 50 | 51 | ## Documentation 52 | The `documentation` directory contains my project proposal, Report and assests. For more detailed information about the project, you can refer to the following documents: 53 | 54 | - [Project Proposal](documentation/project_proposal.pdf) 55 | - [Project Report](documentation/project_report.pdf) 56 | - [Assets](documentation/assets) 57 | 58 | ## How to Use 59 | 60 | 61 | ### Installation 62 | 63 | Make sure you have the required version of Python (e.g., Python 3.7 or later) installed before proceeding. 64 | 65 | 1. To use this project, you need to have Python installed on your system. 66 | 67 | 2. Install the required dependencies using the following command: `pip install -r requirements.txt` 68 | 69 | 3. Open your terminal, navigate to where you want to save the project and and Clone this repository using `git clone https://github.com/jenishborah/Tea-Leaf-Disease-Detection` 70 | 71 | 4. Open and run the Jupyter Notebooks in the `notebooks` directory in the specified order. 72 | 73 | 74 | 75 | 76 | 77 | 78 | #### Running the Web Application 79 | 80 | 1. Navigate to the `web_app` directory. 81 | 82 | 2. Run the Flask application: `main.py` 83 | 84 | 3. Open your web browser and go to `http://localhost:5000` to access the web application. 85 | 86 | 87 | 88 | ## License 89 | This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. 90 | 91 | Copyright (c) 2023 Jenish A borah 92 | -------------------------------------------------------------------------------- /web_application/main.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, render_template, request 2 | import os 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | import cv2 6 | import tensorflow as tf 7 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 8 | from tensorflow.keras.applications import InceptionV3, VGG16, ResNet50, MobileNetV2 9 | from tensorflow.keras.layers import GlobalAveragePooling2D, Dense 10 | from tensorflow.keras.models import Model 11 | 12 | app = Flask(__name__) 13 | 14 | # Define the load_dataset function before the compare function 15 | def load_dataset(dataset_path): 16 | if not os.path.exists(dataset_path): 17 | raise FileNotFoundError("Dataset directory does not exist: " + dataset_path) 18 | 19 | # Load the dataset from the directory 20 | return dataset_path 21 | 22 | 23 | @app.route('/') 24 | def index(): 25 | return render_template('index.html') 26 | 27 | 28 | @app.route('/compare', methods=['POST']) 29 | def compare(): 30 | # Get the selected model names 31 | model1 = request.form.get('model1') 32 | model2 = request.form.get('model2') 33 | 34 | # Get the selected parameters 35 | epochs = int(request.form.get('epochs')) 36 | batch_size = int(request.form.get('batch_size', 16)) 37 | 38 | 39 | # Get the form data 40 | dataset_link = request.form.get('dataset_link') 41 | dataset_path = request.form.get('dataset_path') 42 | 43 | if dataset_path is None: 44 | return "Please provide a valid dataset path." 45 | 46 | # Load the dataset 47 | dataset = load_dataset(dataset_path) 48 | 49 | # Preprocess the dataset based on the selected models 50 | preprocessed_data1, preprocessed_data2 = preprocess_data(dataset, model1, model2) 51 | 52 | # Train and compare the selected models 53 | if model1 != model2: 54 | history_model1, history_model2 = train_and_compare_models(model1, model2, epochs, batch_size, preprocessed_data1, preprocessed_data2) 55 | generate_comparison_graph(history_model1, history_model2) 56 | else: 57 | return "Please select different models for comparison." 58 | 59 | # Return the path to the generated comparison graph 60 | comparison_graph_path = os.path.join('static', 'comparison_graph.png') 61 | return render_template('result.html', comparison_graph_path=comparison_graph_path) 62 | 63 | 64 | def preprocess_data(dataset, model1, model2): 65 | preprocessed_data1 = [] 66 | preprocessed_data2 = [] 67 | 68 | if model1 == 'custom-nasnetmobile': 69 | # Implement the preprocessing steps for Custom NASNetMobile 70 | preprocessed_data1 = dataset 71 | elif model1 == 'custom-vgg16': 72 | # Implement the preprocessing steps for Custom VGG16 73 | preprocessed_data1 = dataset 74 | elif model1 == 'custom-inceptionv3': 75 | # Implement the preprocessing steps for Custom InceptionV3 76 | preprocessed_data1 = dataset 77 | 78 | if model2 == 'custom-nasnetmobile': 79 | # Implement the preprocessing steps for Custom NASNetMobile 80 | preprocessed_data2 = dataset 81 | elif model2 == 'custom-vgg16': 82 | # Implement the preprocessing steps for Custom VGG16 83 | preprocessed_data2 = dataset 84 | elif model2 == 'custom-inceptionv3': 85 | # Implement the preprocessing steps for Custom InceptionV3 86 | preprocessed_data2 = dataset 87 | 88 | return preprocessed_data1, preprocessed_data2 89 | 90 | 91 | def load_and_preprocess_dataset(dataset_path, batch_size, validation_split): 92 | # Create an ImageDataGenerator for data preprocessing 93 | datagen = ImageDataGenerator( 94 | rescale=1./255, 95 | validation_split=validation_split 96 | ) 97 | 98 | # Load and preprocess the training dataset 99 | train_data = datagen.flow_from_directory( 100 | dataset_path, 101 | target_size=(224, 224), 102 | batch_size=batch_size, 103 | class_mode='categorical', 104 | subset='training' 105 | ) 106 | 107 | # Load and preprocess the validation dataset 108 | val_data = datagen.flow_from_directory( 109 | dataset_path, 110 | target_size=(224, 224), 111 | batch_size=batch_size, 112 | class_mode='categorical', 113 | subset='validation' 114 | ) 115 | 116 | return train_data, val_data 117 | 118 | 119 | def build_model(model_name): 120 | base_model = None 121 | 122 | if model_name == 'custom-nasnetmobile': 123 | base_model = CustomNASNetMobile(include_top=False, weights='imagenet', input_shape=(224, 224, 3)) 124 | elif model_name == 'custom-vgg16': 125 | base_model = CustomVGG16(include_top=False, weights='imagenet', input_shape=(224, 224, 3)) 126 | elif model_name == 'custom-inceptionv3': 127 | base_model = CustomInceptionV3(include_top=False, weights='imagenet', input_shape=(224, 224, 3)) 128 | 129 | # Add custom classification layers on top of the base model 130 | x = base_model.output 131 | x = GlobalAveragePooling2D()(x) 132 | x = Dense(1024, activation='relu')(x) 133 | predictions = Dense(8, activation='softmax')(x) 134 | 135 | # Create the final model 136 | model = Model(inputs=base_model.input, outputs=predictions) 137 | 138 | # Compile the model 139 | model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) 140 | 141 | return model 142 | 143 | 144 | def train_and_compare_models(model1, model2, epochs, batch_size, data1, data2): 145 | # Load and preprocess the datasets 146 | train_data1, val_data1 = load_and_preprocess_dataset(data1, batch_size, 0.2) 147 | train_data2, val_data2 = load_and_preprocess_dataset(data2, batch_size, 0.2) 148 | 149 | # Build the models 150 | model1 = build_model(model1) 151 | model2 = build_model(model2) 152 | 153 | # Train the models 154 | history_model1 = model1.fit(train_data1, epochs=epochs, validation_data=val_data1) 155 | history_model2 = model2.fit(train_data2, epochs=epochs, validation_data=val_data2) 156 | 157 | return history_model1, history_model2 158 | 159 | 160 | def generate_comparison_graph(history_model1, history_model2): 161 | # Get the training and validation accuracy 162 | train_acc_model1 = history_model1.history['accuracy'] 163 | val_acc_model1 = history_model1.history['val_accuracy'] 164 | train_acc_model2 = history_model2.history['accuracy'] 165 | val_acc_model2 = history_model2.history['val_accuracy'] 166 | 167 | # Get the training and validation loss 168 | train_loss_model1 = history_model1.history['loss'] 169 | val_loss_model1 = history_model1.history['val_loss'] 170 | train_loss_model2 = history_model2.history['loss'] 171 | val_loss_model2 = history_model2.history['val_loss'] 172 | 173 | # Plot the training and validation accuracy 174 | plt.figure(figsize=(12, 6)) 175 | plt.subplot(1, 2, 1) 176 | plt.plot(train_acc_model1, label='Model 1 Training Accuracy') 177 | plt.plot(val_acc_model1, label='Model 1 Validation Accuracy') 178 | plt.plot(train_acc_model2, label='Model 2 Training Accuracy') 179 | plt.plot(val_acc_model2, label='Model 2 Validation Accuracy') 180 | plt.xlabel('Epochs') 181 | plt.ylabel('Accuracy') 182 | plt.title('Training and Validation Accuracy') 183 | plt.legend() 184 | 185 | # Plot the training and validation loss 186 | plt.subplot(1, 2, 2) 187 | plt.plot(train_loss_model1, label='Model 1 Training Loss') 188 | plt.plot(val_loss_model1, label='Model 1 Validation Loss') 189 | plt.plot(train_loss_model2, label='Model 2 Training Loss') 190 | plt.plot(val_loss_model2, label='Model 2 Validation Loss') 191 | plt.xlabel('Epochs') 192 | plt.ylabel('Loss') 193 | plt.title('Training and Validation Loss') 194 | plt.legend() 195 | 196 | # Save the comparison graph 197 | comparison_graph_path = os.path.join('static', 'comparison_graph.png') 198 | plt.savefig(comparison_graph_path) 199 | plt.close() 200 | 201 | return comparison_graph_path 202 | 203 | 204 | if __name__ == '__main__': 205 | app.run(debug=True) 206 | --------------------------------------------------------------------------------