├── tokenizer.pickle ├── label_encoder.pickle ├── chat_model ├── saved_model.pb ├── keras_metadata.pb └── variables │ ├── variables.index │ └── variables.data-00000-of-00001 ├── requirements.txt ├── ChatBotApp.py ├── train.py ├── ReadMe.md ├── Model_Training.ipynb ├── Model_Training_2.ipynb ├── intents.json └── data-visualization-bokeh-for-learning-purpose.ipynb /tokenizer.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dr-mushtaq/Extraneous-comment-management-ECM-in-e-learning/main/tokenizer.pickle -------------------------------------------------------------------------------- /label_encoder.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dr-mushtaq/Extraneous-comment-management-ECM-in-e-learning/main/label_encoder.pickle -------------------------------------------------------------------------------- /chat_model/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dr-mushtaq/Extraneous-comment-management-ECM-in-e-learning/main/chat_model/saved_model.pb -------------------------------------------------------------------------------- /chat_model/keras_metadata.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dr-mushtaq/Extraneous-comment-management-ECM-in-e-learning/main/chat_model/keras_metadata.pb -------------------------------------------------------------------------------- /chat_model/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dr-mushtaq/Extraneous-comment-management-ECM-in-e-learning/main/chat_model/variables/variables.index -------------------------------------------------------------------------------- /chat_model/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dr-mushtaq/Extraneous-comment-management-ECM-in-e-learning/main/chat_model/variables/variables.data-00000-of-00001 -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | streamlit 2 | streamlit-pandas-profiling 3 | pandas 4 | pandas-profiling 5 | numpy 6 | seaborn 7 | matplotlib 8 | matplotlib-inline 9 | scikit-learn 10 | plotly 11 | rasa 12 | -------------------------------------------------------------------------------- /ChatBotApp.py: -------------------------------------------------------------------------------- 1 | import json 2 | import numpy as np 3 | from tensorflow import keras 4 | from sklearn.preprocessing import LabelEncoder 5 | import kivy as kv 6 | import PyQt5 as pq 7 | 8 | 9 | import colorama 10 | colorama.init() 11 | from colorama import Fore, Style, Back 12 | 13 | import random 14 | import pickle 15 | 16 | with open("intents.json") as file: 17 | data = json.load(file) 18 | 19 | 20 | def chat(): 21 | # load trained model 22 | model = keras.models.load_model('chat_model') 23 | 24 | # load tokenizer object 25 | with open('tokenizer.pickle', 'rb') as handle: 26 | tokenizer = pickle.load(handle) 27 | 28 | # load label encoder object 29 | with open('label_encoder.pickle', 'rb') as enc: 30 | lbl_encoder = pickle.load(enc) 31 | 32 | # parameters 33 | max_len = 20 34 | 35 | while True: 36 | print(Fore.LIGHTBLUE_EX + "User: " + Style.RESET_ALL, end="") 37 | inp = input() 38 | if inp.lower() == "quit": 39 | break 40 | 41 | result = model.predict(keras.preprocessing.sequence.pad_sequences(tokenizer.texts_to_sequences([inp]), 42 | truncating='post', maxlen=max_len)) 43 | tag = lbl_encoder.inverse_transform([np.argmax(result)]) 44 | 45 | for i in data['intents']: 46 | if i['tag'] == tag: 47 | print(Fore.GREEN + "ChatBot:" + Style.RESET_ALL , np.random.choice(i['responses'])) 48 | 49 | # print(Fore.GREEN + "ChatBot:" + Style.RESET_ALL,random.choice(responses)) 50 | 51 | print(Fore.YELLOW + "Start messaging with the bot (type quit to stop)!" + Style.RESET_ALL) 52 | chat() -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import json 2 | import numpy as np 3 | import tensorflow as tf 4 | from tensorflow import keras 5 | from tensorflow.keras.models import Sequential 6 | from tensorflow.keras.layers import Dense, Embedding, GlobalAveragePooling1D 7 | from tensorflow.keras.preprocessing.text import Tokenizer 8 | from tensorflow.keras.preprocessing.sequence import pad_sequences 9 | from sklearn.preprocessing import LabelEncoder 10 | 11 | 12 | with open('intents.json') as file: 13 | data = json.load(file) 14 | 15 | training_sentences = [] 16 | training_labels = [] 17 | labels = [] 18 | responses = [] 19 | 20 | 21 | for intent in data['intents']: 22 | for pattern in intent['patterns']: 23 | training_sentences.append(pattern) 24 | training_labels.append(intent['tag']) 25 | responses.append(intent['responses']) 26 | 27 | if intent['tag'] not in labels: 28 | labels.append(intent['tag']) 29 | 30 | num_classes = len(labels) 31 | 32 | 33 | lbl_encoder = LabelEncoder() 34 | lbl_encoder.fit(training_labels) 35 | training_labels = lbl_encoder.transform(training_labels) 36 | 37 | 38 | vocab_size = 1000 39 | embedding_dim = 16 40 | max_len = 20 41 | oov_token = "" 42 | 43 | tokenizer = Tokenizer(num_words=vocab_size, oov_token=oov_token) 44 | tokenizer.fit_on_texts(training_sentences) 45 | word_index = tokenizer.word_index 46 | sequences = tokenizer.texts_to_sequences(training_sentences) 47 | padded_sequences = pad_sequences(sequences, truncating='post', maxlen=max_len) 48 | 49 | 50 | model = Sequential() 51 | model.add(Embedding(vocab_size, embedding_dim, input_length=max_len)) 52 | model.add(GlobalAveragePooling1D()) 53 | model.add(Dense(16, activation='relu')) 54 | model.add(Dense(16, activation='relu')) 55 | model.add(Dense(num_classes, activation='softmax')) 56 | 57 | model.compile(loss='sparse_categorical_crossentropy', 58 | optimizer='adam', metrics=['accuracy']) 59 | 60 | model.summary() 61 | 62 | 63 | epochs = 500 64 | history = model.fit(padded_sequences, np.array(training_labels), epochs=epochs) 65 | 66 | 67 | # to save the trained model 68 | model.save("chat_model") 69 | 70 | import pickle 71 | 72 | # to save the fitted tokenizer 73 | with open('tokenizer.pickle', 'wb') as handle: 74 | pickle.dump(tokenizer, handle, protocol=pickle.HIGHEST_PROTOCOL) 75 | 76 | # to save the fitted label encoder 77 | with open('label_encoder.pickle', 'wb') as ecn_file: 78 | pickle.dump(lbl_encoder, ecn_file, protocol=pickle.HIGHEST_PROTOCOL) -------------------------------------------------------------------------------- /ReadMe.md: -------------------------------------------------------------------------------- 1 | **About** 2 | 3 | The coronavirus pandemic has brought about a huge increase in the popularity of E-learning this year, with virtually all educational organisations across the world implementing the virtual education system. Chatbots employed in learning systems are beneficial for a variety of objectives. They can respond to pertinent or inconsequential inquiries of students, direct the student's inquiry to the applicable department, and swiftly provide applicable information and decrease work load of instructor by reply to irrelevant student queries. E-learning, a modern form of technology, has several issues that need to be solved.This repository is a related to all about Chatbot System for Virtual Univeristy of Pakistan 4 | 5 | Gatting quick answer in emergency 6 | 7 | Decrease instructor work load 8 | 9 | resolve student compliant and problem 10 | 11 | This project will address these challenges by conducting research on: 12 | 13 | The development of ML models that are reply to Frequently asked question of student in E-learning system 14 | 15 | The collection and preparation of student frequently asked questions data for ML models. 16 | 17 | The developed and deployed chatbot system 18 | 19 | Follow Coursesteach for more content 😊 20 | 21 | https://coursesteach.com/ 22 | 23 | 24 |
25 |

📚Datasets

26 | 27 | | Dataset 1 | Dtaset 2| Dataset 3 | 28 | |---|---|---| 29 | |[**Student Frequently Asked Questions**](https://github.com/hussain0048/Machine-Learning/tree/master/Sklearn/Unsupervised%20Learning)|[**1**](https://drive.google.com/file/d/1c6plHK4Yqg_ch8QiNTtfGuSZSK0mt3lf/view)|[![Colab icon](https://img.shields.io/badge/Colab-Open-blue.svg?logo=colab&logoColor=white)](https://github.com/hussain0048/Computer-Vision-/blob/main/Introduction_to_Computer_Vision.ipynb)| 30 |
31 | 32 |
33 |

📚Model training

34 | 35 | | Code Name | Code | Code | 36 | |---|---|---| 37 | | **1- Saad?**|[![Colab icon](https://img.shields.io/badge/Colab-Open-blue.svg?logo=colab&logoColor=white)](https://github.com/hussain0048/Extraneous-comment-management-ECM-in-e-learning/blob/main/Model_Training_Saad.ipynb)| [1](https://drive.google.com/file/d/1Cb-Cz0dRwNZzAp5f2K5cVNNwBRo3hki4/view) |[![Colab icon](https://img.shields.io/badge/Colab-Open-blue.svg?logo=colab&logoColor=white)](https://github.com/hussain0048/Computer-Vision-/blob/main/Introduction_to_Computer_Vision.ipynb)| 38 |
39 | 40 | 41 | 42 | 43 | ## **✨Top Contributors** 44 | We would love your help in making this repository even better! if you have any suggestions for improvement in any Projects content, feel free to open an issue or submit a course contribution request. 45 | 46 | Together, let's make this the best AI learning hub website! 🚀 47 | 48 | Thanks goes to these Wonderful People. Contributions of any kind are welcome!🚀 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | -------------------------------------------------------------------------------- /Model_Training.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "provenance": [], 7 | "toc_visible": true, 8 | "include_colab_link": true 9 | }, 10 | "kernelspec": { 11 | "name": "python3", 12 | "display_name": "Python 3" 13 | }, 14 | "language_info": { 15 | "name": "python" 16 | } 17 | }, 18 | "cells": [ 19 | { 20 | "cell_type": "markdown", 21 | "metadata": { 22 | "id": "view-in-github", 23 | "colab_type": "text" 24 | }, 25 | "source": [ 26 | "\"Open" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "source": [ 32 | "# **Import library**" 33 | ], 34 | "metadata": { 35 | "id": "3HLNoZfkBUGc" 36 | } 37 | }, 38 | { 39 | "cell_type": "code", 40 | "source": [ 41 | "import json \n", 42 | "import numpy as np \n", 43 | "import tensorflow as tf\n", 44 | "from tensorflow import keras\n", 45 | "from tensorflow.keras.models import Sequential\n", 46 | "from tensorflow.keras.layers import Dense, Embedding, GlobalAveragePooling1D\n", 47 | "from tensorflow.keras.preprocessing.text import Tokenizer\n", 48 | "from tensorflow.keras.preprocessing.sequence import pad_sequences\n", 49 | "from sklearn.preprocessing import LabelEncoder" 50 | ], 51 | "metadata": { 52 | "id": "eFtJb0NKBZvV" 53 | }, 54 | "execution_count": 1, 55 | "outputs": [] 56 | }, 57 | { 58 | "cell_type": "markdown", 59 | "source": [ 60 | "#**Data Preprocessing**" 61 | ], 62 | "metadata": { 63 | "id": "CpC21eawvwSY" 64 | } 65 | }, 66 | { 67 | "cell_type": "code", 68 | "source": [ 69 | "from google.colab import drive\n", 70 | "drive.mount('/content/drive')" 71 | ], 72 | "metadata": { 73 | "colab": { 74 | "base_uri": "https://localhost:8080/" 75 | }, 76 | "id": "R60NmwbGDNp2", 77 | "outputId": "a4a8a5a8-7ed7-4a6b-847d-52f45aa1f11d" 78 | }, 79 | "execution_count": 2, 80 | "outputs": [ 81 | { 82 | "output_type": "stream", 83 | "name": "stdout", 84 | "text": [ 85 | "Mounted at /content/drive\n" 86 | ] 87 | } 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "source": [ 93 | "with open('/content/drive/MyDrive/Research /Research Proposal /Dataset/intents.json') as file:\n", 94 | " data = json.load(file)\n", 95 | " \n", 96 | "training_sentences = []\n", 97 | "training_labels = []\n", 98 | "labels = []\n", 99 | "responses = []" 100 | ], 101 | "metadata": { 102 | "id": "O_cURskBDacM" 103 | }, 104 | "execution_count": 4, 105 | "outputs": [] 106 | }, 107 | { 108 | "cell_type": "code", 109 | "source": [ 110 | "for intent in data['intents']:\n", 111 | " for pattern in intent['patterns']:\n", 112 | " training_sentences.append(pattern)\n", 113 | " training_labels.append(intent['tag'])\n", 114 | " responses.append(intent['responses'])\n", 115 | " \n", 116 | " if intent['tag'] not in labels:\n", 117 | " labels.append(intent['tag'])\n", 118 | " \n", 119 | "num_classes = len(labels)" 120 | ], 121 | "metadata": { 122 | "id": "ICA7KJTRD768" 123 | }, 124 | "execution_count": 5, 125 | "outputs": [] 126 | }, 127 | { 128 | "cell_type": "code", 129 | "source": [ 130 | "num_classes" 131 | ], 132 | "metadata": { 133 | "id": "qLdOGR8iFn5h" 134 | }, 135 | "execution_count": null, 136 | "outputs": [] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "source": [ 141 | "lbl_encoder = LabelEncoder()\n", 142 | "lbl_encoder.fit(training_labels)\n", 143 | "training_labels = lbl_encoder.transform(training_labels)" 144 | ], 145 | "metadata": { 146 | "id": "k7gDa-Wtv7by" 147 | }, 148 | "execution_count": 6, 149 | "outputs": [] 150 | }, 151 | { 152 | "cell_type": "code", 153 | "source": [ 154 | "training_labels" 155 | ], 156 | "metadata": { 157 | "id": "cgKfzYgoFjCs", 158 | "outputId": "78c5d97c-6271-4b37-c27f-742facd9c4f4", 159 | "colab": { 160 | "base_uri": "https://localhost:8080/" 161 | } 162 | }, 163 | "execution_count": 11, 164 | "outputs": [ 165 | { 166 | "output_type": "execute_result", 167 | "data": { 168 | "text/plain": [ 169 | "array([23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 1, 1, 1, 2, 2,\n", 170 | " 4, 4, 7, 7, 3, 3, 18, 18, 27, 27, 16, 16, 45, 45, 29, 29, 15,\n", 171 | " 15, 14, 14, 12, 12, 11, 11, 26, 26, 17, 17, 40, 40, 31, 31, 6, 6,\n", 172 | " 32, 32, 22, 22, 21, 21, 9, 9, 28, 28, 34, 34, 5, 5, 19, 19, 46,\n", 173 | " 46, 10, 10, 33, 33, 25, 25, 36, 36, 42, 42, 37, 37, 0, 38, 38, 13,\n", 174 | " 13, 35, 35, 39, 39, 8, 8, 44, 44, 43, 43, 30, 30, 41, 41, 41, 20,\n", 175 | " 20, 20])" 176 | ] 177 | }, 178 | "metadata": {}, 179 | "execution_count": 11 180 | } 181 | ] 182 | }, 183 | { 184 | "cell_type": "code", 185 | "source": [ 186 | "vocab_size = 1000\n", 187 | "embedding_dim = 16\n", 188 | "max_len = 20\n", 189 | "oov_token = \"\"" 190 | ], 191 | "metadata": { 192 | "id": "tVVgsXrGEKvy" 193 | }, 194 | "execution_count": 7, 195 | "outputs": [] 196 | }, 197 | { 198 | "cell_type": "code", 199 | "source": [ 200 | "tokenizer = Tokenizer(num_words=vocab_size, oov_token=oov_token)\n", 201 | "tokenizer.fit_on_texts(training_sentences)\n", 202 | "word_index = tokenizer.word_index\n", 203 | "sequences = tokenizer.texts_to_sequences(training_sentences)\n", 204 | "padded_sequences = pad_sequences(sequences, truncating='post', maxlen=max_len)" 205 | ], 206 | "metadata": { 207 | "id": "SvfCy-GaEQ05" 208 | }, 209 | "execution_count": 8, 210 | "outputs": [] 211 | }, 212 | { 213 | "cell_type": "markdown", 214 | "source": [ 215 | "#**Model Training**\n", 216 | "\n" 217 | ], 218 | "metadata": { 219 | "id": "8sPgEeuk3Btl" 220 | } 221 | }, 222 | { 223 | "cell_type": "code", 224 | "source": [ 225 | "model = Sequential()\n", 226 | "model.add(Embedding(vocab_size, embedding_dim, input_length=max_len))\n", 227 | "model.add(GlobalAveragePooling1D())\n", 228 | "model.add(Dense(16, activation='relu'))\n", 229 | "model.add(Dense(16, activation='relu'))\n", 230 | "model.add(Dense(num_classes, activation='softmax'))\n", 231 | "\n", 232 | "model.compile(loss='sparse_categorical_crossentropy', \n", 233 | " optimizer='adam', metrics=['accuracy'])\n", 234 | "\n", 235 | "model.summary()\n", 236 | "\n", 237 | "\n", 238 | "epochs = 500\n", 239 | "history = model.fit(padded_sequences, np.array(training_labels), epochs=epochs)" 240 | ], 241 | "metadata": { 242 | "id": "uw839UJREZTN" 243 | }, 244 | "execution_count": null, 245 | "outputs": [] 246 | }, 247 | { 248 | "cell_type": "markdown", 249 | "source": [ 250 | "# **Model Saving**" 251 | ], 252 | "metadata": { 253 | "id": "Ryid343CTpV-" 254 | } 255 | }, 256 | { 257 | "cell_type": "code", 258 | "source": [ 259 | "# to save the trained model\n", 260 | "model.save(\"chat_model\")" 261 | ], 262 | "metadata": { 263 | "id": "vVx-6bzEz5Nc", 264 | "colab": { 265 | "base_uri": "https://localhost:8080/" 266 | }, 267 | "outputId": "f6fdeffd-a1dd-42cf-cf3c-10b64da9e792" 268 | }, 269 | "execution_count": 10, 270 | "outputs": [ 271 | { 272 | "output_type": "stream", 273 | "name": "stderr", 274 | "text": [ 275 | "WARNING:absl:Found untraced functions such as _update_step_xla while saving (showing 1 of 1). These functions will not be directly callable after loading.\n" 276 | ] 277 | } 278 | ] 279 | } 280 | ] 281 | } -------------------------------------------------------------------------------- /Model_Training_2.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "provenance": [], 7 | "toc_visible": true, 8 | "include_colab_link": true 9 | }, 10 | "kernelspec": { 11 | "name": "python3", 12 | "display_name": "Python 3" 13 | }, 14 | "language_info": { 15 | "name": "python" 16 | } 17 | }, 18 | "cells": [ 19 | { 20 | "cell_type": "markdown", 21 | "metadata": { 22 | "id": "view-in-github", 23 | "colab_type": "text" 24 | }, 25 | "source": [ 26 | "\"Open" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "source": [ 32 | "# **1-Import library**" 33 | ], 34 | "metadata": { 35 | "id": "3HLNoZfkBUGc" 36 | } 37 | }, 38 | { 39 | "cell_type": "code", 40 | "source": [ 41 | "import nltk\n", 42 | "from nltk.stem import WordNetLemmatizer\n", 43 | "lemmatizer = WordNetLemmatizer()\n", 44 | "import json\n", 45 | "import pickle\n", 46 | "\n", 47 | "import numpy as np\n", 48 | "from keras.models import Sequential\n", 49 | "from keras.layers import Dense, Activation, Dropout\n", 50 | "from keras.optimizers import SGD\n", 51 | "import random" 52 | ], 53 | "metadata": { 54 | "id": "UkuJaCW9bT6u" 55 | }, 56 | "execution_count": 4, 57 | "outputs": [] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "source": [ 62 | "import nltk\n", 63 | "nltk.download('punkt')" 64 | ], 65 | "metadata": { 66 | "id": "TbN0QbsScujH" 67 | }, 68 | "execution_count": null, 69 | "outputs": [] 70 | }, 71 | { 72 | "cell_type": "markdown", 73 | "source": [ 74 | "#**2-Dataset Loading**" 75 | ], 76 | "metadata": { 77 | "id": "CpC21eawvwSY" 78 | } 79 | }, 80 | { 81 | "cell_type": "code", 82 | "source": [ 83 | "from google.colab import drive\n", 84 | "drive.mount('/content/drive')" 85 | ], 86 | "metadata": { 87 | "colab": { 88 | "base_uri": "https://localhost:8080/" 89 | }, 90 | "id": "R60NmwbGDNp2", 91 | "outputId": "ce2de1b9-b58b-4b34-eba2-0bcffae5ed86" 92 | }, 93 | "execution_count": 1, 94 | "outputs": [ 95 | { 96 | "output_type": "stream", 97 | "name": "stdout", 98 | "text": [ 99 | "Mounted at /content/drive\n" 100 | ] 101 | } 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "source": [ 107 | "words=[]\n", 108 | "classes = []\n", 109 | "documents = []\n", 110 | "ignore_words = ['?', '!']\n", 111 | "data_file = open('/content/drive/MyDrive/Research /Research Proposal /Dataset/intentsnew (6).json').read()\n", 112 | "intents = json.loads(data_file)" 113 | ], 114 | "metadata": { 115 | "id": "CfFYn2f_b5u9" 116 | }, 117 | "execution_count": 6, 118 | "outputs": [] 119 | }, 120 | { 121 | "cell_type": "markdown", 122 | "source": [ 123 | "# **3-Data Preprocessing**" 124 | ], 125 | "metadata": { 126 | "id": "5Yl5GZnxZnI3" 127 | } 128 | }, 129 | { 130 | "cell_type": "code", 131 | "source": [ 132 | "for intent in intents['intents']:\n", 133 | " for pattern in intent['patterns']:\n", 134 | "\n", 135 | " #tokenize each word\n", 136 | " w = nltk.word_tokenize(pattern)\n", 137 | " words.extend(w)\n", 138 | " # a dd documents in the corpus\n", 139 | " documents.append((w, intent['tag']))\n", 140 | "\n", 141 | " # add to our classes list\n", 142 | " if intent['tag'] not in classes:\n", 143 | " classes.append(intent['tag'])" 144 | ], 145 | "metadata": { 146 | "id": "ICA7KJTRD768" 147 | }, 148 | "execution_count": 7, 149 | "outputs": [] 150 | }, 151 | { 152 | "cell_type": "code", 153 | "source": [ 154 | "# lemmaztize and lower each word and remove duplicates\n", 155 | "nltk.download('wordnet')\n", 156 | "words = [lemmatizer.lemmatize(w.lower()) for w in words if w not in ignore_words]\n", 157 | "words = sorted(list(set(words)))\n", 158 | "# sort classes\n", 159 | "classes = sorted(list(set(classes)))\n", 160 | "# documents = combination between patterns and intents\n", 161 | "print (len(documents), \"documents\")\n", 162 | "# classes = intents\n", 163 | "print (len(classes), \"classes\", classes)\n", 164 | "# words = all words, vocabulary\n", 165 | "print (len(words), \"unique lemmatized words\", words)\n", 166 | "pickle.dump(words,open('texts.pkl','wb'))\n", 167 | "pickle.dump(classes,open('labels.pkl','wb'))\n" 168 | ], 169 | "metadata": { 170 | "id": "-azsX0L8c7hX" 171 | }, 172 | "execution_count": null, 173 | "outputs": [] 174 | }, 175 | { 176 | "cell_type": "code", 177 | "source": [ 178 | "# create our training data\n", 179 | "training = []\n", 180 | "# create an empty array for our output\n", 181 | "output_empty = [0] * len(classes)\n", 182 | "# training set, bag of words for each sentence\n", 183 | "for doc in documents:\n", 184 | " # initialize our bag of words\n", 185 | " bag = []\n", 186 | " # list of tokenized words for the pattern\n", 187 | " pattern_words = doc[0]\n", 188 | " # lemmatize each word - create base word, in attempt to represent related words\n", 189 | " pattern_words = [lemmatizer.lemmatize(word.lower()) for word in pattern_words]\n", 190 | " # create our bag of words array with 1, if word match found in current pattern\n", 191 | " for w in words:\n", 192 | " bag.append(1) if w in pattern_words else bag.append(0)\n", 193 | "\n", 194 | " # output is a '0' for each tag and '1' for current tag (for each pattern)\n", 195 | " output_row = list(output_empty)\n", 196 | " output_row[classes.index(doc[1])] = 1\n", 197 | "\n", 198 | " training.append([bag, output_row])\n", 199 | "# shuffle our features and turn into np.array\n", 200 | "random.shuffle(training)\n", 201 | "training = np.array(training)\n", 202 | "# create train and test lists. X - patterns, Y - intents\n", 203 | "train_x = list(training[:,0])\n", 204 | "train_y = list(training[:,1])\n", 205 | "print(\"Training data created\")\n" 206 | ], 207 | "metadata": { 208 | "id": "ZpeeTqAQdIUO", 209 | "outputId": "5c811704-a703-4873-ec88-81a6fbb1cac0", 210 | "colab": { 211 | "base_uri": "https://localhost:8080/" 212 | } 213 | }, 214 | "execution_count": 9, 215 | "outputs": [ 216 | { 217 | "output_type": "stream", 218 | "name": "stdout", 219 | "text": [ 220 | "Training data created\n" 221 | ] 222 | }, 223 | { 224 | "output_type": "stream", 225 | "name": "stderr", 226 | "text": [ 227 | ":24: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray.\n", 228 | " training = np.array(training)\n" 229 | ] 230 | } 231 | ] 232 | }, 233 | { 234 | "cell_type": "markdown", 235 | "source": [ 236 | "# **4-Model Training**" 237 | ], 238 | "metadata": { 239 | "id": "iiA5mfO_bf5G" 240 | } 241 | }, 242 | { 243 | "cell_type": "markdown", 244 | "source": [ 245 | "## **4.1-Neural Netowrk**" 246 | ], 247 | "metadata": { 248 | "id": "P-gDV4j8l0L0" 249 | } 250 | }, 251 | { 252 | "cell_type": "code", 253 | "source": [ 254 | "# Create model - 3 layers. First layer 128 neurons, second layer 64 neurons and 3rd output layer contains number of neurons\n", 255 | "# equal to number of intents to predict output intent with softmax\n", 256 | "model = Sequential()\n", 257 | "model.add(Dense(128, input_shape=(len(train_x[0]),), activation='relu'))\n", 258 | "model.add(Dropout(0.5))\n", 259 | "model.add(Dense(64, activation='relu'))\n", 260 | "model.add(Dropout(0.5))\n", 261 | "model.add(Dense(len(train_y[0]), activation='softmax'))\n", 262 | "\n", 263 | "# Compile model. Stochastic gradient descent with Nesterov accelerated gradient gives good results for this model\n", 264 | "sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)\n", 265 | "model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])\n", 266 | "\n", 267 | "#fitting and saving the model\n", 268 | "hist = model.fit(np.array(train_x), np.array(train_y), epochs=200, batch_size=5, verbose=1)\n" 269 | ], 270 | "metadata": { 271 | "id": "pyIo9YkVdORl" 272 | }, 273 | "execution_count": null, 274 | "outputs": [] 275 | }, 276 | { 277 | "cell_type": "markdown", 278 | "source": [ 279 | "### **4.1.1 Model Saving**" 280 | ], 281 | "metadata": { 282 | "id": "Em4aCw6ImU3R" 283 | } 284 | }, 285 | { 286 | "cell_type": "code", 287 | "source": [ 288 | "model.save('chat_model', hist)\n", 289 | "print(\"model created\")" 290 | ], 291 | "metadata": { 292 | "id": "GwFda9VsmgQC", 293 | "outputId": "ccf8d6e4-6bef-4d81-92fd-245e2fc38a2f", 294 | "colab": { 295 | "base_uri": "https://localhost:8080/" 296 | } 297 | }, 298 | "execution_count": 12, 299 | "outputs": [ 300 | { 301 | "output_type": "stream", 302 | "name": "stdout", 303 | "text": [ 304 | "model created\n" 305 | ] 306 | } 307 | ] 308 | }, 309 | { 310 | "cell_type": "markdown", 311 | "source": [ 312 | "## **4.2- LSTM**" 313 | ], 314 | "metadata": { 315 | "id": "5HvFngP6nzWA" 316 | } 317 | }, 318 | { 319 | "cell_type": "code", 320 | "source": [ 321 | "import numpy as np\n", 322 | "from keras.models import Sequential\n", 323 | "from keras.layers import LSTM, Dense" 324 | ], 325 | "metadata": { 326 | "id": "1nVA9smHqt4v" 327 | }, 328 | "execution_count": 22, 329 | "outputs": [] 330 | }, 331 | { 332 | "cell_type": "code", 333 | "source": [ 334 | "# Create the LSTM network\n", 335 | "model = Sequential()\n", 336 | "model.add(LSTM(8, input_shape=(5, 1), return_sequences=False))\n", 337 | "model.add(Dense(1))" 338 | ], 339 | "metadata": { 340 | "id": "kcFSuMKmq2C_" 341 | }, 342 | "execution_count": 23, 343 | "outputs": [] 344 | }, 345 | { 346 | "cell_type": "code", 347 | "source": [ 348 | "# Compile the model\n", 349 | "model.compile(loss='mse', optimizer='adam')" 350 | ], 351 | "metadata": { 352 | "id": "qO4Pu8Eaq68Y" 353 | }, 354 | "execution_count": 24, 355 | "outputs": [] 356 | }, 357 | { 358 | "cell_type": "code", 359 | "source": [ 360 | "# Train the model\n", 361 | "model.fit(np.array(train_x), np.array(train_y), epochs=100)" 362 | ], 363 | "metadata": { 364 | "id": "pISQug2TrAX3", 365 | "outputId": "c76ac1a4-5c42-4dea-a94b-dc4cdf4d20ae", 366 | "colab": { 367 | "base_uri": "https://localhost:8080/", 368 | "height": 606 369 | } 370 | }, 371 | "execution_count": 30, 372 | "outputs": [ 373 | { 374 | "output_type": "stream", 375 | "name": "stdout", 376 | "text": [ 377 | "Epoch 1/100\n" 378 | ] 379 | }, 380 | { 381 | "output_type": "error", 382 | "ename": "ValueError", 383 | "evalue": "ignored", 384 | "traceback": [ 385 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", 386 | "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", 387 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Train the model\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_x\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_y\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mepochs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m100\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", 388 | "\u001b[0;32m/usr/local/lib/python3.10/dist-packages/keras/utils/traceback_utils.py\u001b[0m in \u001b[0;36merror_handler\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 68\u001b[0m \u001b[0;31m# To get the full stack trace, call:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 69\u001b[0m \u001b[0;31m# `tf.debugging.disable_traceback_filtering()`\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 70\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwith_traceback\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfiltered_tb\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 71\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 72\u001b[0m \u001b[0;32mdel\u001b[0m \u001b[0mfiltered_tb\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", 389 | "\u001b[0;32m/usr/local/lib/python3.10/dist-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mtf__train_function\u001b[0;34m(iterator)\u001b[0m\n\u001b[1;32m 13\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 14\u001b[0m \u001b[0mdo_return\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 15\u001b[0;31m \u001b[0mretval_\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mag__\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconverted_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mag__\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mld\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstep_function\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mag__\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mld\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mag__\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mld\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0miterator\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfscope\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 16\u001b[0m \u001b[0;32mexcept\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 17\u001b[0m \u001b[0mdo_return\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", 390 | "\u001b[0;31mValueError\u001b[0m: in user code:\n\n File \"/usr/local/lib/python3.10/dist-packages/keras/engine/training.py\", line 1284, in train_function *\n return step_function(self, iterator)\n File \"/usr/local/lib/python3.10/dist-packages/keras/engine/training.py\", line 1268, in step_function **\n outputs = model.distribute_strategy.run(run_step, args=(data,))\n File \"/usr/local/lib/python3.10/dist-packages/keras/engine/training.py\", line 1249, in run_step **\n outputs = model.train_step(data)\n File \"/usr/local/lib/python3.10/dist-packages/keras/engine/training.py\", line 1050, in train_step\n y_pred = self(x, training=True)\n File \"/usr/local/lib/python3.10/dist-packages/keras/utils/traceback_utils.py\", line 70, in error_handler\n raise e.with_traceback(filtered_tb) from None\n File \"/usr/local/lib/python3.10/dist-packages/keras/engine/input_spec.py\", line 298, in assert_input_compatibility\n raise ValueError(\n\n ValueError: Input 0 of layer \"sequential_6\" is incompatible with the layer: expected shape=(None, 100), found shape=(None, 391)\n" 391 | ] 392 | } 393 | ] 394 | } 395 | ] 396 | } -------------------------------------------------------------------------------- /intents.json: -------------------------------------------------------------------------------- 1 | { 2 | "intents": [ 3 | { 4 | "tag": "greetings", 5 | "patterns": ["hi", "hello", "hey", "good morning", "good evening"], 6 | "responses": ["Hello! How can I help you?", "Hi there! How can I assist you today?"] 7 | }, 8 | { 9 | "tag": "help", 10 | "patterns": ["Could you help me?", "give me a hand please", "Can you help?", "What can you do for me?", "I need a support", "I need a help", "support me please"], 11 | "responses": ["Tell me how can assist you", "Tell me your problem to assist you", "Yes Sure, How can I support you?"] 12 | }, 13 | { 14 | "tag": "admission", 15 | "patterns": ["What is the admission system at VU?", "How can I apply for admission at VU?", "How to get admission in vu?"], 16 | "responses": ["The admission system at VU is conducted online. You can apply for admission by visiting the VU website and following the instructions for online admission. The website provides all the necessary information regarding admission requirements, dates, and procedures."] 17 | }, 18 | { 19 | "tag": "admission_criteria", 20 | "patterns": ["What are the admission criteria at VU?", "What are the requirements for admission at VU?"], 21 | "responses": ["The admission criteria at VU vary depending on the degree program. Generally, applicants must have a minimum of 45% marks in their intermediate or equivalent degree. There are also different requirements for different degree programs. You can find detailed information on the VU website regarding the admission criteria for each program."] 22 | 23 | }, 24 | { 25 | "tag": "admission_types", 26 | "patterns": ["What are the types of admission at VU?", "How many types of admission does VU offer?"], 27 | "responses": ["VU offers three types of admission: Regular, Late and Deficient. Regular admission is offered at the start of the semester. Late admission is offered to students who missed the regular admission deadline. Deficient admission is offered to students who have failed to clear the required course(s) in a previous semester."] 28 | 29 | }, 30 | { 31 | "tag": "benefits", 32 | "patterns": ["What are the benefits of taking admission in VU?", "Why should I choose VU for my education?"], 33 | "responses": ["There are several benefits to taking admission at Virtual University of Pakistan, including: \n1) Flexible learning: you can study from anywhere at any time. \n2) Affordable education: VU offers quality education at an affordable cost. \n3) Comprehensive curriculum: VU offers a wide range of programs and courses in various fields. \n4) Experienced faculty: VU has a team of experienced and qualified professors who are experts in their fields. \n5) Advanced technology: VU uses the latest technology to deliver its courses and provide a seamless learning experience. \n6) Professional development: VU offers various opportunities for professional development, including internships and career counseling, to help students prepare for their future careers."] 34 | }, 35 | { 36 | "tag": "admission_process", 37 | "patterns": ["How can I apply for admission at VU?", "What is the admission process at VU?"], 38 | "responses": ["To apply for admission at Virtual University of Pakistan, you need to visit the VU website and create an account. Once you have created an account, you can fill out the online admission form and submit it along with the required documents. The admission process usually consists of an entrance test and an interview. Once you have been accepted, you will receive an admission letter and can proceed to register for your courses."] 39 | }, 40 | { 41 | "tag": "financial_aid", 42 | "patterns": ["Does VU offer financial aid?", "How can I get financial assistance to study at VU?"], 43 | "responses": ["Yes, VU offers financial aid to eligible students in the form of scholarships, loans, and grants. You can apply for financial aid through the VU Financial Assistance Office and must meet the eligibility requirements to receive assistance."] 44 | }, 45 | { 46 | "tag": "online_learning", 47 | "patterns": ["Is online learning at VU as effective as traditional learning?", "Can I get a quality education through online learning at VU?"], 48 | "responses": ["Yes, online learning at Virtual University of Pakistan is just as effective as traditional learning, if not more so. VU uses a comprehensive online learning platform and a team of experienced professors to deliver quality education to its students. Online learning allows for greater flexibility and convenience and can be especially helpful for students who have other commitments or cannot attend traditional classes."] 49 | }, 50 | { 51 | "tag": "fee_structure", 52 | "patterns": ["What is the fee structure at VU?", "How much do I have to pay to study at VU?"], 53 | "responses": ["The fee structure at VU varies depending on the degree program and the number of credit hours per course. The fee for each course is listed on the VU website, along with the total fee for the degree program. The fee can be paid online or at designated banks. Students can also pay the fee in installments."] 54 | 55 | }, 56 | { 57 | "tag": "vu_cost", 58 | "patterns": ["Is Virtual University of Pakistan expensive?", "What is the cost of studying at VU?"], 59 | "responses": ["The cost of studying at Virtual University of Pakistan varies depending on the program and the number of courses you take. However, VU is generally considered to be an affordable option compared to traditional universities. You can find detailed information about the tuition fees on the VU website, or by contacting the VU student support team."] 60 | }, 61 | { 62 | "tag": "payment_options", 63 | "patterns": ["What payment options are available for VU tuition?", "How can I pay for my VU tuition?"], 64 | "responses": ["Virtual University of Pakistan offers several payment options for tuition, including online banking, credit card payments, and bank drafts. You can find more information about the payment options on the VU website, or by contacting the VU student support team."] 65 | }, 66 | { 67 | "tag": "fee_installments", 68 | "patterns": ["Can I pay my fees in installments?", "What are the options for paying the fee at VU?"], 69 | "responses": ["Yes, VU offers the option of paying the fee in installments. The fee can be paid in two or three installments, depending on the program. Students can find the exact details on the VU website."] 70 | 71 | }, 72 | { 73 | "tag": "fee_deadline", 74 | "patterns": ["What is the deadline for paying the fee at VU?", "When do I need to pay my fees by?"], 75 | "responses": ["The deadline for paying the fee at VU is typically mentioned on the VU website and is also communicated to students through email and notifications. Students are required to pay the fee within the given deadline to avoid any late fees or penalties."] 76 | 77 | }, 78 | { 79 | "tag": "exam_type", 80 | "patterns": ["What types of exams are held at VU?", "How are exams conducted at VU?"], 81 | "responses": ["Virtual University of Pakistan conducts two types of exams: Midterm and Final term exams."] 82 | }, 83 | { 84 | "tag": "exam_system", 85 | "patterns": ["How is the exam system at VU?", "What is the process of taking exams at VU?"], 86 | "responses": ["VU has an online examination system in which students take exams through their own Learning Management System (LMS). The exams are invigilated by VU's own proctors and are conducted in a secure environment."] 87 | 88 | }, 89 | { 90 | "tag": "midterm", 91 | "patterns": ["What is a midterm exam?", "How is the midterm exam conducted at VU?"], 92 | "responses": ["The Midterm exam is held in the middle of the semester and covers half of the course material. It is an online exam conducted through VU's LMS and is usually held for a duration of 2 hours."] 93 | 94 | }, 95 | { 96 | "tag": "finalterm", 97 | "patterns": ["What is a final term exam?", "How is the final term exam conducted at VU?"], 98 | "responses": ["The Final term exam is held at the end of the semester and covers the entire course material. It is an online exam conducted through VU's LMS and is usually held for a duration of 3 hours."] 99 | 100 | }, 101 | { 102 | "tag": "term", 103 | "patterns": ["How many terms are there in a year at VU?", "What is the duration of a term at VU?"], 104 | "responses": ["There are two terms in a year at VU: Spring and Fall. Each term is of 18 weeks duration."] 105 | 106 | }, 107 | { 108 | "tag": "plagiarism", 109 | "patterns": ["What is the plagiarism policy at VU?", "How does VU deal with plagiarism?"], 110 | "responses": ["VU has a strict policy against plagiarism. Students are expected to submit original work, and any form of plagiarism, including copying from online sources or other students' work, is considered a serious academic offence. VU uses various tools and technologies to detect plagiarism, and if a student is found to have committed plagiarism, they may face a range of penalties, including failing the assignment or the course, or even expulsion from the university."] 111 | 112 | }, 113 | { 114 | "tag": "avoid_plagiarism", 115 | "patterns": ["How can I avoid plagiarism at VU?", "What are some tips for avoiding plagiarism at VU?"], 116 | "responses": ["To avoid plagiarism at VU, students should always properly cite their sources, whether it's a direct quote or a paraphrase. They should also avoid copying and pasting from online sources, and instead use the sources to inform their own work. Students can also use plagiarism-checking tools to ensure that their work is original and properly cited."] 117 | 118 | }, 119 | { 120 | "tag": "plagiarism_consequences", 121 | "patterns": ["What are the consequences of plagiarism at VU?", "What happens if I am caught plagiarizing at VU?"], 122 | "responses": ["The consequences of plagiarism at VU can be serious. Depending on the severity of the offence, a student may fail the assignment, the course, or even be expelled from the university. Additionally, the offence will be recorded in the student's academic record, which can have long-term consequences for their academic and professional career."] 123 | 124 | }, 125 | { 126 | "tag": "grading_scheme", 127 | "patterns": ["What is the grading scheme at VU?", "How are grades calculated at VU?"], 128 | "responses": ["The grading scheme at VU is based on the percentage of marks obtained by the students in a course. The letter grades and their corresponding percentage ranges are as follows:\n\nA+ (90-100%)\nA (80-89%)\nB (70-79%)\nC (60-69%)\nD (50-59%)\nF (Below 50%)"] 129 | }, 130 | { 131 | "tag": "gpa", 132 | "patterns": ["How is GPA calculated at VU?", "What is the GPA system at VU?"], 133 | "responses": ["The GPA (Grade Point Average) at VU is calculated by multiplying the grade points obtained in a course with the credit hours assigned to that course, and then dividing the total grade points earned by the total credit hours attempted. The GPA scale ranges from 0.00 to 4.00."] 134 | 135 | }, 136 | { 137 | "tag": "cgpa", 138 | "patterns": ["What is CGPA?", "How is CGPA calculated at VU?"], 139 | "responses": ["CGPA (Cumulative Grade Point Average) is the overall GPA calculated by taking into account all the courses attempted by a student. The CGPA is calculated by multiplying the grade points earned in each course with the credit hours assigned to that course, adding up the total grade points earned and dividing the sum by the total credit hours attempted. The CGPA scale ranges from 0.00 to 4.00."] 140 | 141 | }, 142 | { 143 | "tag": "passing_criteria", 144 | "patterns": ["What is the passing criteria at VU?", "What is the minimum passing marks at VU?"], 145 | "responses": ["The passing criteria at VU is to obtain at least 50% marks in a course. Any student who fails to obtain 50% marks is considered to have failed that course."] 146 | 147 | }, 148 | { 149 | "tag": "quizzes", 150 | "patterns": ["How are quizzes conducted at VU?", "What is the weightage of quizzes at VU?"], 151 | "responses": ["Quizzes are a regular part of the learning process at VU. They are conducted online and their weightage varies from course to course. The weightage of quizzes is mentioned in the course overview and is also mentioned in the course syllabus."] 152 | 153 | }, 154 | { 155 | "tag": "assignments", 156 | "patterns": ["What is the assignment submission process at VU?", "How many assignments are there in a course at VU?"], 157 | "responses": ["Assignments are an important part of the learning process at VU. They are submitted online on the VU Learning Management System (LMS) within the given deadline. The number of assignments in a course varies from course to course and is mentioned in the course overview and also in the course syllabus."] 158 | 159 | }, 160 | { 161 | "tag": "gdb", 162 | "patterns": ["What is GDB at VU?", "How is GDB conducted at VU?"], 163 | "responses": ["GDB (Graded Discussion Board) is a type of assessment at VU where students are required to participate in an online discussion on a given topic. The GDB is graded and its weightage is mentioned in the course overview and the course syllabus. The GDB is conducted online and students are required to participate within the given deadline."] 164 | 165 | }, 166 | { 167 | "tag": "vu_professors", 168 | "patterns": ["What can you tell me about VU professors?", "How are the professors at VU?"], 169 | "responses": ["The professors at VU are highly qualified and experienced in their respective fields. They are experts in their subject matter and are committed to providing a quality education to their students. They are also available to provide support and guidance to students throughout the course. You can contact your professors through email or the VU Learning Management System (LMS) to ask questions or discuss course material."] 170 | }, 171 | { 172 | "tag": "contact_professor", 173 | "patterns": ["How can I contact my professor?", "What is the best way to reach out to my professor?"], 174 | "responses": ["You can contact your professor through email or the VU Learning Management System (LMS). Your professor's email address can be found on the course syllabus, and you can also access the LMS to send a message or post a question on the course forum. If you need additional help, you can also attend office hours or schedule a meeting with your professor."] 175 | }, 176 | { 177 | "tag": "professor_feedback", 178 | "patterns": ["Can I give feedback on my professor?", "How can I provide feedback on my professor?"], 179 | "responses": ["Yes, you can provide feedback on your professor through the VU Course Evaluation system. At the end of the course, you will receive a link to the course evaluation form, where you can provide feedback on your professor and the course. Your feedback is valuable and helps VU to continually improve the quality of education."] 180 | }, 181 | { 182 | "tag": "how_to_study", 183 | "patterns": ["How can I study effectively at VU?", "What are some tips for studying in VU?"], 184 | "responses": ["To study effectively at VU, it is important to have a clear understanding of the course material. You should attend all lectures and take detailed notes. You can also watch recorded lectures and access course material on the VU Learning Management System (LMS) at any time. Additionally, you can form study groups with other students to discuss course material and exchange ideas. It's also important to manage your time well and allocate sufficient time for studying."] 185 | 186 | }, 187 | { 188 | "tag": "study_tips", 189 | "patterns": ["What are some tips for studying at VU?", "How can I improve my performance at VU?"], 190 | "responses": ["Here are some study tips that may help you perform better at VU: 1) Make a study schedule and stick to it, 2) Take regular breaks to avoid burnout, 3) Use the VU Learning Management System (LMS) to access course materials and communicate with your professors, 4) Participate in online discussions and forums to engage with your classmates, 5) Attend virtual live sessions and review recordings of missed lectures, 6) Use past papers and quizzes to practice for exams, and 7) Get help from your professors or teaching assistants if you are struggling with course material."] 191 | 192 | }, 193 | { 194 | "tag": "time_management", 195 | "patterns": ["How can I manage my time better at VU?", "What are some time management tips for VU students?"], 196 | "responses": ["Here are some time management tips that may help you: 1) Prioritize your tasks and deadlines, 2) Use a planner or calendar to schedule your study time, 3) Minimize distractions by studying in a quiet place and turning off your phone or notifications, 4) Break down large assignments into smaller, manageable tasks, 5) Set goals and rewards for yourself to stay motivated, and 6) Avoid procrastination by starting your work early and staying on track."] 197 | 198 | }, 199 | { 200 | "tag": "support_services", 201 | "patterns": ["What support services are available to VU students?", "How can I get help if I'm struggling at VU?"], 202 | "responses": ["VU offers a variety of support services to help students succeed. You can access resources like the Virtual Writing Center for help with writing assignments, the Virtual Math Lab for help with math problems, and the Virtual Reference Desk for research assistance. Additionally, you can get support from your professors, teaching assistants, and classmates through the VU Learning Management System (LMS). If you need further assistance, you can also reach out to the VU Student Services department for help with academic, financial, or personal issues."] 203 | 204 | }, 205 | { 206 | "tag": "academic_support", 207 | "patterns": ["What academic support services does VU offer?"], 208 | "responses": ["VU offers a range of academic support services, including access to course instructors and teaching assistants, as well as tutoring services for specific courses. You can also attend review sessions and academic workshops to enhance your learning. Additionally, VU provides an online library with a range of resources and materials to support your studies."] 209 | 210 | }, 211 | { 212 | "tag": "support_system", 213 | "patterns": ["How can I get help at VU?", "What support services are available at VU?"], 214 | "responses": ["Virtual University of Pakistan offers various support services to its students, including: 1) Faculty support: you can contact your course instructors through email, discussion forums, or online chat, 2) Student support: VU has a dedicated student support team that can assist you with any questions or issues you may have, 3) Technical support: VU provides technical support to students who are experiencing issues with the online learning platform or other technical difficulties, and 4) Career services: VU offers career counseling, job placement assistance, and internship opportunities to help you prepare for your future career."] 215 | }, 216 | { 217 | "tag": "faculty_contact", 218 | "patterns": ["How can I contact my course instructor at VU?", "Is there a way to get in touch with the faculty at VU?"], 219 | "responses": ["You can contact your course instructor at Virtual University of Pakistan through email, online chat, or discussion forums. The contact information for your instructor can be found on the VU website or in the course syllabus."] 220 | }, 221 | { 222 | "tag": "student_support", 223 | "patterns": ["What kind of support is available for VU students?", "Where can I go for help as a VU student?"], 224 | "responses": ["VU has a dedicated student support team that can assist you with any questions or issues you may have. You can contact the student support team through the VU website or by phone. They can assist you with technical issues, academic concerns, or any other issues you may have."] 225 | }, 226 | { 227 | "tag": "technical_support", 228 | "patterns": ["How can I get technical support at VU?", "What do I do if I have technical issues with the online learning platform at VU?"], 229 | "responses": ["If you are experiencing technical issues with the online learning platform at Virtual University of Pakistan, you can contact the technical support team. The contact information for the technical support team can be found on the VU website or in the online learning platform itself. They can assist you with any technical difficulties you may be having."] 230 | }, 231 | { 232 | "tag": "career_services", 233 | "patterns": ["Does VU offer career services?", "How can I get help with my career goals as a VU student?"], 234 | "responses": ["Yes, Virtual University of Pakistan offers career counseling, job placement assistance, and internship opportunities to help you prepare for your future career. You can access these services through the VU website or by contacting the career services team directly."] 235 | }, 236 | { 237 | "tag": "vu_campuses", 238 | "patterns": ["Where are Virtual University of Pakistan's campuses located?", "Are there any physical campuses of VU?"], 239 | "responses": ["Virtual University of Pakistan has campuses in several cities across the country, including Lahore, Islamabad, Karachi, Faisalabad, Peshawar, and Rawalpindi. However, VU is primarily an online university, and most of the courses are conducted through an online platform. If you need to visit a physical campus for any reason, you can find the address and contact details of the nearest campus on the VU website."] 240 | }, 241 | { 242 | "tag": "virtual_campus", 243 | "patterns": ["What is the virtual campus of VU?", "How does the virtual campus work?"], 244 | "responses": ["The virtual campus of Virtual University of Pakistan is a web-based platform that provides students with access to course materials, lectures, discussion forums, and other resources. It allows students to participate in online classes, communicate with faculty and peers, submit assignments, and take exams. The virtual campus is available 24/7 and can be accessed from anywhere with an internet connection."] 245 | }, 246 | { 247 | "tag": "physical_campus", 248 | "patterns": ["Can I visit a physical campus of VU?", "What facilities are available at the VU physical campuses?"], 249 | "responses": ["Yes, you can visit a physical campus of Virtual University of Pakistan if you need to. The campuses have a range of facilities for students, including libraries, computer labs, lecture halls, and study areas. The campuses also have a student support center where you can get help with academic and administrative issues."] 250 | }, 251 | { 252 | "tag": "thanks", 253 | "patterns": ["Thank you", "Thanks for your help", "Appreciate your assistance"], 254 | "responses": ["You're welcome! Feel free to ask me any other questions you may have."] 255 | }, 256 | { 257 | "tag": "goodbye", 258 | "patterns": ["Bye", "See you later", "Goodbye"], 259 | "responses": ["See you later", "Have a nice day", "Bye! Come back again"] 260 | }, 261 | { 262 | "tag": "fallback", 263 | "patterns": [], 264 | "responses": ["I'm sorry, I didn't understand what you were asking. Can you please rephrase your question?"] 265 | } 266 | ] 267 | } 268 | -------------------------------------------------------------------------------- /data-visualization-bokeh-for-learning-purpose.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "id": "a8d9a4f1", 7 | "metadata": { 8 | "_cell_guid": "b1076dfc-b9ad-4769-8c92-a6c4dae69d19", 9 | "_uuid": "8f2839f25d086af736a60e9eeb907d3b93b6e0e5", 10 | "execution": { 11 | "iopub.execute_input": "2023-05-15T09:21:23.424698Z", 12 | "iopub.status.busy": "2023-05-15T09:21:23.424071Z", 13 | "iopub.status.idle": "2023-05-15T09:21:23.435532Z", 14 | "shell.execute_reply": "2023-05-15T09:21:23.434353Z" 15 | }, 16 | "papermill": { 17 | "duration": 0.020962, 18 | "end_time": "2023-05-15T09:21:23.437948", 19 | "exception": false, 20 | "start_time": "2023-05-15T09:21:23.416986", 21 | "status": "completed" 22 | }, 23 | "tags": [] 24 | }, 25 | "outputs": [], 26 | "source": [ 27 | "# This Python 3 environment comes with many helpful analytics libraries installed\n", 28 | "# It is defined by the kaggle/python Docker image: https://github.com/kaggle/docker-python\n", 29 | "# For example, here's several helpful packages to load\n", 30 | "\n", 31 | "import numpy as np # linear algebra\n", 32 | "import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)\n", 33 | "\n", 34 | "# Input data files are available in the read-only \"../input/\" directory\n", 35 | "# For example, running this (by clicking run or pressing Shift+Enter) will list all files under the input directory\n", 36 | "\n", 37 | "import os\n", 38 | "for dirname, _, filenames in os.walk('/kaggle/input'):\n", 39 | " for filename in filenames:\n", 40 | " print(os.path.join(dirname, filename))\n", 41 | "\n", 42 | "# You can write up to 20GB to the current directory (/kaggle/working/) that gets preserved as output when you create a version using \"Save & Run All\" \n", 43 | "# You can also write temporary files to /kaggle/temp/, but they won't be saved outside of the current session" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "id": "a28c0045", 49 | "metadata": { 50 | "papermill": { 51 | "duration": 0.003218, 52 | "end_time": "2023-05-15T09:21:23.445106", 53 | "exception": false, 54 | "start_time": "2023-05-15T09:21:23.441888", 55 | "status": "completed" 56 | }, 57 | "tags": [] 58 | }, 59 | "source": [ 60 | "Data Visualization | Bokeh | For Learning Purpose | By Izhar Ul Haq" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": 2, 66 | "id": "2f7e1248", 67 | "metadata": { 68 | "execution": { 69 | "iopub.execute_input": "2023-05-15T09:21:23.453883Z", 70 | "iopub.status.busy": "2023-05-15T09:21:23.453458Z", 71 | "iopub.status.idle": "2023-05-15T09:21:23.832086Z", 72 | "shell.execute_reply": "2023-05-15T09:21:23.831464Z" 73 | }, 74 | "papermill": { 75 | "duration": 0.385935, 76 | "end_time": "2023-05-15T09:21:23.834389", 77 | "exception": false, 78 | "start_time": "2023-05-15T09:21:23.448454", 79 | "status": "completed" 80 | }, 81 | "tags": [] 82 | }, 83 | "outputs": [], 84 | "source": [ 85 | "from bokeh.plotting import figure, show" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": 3, 91 | "id": "94f1b9d1", 92 | "metadata": { 93 | "execution": { 94 | "iopub.execute_input": "2023-05-15T09:21:23.844498Z", 95 | "iopub.status.busy": "2023-05-15T09:21:23.843244Z", 96 | "iopub.status.idle": "2023-05-15T09:21:23.849083Z", 97 | "shell.execute_reply": "2023-05-15T09:21:23.848089Z" 98 | }, 99 | "papermill": { 100 | "duration": 0.012618, 101 | "end_time": "2023-05-15T09:21:23.850935", 102 | "exception": false, 103 | "start_time": "2023-05-15T09:21:23.838317", 104 | "status": "completed" 105 | }, 106 | "tags": [] 107 | }, 108 | "outputs": [], 109 | "source": [ 110 | "import bokeh" 111 | ] 112 | }, 113 | { 114 | "cell_type": "code", 115 | "execution_count": 4, 116 | "id": "8059fd85", 117 | "metadata": { 118 | "execution": { 119 | "iopub.execute_input": "2023-05-15T09:21:23.859424Z", 120 | "iopub.status.busy": "2023-05-15T09:21:23.859104Z", 121 | "iopub.status.idle": "2023-05-15T09:21:23.868608Z", 122 | "shell.execute_reply": "2023-05-15T09:21:23.867536Z" 123 | }, 124 | "papermill": { 125 | "duration": 0.015554, 126 | "end_time": "2023-05-15T09:21:23.870245", 127 | "exception": false, 128 | "start_time": "2023-05-15T09:21:23.854691", 129 | "status": "completed" 130 | }, 131 | "tags": [] 132 | }, 133 | "outputs": [], 134 | "source": [ 135 | "p = figure()\n", 136 | "# width = 500, height = 400" 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": 5, 142 | "id": "89c0068e", 143 | "metadata": { 144 | "execution": { 145 | "iopub.execute_input": "2023-05-15T09:21:23.879662Z", 146 | "iopub.status.busy": "2023-05-15T09:21:23.878655Z", 147 | "iopub.status.idle": "2023-05-15T09:21:34.872750Z", 148 | "shell.execute_reply": "2023-05-15T09:21:34.871375Z" 149 | }, 150 | "papermill": { 151 | "duration": 11.001761, 152 | "end_time": "2023-05-15T09:21:34.875652", 153 | "exception": false, 154 | "start_time": "2023-05-15T09:21:23.873891", 155 | "status": "completed" 156 | }, 157 | "tags": [] 158 | }, 159 | "outputs": [ 160 | { 161 | "name": "stdout", 162 | "output_type": "stream", 163 | "text": [ 164 | "Requirement already satisfied: bokeh in /opt/conda/lib/python3.10/site-packages (2.4.3)\r\n", 165 | "Requirement already satisfied: numpy>=1.11.3 in /opt/conda/lib/python3.10/site-packages (from bokeh) (1.23.5)\r\n", 166 | "Requirement already satisfied: pillow>=7.1.0 in /opt/conda/lib/python3.10/site-packages (from bokeh) (9.5.0)\r\n", 167 | "Requirement already satisfied: tornado>=5.1 in /opt/conda/lib/python3.10/site-packages (from bokeh) (6.2)\r\n", 168 | "Requirement already satisfied: Jinja2>=2.9 in /opt/conda/lib/python3.10/site-packages (from bokeh) (3.1.2)\r\n", 169 | "Requirement already satisfied: packaging>=16.8 in /opt/conda/lib/python3.10/site-packages (from bokeh) (21.3)\r\n", 170 | "Requirement already satisfied: typing-extensions>=3.10.0 in /opt/conda/lib/python3.10/site-packages (from bokeh) (4.5.0)\r\n", 171 | "Requirement already satisfied: PyYAML>=3.10 in /opt/conda/lib/python3.10/site-packages (from bokeh) (6.0)\r\n", 172 | "Requirement already satisfied: MarkupSafe>=2.0 in /opt/conda/lib/python3.10/site-packages (from Jinja2>=2.9->bokeh) (2.1.2)\r\n", 173 | "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /opt/conda/lib/python3.10/site-packages (from packaging>=16.8->bokeh) (3.0.9)\r\n", 174 | "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\r\n", 175 | "\u001b[0mNote: you may need to restart the kernel to use updated packages.\n" 176 | ] 177 | } 178 | ], 179 | "source": [ 180 | "pip install bokeh" 181 | ] 182 | }, 183 | { 184 | "cell_type": "code", 185 | "execution_count": 6, 186 | "id": "f3d4b0b9", 187 | "metadata": { 188 | "execution": { 189 | "iopub.execute_input": "2023-05-15T09:21:34.885363Z", 190 | "iopub.status.busy": "2023-05-15T09:21:34.885002Z", 191 | "iopub.status.idle": "2023-05-15T09:21:34.967017Z", 192 | "shell.execute_reply": "2023-05-15T09:21:34.966066Z" 193 | }, 194 | "papermill": { 195 | "duration": 0.089411, 196 | "end_time": "2023-05-15T09:21:34.969183", 197 | "exception": false, 198 | "start_time": "2023-05-15T09:21:34.879772", 199 | "status": "completed" 200 | }, 201 | "tags": [] 202 | }, 203 | "outputs": [], 204 | "source": [ 205 | "p.circle([1,2,3,4,5], [2,4,6,8,10],\n", 206 | " size = 5,\n", 207 | " color = \"red\",\n", 208 | " )\n", 209 | "show(p)" 210 | ] 211 | }, 212 | { 213 | "cell_type": "code", 214 | "execution_count": null, 215 | "id": "06ad68af", 216 | "metadata": { 217 | "papermill": { 218 | "duration": 0.003728, 219 | "end_time": "2023-05-15T09:21:34.976902", 220 | "exception": false, 221 | "start_time": "2023-05-15T09:21:34.973174", 222 | "status": "completed" 223 | }, 224 | "tags": [] 225 | }, 226 | "outputs": [], 227 | "source": [] 228 | }, 229 | { 230 | "cell_type": "code", 231 | "execution_count": 7, 232 | "id": "61d6f746", 233 | "metadata": { 234 | "execution": { 235 | "iopub.execute_input": "2023-05-15T09:21:34.986111Z", 236 | "iopub.status.busy": "2023-05-15T09:21:34.985682Z", 237 | "iopub.status.idle": "2023-05-15T09:21:35.079767Z", 238 | "shell.execute_reply": "2023-05-15T09:21:35.077648Z" 239 | }, 240 | "papermill": { 241 | "duration": 0.101826, 242 | "end_time": "2023-05-15T09:21:35.082480", 243 | "exception": false, 244 | "start_time": "2023-05-15T09:21:34.980654", 245 | "status": "completed" 246 | }, 247 | "tags": [] 248 | }, 249 | "outputs": [], 250 | "source": [ 251 | "p.circle([1,2,3,4,5], [2,4,6,8,10],\n", 252 | " size = 5,\n", 253 | " color = \"red\",\n", 254 | " alpha = 0.7\n", 255 | " )\n", 256 | "show(p)" 257 | ] 258 | }, 259 | { 260 | "cell_type": "code", 261 | "execution_count": 8, 262 | "id": "03c84eb2", 263 | "metadata": { 264 | "execution": { 265 | "iopub.execute_input": "2023-05-15T09:21:35.091968Z", 266 | "iopub.status.busy": "2023-05-15T09:21:35.091652Z", 267 | "iopub.status.idle": "2023-05-15T09:21:35.097660Z", 268 | "shell.execute_reply": "2023-05-15T09:21:35.096954Z" 269 | }, 270 | "papermill": { 271 | "duration": 0.012766, 272 | "end_time": "2023-05-15T09:21:35.099417", 273 | "exception": false, 274 | "start_time": "2023-05-15T09:21:35.086651", 275 | "status": "completed" 276 | }, 277 | "tags": [] 278 | }, 279 | "outputs": [ 280 | { 281 | "name": "stdout", 282 | "output_type": "stream", 283 | "text": [ 284 | "Hello World!!!\n" 285 | ] 286 | } 287 | ], 288 | "source": [ 289 | "print(\"Hello World!!!\")" 290 | ] 291 | }, 292 | { 293 | "cell_type": "code", 294 | "execution_count": 9, 295 | "id": "7f251d60", 296 | "metadata": { 297 | "execution": { 298 | "iopub.execute_input": "2023-05-15T09:21:35.108452Z", 299 | "iopub.status.busy": "2023-05-15T09:21:35.108157Z", 300 | "iopub.status.idle": "2023-05-15T09:21:35.113149Z", 301 | "shell.execute_reply": "2023-05-15T09:21:35.111867Z" 302 | }, 303 | "papermill": { 304 | "duration": 0.011688, 305 | "end_time": "2023-05-15T09:21:35.114979", 306 | "exception": false, 307 | "start_time": "2023-05-15T09:21:35.103291", 308 | "status": "completed" 309 | }, 310 | "tags": [] 311 | }, 312 | "outputs": [ 313 | { 314 | "name": "stdout", 315 | "output_type": "stream", 316 | "text": [ 317 | "2.4.3\n" 318 | ] 319 | } 320 | ], 321 | "source": [ 322 | "import bokeh\n", 323 | "print(bokeh.__version__)" 324 | ] 325 | }, 326 | { 327 | "cell_type": "code", 328 | "execution_count": 10, 329 | "id": "ca7d1f0f", 330 | "metadata": { 331 | "execution": { 332 | "iopub.execute_input": "2023-05-15T09:21:35.124066Z", 333 | "iopub.status.busy": "2023-05-15T09:21:35.123777Z", 334 | "iopub.status.idle": "2023-05-15T09:21:35.207327Z", 335 | "shell.execute_reply": "2023-05-15T09:21:35.206194Z" 336 | }, 337 | "papermill": { 338 | "duration": 0.091137, 339 | "end_time": "2023-05-15T09:21:35.210013", 340 | "exception": false, 341 | "start_time": "2023-05-15T09:21:35.118876", 342 | "status": "completed" 343 | }, 344 | "tags": [] 345 | }, 346 | "outputs": [], 347 | "source": [ 348 | "show(p)" 349 | ] 350 | }, 351 | { 352 | "cell_type": "code", 353 | "execution_count": 11, 354 | "id": "3c8a08a9", 355 | "metadata": { 356 | "execution": { 357 | "iopub.execute_input": "2023-05-15T09:21:35.219639Z", 358 | "iopub.status.busy": "2023-05-15T09:21:35.219292Z", 359 | "iopub.status.idle": "2023-05-15T09:21:35.225627Z", 360 | "shell.execute_reply": "2023-05-15T09:21:35.224470Z" 361 | }, 362 | "papermill": { 363 | "duration": 0.014196, 364 | "end_time": "2023-05-15T09:21:35.228499", 365 | "exception": false, 366 | "start_time": "2023-05-15T09:21:35.214303", 367 | "status": "completed" 368 | }, 369 | "tags": [] 370 | }, 371 | "outputs": [ 372 | { 373 | "name": "stdout", 374 | "output_type": "stream", 375 | "text": [ 376 | "Figure(id='1002', ...)\n" 377 | ] 378 | } 379 | ], 380 | "source": [ 381 | "print(p)" 382 | ] 383 | }, 384 | { 385 | "cell_type": "code", 386 | "execution_count": 12, 387 | "id": "1ad3f3f5", 388 | "metadata": { 389 | "execution": { 390 | "iopub.execute_input": "2023-05-15T09:21:35.238956Z", 391 | "iopub.status.busy": "2023-05-15T09:21:35.238547Z", 392 | "iopub.status.idle": "2023-05-15T09:21:35.301521Z", 393 | "shell.execute_reply": "2023-05-15T09:21:35.300482Z" 394 | }, 395 | "papermill": { 396 | "duration": 0.070599, 397 | "end_time": "2023-05-15T09:21:35.303528", 398 | "exception": false, 399 | "start_time": "2023-05-15T09:21:35.232929", 400 | "status": "completed" 401 | }, 402 | "tags": [] 403 | }, 404 | "outputs": [ 405 | { 406 | "data": { 407 | "text/html": [ 408 | "
\n", 409 | " \n", 410 | " Loading BokehJS ...\n", 411 | "
\n" 412 | ] 413 | }, 414 | "metadata": {}, 415 | "output_type": "display_data" 416 | }, 417 | { 418 | "data": { 419 | "application/javascript": [ 420 | "(function(root) {\n", 421 | " function now() {\n", 422 | " return new Date();\n", 423 | " }\n", 424 | "\n", 425 | " const force = true;\n", 426 | "\n", 427 | " if (typeof root._bokeh_onload_callbacks === \"undefined\" || force === true) {\n", 428 | " root._bokeh_onload_callbacks = [];\n", 429 | " root._bokeh_is_loading = undefined;\n", 430 | " }\n", 431 | "\n", 432 | "const JS_MIME_TYPE = 'application/javascript';\n", 433 | " const HTML_MIME_TYPE = 'text/html';\n", 434 | " const EXEC_MIME_TYPE = 'application/vnd.bokehjs_exec.v0+json';\n", 435 | " const CLASS_NAME = 'output_bokeh rendered_html';\n", 436 | "\n", 437 | " /**\n", 438 | " * Render data to the DOM node\n", 439 | " */\n", 440 | " function render(props, node) {\n", 441 | " const script = document.createElement(\"script\");\n", 442 | " node.appendChild(script);\n", 443 | " }\n", 444 | "\n", 445 | " /**\n", 446 | " * Handle when an output is cleared or removed\n", 447 | " */\n", 448 | " function handleClearOutput(event, handle) {\n", 449 | " const cell = handle.cell;\n", 450 | "\n", 451 | " const id = cell.output_area._bokeh_element_id;\n", 452 | " const server_id = cell.output_area._bokeh_server_id;\n", 453 | " // Clean up Bokeh references\n", 454 | " if (id != null && id in Bokeh.index) {\n", 455 | " Bokeh.index[id].model.document.clear();\n", 456 | " delete Bokeh.index[id];\n", 457 | " }\n", 458 | "\n", 459 | " if (server_id !== undefined) {\n", 460 | " // Clean up Bokeh references\n", 461 | " const cmd_clean = \"from bokeh.io.state import curstate; print(curstate().uuid_to_server['\" + server_id + \"'].get_sessions()[0].document.roots[0]._id)\";\n", 462 | " cell.notebook.kernel.execute(cmd_clean, {\n", 463 | " iopub: {\n", 464 | " output: function(msg) {\n", 465 | " const id = msg.content.text.trim();\n", 466 | " if (id in Bokeh.index) {\n", 467 | " Bokeh.index[id].model.document.clear();\n", 468 | " delete Bokeh.index[id];\n", 469 | " }\n", 470 | " }\n", 471 | " }\n", 472 | " });\n", 473 | " // Destroy server and session\n", 474 | " const cmd_destroy = \"import bokeh.io.notebook as ion; ion.destroy_server('\" + server_id + \"')\";\n", 475 | " cell.notebook.kernel.execute(cmd_destroy);\n", 476 | " }\n", 477 | " }\n", 478 | "\n", 479 | " /**\n", 480 | " * Handle when a new output is added\n", 481 | " */\n", 482 | " function handleAddOutput(event, handle) {\n", 483 | " const output_area = handle.output_area;\n", 484 | " const output = handle.output;\n", 485 | "\n", 486 | " // limit handleAddOutput to display_data with EXEC_MIME_TYPE content only\n", 487 | " if ((output.output_type != \"display_data\") || (!Object.prototype.hasOwnProperty.call(output.data, EXEC_MIME_TYPE))) {\n", 488 | " return\n", 489 | " }\n", 490 | "\n", 491 | " const toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n", 492 | "\n", 493 | " if (output.metadata[EXEC_MIME_TYPE][\"id\"] !== undefined) {\n", 494 | " toinsert[toinsert.length - 1].firstChild.textContent = output.data[JS_MIME_TYPE];\n", 495 | " // store reference to embed id on output_area\n", 496 | " output_area._bokeh_element_id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n", 497 | " }\n", 498 | " if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n", 499 | " const bk_div = document.createElement(\"div\");\n", 500 | " bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n", 501 | " const script_attrs = bk_div.children[0].attributes;\n", 502 | " for (let i = 0; i < script_attrs.length; i++) {\n", 503 | " toinsert[toinsert.length - 1].firstChild.setAttribute(script_attrs[i].name, script_attrs[i].value);\n", 504 | " toinsert[toinsert.length - 1].firstChild.textContent = bk_div.children[0].textContent\n", 505 | " }\n", 506 | " // store reference to server id on output_area\n", 507 | " output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n", 508 | " }\n", 509 | " }\n", 510 | "\n", 511 | " function register_renderer(events, OutputArea) {\n", 512 | "\n", 513 | " function append_mime(data, metadata, element) {\n", 514 | " // create a DOM node to render to\n", 515 | " const toinsert = this.create_output_subarea(\n", 516 | " metadata,\n", 517 | " CLASS_NAME,\n", 518 | " EXEC_MIME_TYPE\n", 519 | " );\n", 520 | " this.keyboard_manager.register_events(toinsert);\n", 521 | " // Render to node\n", 522 | " const props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n", 523 | " render(props, toinsert[toinsert.length - 1]);\n", 524 | " element.append(toinsert);\n", 525 | " return toinsert\n", 526 | " }\n", 527 | "\n", 528 | " /* Handle when an output is cleared or removed */\n", 529 | " events.on('clear_output.CodeCell', handleClearOutput);\n", 530 | " events.on('delete.Cell', handleClearOutput);\n", 531 | "\n", 532 | " /* Handle when a new output is added */\n", 533 | " events.on('output_added.OutputArea', handleAddOutput);\n", 534 | "\n", 535 | " /**\n", 536 | " * Register the mime type and append_mime function with output_area\n", 537 | " */\n", 538 | " OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n", 539 | " /* Is output safe? */\n", 540 | " safe: true,\n", 541 | " /* Index of renderer in `output_area.display_order` */\n", 542 | " index: 0\n", 543 | " });\n", 544 | " }\n", 545 | "\n", 546 | " // register the mime type if in Jupyter Notebook environment and previously unregistered\n", 547 | " if (root.Jupyter !== undefined) {\n", 548 | " const events = require('base/js/events');\n", 549 | " const OutputArea = require('notebook/js/outputarea').OutputArea;\n", 550 | "\n", 551 | " if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n", 552 | " register_renderer(events, OutputArea);\n", 553 | " }\n", 554 | " }\n", 555 | " if (typeof (root._bokeh_timeout) === \"undefined\" || force === true) {\n", 556 | " root._bokeh_timeout = Date.now() + 5000;\n", 557 | " root._bokeh_failed_load = false;\n", 558 | " }\n", 559 | "\n", 560 | " const NB_LOAD_WARNING = {'data': {'text/html':\n", 561 | " \"
\\n\"+\n", 562 | " \"

\\n\"+\n", 563 | " \"BokehJS does not appear to have successfully loaded. If loading BokehJS from CDN, this \\n\"+\n", 564 | " \"may be due to a slow or bad network connection. Possible fixes:\\n\"+\n", 565 | " \"

\\n\"+\n", 566 | " \"
    \\n\"+\n", 567 | " \"
  • re-rerun `output_notebook()` to attempt to load from CDN again, or
  • \\n\"+\n", 568 | " \"
  • use INLINE resources instead, as so:
  • \\n\"+\n", 569 | " \"
\\n\"+\n", 570 | " \"\\n\"+\n", 571 | " \"from bokeh.resources import INLINE\\n\"+\n", 572 | " \"output_notebook(resources=INLINE)\\n\"+\n", 573 | " \"\\n\"+\n", 574 | " \"
\"}};\n", 575 | "\n", 576 | " function display_loaded() {\n", 577 | " const el = document.getElementById(\"1555\");\n", 578 | " if (el != null) {\n", 579 | " el.textContent = \"BokehJS is loading...\";\n", 580 | " }\n", 581 | " if (root.Bokeh !== undefined) {\n", 582 | " if (el != null) {\n", 583 | " el.textContent = \"BokehJS \" + root.Bokeh.version + \" successfully loaded.\";\n", 584 | " }\n", 585 | " } else if (Date.now() < root._bokeh_timeout) {\n", 586 | " setTimeout(display_loaded, 100)\n", 587 | " }\n", 588 | " }\n", 589 | "\n", 590 | " function run_callbacks() {\n", 591 | " try {\n", 592 | " root._bokeh_onload_callbacks.forEach(function(callback) {\n", 593 | " if (callback != null)\n", 594 | " callback();\n", 595 | " });\n", 596 | " } finally {\n", 597 | " delete root._bokeh_onload_callbacks\n", 598 | " }\n", 599 | " console.debug(\"Bokeh: all callbacks have finished\");\n", 600 | " }\n", 601 | "\n", 602 | " function load_libs(css_urls, js_urls, callback) {\n", 603 | " if (css_urls == null) css_urls = [];\n", 604 | " if (js_urls == null) js_urls = [];\n", 605 | "\n", 606 | " root._bokeh_onload_callbacks.push(callback);\n", 607 | " if (root._bokeh_is_loading > 0) {\n", 608 | " console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n", 609 | " return null;\n", 610 | " }\n", 611 | " if (js_urls == null || js_urls.length === 0) {\n", 612 | " run_callbacks();\n", 613 | " return null;\n", 614 | " }\n", 615 | " console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n", 616 | " root._bokeh_is_loading = css_urls.length + js_urls.length;\n", 617 | "\n", 618 | " function on_load() {\n", 619 | " root._bokeh_is_loading--;\n", 620 | " if (root._bokeh_is_loading === 0) {\n", 621 | " console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n", 622 | " run_callbacks()\n", 623 | " }\n", 624 | " }\n", 625 | "\n", 626 | " function on_error(url) {\n", 627 | " console.error(\"failed to load \" + url);\n", 628 | " }\n", 629 | "\n", 630 | " for (let i = 0; i < css_urls.length; i++) {\n", 631 | " const url = css_urls[i];\n", 632 | " const element = document.createElement(\"link\");\n", 633 | " element.onload = on_load;\n", 634 | " element.onerror = on_error.bind(null, url);\n", 635 | " element.rel = \"stylesheet\";\n", 636 | " element.type = \"text/css\";\n", 637 | " element.href = url;\n", 638 | " console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n", 639 | " document.body.appendChild(element);\n", 640 | " }\n", 641 | "\n", 642 | " for (let i = 0; i < js_urls.length; i++) {\n", 643 | " const url = js_urls[i];\n", 644 | " const element = document.createElement('script');\n", 645 | " element.onload = on_load;\n", 646 | " element.onerror = on_error.bind(null, url);\n", 647 | " element.async = false;\n", 648 | " element.src = url;\n", 649 | " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", 650 | " document.head.appendChild(element);\n", 651 | " }\n", 652 | " };\n", 653 | "\n", 654 | " function inject_raw_css(css) {\n", 655 | " const element = document.createElement(\"style\");\n", 656 | " element.appendChild(document.createTextNode(css));\n", 657 | " document.body.appendChild(element);\n", 658 | " }\n", 659 | "\n", 660 | " const js_urls = [\"https://cdn.bokeh.org/bokeh/release/bokeh-2.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-2.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-2.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-2.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-mathjax-2.4.3.min.js\"];\n", 661 | " const css_urls = [];\n", 662 | "\n", 663 | " const inline_js = [ function(Bokeh) {\n", 664 | " Bokeh.set_log_level(\"info\");\n", 665 | " },\n", 666 | "function(Bokeh) {\n", 667 | " }\n", 668 | " ];\n", 669 | "\n", 670 | " function run_inline_js() {\n", 671 | " if (root.Bokeh !== undefined || force === true) {\n", 672 | " for (let i = 0; i < inline_js.length; i++) {\n", 673 | " inline_js[i].call(root, root.Bokeh);\n", 674 | " }\n", 675 | "if (force === true) {\n", 676 | " display_loaded();\n", 677 | " }} else if (Date.now() < root._bokeh_timeout) {\n", 678 | " setTimeout(run_inline_js, 100);\n", 679 | " } else if (!root._bokeh_failed_load) {\n", 680 | " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", 681 | " root._bokeh_failed_load = true;\n", 682 | " } else if (force !== true) {\n", 683 | " const cell = $(document.getElementById(\"1555\")).parents('.cell').data().cell;\n", 684 | " cell.output_area.append_execute_result(NB_LOAD_WARNING)\n", 685 | " }\n", 686 | " }\n", 687 | "\n", 688 | " if (root._bokeh_is_loading === 0) {\n", 689 | " console.debug(\"Bokeh: BokehJS loaded, going straight to plotting\");\n", 690 | " run_inline_js();\n", 691 | " } else {\n", 692 | " load_libs(css_urls, js_urls, function() {\n", 693 | " console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n", 694 | " run_inline_js();\n", 695 | " });\n", 696 | " }\n", 697 | "}(window));" 698 | ], 699 | "application/vnd.bokehjs_load.v0+json": "(function(root) {\n function now() {\n return new Date();\n }\n\n const force = true;\n\n if (typeof root._bokeh_onload_callbacks === \"undefined\" || force === true) {\n root._bokeh_onload_callbacks = [];\n root._bokeh_is_loading = undefined;\n }\n\n\n if (typeof (root._bokeh_timeout) === \"undefined\" || force === true) {\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_failed_load = false;\n }\n\n const NB_LOAD_WARNING = {'data': {'text/html':\n \"
\\n\"+\n \"

\\n\"+\n \"BokehJS does not appear to have successfully loaded. If loading BokehJS from CDN, this \\n\"+\n \"may be due to a slow or bad network connection. Possible fixes:\\n\"+\n \"

\\n\"+\n \"
    \\n\"+\n \"
  • re-rerun `output_notebook()` to attempt to load from CDN again, or
  • \\n\"+\n \"
  • use INLINE resources instead, as so:
  • \\n\"+\n \"
\\n\"+\n \"\\n\"+\n \"from bokeh.resources import INLINE\\n\"+\n \"output_notebook(resources=INLINE)\\n\"+\n \"\\n\"+\n \"
\"}};\n\n function display_loaded() {\n const el = document.getElementById(\"1555\");\n if (el != null) {\n el.textContent = \"BokehJS is loading...\";\n }\n if (root.Bokeh !== undefined) {\n if (el != null) {\n el.textContent = \"BokehJS \" + root.Bokeh.version + \" successfully loaded.\";\n }\n } else if (Date.now() < root._bokeh_timeout) {\n setTimeout(display_loaded, 100)\n }\n }\n\n function run_callbacks() {\n try {\n root._bokeh_onload_callbacks.forEach(function(callback) {\n if (callback != null)\n callback();\n });\n } finally {\n delete root._bokeh_onload_callbacks\n }\n console.debug(\"Bokeh: all callbacks have finished\");\n }\n\n function load_libs(css_urls, js_urls, callback) {\n if (css_urls == null) css_urls = [];\n if (js_urls == null) js_urls = [];\n\n root._bokeh_onload_callbacks.push(callback);\n if (root._bokeh_is_loading > 0) {\n console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n return null;\n }\n if (js_urls == null || js_urls.length === 0) {\n run_callbacks();\n return null;\n }\n console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n root._bokeh_is_loading = css_urls.length + js_urls.length;\n\n function on_load() {\n root._bokeh_is_loading--;\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n run_callbacks()\n }\n }\n\n function on_error(url) {\n console.error(\"failed to load \" + url);\n }\n\n for (let i = 0; i < css_urls.length; i++) {\n const url = css_urls[i];\n const element = document.createElement(\"link\");\n element.onload = on_load;\n element.onerror = on_error.bind(null, url);\n element.rel = \"stylesheet\";\n element.type = \"text/css\";\n element.href = url;\n console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n document.body.appendChild(element);\n }\n\n for (let i = 0; i < js_urls.length; i++) {\n const url = js_urls[i];\n const element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error.bind(null, url);\n element.async = false;\n element.src = url;\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n };\n\n function inject_raw_css(css) {\n const element = document.createElement(\"style\");\n element.appendChild(document.createTextNode(css));\n document.body.appendChild(element);\n }\n\n const js_urls = [\"https://cdn.bokeh.org/bokeh/release/bokeh-2.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-2.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-2.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-2.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-mathjax-2.4.3.min.js\"];\n const css_urls = [];\n\n const inline_js = [ function(Bokeh) {\n Bokeh.set_log_level(\"info\");\n },\nfunction(Bokeh) {\n }\n ];\n\n function run_inline_js() {\n if (root.Bokeh !== undefined || force === true) {\n for (let i = 0; i < inline_js.length; i++) {\n inline_js[i].call(root, root.Bokeh);\n }\nif (force === true) {\n display_loaded();\n }} else if (Date.now() < root._bokeh_timeout) {\n setTimeout(run_inline_js, 100);\n } else if (!root._bokeh_failed_load) {\n console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n root._bokeh_failed_load = true;\n } else if (force !== true) {\n const cell = $(document.getElementById(\"1555\")).parents('.cell').data().cell;\n cell.output_area.append_execute_result(NB_LOAD_WARNING)\n }\n }\n\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: BokehJS loaded, going straight to plotting\");\n run_inline_js();\n } else {\n load_libs(css_urls, js_urls, function() {\n console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n run_inline_js();\n });\n }\n}(window));" 700 | }, 701 | "metadata": {}, 702 | "output_type": "display_data" 703 | }, 704 | { 705 | "data": { 706 | "text/html": [ 707 | "\n", 708 | "
\n" 709 | ] 710 | }, 711 | "metadata": {}, 712 | "output_type": "display_data" 713 | }, 714 | { 715 | "data": { 716 | "application/javascript": [ 717 | "(function(root) {\n", 718 | " function embed_document(root) {\n", 719 | " const docs_json = {\"53c8896b-8d16-4b30-b1fd-8c7fa7b4d692\":{\"defs\":[],\"roots\":{\"references\":[{\"attributes\":{\"below\":[{\"id\":\"1527\"}],\"center\":[{\"id\":\"1530\"},{\"id\":\"1534\"}],\"left\":[{\"id\":\"1531\"}],\"renderers\":[{\"id\":\"1553\"}],\"title\":{\"id\":\"1517\"},\"toolbar\":{\"id\":\"1542\"},\"x_range\":{\"id\":\"1519\"},\"x_scale\":{\"id\":\"1523\"},\"y_range\":{\"id\":\"1521\"},\"y_scale\":{\"id\":\"1525\"}},\"id\":\"1516\",\"subtype\":\"Figure\",\"type\":\"Plot\"},{\"attributes\":{\"source\":{\"id\":\"1549\"}},\"id\":\"1554\",\"type\":\"CDSView\"},{\"attributes\":{\"axis\":{\"id\":\"1531\"},\"coordinates\":null,\"dimension\":1,\"group\":null,\"ticker\":null},\"id\":\"1534\",\"type\":\"Grid\"},{\"attributes\":{\"axis_label\":\"Y\",\"coordinates\":null,\"formatter\":{\"id\":\"1558\"},\"group\":null,\"major_label_policy\":{\"id\":\"1559\"},\"ticker\":{\"id\":\"1532\"}},\"id\":\"1531\",\"type\":\"LinearAxis\"},{\"attributes\":{\"tools\":[{\"id\":\"1535\"},{\"id\":\"1536\"},{\"id\":\"1537\"},{\"id\":\"1538\"},{\"id\":\"1539\"},{\"id\":\"1540\"}]},\"id\":\"1542\",\"type\":\"Toolbar\"},{\"attributes\":{},\"id\":\"1559\",\"type\":\"AllLabels\"},{\"attributes\":{\"data\":{\"x\":[1,2,3,4,5],\"y\":[6,7,2,4,5]},\"selected\":{\"id\":\"1564\"},\"selection_policy\":{\"id\":\"1563\"}},\"id\":\"1549\",\"type\":\"ColumnDataSource\"},{\"attributes\":{\"fill_alpha\":{\"value\":0.2},\"fill_color\":{\"value\":\"navy\"},\"hatch_alpha\":{\"value\":0.2},\"hatch_color\":{\"value\":\"navy\"},\"line_alpha\":{\"value\":0.2},\"line_color\":{\"value\":\"navy\"},\"size\":{\"value\":10},\"x\":{\"field\":\"x\"},\"y\":{\"field\":\"y\"}},\"id\":\"1552\",\"type\":\"Circle\"},{\"attributes\":{},\"id\":\"1540\",\"type\":\"HelpTool\"},{\"attributes\":{},\"id\":\"1519\",\"type\":\"DataRange1d\"},{\"attributes\":{},\"id\":\"1523\",\"type\":\"LinearScale\"},{\"attributes\":{},\"id\":\"1562\",\"type\":\"AllLabels\"},{\"attributes\":{},\"id\":\"1532\",\"type\":\"BasicTicker\"},{\"attributes\":{},\"id\":\"1521\",\"type\":\"DataRange1d\"},{\"attributes\":{},\"id\":\"1561\",\"type\":\"BasicTickFormatter\"},{\"attributes\":{},\"id\":\"1539\",\"type\":\"ResetTool\"},{\"attributes\":{},\"id\":\"1564\",\"type\":\"Selection\"},{\"attributes\":{\"fill_color\":{\"value\":\"navy\"},\"hatch_color\":{\"value\":\"navy\"},\"line_color\":{\"value\":\"navy\"},\"size\":{\"value\":10},\"x\":{\"field\":\"x\"},\"y\":{\"field\":\"y\"}},\"id\":\"1550\",\"type\":\"Circle\"},{\"attributes\":{},\"id\":\"1535\",\"type\":\"PanTool\"},{\"attributes\":{},\"id\":\"1558\",\"type\":\"BasicTickFormatter\"},{\"attributes\":{\"overlay\":{\"id\":\"1541\"}},\"id\":\"1537\",\"type\":\"BoxZoomTool\"},{\"attributes\":{},\"id\":\"1536\",\"type\":\"WheelZoomTool\"},{\"attributes\":{},\"id\":\"1563\",\"type\":\"UnionRenderers\"},{\"attributes\":{},\"id\":\"1538\",\"type\":\"SaveTool\"},{\"attributes\":{},\"id\":\"1525\",\"type\":\"LinearScale\"},{\"attributes\":{\"axis_label\":\"X\",\"coordinates\":null,\"formatter\":{\"id\":\"1561\"},\"group\":null,\"major_label_policy\":{\"id\":\"1562\"},\"ticker\":{\"id\":\"1528\"}},\"id\":\"1527\",\"type\":\"LinearAxis\"},{\"attributes\":{\"bottom_units\":\"screen\",\"coordinates\":null,\"fill_alpha\":0.5,\"fill_color\":\"lightgrey\",\"group\":null,\"left_units\":\"screen\",\"level\":\"overlay\",\"line_alpha\":1.0,\"line_color\":\"black\",\"line_dash\":[4,4],\"line_width\":2,\"right_units\":\"screen\",\"syncable\":false,\"top_units\":\"screen\"},\"id\":\"1541\",\"type\":\"BoxAnnotation\"},{\"attributes\":{\"axis\":{\"id\":\"1527\"},\"coordinates\":null,\"group\":null,\"ticker\":null},\"id\":\"1530\",\"type\":\"Grid\"},{\"attributes\":{\"fill_alpha\":{\"value\":0.1},\"fill_color\":{\"value\":\"navy\"},\"hatch_alpha\":{\"value\":0.1},\"hatch_color\":{\"value\":\"navy\"},\"line_alpha\":{\"value\":0.1},\"line_color\":{\"value\":\"navy\"},\"size\":{\"value\":10},\"x\":{\"field\":\"x\"},\"y\":{\"field\":\"y\"}},\"id\":\"1551\",\"type\":\"Circle\"},{\"attributes\":{},\"id\":\"1528\",\"type\":\"BasicTicker\"},{\"attributes\":{\"coordinates\":null,\"group\":null,\"text\":\"Scatter Plot\"},\"id\":\"1517\",\"type\":\"Title\"},{\"attributes\":{\"coordinates\":null,\"data_source\":{\"id\":\"1549\"},\"glyph\":{\"id\":\"1550\"},\"group\":null,\"hover_glyph\":null,\"muted_glyph\":{\"id\":\"1552\"},\"nonselection_glyph\":{\"id\":\"1551\"},\"view\":{\"id\":\"1554\"}},\"id\":\"1553\",\"type\":\"GlyphRenderer\"}],\"root_ids\":[\"1516\"]},\"title\":\"Bokeh Application\",\"version\":\"2.4.3\"}};\n", 720 | " const render_items = [{\"docid\":\"53c8896b-8d16-4b30-b1fd-8c7fa7b4d692\",\"root_ids\":[\"1516\"],\"roots\":{\"1516\":\"5c6778d1-c981-4870-9a77-5a13730dd90a\"}}];\n", 721 | " root.Bokeh.embed.embed_items_notebook(docs_json, render_items);\n", 722 | " }\n", 723 | " if (root.Bokeh !== undefined) {\n", 724 | " embed_document(root);\n", 725 | " } else {\n", 726 | " let attempts = 0;\n", 727 | " const timer = setInterval(function(root) {\n", 728 | " if (root.Bokeh !== undefined) {\n", 729 | " clearInterval(timer);\n", 730 | " embed_document(root);\n", 731 | " } else {\n", 732 | " attempts++;\n", 733 | " if (attempts > 100) {\n", 734 | " clearInterval(timer);\n", 735 | " console.log(\"Bokeh: ERROR: Unable to run BokehJS code because BokehJS library is missing\");\n", 736 | " }\n", 737 | " }\n", 738 | " }, 10, root)\n", 739 | " }\n", 740 | "})(window);" 741 | ], 742 | "application/vnd.bokehjs_exec.v0+json": "" 743 | }, 744 | "metadata": { 745 | "application/vnd.bokehjs_exec.v0+json": { 746 | "id": "1516" 747 | } 748 | }, 749 | "output_type": "display_data" 750 | } 751 | ], 752 | "source": [ 753 | "import bokeh.io\n", 754 | "from bokeh.plotting import figure\n", 755 | "from bokeh.io import output_notebook, show\n", 756 | "\n", 757 | "\n", 758 | "# Generate some example data\n", 759 | "x = [1, 2, 3, 4, 5]\n", 760 | "y = [6, 7, 2, 4, 5]\n", 761 | "\n", 762 | "# Create a figure\n", 763 | "p = figure(title='Scatter Plot', x_axis_label='X', y_axis_label='Y')\n", 764 | "\n", 765 | "# Add data to the figure\n", 766 | "p.circle(x, y, size=10, color='navy')\n", 767 | "\n", 768 | "# Set notebook output mode\n", 769 | "output_notebook()\n", 770 | "show(p)" 771 | ] 772 | }, 773 | { 774 | "cell_type": "code", 775 | "execution_count": 13, 776 | "id": "a4d544e5", 777 | "metadata": { 778 | "execution": { 779 | "iopub.execute_input": "2023-05-15T09:21:35.315286Z", 780 | "iopub.status.busy": "2023-05-15T09:21:35.314909Z", 781 | "iopub.status.idle": "2023-05-15T09:21:35.352083Z", 782 | "shell.execute_reply": "2023-05-15T09:21:35.350568Z" 783 | }, 784 | "papermill": { 785 | "duration": 0.045904, 786 | "end_time": "2023-05-15T09:21:35.354678", 787 | "exception": false, 788 | "start_time": "2023-05-15T09:21:35.308774", 789 | "status": "completed" 790 | }, 791 | "tags": [] 792 | }, 793 | "outputs": [ 794 | { 795 | "data": { 796 | "text/html": [ 797 | "\n", 798 | "
\n" 799 | ] 800 | }, 801 | "metadata": {}, 802 | "output_type": "display_data" 803 | }, 804 | { 805 | "data": { 806 | "application/javascript": [ 807 | "(function(root) {\n", 808 | " function embed_document(root) {\n", 809 | " const docs_json = {\"c7ec5773-40e6-4ced-8505-26921fb1e34d\":{\"defs\":[],\"roots\":{\"references\":[{\"attributes\":{\"below\":[{\"id\":\"1527\"}],\"center\":[{\"id\":\"1530\"},{\"id\":\"1534\"}],\"left\":[{\"id\":\"1531\"}],\"renderers\":[{\"id\":\"1553\"}],\"title\":{\"id\":\"1517\"},\"toolbar\":{\"id\":\"1542\"},\"x_range\":{\"id\":\"1519\"},\"x_scale\":{\"id\":\"1523\"},\"y_range\":{\"id\":\"1521\"},\"y_scale\":{\"id\":\"1525\"}},\"id\":\"1516\",\"subtype\":\"Figure\",\"type\":\"Plot\"},{\"attributes\":{\"source\":{\"id\":\"1549\"}},\"id\":\"1554\",\"type\":\"CDSView\"},{\"attributes\":{\"axis\":{\"id\":\"1531\"},\"coordinates\":null,\"dimension\":1,\"group\":null,\"ticker\":null},\"id\":\"1534\",\"type\":\"Grid\"},{\"attributes\":{\"axis_label\":\"Y\",\"coordinates\":null,\"formatter\":{\"id\":\"1558\"},\"group\":null,\"major_label_policy\":{\"id\":\"1559\"},\"ticker\":{\"id\":\"1532\"}},\"id\":\"1531\",\"type\":\"LinearAxis\"},{\"attributes\":{\"tools\":[{\"id\":\"1535\"},{\"id\":\"1536\"},{\"id\":\"1537\"},{\"id\":\"1538\"},{\"id\":\"1539\"},{\"id\":\"1540\"}]},\"id\":\"1542\",\"type\":\"Toolbar\"},{\"attributes\":{},\"id\":\"1559\",\"type\":\"AllLabels\"},{\"attributes\":{\"data\":{\"x\":[1,2,3,4,5],\"y\":[6,7,2,4,5]},\"selected\":{\"id\":\"1564\"},\"selection_policy\":{\"id\":\"1563\"}},\"id\":\"1549\",\"type\":\"ColumnDataSource\"},{\"attributes\":{\"fill_alpha\":{\"value\":0.2},\"fill_color\":{\"value\":\"navy\"},\"hatch_alpha\":{\"value\":0.2},\"hatch_color\":{\"value\":\"navy\"},\"line_alpha\":{\"value\":0.2},\"line_color\":{\"value\":\"navy\"},\"size\":{\"value\":10},\"x\":{\"field\":\"x\"},\"y\":{\"field\":\"y\"}},\"id\":\"1552\",\"type\":\"Circle\"},{\"attributes\":{},\"id\":\"1540\",\"type\":\"HelpTool\"},{\"attributes\":{},\"id\":\"1519\",\"type\":\"DataRange1d\"},{\"attributes\":{},\"id\":\"1523\",\"type\":\"LinearScale\"},{\"attributes\":{},\"id\":\"1562\",\"type\":\"AllLabels\"},{\"attributes\":{},\"id\":\"1532\",\"type\":\"BasicTicker\"},{\"attributes\":{},\"id\":\"1521\",\"type\":\"DataRange1d\"},{\"attributes\":{},\"id\":\"1561\",\"type\":\"BasicTickFormatter\"},{\"attributes\":{},\"id\":\"1539\",\"type\":\"ResetTool\"},{\"attributes\":{},\"id\":\"1564\",\"type\":\"Selection\"},{\"attributes\":{\"fill_color\":{\"value\":\"navy\"},\"hatch_color\":{\"value\":\"navy\"},\"line_color\":{\"value\":\"navy\"},\"size\":{\"value\":10},\"x\":{\"field\":\"x\"},\"y\":{\"field\":\"y\"}},\"id\":\"1550\",\"type\":\"Circle\"},{\"attributes\":{},\"id\":\"1535\",\"type\":\"PanTool\"},{\"attributes\":{},\"id\":\"1558\",\"type\":\"BasicTickFormatter\"},{\"attributes\":{\"overlay\":{\"id\":\"1541\"}},\"id\":\"1537\",\"type\":\"BoxZoomTool\"},{\"attributes\":{},\"id\":\"1536\",\"type\":\"WheelZoomTool\"},{\"attributes\":{},\"id\":\"1563\",\"type\":\"UnionRenderers\"},{\"attributes\":{},\"id\":\"1538\",\"type\":\"SaveTool\"},{\"attributes\":{},\"id\":\"1525\",\"type\":\"LinearScale\"},{\"attributes\":{\"axis_label\":\"X\",\"coordinates\":null,\"formatter\":{\"id\":\"1561\"},\"group\":null,\"major_label_policy\":{\"id\":\"1562\"},\"ticker\":{\"id\":\"1528\"}},\"id\":\"1527\",\"type\":\"LinearAxis\"},{\"attributes\":{\"bottom_units\":\"screen\",\"coordinates\":null,\"fill_alpha\":0.5,\"fill_color\":\"lightgrey\",\"group\":null,\"left_units\":\"screen\",\"level\":\"overlay\",\"line_alpha\":1.0,\"line_color\":\"black\",\"line_dash\":[4,4],\"line_width\":2,\"right_units\":\"screen\",\"syncable\":false,\"top_units\":\"screen\"},\"id\":\"1541\",\"type\":\"BoxAnnotation\"},{\"attributes\":{\"axis\":{\"id\":\"1527\"},\"coordinates\":null,\"group\":null,\"ticker\":null},\"id\":\"1530\",\"type\":\"Grid\"},{\"attributes\":{\"fill_alpha\":{\"value\":0.1},\"fill_color\":{\"value\":\"navy\"},\"hatch_alpha\":{\"value\":0.1},\"hatch_color\":{\"value\":\"navy\"},\"line_alpha\":{\"value\":0.1},\"line_color\":{\"value\":\"navy\"},\"size\":{\"value\":10},\"x\":{\"field\":\"x\"},\"y\":{\"field\":\"y\"}},\"id\":\"1551\",\"type\":\"Circle\"},{\"attributes\":{},\"id\":\"1528\",\"type\":\"BasicTicker\"},{\"attributes\":{\"coordinates\":null,\"group\":null,\"text\":\"Scatter Plot\"},\"id\":\"1517\",\"type\":\"Title\"},{\"attributes\":{\"coordinates\":null,\"data_source\":{\"id\":\"1549\"},\"glyph\":{\"id\":\"1550\"},\"group\":null,\"hover_glyph\":null,\"muted_glyph\":{\"id\":\"1552\"},\"nonselection_glyph\":{\"id\":\"1551\"},\"view\":{\"id\":\"1554\"}},\"id\":\"1553\",\"type\":\"GlyphRenderer\"}],\"root_ids\":[\"1516\"]},\"title\":\"Bokeh Application\",\"version\":\"2.4.3\"}};\n", 810 | " const render_items = [{\"docid\":\"c7ec5773-40e6-4ced-8505-26921fb1e34d\",\"root_ids\":[\"1516\"],\"roots\":{\"1516\":\"a0f4b57c-ea44-4b7b-aad3-93ba53bc6104\"}}];\n", 811 | " root.Bokeh.embed.embed_items_notebook(docs_json, render_items);\n", 812 | " }\n", 813 | " if (root.Bokeh !== undefined) {\n", 814 | " embed_document(root);\n", 815 | " } else {\n", 816 | " let attempts = 0;\n", 817 | " const timer = setInterval(function(root) {\n", 818 | " if (root.Bokeh !== undefined) {\n", 819 | " clearInterval(timer);\n", 820 | " embed_document(root);\n", 821 | " } else {\n", 822 | " attempts++;\n", 823 | " if (attempts > 100) {\n", 824 | " clearInterval(timer);\n", 825 | " console.log(\"Bokeh: ERROR: Unable to run BokehJS code because BokehJS library is missing\");\n", 826 | " }\n", 827 | " }\n", 828 | " }, 10, root)\n", 829 | " }\n", 830 | "})(window);" 831 | ], 832 | "application/vnd.bokehjs_exec.v0+json": "" 833 | }, 834 | "metadata": { 835 | "application/vnd.bokehjs_exec.v0+json": { 836 | "id": "1516" 837 | } 838 | }, 839 | "output_type": "display_data" 840 | } 841 | ], 842 | "source": [ 843 | "show(p)" 844 | ] 845 | }, 846 | { 847 | "cell_type": "code", 848 | "execution_count": null, 849 | "id": "8b6e2517", 850 | "metadata": { 851 | "papermill": { 852 | "duration": 0.005685, 853 | "end_time": "2023-05-15T09:21:35.365987", 854 | "exception": false, 855 | "start_time": "2023-05-15T09:21:35.360302", 856 | "status": "completed" 857 | }, 858 | "tags": [] 859 | }, 860 | "outputs": [], 861 | "source": [] 862 | } 863 | ], 864 | "metadata": { 865 | "kernelspec": { 866 | "display_name": "Python 3", 867 | "language": "python", 868 | "name": "python3" 869 | }, 870 | "language_info": { 871 | "codemirror_mode": { 872 | "name": "ipython", 873 | "version": 3 874 | }, 875 | "file_extension": ".py", 876 | "mimetype": "text/x-python", 877 | "name": "python", 878 | "nbconvert_exporter": "python", 879 | "pygments_lexer": "ipython3", 880 | "version": "3.10.10" 881 | }, 882 | "papermill": { 883 | "default_parameters": {}, 884 | "duration": 22.224493, 885 | "end_time": "2023-05-15T09:21:36.396410", 886 | "environment_variables": {}, 887 | "exception": null, 888 | "input_path": "__notebook__.ipynb", 889 | "output_path": "__notebook__.ipynb", 890 | "parameters": {}, 891 | "start_time": "2023-05-15T09:21:14.171917", 892 | "version": "2.4.0" 893 | } 894 | }, 895 | "nbformat": 4, 896 | "nbformat_minor": 5 897 | } 898 | --------------------------------------------------------------------------------