├── Deapdataset_emotion
├── Deap_Valance.ipynb
└── README.md
├── Epilepesy_Sezuire_dtection
├── LSTM for Epileptic Seizures .ipynb
├── LSTM_for_Epileptic_Seizures_Prediction_.ipynb
└── README.md
├── LSTM_EEG_Emotion
└── EEG_emotion_LSTM .ipynb
├── README.md
└── single_channel_eeg_emotion
├── eeg_classification_binary.ipynb
└── singlechannel_lstm_emotion_data.ipynb
/Deapdataset_emotion/README.md:
--------------------------------------------------------------------------------
1 | # STEPS
2 | ### 1.Download the preproccesed Data from the link https://www.eecs.qmul.ac.uk/mmv/datasets/deap/ (you have to require premmision for download)
3 | ### 2.Preproccess using either FFT,DFT or any other techinques to extract features
4 | ### 3.After Feature extraction,can directly do prediction
5 |
--------------------------------------------------------------------------------
/Epilepesy_Sezuire_dtection/README.md:
--------------------------------------------------------------------------------
1 | # Epilepsy Seizure Classification
2 |
--------------------------------------------------------------------------------
/LSTM_EEG_Emotion/EEG_emotion_LSTM .ipynb:
--------------------------------------------------------------------------------
1 | {"cells":[{"metadata":{},"cell_type":"markdown","source":"# EEG Classifictaion using LSTM"},{"metadata":{},"cell_type":"markdown","source":"This used used LSTM model to classify electroencephalogram (EEG) brain signal and to predict the human emotions .The notebook classifies data into 3 classes negative,nuteral and positive.\n\nThe dataset used for this notebook is freely avialable in the following link[https://www.kaggle.com/birdy654/eeg-brainwave-dataset-feeling-emotion](http://www.kaggle.com/birdy654/eeg-brainwave-dataset-feeling-emotions)"},{"metadata":{"trusted":true},"cell_type":"markdown","source":"## load & read the dataset"},{"metadata":{"_uuid":"8f2839f25d086af736a60e9eeb907d3b93b6e0e5","_cell_guid":"b1076dfc-b9ad-4769-8c92-a6c4dae69d19","trusted":true},"cell_type":"code","source":"# This Python 3 environment comes with many helpful analytics libraries installed\n# It is defined by the kaggle/python Docker image: https://github.com/kaggle/docker-python\n# For example, here's several helpful packages to load\n\nimport numpy as np # linear algebra\nimport pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)\n\n# Input data files are available in the read-only \"../input/\" directory\n# For example, running this (by clicking run or pressing Shift+Enter) will list all files under the input directory\n\nimport os\nfor dirname, _, filenames in os.walk('/kaggle/input'):\n for filename in filenames:\n print(os.path.join(dirname, filename))\n\n# You can write up to 5GB to the current directory (/kaggle/working/) that gets preserved as output when you create a version using \"Save & Run All\" \n# You can also write temporary files to /kaggle/temp/, but they won't be saved outside of the current session","execution_count":1,"outputs":[{"output_type":"stream","text":"/kaggle/input/eeg-brainwave-dataset-feeling-emotions/emotions.csv\n","name":"stdout"}]},{"metadata":{"_uuid":"d629ff2d2480ee46fbb7e2d37f6b5fab8052498a","_cell_guid":"79c7e3d0-c299-4dcb-8224-4455121ee9b0","trusted":true},"cell_type":"code","source":"df=pd.read_csv('/kaggle/input/eeg-brainwave-dataset-feeling-emotions/emotions.csv')\ndf.head()","execution_count":2,"outputs":[{"output_type":"execute_result","execution_count":2,"data":{"text/plain":" # mean_0_a mean_1_a mean_2_a mean_3_a mean_4_a mean_d_0_a mean_d_1_a \\\n0 4.62 30.3 -356.0 15.6 26.3 1.070 0.411 \n1 28.80 33.1 32.0 25.8 22.8 6.550 1.680 \n2 8.90 29.4 -416.0 16.7 23.7 79.900 3.360 \n3 14.90 31.6 -143.0 19.8 24.3 -0.584 -0.284 \n4 28.30 31.3 45.2 27.3 24.5 34.800 -5.790 \n\n mean_d_2_a mean_d_3_a mean_d_4_a ... fft_741_b fft_742_b fft_743_b \\\n0 -15.70 2.06 3.15 ... 23.5 20.3 20.3 \n1 2.88 3.83 -4.82 ... -23.3 -21.8 -21.8 \n2 90.20 89.90 2.03 ... 462.0 -233.0 -233.0 \n3 8.82 2.30 -1.97 ... 299.0 -243.0 -243.0 \n4 3.06 41.40 5.52 ... 12.0 38.1 38.1 \n\n fft_744_b fft_745_b fft_746_b fft_747_b fft_748_b fft_749_b label \n0 23.5 -215.0 280.00 -162.00 -162.00 280.00 NEGATIVE \n1 -23.3 182.0 2.57 -31.60 -31.60 2.57 NEUTRAL \n2 462.0 -267.0 281.00 -148.00 -148.00 281.00 POSITIVE \n3 299.0 132.0 -12.40 9.53 9.53 -12.40 POSITIVE \n4 12.0 119.0 -17.60 23.90 23.90 -17.60 NEUTRAL \n\n[5 rows x 2549 columns]","text/html":"
\n\n
\n \n \n \n # mean_0_a \n mean_1_a \n mean_2_a \n mean_3_a \n mean_4_a \n mean_d_0_a \n mean_d_1_a \n mean_d_2_a \n mean_d_3_a \n mean_d_4_a \n ... \n fft_741_b \n fft_742_b \n fft_743_b \n fft_744_b \n fft_745_b \n fft_746_b \n fft_747_b \n fft_748_b \n fft_749_b \n label \n \n \n \n \n 0 \n 4.62 \n 30.3 \n -356.0 \n 15.6 \n 26.3 \n 1.070 \n 0.411 \n -15.70 \n 2.06 \n 3.15 \n ... \n 23.5 \n 20.3 \n 20.3 \n 23.5 \n -215.0 \n 280.00 \n -162.00 \n -162.00 \n 280.00 \n NEGATIVE \n \n \n 1 \n 28.80 \n 33.1 \n 32.0 \n 25.8 \n 22.8 \n 6.550 \n 1.680 \n 2.88 \n 3.83 \n -4.82 \n ... \n -23.3 \n -21.8 \n -21.8 \n -23.3 \n 182.0 \n 2.57 \n -31.60 \n -31.60 \n 2.57 \n NEUTRAL \n \n \n 2 \n 8.90 \n 29.4 \n -416.0 \n 16.7 \n 23.7 \n 79.900 \n 3.360 \n 90.20 \n 89.90 \n 2.03 \n ... \n 462.0 \n -233.0 \n -233.0 \n 462.0 \n -267.0 \n 281.00 \n -148.00 \n -148.00 \n 281.00 \n POSITIVE \n \n \n 3 \n 14.90 \n 31.6 \n -143.0 \n 19.8 \n 24.3 \n -0.584 \n -0.284 \n 8.82 \n 2.30 \n -1.97 \n ... \n 299.0 \n -243.0 \n -243.0 \n 299.0 \n 132.0 \n -12.40 \n 9.53 \n 9.53 \n -12.40 \n POSITIVE \n \n \n 4 \n 28.30 \n 31.3 \n 45.2 \n 27.3 \n 24.5 \n 34.800 \n -5.790 \n 3.06 \n 41.40 \n 5.52 \n ... \n 12.0 \n 38.1 \n 38.1 \n 12.0 \n 119.0 \n -17.60 \n 23.90 \n 23.90 \n -17.60 \n NEUTRAL \n \n \n
\n
5 rows × 2549 columns
\n
"},"metadata":{}}]},{"metadata":{"trusted":true},"cell_type":"code","source":"df.info()","execution_count":3,"outputs":[{"output_type":"stream","text":"\nRangeIndex: 2132 entries, 0 to 2131\nColumns: 2549 entries, # mean_0_a to label\ndtypes: float64(2548), object(1)\nmemory usage: 41.5+ MB\n","name":"stdout"}]},{"metadata":{"trusted":true},"cell_type":"code","source":"import seaborn as sns\nsns.countplot(x='label', data=df)\n","execution_count":4,"outputs":[{"output_type":"execute_result","execution_count":4,"data":{"text/plain":""},"metadata":{}},{"output_type":"display_data","data":{"text/plain":"","image/png":"iVBORw0KGgoAAAANSUhEUgAAAYUAAAEGCAYAAACKB4k+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAV2klEQVR4nO3dfZBdd33f8fcHCRswcbDw2hGWjUSiQGQeDN6YJM4wMaa1krbIUBvkQlDAHaUTQ4EpaWwmTWlaNe5QWhjAmdGEBxGIjXi0QjskjoKhPBoZDLZsFAtsbGEhLSYM4Ukg8+0f97eH69XKurvS2ZW079fMnXvO7/zOud/ds7ufPQ/3d1NVSJIE8LD5LkCSdPQwFCRJHUNBktQxFCRJHUNBktRZPN8FHI5TTz21li9fPt9lSNIx5eabb/5WVY1Nt+yYDoXly5ezbdu2+S5Dko4pSb5+sGWePpIkdQwFSVLHUJAkdQwFSVLHUJAkdQwFSVLHUJAkdQwFSVLHUJAkdY7pdzRr4bjnT58y3yUc9876k1vnuwQdBXo7UkjyxCS3DD2+m+RVSZYkuSHJne35lKF1rkqyM8mOJBf1VZskaXq9HSlU1Q7gHIAki4BvAB8CrgS2VtXVSa5s83+UZBWwFjgbeBzwd0l+uaoeOBL1nPuH7zoSm9Eh3Pz6l8x3CToKnf/m8+e7hOPep17xqSOynbm6pnAh8NWq+jqwBtjU2jcBF7fpNcB1VbWvqu4CdgLnzVF9kiTmLhTWAte26dOrajdAez6ttZ8B3Du0zq7W9iBJ1ifZlmTbxMREjyVL0sLTeygkOQF4LvC+Q3Wdpq0OaKjaWFXjVTU+NjbtcOCSpFmaiyOF3wa+UFV72vyeJEsB2vPe1r4LOHNovWXAfXNQnySpmYtQuIyfnToC2AKsa9PrgOuH2tcmOTHJCmAlcNMc1CdJanp9n0KSRwH/DPj9oeargc1JLgfuAS4FqKrtSTYDtwP7gSuO1J1HkqTR9BoKVfUD4LFT2u5ncDfSdP03ABv6rEmSdHAOcyFJ6hgKkqSOoSBJ6hgKkqSOoSBJ6hgKkqSOoSBJ6hgKkqSOoSBJ6hgKkqSOoSBJ6hgKkqSOoSBJ6hgKkqSOoSBJ6hgKkqSOoSBJ6hgKkqSOoSBJ6hgKkqROr6GQ5DFJ3p/kK0nuSPLrSZYkuSHJne35lKH+VyXZmWRHkov6rE2SdKC+jxTeBHy0qp4EPA24A7gS2FpVK4GtbZ4kq4C1wNnAauCaJIt6rk+SNKS3UEhyMvAs4G0AVfXjqvoOsAbY1LptAi5u02uA66pqX1XdBewEzuurPknSgfo8UngCMAG8I8kXk/xFkpOA06tqN0B7Pq31PwO4d2j9Xa3tQZKsT7ItybaJiYkey5ekhafPUFgMPAP486p6OvB92qmig8g0bXVAQ9XGqhqvqvGxsbEjU6kkCeg3FHYBu6rqc23+/QxCYk+SpQDtee9Q/zOH1l8G3NdjfZKkKXoLhar6JnBvkie2pguB24EtwLrWtg64vk1vAdYmOTHJCmAlcFNf9UmSDrS45+2/AnhPkhOArwEvZRBEm5NcDtwDXApQVduTbGYQHPuBK6rqgZ7rkyQN6TUUquoWYHyaRRcepP8GYEOfNUmSDs53NEuSOoaCJKljKEiSOoaCJKljKEiSOoaCJKljKEiSOoaCJKljKEiSOoaCJKljKEiSOoaCJKljKEiSOoaCJKljKEiSOoaCJKljKEiSOoaCJKljKEiSOoaCJKnTaygkuTvJrUluSbKttS1JckOSO9vzKUP9r0qyM8mOJBf1WZsk6UBzcaRwQVWdU1Xjbf5KYGtVrQS2tnmSrALWAmcDq4Frkiyag/okSc18nD5aA2xq05uAi4far6uqfVV1F7ATOG8e6pOkBavvUCjgb5PcnGR9azu9qnYDtOfTWvsZwL1D6+5qbQ+SZH2SbUm2TUxM9Fi6JC08i3ve/vlVdV+S04AbknzlIfpmmrY6oKFqI7ARYHx8/IDlkqTZ6/VIoarua897gQ8xOB20J8lSgPa8t3XfBZw5tPoy4L4+65MkPVhvoZDkpCQ/NzkN/HPgNmALsK51Wwdc36a3AGuTnJhkBbASuKmv+iRJB+rz9NHpwIeSTL7OX1XVR5N8Htic5HLgHuBSgKranmQzcDuwH7iiqh7osT5J0hS9hUJVfQ142jTt9wMXHmSdDcCGvmqSJD0039EsSeoYCpKkjqEgSeoYCpKkjqEgSeoYCpKkjqEgSeoYCpKkjqEgSeoYCpKkjqEgSeoYCpKkjqEgSeoYCpKkjqEgSeoYCpKkjqEgSeoYCpKkjqEgSeqMFApJto7SJkk6tj1kKCR5RJIlwKlJTkmypD2WA48b5QWSLEryxSQfafNLktyQ5M72fMpQ36uS7EyyI8lFs/+yJEmzcagjhd8Hbgae1J4nH9cDbx3xNV4J3DE0fyWwtapWAlvbPElWAWuBs4HVwDVJFo34GpKkI+AhQ6Gq3lRVK4DXVNUTqmpFezytqt5yqI0nWQb8C+AvhprXAJva9Cbg4qH266pqX1XdBewEzpvh1yNJOgyLR+lUVW9O8hvA8uF1qupdh1j1jcB/BH5uqO30qtrd1t+d5LTWfgbw2aF+u1rbgyRZD6wHOOuss0YpX5I0olEvNP8l8D+B3wR+tT3GD7HOvwT2VtXNI9aSadrqgIaqjVU1XlXjY2NjI25akjSKkY4UGATAqqo64I/0QzgfeG6S3wEeAZyc5N3AniRL21HCUmBv678LOHNo/WXAfTN4PUnSYRr1fQq3Ab8wkw1X1VVVtayqljO4gPz3VfViYAuwrnVbx+CiNa19bZITk6wAVgI3zeQ1JUmHZ9QjhVOB25PcBOybbKyq587iNa8GNie5HLgHuLRta3uSzcDtwH7giqp6YBbblyTN0qih8LrDeZGquhG4sU3fD1x4kH4bgA2H81qSpNkb9e6jj/ddiCRp/o0UCkn+iZ/dCXQC8HDg+1V1cl+FSZLm3qhHCsPvMyDJxfjGMkk67sxqlNSq+jDw7CNciyRpno16+uj5Q7MPY/C+hZm8Z0GSdAwY9e6jfzU0vR+4m8FYRZKk48io1xRe2nchkqT5N+rYR8uSfCjJ3iR7knygjYAqSTqOjHqh+R0MhqF4HIORS/+6tUmSjiOjhsJYVb2jqva3xzsBhyiVpOPMqKHwrSQvbh+tuSjJi4H7+yxMkjT3Rg2FlwEvAL4J7AYuAbz4LEnHmVFvSf2vwLqq+keAJEsYfOjOy/oqTJI090Y9UnjqZCAAVNW3gaf3U5Ikab6MGgoPS3LK5Ew7Uhj1KEOSdIwY9Q/7G4BPJ3k/g+EtXoCfeyBJx51R39H8riTbGAyCF+D5VXV7r5VJkubcyKeAWggYBJJ0HJvV0NmSpOOToSBJ6vQWCkkekeSmJF9Ksj3Jf2ntS5LckOTO9jx8V9NVSXYm2ZHkor5qkyRNr88jhX3As6vqacA5wOokvwZcCWytqpXA1jZPklXAWuBsYDVwTZJFPdYnSZqit1Coge+12Ye3RzH4cJ5NrX0TcHGbXgNcV1X7quouYCd+DrQkzalerym0wfNuAfYCN1TV54DTq2o3QHs+rXU/A7h3aPVdrW3qNtcn2ZZk28TERJ/lS9KC02soVNUDVXUOsAw4L8mTH6J7ptvENNvcWFXjVTU+Nubo3ZJ0JM3J3UdV9R3gRgbXCvYkWQrQnve2bruAM4dWWwbcNxf1SZIG+rz7aCzJY9r0I4HnAF9h8Alu61q3dcD1bXoLsDbJiUlWACuBm/qqT5J0oD4HtVsKbGp3ED0M2FxVH0nyGWBzksuBe4BLAapqe5LNDN41vR+4oqoe6LE+SdIUvYVCVX2ZaYbXrqr7gQsPss4GHGhPkuaN72iWJHUMBUlSx1CQJHUMBUlSx1CQJHUMBUlSx1CQJHUMBUlSx1CQJHUMBUlSx1CQJHUMBUlSx1CQJHUMBUlSx1CQJHUMBUlSx1CQJHUMBUlSx1CQJHUMBUlSp7dQSHJmko8luSPJ9iSvbO1LktyQ5M72fMrQOlcl2ZlkR5KL+qpNkjS9Po8U9gP/oap+Bfg14Iokq4Arga1VtRLY2uZpy9YCZwOrgWuSLOqxPknSFL2FQlXtrqovtOl/Au4AzgDWAJtat03AxW16DXBdVe2rqruAncB5fdUnSTrQnFxTSLIceDrwOeD0qtoNg+AATmvdzgDuHVptV2ubuq31SbYl2TYxMdFn2ZK04PQeCkkeDXwAeFVVffehuk7TVgc0VG2sqvGqGh8bGztSZUqS6DkUkjycQSC8p6o+2Jr3JFnali8F9rb2XcCZQ6svA+7rsz5J0oP1efdRgLcBd1TV/xpatAVY16bXAdcPta9NcmKSFcBK4Ka+6pMkHWhxj9s+H/hd4NYkt7S21wJXA5uTXA7cA1wKUFXbk2wGbmdw59IVVfVAj/VJkqboLRSq6pNMf50A4MKDrLMB2NBXTZKkh+Y7miVJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktTpLRSSvD3J3iS3DbUtSXJDkjvb8ylDy65KsjPJjiQX9VWXJOng+jxSeCewekrblcDWqloJbG3zJFkFrAXObutck2RRj7VJkqbRWyhU1SeAb09pXgNsatObgIuH2q+rqn1VdRewEzivr9okSdOb62sKp1fVboD2fFprPwO4d6jfrtZ2gCTrk2xLsm1iYqLXYiVpoTlaLjRnmraarmNVbayq8aoaHxsb67ksSVpY5joU9iRZCtCe97b2XcCZQ/2WAffNcW2StODNdShsAda16XXA9UPta5OcmGQFsBK4aY5rk6QFb3FfG05yLfBbwKlJdgH/Gbga2JzkcuAe4FKAqtqeZDNwO7AfuKKqHuirNknS9HoLhaq67CCLLjxI/w3Ahr7qkSQd2tFyoVmSdBQwFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJHUNBktQxFCRJnaMuFJKsTrIjyc4kV853PZK0kBxVoZBkEfBW4LeBVcBlSVbNb1WStHAcVaEAnAfsrKqvVdWPgeuANfNckyQtGKmq+a6hk+QSYHVV/ds2/7vAM6vq5UN91gPr2+wTgR1zXujcORX41nwXoVlz/x27jvd99/iqGptuweK5ruQQMk3bg1KrqjYCG+emnPmVZFtVjc93HZod99+xayHvu6Pt9NEu4Myh+WXAffNUiyQtOEdbKHweWJlkRZITgLXAlnmuSZIWjKPq9FFV7U/ycuBvgEXA26tq+zyXNZ8WxGmy45j779i1YPfdUXWhWZI0v46200eSpHlkKEiSOobCYUhSSd4wNP+aJK9r069L8o0ktww9HtOWnZfkxiR3JvlCkv+T5ClTtv2lJNe26ZcObePHSW5t01cn+b0kb0nyW0k+M2Ubi5PsSbI0yTuT3DW0nU/3/g06RsxmP05+36ds58Yk40k+1/rdk2RiaL3lSe5u++/LST6e5PFTtnH9NPvxdUle0+O34JiW5IH2/b0tyfuSPKq1L2vfzzuTfDXJm9oNLCR5VJL3tH1xW5JPJnl0W/a9JE8Z2m/fHvrd+bu2H29LclKS+5P8/JR6PpzkBe1nZHj/33IsjNBgKByefcDzk5x6kOX/u6rOGXp8J8npwGbgtVW1sqqeAfwZ8IuTKyX5FQb75llJTqqqd0xug8Etuhe0+eGxoT4BLEuyfKjtOcBtVbW7zf/hUC2/cQS+/uPFjPfjQ22sqp7Z9tWfAO8dWu/u1uWCqnoqcCPwx5PrtX8angE8JsmKw/yaFpIftu/vk4EfA/8uSYAPAh+uqpXALwOPBja0dV4J7Kmqp7T1Lgd+MrnBqrp16HduCz/73XnOUJ/vA38LXDzZ1gLiN4GPtKb3TvnZub2fb8GRYygcnv0M7lJ49QzWeTmwqaq6/9Sr6pNV9eGhPv8G+EsGP3DPHWWjVfVT4H3AC4ea1wLXzqC2hWo2+/FI+AxwxtD8vwb+msHwLmvnuJbjxf8Dfgl4NvCjqnoHQFU9wGD/vqwdSSwFvjG5UlXtqKp9s3i9a3nwvnoe8NGq+sEs6593hsLheyvwoqmHkM2rhw4bP9bazga+cIhtvhB4L4MfuMtmUEv3A5rkROB3gA8MLX/9UD3vmcF2F4KZ7scjYTUw/M/AZQz24Uz3uxicLmUwmOatDH7Pbh5eXlXfBe5hEBpvB/4oyWeS/LckK2f5sh8Fzk3y2DY/9R+xF045ffTIWb7OnDEUDlP7QXsX8O+nWTx82uGC6dZv55/vSPKmNv+rwERVfR3YCjwjySkj1vJ54NFJnsjgl+OzVfWPQ12GTx+9aPSv8vg3i/14sHu5R7nH+2NJ9jI4vfdXAO204i8Bn6yqfwD2J3nyjL6IheuRSW4BtjH4o/82BkPmTLcvAlRV3QI8AXg9sAT4fDttOyNt4M4twCXt9OM5DI7wJ009ffTDmb7GXDMUjow3MjgnedIIfbczOG8MDM4/A/8JmPwP9TLgSUnuBr4KnMzgtMKoJk89eOpo5mayH+8Hpob1EkYbRO0C4PEMfhb+tLW9sG3vrrbvl+MppFH9cOiP7ivaH+rtwIPGLkpyMoNhdL4KUFXfq6oPVtUfAO9mcGQ9G5NH6JcA11fVTw7R/6hmKBwBVfVtBhePLx+h+1uB30syfKF38m6JhwGXAk+tquVVtZzB0OEzPYX0YgbnVB0iZAZmuB8/D5yf5BcAkowDJwL3jvhaPwReBbwkyRIG+3j10H4/F0PhcGwFHpXkJdB9VssbgHdW1Q+SnD95BN7uSFoFfH2Wr/UxYCVwBcfBP2KGwpHzBgbD7Q579ZTzicur6psM/iv8sww+Xe7TDP7DeAvwLOAbVfWNoW18AliVZOkoRbS7G34A/H27O2LY66fUc8Isvs7j3aj7cQ+DO1j+bzt18UbgsnbBfyTtrrBrGfwxOQv47NCyu4DvJnlma/rjJLsmH7P/8haGGgzV8Dzg0iR3Av8A/Ah4bevyi8DHk9wKfJHBqacPTLetEV7rp23dxzL4fR029ZrCUX/Xn8NcSJI6HilIkjqGgiSpYyhIkjqGgiSpYyhIkjqGgjQDSb53iOXLk9w2w22+M8klh1eZdGQYCpKkjqEgzUKSRyfZmsHnYdyaZM3Q4sVJNmXwmQnvz8/G9z83g89QuDnJ34z6hkRpLhkK0uz8CHhe+zyMC4A3tDH8AZ4IbGyfmfBd4A+SPBx4M3BJVZ3LYJTODdNsV5pXi+e7AOkYFeC/J3kW8FMGn4twelt2b1V9qk2/m8HIqx8Fngzc0LJjEbAb6ShjKEiz8yJgDDi3qn7SRjZ9RFs2deyYYhAi26vq1+euRGnmPH0kzc7PA3tbIEwOhT3prCSTf/wvAz4J7ADGJtuTPDzJ2XNasTQCQ0GanfcA40m2MThq+MrQsjuAdUm+zOAzFv68jfF/CfA/knwJuAU46kfM1MLjKKmSpI5HCpKkjqEgSeoYCpKkjqEgSeoYCpKkjqEgSeoYCpKkzv8H9ZmP9bQQBMcAAAAASUVORK5CYII=\n"},"metadata":{"needs_background":"light"}}]},{"metadata":{"trusted":true},"cell_type":"code","source":"df.isnull().sum()\n#no missing values","execution_count":5,"outputs":[{"output_type":"execute_result","execution_count":5,"data":{"text/plain":"# mean_0_a 0\nmean_1_a 0\nmean_2_a 0\nmean_3_a 0\nmean_4_a 0\n ..\nfft_746_b 0\nfft_747_b 0\nfft_748_b 0\nfft_749_b 0\nlabel 0\nLength: 2549, dtype: int64"},"metadata":{}}]},{"metadata":{"trusted":true},"cell_type":"code","source":"encode = ({'NEUTRAL': 0, 'POSITIVE': 1, 'NEGATIVE': 2} )\n#new dataset with replaced values\ndf_encoded = df.replace(encode)\n\nprint(df_encoded.head())\nprint(df_encoded['label'].value_counts())","execution_count":6,"outputs":[{"output_type":"stream","text":" # mean_0_a mean_1_a mean_2_a mean_3_a mean_4_a mean_d_0_a mean_d_1_a \\\n0 4.62 30.3 -356.0 15.6 26.3 1.070 0.411 \n1 28.80 33.1 32.0 25.8 22.8 6.550 1.680 \n2 8.90 29.4 -416.0 16.7 23.7 79.900 3.360 \n3 14.90 31.6 -143.0 19.8 24.3 -0.584 -0.284 \n4 28.30 31.3 45.2 27.3 24.5 34.800 -5.790 \n\n mean_d_2_a mean_d_3_a mean_d_4_a ... fft_741_b fft_742_b fft_743_b \\\n0 -15.70 2.06 3.15 ... 23.5 20.3 20.3 \n1 2.88 3.83 -4.82 ... -23.3 -21.8 -21.8 \n2 90.20 89.90 2.03 ... 462.0 -233.0 -233.0 \n3 8.82 2.30 -1.97 ... 299.0 -243.0 -243.0 \n4 3.06 41.40 5.52 ... 12.0 38.1 38.1 \n\n fft_744_b fft_745_b fft_746_b fft_747_b fft_748_b fft_749_b label \n0 23.5 -215.0 280.00 -162.00 -162.00 280.00 2 \n1 -23.3 182.0 2.57 -31.60 -31.60 2.57 0 \n2 462.0 -267.0 281.00 -148.00 -148.00 281.00 1 \n3 299.0 132.0 -12.40 9.53 9.53 -12.40 1 \n4 12.0 119.0 -17.60 23.90 23.90 -17.60 0 \n\n[5 rows x 2549 columns]\n0 716\n1 708\n2 708\nName: label, dtype: int64\n","name":"stdout"}]},{"metadata":{"trusted":true},"cell_type":"code","source":"df_encoded['label'].unique()","execution_count":7,"outputs":[{"output_type":"execute_result","execution_count":7,"data":{"text/plain":"array([2, 0, 1])"},"metadata":{}}]},{"metadata":{"trusted":true},"cell_type":"code","source":"df_encoded.head()","execution_count":8,"outputs":[{"output_type":"execute_result","execution_count":8,"data":{"text/plain":" # mean_0_a mean_1_a mean_2_a mean_3_a mean_4_a mean_d_0_a mean_d_1_a \\\n0 4.62 30.3 -356.0 15.6 26.3 1.070 0.411 \n1 28.80 33.1 32.0 25.8 22.8 6.550 1.680 \n2 8.90 29.4 -416.0 16.7 23.7 79.900 3.360 \n3 14.90 31.6 -143.0 19.8 24.3 -0.584 -0.284 \n4 28.30 31.3 45.2 27.3 24.5 34.800 -5.790 \n\n mean_d_2_a mean_d_3_a mean_d_4_a ... fft_741_b fft_742_b fft_743_b \\\n0 -15.70 2.06 3.15 ... 23.5 20.3 20.3 \n1 2.88 3.83 -4.82 ... -23.3 -21.8 -21.8 \n2 90.20 89.90 2.03 ... 462.0 -233.0 -233.0 \n3 8.82 2.30 -1.97 ... 299.0 -243.0 -243.0 \n4 3.06 41.40 5.52 ... 12.0 38.1 38.1 \n\n fft_744_b fft_745_b fft_746_b fft_747_b fft_748_b fft_749_b label \n0 23.5 -215.0 280.00 -162.00 -162.00 280.00 2 \n1 -23.3 182.0 2.57 -31.60 -31.60 2.57 0 \n2 462.0 -267.0 281.00 -148.00 -148.00 281.00 1 \n3 299.0 132.0 -12.40 9.53 9.53 -12.40 1 \n4 12.0 119.0 -17.60 23.90 23.90 -17.60 0 \n\n[5 rows x 2549 columns]","text/html":"\n\n
\n \n \n \n # mean_0_a \n mean_1_a \n mean_2_a \n mean_3_a \n mean_4_a \n mean_d_0_a \n mean_d_1_a \n mean_d_2_a \n mean_d_3_a \n mean_d_4_a \n ... \n fft_741_b \n fft_742_b \n fft_743_b \n fft_744_b \n fft_745_b \n fft_746_b \n fft_747_b \n fft_748_b \n fft_749_b \n label \n \n \n \n \n 0 \n 4.62 \n 30.3 \n -356.0 \n 15.6 \n 26.3 \n 1.070 \n 0.411 \n -15.70 \n 2.06 \n 3.15 \n ... \n 23.5 \n 20.3 \n 20.3 \n 23.5 \n -215.0 \n 280.00 \n -162.00 \n -162.00 \n 280.00 \n 2 \n \n \n 1 \n 28.80 \n 33.1 \n 32.0 \n 25.8 \n 22.8 \n 6.550 \n 1.680 \n 2.88 \n 3.83 \n -4.82 \n ... \n -23.3 \n -21.8 \n -21.8 \n -23.3 \n 182.0 \n 2.57 \n -31.60 \n -31.60 \n 2.57 \n 0 \n \n \n 2 \n 8.90 \n 29.4 \n -416.0 \n 16.7 \n 23.7 \n 79.900 \n 3.360 \n 90.20 \n 89.90 \n 2.03 \n ... \n 462.0 \n -233.0 \n -233.0 \n 462.0 \n -267.0 \n 281.00 \n -148.00 \n -148.00 \n 281.00 \n 1 \n \n \n 3 \n 14.90 \n 31.6 \n -143.0 \n 19.8 \n 24.3 \n -0.584 \n -0.284 \n 8.82 \n 2.30 \n -1.97 \n ... \n 299.0 \n -243.0 \n -243.0 \n 299.0 \n 132.0 \n -12.40 \n 9.53 \n 9.53 \n -12.40 \n 1 \n \n \n 4 \n 28.30 \n 31.3 \n 45.2 \n 27.3 \n 24.5 \n 34.800 \n -5.790 \n 3.06 \n 41.40 \n 5.52 \n ... \n 12.0 \n 38.1 \n 38.1 \n 12.0 \n 119.0 \n -17.60 \n 23.90 \n 23.90 \n -17.60 \n 0 \n \n \n
\n
5 rows × 2549 columns
\n
"},"metadata":{}}]},{"metadata":{"trusted":true},"cell_type":"code","source":"x=df_encoded.drop([\"label\"] ,axis=1)\nx.shape","execution_count":9,"outputs":[{"output_type":"execute_result","execution_count":9,"data":{"text/plain":"(2132, 2548)"},"metadata":{}}]},{"metadata":{"trusted":true},"cell_type":"code","source":"y = df_encoded.loc[:,'label'].values\ny.shape\n","execution_count":10,"outputs":[{"output_type":"execute_result","execution_count":10,"data":{"text/plain":"(2132,)"},"metadata":{}}]},{"metadata":{"trusted":true},"cell_type":"code","source":"from sklearn.preprocessing import StandardScaler\nscaler = StandardScaler()\nscaler.fit(x)\nx = scaler.transform(x)\nfrom keras.utils import to_categorical\ny = to_categorical(y)\ny","execution_count":11,"outputs":[{"output_type":"stream","text":"Using TensorFlow backend.\n","name":"stderr"},{"output_type":"execute_result","execution_count":11,"data":{"text/plain":"array([[0., 0., 1.],\n [1., 0., 0.],\n [0., 1., 0.],\n ...,\n [0., 0., 1.],\n [0., 0., 1.],\n [1., 0., 0.]], dtype=float32)"},"metadata":{}}]},{"metadata":{"trusted":true},"cell_type":"code","source":"y","execution_count":12,"outputs":[{"output_type":"execute_result","execution_count":12,"data":{"text/plain":"array([[0., 0., 1.],\n [1., 0., 0.],\n [0., 1., 0.],\n ...,\n [0., 0., 1.],\n [0., 0., 1.],\n [1., 0., 0.]], dtype=float32)"},"metadata":{}}]},{"metadata":{"trusted":true},"cell_type":"code","source":"from sklearn.model_selection import train_test_split\nx_train, x_test, y_train, y_test = train_test_split(x, y, test_size = 0.2, random_state = 4)","execution_count":14,"outputs":[]},{"metadata":{"trusted":true},"cell_type":"code","source":"x_train = np.reshape(x_train, (x_train.shape[0],1,x.shape[1]))\nx_test = np.reshape(x_test, (x_test.shape[0],1,x.shape[1]))\n","execution_count":15,"outputs":[]},{"metadata":{"trusted":true},"cell_type":"code","source":"import tensorflow as tf\nfrom tensorflow.keras import Sequential\n\nfrom tensorflow.keras.layers import Dense, Dropout\nfrom tensorflow.keras.layers import Embedding\nfrom tensorflow.keras.layers import LSTM\ntf.keras.backend.clear_session()\n\nmodel = Sequential()\nmodel.add(LSTM(64, input_shape=(1,2548),activation=\"relu\",return_sequences=True))\nmodel.add(Dropout(0.2))\nmodel.add(LSTM(32,activation=\"sigmoid\"))\nmodel.add(Dropout(0.2))\n#model.add(LSTM(100,return_sequences=True))\n#model.add(Dropout(0.2))\n#model.add(LSTM(50))\n#model.add(Dropout(0.2))\nmodel.add(Dense(3, activation='sigmoid'))\nfrom keras.optimizers import SGD\nmodel.compile(loss = 'categorical_crossentropy', optimizer = \"adam\", metrics = ['accuracy'])\nmodel.summary()","execution_count":18,"outputs":[{"output_type":"stream","text":"Model: \"sequential\"\n_________________________________________________________________\nLayer (type) Output Shape Param # \n=================================================================\nlstm (LSTM) (None, 1, 64) 668928 \n_________________________________________________________________\ndropout (Dropout) (None, 1, 64) 0 \n_________________________________________________________________\nlstm_1 (LSTM) (None, 32) 12416 \n_________________________________________________________________\ndropout_1 (Dropout) (None, 32) 0 \n_________________________________________________________________\ndense (Dense) (None, 3) 99 \n=================================================================\nTotal params: 681,443\nTrainable params: 681,443\nNon-trainable params: 0\n_________________________________________________________________\n","name":"stdout"}]},{"metadata":{"trusted":true},"cell_type":"code","source":"history = model.fit(x_train, y_train, epochs = 100, validation_data= (x_test, y_test))\nscore, acc = model.evaluate(x_test, y_test)\n","execution_count":19,"outputs":[{"output_type":"stream","text":"Train on 1705 samples, validate on 427 samples\nEpoch 1/50\n1705/1705 [==============================] - 3s 2ms/sample - loss: 0.6955 - accuracy: 0.8041 - val_loss: 0.4640 - val_accuracy: 0.9157\nEpoch 2/50\n1705/1705 [==============================] - 0s 223us/sample - loss: 0.4137 - accuracy: 0.9331 - val_loss: 0.3570 - val_accuracy: 0.9251\nEpoch 3/50\n1705/1705 [==============================] - 0s 212us/sample - loss: 0.3213 - accuracy: 0.9372 - val_loss: 0.3024 - val_accuracy: 0.9180\nEpoch 4/50\n1705/1705 [==============================] - 0s 262us/sample - loss: 0.2528 - accuracy: 0.9460 - val_loss: 0.2562 - val_accuracy: 0.9251\nEpoch 5/50\n1705/1705 [==============================] - 0s 257us/sample - loss: 0.2116 - accuracy: 0.9537 - val_loss: 0.2363 - val_accuracy: 0.9274\nEpoch 6/50\n1705/1705 [==============================] - 0s 243us/sample - loss: 0.1819 - accuracy: 0.9554 - val_loss: 0.1987 - val_accuracy: 0.9344\nEpoch 7/50\n1705/1705 [==============================] - 0s 209us/sample - loss: 0.1333 - accuracy: 0.9795 - val_loss: 0.1729 - val_accuracy: 0.9485\nEpoch 8/50\n1705/1705 [==============================] - 0s 212us/sample - loss: 0.1065 - accuracy: 0.9836 - val_loss: 0.1561 - val_accuracy: 0.9485\nEpoch 9/50\n1705/1705 [==============================] - 0s 241us/sample - loss: 0.0871 - accuracy: 0.9918 - val_loss: 0.1248 - val_accuracy: 0.9625\nEpoch 10/50\n1705/1705 [==============================] - 0s 219us/sample - loss: 0.0807 - accuracy: 0.9853 - val_loss: 0.1425 - val_accuracy: 0.9555\nEpoch 11/50\n1705/1705 [==============================] - 0s 220us/sample - loss: 0.0676 - accuracy: 0.9889 - val_loss: 0.1286 - val_accuracy: 0.9602\nEpoch 12/50\n1705/1705 [==============================] - 0s 217us/sample - loss: 0.0457 - accuracy: 0.9965 - val_loss: 0.1002 - val_accuracy: 0.9719\nEpoch 13/50\n1705/1705 [==============================] - 0s 210us/sample - loss: 0.0558 - accuracy: 0.9900 - val_loss: 0.1102 - val_accuracy: 0.9696\nEpoch 14/50\n1705/1705 [==============================] - 0s 223us/sample - loss: 0.0411 - accuracy: 0.9947 - val_loss: 0.1368 - val_accuracy: 0.9602\nEpoch 15/50\n1705/1705 [==============================] - 0s 221us/sample - loss: 0.0372 - accuracy: 0.9947 - val_loss: 0.0908 - val_accuracy: 0.9742\nEpoch 16/50\n1705/1705 [==============================] - 0s 211us/sample - loss: 0.0372 - accuracy: 0.9941 - val_loss: 0.0856 - val_accuracy: 0.9789\nEpoch 17/50\n1705/1705 [==============================] - 0s 214us/sample - loss: 0.0350 - accuracy: 0.9941 - val_loss: 0.1038 - val_accuracy: 0.9719\nEpoch 18/50\n1705/1705 [==============================] - 0s 213us/sample - loss: 0.0276 - accuracy: 0.9971 - val_loss: 0.1602 - val_accuracy: 0.9602\nEpoch 19/50\n1705/1705 [==============================] - 0s 235us/sample - loss: 0.0280 - accuracy: 0.9977 - val_loss: 0.1004 - val_accuracy: 0.9766\nEpoch 20/50\n1705/1705 [==============================] - 0s 215us/sample - loss: 0.0243 - accuracy: 0.9977 - val_loss: 0.1437 - val_accuracy: 0.9578\nEpoch 21/50\n1705/1705 [==============================] - 0s 210us/sample - loss: 0.0206 - accuracy: 0.9988 - val_loss: 0.1247 - val_accuracy: 0.9696\nEpoch 22/50\n1705/1705 [==============================] - 0s 221us/sample - loss: 0.0142 - accuracy: 1.0000 - val_loss: 0.1269 - val_accuracy: 0.9672\nEpoch 23/50\n1705/1705 [==============================] - 0s 212us/sample - loss: 0.0152 - accuracy: 0.9994 - val_loss: 0.1172 - val_accuracy: 0.9649\nEpoch 24/50\n1705/1705 [==============================] - 0s 219us/sample - loss: 0.0125 - accuracy: 1.0000 - val_loss: 0.1234 - val_accuracy: 0.9672\nEpoch 25/50\n1705/1705 [==============================] - 0s 216us/sample - loss: 0.0137 - accuracy: 0.9994 - val_loss: 0.1220 - val_accuracy: 0.9742\nEpoch 26/50\n1705/1705 [==============================] - 0s 209us/sample - loss: 0.0125 - accuracy: 0.9988 - val_loss: 0.1579 - val_accuracy: 0.9625\nEpoch 27/50\n1705/1705 [==============================] - 0s 207us/sample - loss: 0.0245 - accuracy: 0.9947 - val_loss: 0.1268 - val_accuracy: 0.9672\nEpoch 28/50\n1705/1705 [==============================] - 0s 224us/sample - loss: 0.0219 - accuracy: 0.9965 - val_loss: 0.1305 - val_accuracy: 0.9649\nEpoch 29/50\n1705/1705 [==============================] - 0s 213us/sample - loss: 0.0246 - accuracy: 0.9947 - val_loss: 0.1831 - val_accuracy: 0.9461\nEpoch 30/50\n1705/1705 [==============================] - 0s 219us/sample - loss: 0.0364 - accuracy: 0.9924 - val_loss: 0.1056 - val_accuracy: 0.9719\nEpoch 31/50\n1705/1705 [==============================] - 0s 215us/sample - loss: 0.0244 - accuracy: 0.9930 - val_loss: 0.1247 - val_accuracy: 0.9649\nEpoch 32/50\n1705/1705 [==============================] - 0s 214us/sample - loss: 0.0344 - accuracy: 0.9906 - val_loss: 0.0854 - val_accuracy: 0.9789\nEpoch 33/50\n1705/1705 [==============================] - 0s 220us/sample - loss: 0.0236 - accuracy: 0.9947 - val_loss: 0.1368 - val_accuracy: 0.9649\nEpoch 34/50\n1705/1705 [==============================] - 0s 220us/sample - loss: 0.0244 - accuracy: 0.9941 - val_loss: 0.1127 - val_accuracy: 0.9672\nEpoch 35/50\n1705/1705 [==============================] - 0s 226us/sample - loss: 0.0215 - accuracy: 0.9935 - val_loss: 0.0891 - val_accuracy: 0.9766\nEpoch 36/50\n1705/1705 [==============================] - 0s 234us/sample - loss: 0.0219 - accuracy: 0.9941 - val_loss: 0.1685 - val_accuracy: 0.9602\nEpoch 37/50\n1705/1705 [==============================] - 0s 201us/sample - loss: 0.0115 - accuracy: 0.9977 - val_loss: 0.1021 - val_accuracy: 0.9696\nEpoch 38/50\n1705/1705 [==============================] - 0s 212us/sample - loss: 0.0090 - accuracy: 1.0000 - val_loss: 0.1278 - val_accuracy: 0.9696\nEpoch 39/50\n1705/1705 [==============================] - 0s 212us/sample - loss: 0.0202 - accuracy: 0.9959 - val_loss: 0.1123 - val_accuracy: 0.9766\nEpoch 40/50\n1705/1705 [==============================] - 0s 205us/sample - loss: 0.0136 - accuracy: 0.9971 - val_loss: 0.1532 - val_accuracy: 0.9696\nEpoch 41/50\n1705/1705 [==============================] - 0s 224us/sample - loss: 0.0104 - accuracy: 0.9988 - val_loss: 0.1074 - val_accuracy: 0.9742\nEpoch 42/50\n1705/1705 [==============================] - 0s 217us/sample - loss: 0.0103 - accuracy: 0.9982 - val_loss: 0.1122 - val_accuracy: 0.9766\nEpoch 43/50\n1705/1705 [==============================] - 0s 211us/sample - loss: 0.0199 - accuracy: 0.9947 - val_loss: 0.1564 - val_accuracy: 0.9602\nEpoch 44/50\n1705/1705 [==============================] - 0s 208us/sample - loss: 0.0098 - accuracy: 0.9982 - val_loss: 0.1403 - val_accuracy: 0.9742\nEpoch 45/50\n1705/1705 [==============================] - 0s 208us/sample - loss: 0.0062 - accuracy: 0.9994 - val_loss: 0.1132 - val_accuracy: 0.9766\nEpoch 46/50\n1705/1705 [==============================] - 0s 205us/sample - loss: 0.0115 - accuracy: 0.9982 - val_loss: 0.1920 - val_accuracy: 0.9602\nEpoch 47/50\n1705/1705 [==============================] - 0s 206us/sample - loss: 0.0108 - accuracy: 0.9982 - val_loss: 0.1171 - val_accuracy: 0.9742\nEpoch 48/50\n1705/1705 [==============================] - 0s 210us/sample - loss: 0.0082 - accuracy: 0.9988 - val_loss: 0.1393 - val_accuracy: 0.9696\nEpoch 49/50\n1705/1705 [==============================] - 0s 209us/sample - loss: 0.0055 - accuracy: 0.9994 - val_loss: 0.1482 - val_accuracy: 0.9625\nEpoch 50/50\n1705/1705 [==============================] - 0s 213us/sample - loss: 0.0053 - accuracy: 0.9994 - val_loss: 0.1093 - val_accuracy: 0.9719\n427/427 [==============================] - 0s 72us/sample - loss: 0.1093 - accuracy: 0.9719\n","name":"stdout"}]},{"metadata":{"trusted":true},"cell_type":"code","source":"from sklearn.metrics import accuracy_score\npred = model.predict(x_test)\npredict_classes = np.argmax(pred,axis=1)\nexpected_classes = np.argmax(y_test,axis=1)\nprint(expected_classes.shape)\nprint(predict_classes.shape)\ncorrect = accuracy_score(expected_classes,predict_classes)\nprint(f\"Training Accuracy: {correct}\")","execution_count":21,"outputs":[{"output_type":"stream","text":"(427,)\n(427,)\nTraining Accuracy: 0.9718969555035128\n","name":"stdout"}]}],"metadata":{"kernelspec":{"name":"python3","display_name":"Python 3","language":"python"},"language_info":{"name":"python","version":"3.7.6","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"}},"nbformat":4,"nbformat_minor":4}
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # EEG_Classification_Deeplearning
--------------------------------------------------------------------------------
/single_channel_eeg_emotion/eeg_classification_binary.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "eeg-classification-binary.ipynb",
7 | "provenance": [],
8 | "authorship_tag": "ABX9TyNo0t3tYuPY2RAmGRDsESzC",
9 | "include_colab_link": true
10 | },
11 | "kernelspec": {
12 | "name": "python3",
13 | "display_name": "Python 3"
14 | }
15 | },
16 | "cells": [
17 | {
18 | "cell_type": "markdown",
19 | "metadata": {
20 | "id": "view-in-github",
21 | "colab_type": "text"
22 | },
23 | "source": [
24 | " "
25 | ]
26 | },
27 | {
28 | "cell_type": "code",
29 | "metadata": {
30 | "id": "3AAQXuDq19MI",
31 | "colab_type": "code",
32 | "outputId": "4f066411-2a6d-42dd-d8c5-47023e8b6f9a",
33 | "colab": {
34 | "base_uri": "https://localhost:8080/",
35 | "height": 34
36 | }
37 | },
38 | "source": [
39 | "from google.colab import drive\n",
40 | "drive.mount('/content/gdrive')\n"
41 | ],
42 | "execution_count": 0,
43 | "outputs": [
44 | {
45 | "output_type": "stream",
46 | "text": [
47 | "Drive already mounted at /content/gdrive; to attempt to forcibly remount, call drive.mount(\"/content/gdrive\", force_remount=True).\n"
48 | ],
49 | "name": "stdout"
50 | }
51 | ]
52 | },
53 | {
54 | "cell_type": "code",
55 | "metadata": {
56 | "id": "T-Pis1xM2Ly2",
57 | "colab_type": "code",
58 | "colab": {}
59 | },
60 | "source": [
61 | "import pandas as pd\n",
62 | "import tensorflow as tf\n",
63 | "\n",
64 | "\n",
65 | "import numpy as np\n",
66 | "import random\n",
67 | "random.seed(72)"
68 | ],
69 | "execution_count": 0,
70 | "outputs": []
71 | },
72 | {
73 | "cell_type": "code",
74 | "metadata": {
75 | "id": "UcSJO3QY2bdn",
76 | "colab_type": "code",
77 | "outputId": "e609c827-13a3-4959-f17d-4bebf53ae900",
78 | "colab": {
79 | "base_uri": "https://localhost:8080/",
80 | "height": 402
81 | }
82 | },
83 | "source": [
84 | "df=pd.read_csv(\"/content/gdrive/My Drive/data/emotion1channel.csv\")\n",
85 | "df"
86 | ],
87 | "execution_count": 0,
88 | "outputs": [
89 | {
90 | "output_type": "execute_result",
91 | "data": {
92 | "text/html": [
93 | "\n",
94 | "\n",
107 | "
\n",
108 | " \n",
109 | " \n",
110 | " \n",
111 | " attention \n",
112 | " meditation \n",
113 | " delta \n",
114 | " theta \n",
115 | " lowAplha \n",
116 | " highAlpha \n",
117 | " lowBeta \n",
118 | " highBeta \n",
119 | " lowGamma \n",
120 | " highGamma \n",
121 | " class \n",
122 | " \n",
123 | " \n",
124 | " \n",
125 | " \n",
126 | " 0 \n",
127 | " 88 \n",
128 | " 17 \n",
129 | " 1290697 \n",
130 | " 18187 \n",
131 | " 6345 \n",
132 | " 7462 \n",
133 | " 7266 \n",
134 | " 4278 \n",
135 | " 2589 \n",
136 | " 1228 \n",
137 | " 7 \n",
138 | " \n",
139 | " \n",
140 | " 1 \n",
141 | " 88 \n",
142 | " 17 \n",
143 | " 105432 \n",
144 | " 21344 \n",
145 | " 8323 \n",
146 | " 4496 \n",
147 | " 4784 \n",
148 | " 12071 \n",
149 | " 2024 \n",
150 | " 1123 \n",
151 | " 7 \n",
152 | " \n",
153 | " \n",
154 | " 2 \n",
155 | " 83 \n",
156 | " 29 \n",
157 | " 732143 \n",
158 | " 37527 \n",
159 | " 48422 \n",
160 | " 10286 \n",
161 | " 9499 \n",
162 | " 6050 \n",
163 | " 2362 \n",
164 | " 4157 \n",
165 | " 7 \n",
166 | " \n",
167 | " \n",
168 | " 3 \n",
169 | " 80 \n",
170 | " 26 \n",
171 | " 21265 \n",
172 | " 24517 \n",
173 | " 7051 \n",
174 | " 1790 \n",
175 | " 9106 \n",
176 | " 5771 \n",
177 | " 1977 \n",
178 | " 3265 \n",
179 | " 7 \n",
180 | " \n",
181 | " \n",
182 | " 4 \n",
183 | " 69 \n",
184 | " 20 \n",
185 | " 349390 \n",
186 | " 145647 \n",
187 | " 10068 \n",
188 | " 21707 \n",
189 | " 11878 \n",
190 | " 19883 \n",
191 | " 9971 \n",
192 | " 6592 \n",
193 | " 7 \n",
194 | " \n",
195 | " \n",
196 | " ... \n",
197 | " ... \n",
198 | " ... \n",
199 | " ... \n",
200 | " ... \n",
201 | " ... \n",
202 | " ... \n",
203 | " ... \n",
204 | " ... \n",
205 | " ... \n",
206 | " ... \n",
207 | " ... \n",
208 | " \n",
209 | " \n",
210 | " 13364 \n",
211 | " 66 \n",
212 | " 61 \n",
213 | " 36288 \n",
214 | " 10942 \n",
215 | " 15819 \n",
216 | " 4781 \n",
217 | " 10488 \n",
218 | " 11690 \n",
219 | " 7245 \n",
220 | " 2287 \n",
221 | " 1 \n",
222 | " \n",
223 | " \n",
224 | " 13365 \n",
225 | " 63 \n",
226 | " 81 \n",
227 | " 434483 \n",
228 | " 16548 \n",
229 | " 25760 \n",
230 | " 10239 \n",
231 | " 7332 \n",
232 | " 3822 \n",
233 | " 9214 \n",
234 | " 5527 \n",
235 | " 1 \n",
236 | " \n",
237 | " \n",
238 | " 13366 \n",
239 | " 61 \n",
240 | " 91 \n",
241 | " 11198 \n",
242 | " 21200 \n",
243 | " 18905 \n",
244 | " 20228 \n",
245 | " 4850 \n",
246 | " 12856 \n",
247 | " 5412 \n",
248 | " 7044 \n",
249 | " 1 \n",
250 | " \n",
251 | " \n",
252 | " 13367 \n",
253 | " 56 \n",
254 | " 88 \n",
255 | " 537338 \n",
256 | " 31723 \n",
257 | " 1915 \n",
258 | " 13033 \n",
259 | " 10876 \n",
260 | " 8500 \n",
261 | " 7453 \n",
262 | " 3461 \n",
263 | " 1 \n",
264 | " \n",
265 | " \n",
266 | " 13368 \n",
267 | " 51 \n",
268 | " 90 \n",
269 | " 534966 \n",
270 | " 54906 \n",
271 | " 30588 \n",
272 | " 22906 \n",
273 | " 14624 \n",
274 | " 17227 \n",
275 | " 14293 \n",
276 | " 3204 \n",
277 | " 1 \n",
278 | " \n",
279 | " \n",
280 | "
\n",
281 | "
13369 rows × 11 columns
\n",
282 | "
"
283 | ],
284 | "text/plain": [
285 | " attention meditation delta ... lowGamma highGamma class\n",
286 | "0 88 17 1290697 ... 2589 1228 7\n",
287 | "1 88 17 105432 ... 2024 1123 7\n",
288 | "2 83 29 732143 ... 2362 4157 7\n",
289 | "3 80 26 21265 ... 1977 3265 7\n",
290 | "4 69 20 349390 ... 9971 6592 7\n",
291 | "... ... ... ... ... ... ... ...\n",
292 | "13364 66 61 36288 ... 7245 2287 1\n",
293 | "13365 63 81 434483 ... 9214 5527 1\n",
294 | "13366 61 91 11198 ... 5412 7044 1\n",
295 | "13367 56 88 537338 ... 7453 3461 1\n",
296 | "13368 51 90 534966 ... 14293 3204 1\n",
297 | "\n",
298 | "[13369 rows x 11 columns]"
299 | ]
300 | },
301 | "metadata": {
302 | "tags": []
303 | },
304 | "execution_count": 18
305 | }
306 | ]
307 | },
308 | {
309 | "cell_type": "code",
310 | "metadata": {
311 | "id": "JLixuhjh2g6a",
312 | "colab_type": "code",
313 | "outputId": "deac9350-8345-4dee-9f66-621fb1d46a2b",
314 | "colab": {
315 | "base_uri": "https://localhost:8080/",
316 | "height": 34
317 | }
318 | },
319 | "source": [
320 | "\n",
321 | "df[\"class\"].unique()\n",
322 | "\n"
323 | ],
324 | "execution_count": 0,
325 | "outputs": [
326 | {
327 | "output_type": "execute_result",
328 | "data": {
329 | "text/plain": [
330 | "array([7, 5, 3, 4, 0, 2, 1, 6])"
331 | ]
332 | },
333 | "metadata": {
334 | "tags": []
335 | },
336 | "execution_count": 19
337 | }
338 | ]
339 | },
340 | {
341 | "cell_type": "code",
342 | "metadata": {
343 | "id": "nELeZtus2owd",
344 | "colab_type": "code",
345 | "outputId": "6d6d9fd4-e76f-4d12-a220-77f2829c8596",
346 | "colab": {
347 | "base_uri": "https://localhost:8080/",
348 | "height": 195
349 | }
350 | },
351 | "source": [
352 | "df.loc[df[\"class\"] == 1, \"class\"] = 0\n",
353 | "df.loc[df[\"class\"] == 2, \"class\"] = 0\n",
354 | "df.loc[df[\"class\"] == 3, \"class\"] = 0\n",
355 | "df.loc[df[\"class\"] == 4, \"class\"] = 1\n",
356 | "df.loc[df[\"class\"] == 5, \"class\"] = 1\n",
357 | "df.loc[df[\"class\"] == 6, \"class\"] = 1\n",
358 | "df.loc[df[\"class\"] == 7, \"class\"] = 1\n",
359 | "df.head()"
360 | ],
361 | "execution_count": 0,
362 | "outputs": [
363 | {
364 | "output_type": "execute_result",
365 | "data": {
366 | "text/html": [
367 | "\n",
368 | "\n",
381 | "
\n",
382 | " \n",
383 | " \n",
384 | " \n",
385 | " attention \n",
386 | " meditation \n",
387 | " delta \n",
388 | " theta \n",
389 | " lowAplha \n",
390 | " highAlpha \n",
391 | " lowBeta \n",
392 | " highBeta \n",
393 | " lowGamma \n",
394 | " highGamma \n",
395 | " class \n",
396 | " \n",
397 | " \n",
398 | " \n",
399 | " \n",
400 | " 0 \n",
401 | " 88 \n",
402 | " 17 \n",
403 | " 1290697 \n",
404 | " 18187 \n",
405 | " 6345 \n",
406 | " 7462 \n",
407 | " 7266 \n",
408 | " 4278 \n",
409 | " 2589 \n",
410 | " 1228 \n",
411 | " 1 \n",
412 | " \n",
413 | " \n",
414 | " 1 \n",
415 | " 88 \n",
416 | " 17 \n",
417 | " 105432 \n",
418 | " 21344 \n",
419 | " 8323 \n",
420 | " 4496 \n",
421 | " 4784 \n",
422 | " 12071 \n",
423 | " 2024 \n",
424 | " 1123 \n",
425 | " 1 \n",
426 | " \n",
427 | " \n",
428 | " 2 \n",
429 | " 83 \n",
430 | " 29 \n",
431 | " 732143 \n",
432 | " 37527 \n",
433 | " 48422 \n",
434 | " 10286 \n",
435 | " 9499 \n",
436 | " 6050 \n",
437 | " 2362 \n",
438 | " 4157 \n",
439 | " 1 \n",
440 | " \n",
441 | " \n",
442 | " 3 \n",
443 | " 80 \n",
444 | " 26 \n",
445 | " 21265 \n",
446 | " 24517 \n",
447 | " 7051 \n",
448 | " 1790 \n",
449 | " 9106 \n",
450 | " 5771 \n",
451 | " 1977 \n",
452 | " 3265 \n",
453 | " 1 \n",
454 | " \n",
455 | " \n",
456 | " 4 \n",
457 | " 69 \n",
458 | " 20 \n",
459 | " 349390 \n",
460 | " 145647 \n",
461 | " 10068 \n",
462 | " 21707 \n",
463 | " 11878 \n",
464 | " 19883 \n",
465 | " 9971 \n",
466 | " 6592 \n",
467 | " 1 \n",
468 | " \n",
469 | " \n",
470 | "
\n",
471 | "
"
472 | ],
473 | "text/plain": [
474 | " attention meditation delta theta ... highBeta lowGamma highGamma class\n",
475 | "0 88 17 1290697 18187 ... 4278 2589 1228 1\n",
476 | "1 88 17 105432 21344 ... 12071 2024 1123 1\n",
477 | "2 83 29 732143 37527 ... 6050 2362 4157 1\n",
478 | "3 80 26 21265 24517 ... 5771 1977 3265 1\n",
479 | "4 69 20 349390 145647 ... 19883 9971 6592 1\n",
480 | "\n",
481 | "[5 rows x 11 columns]"
482 | ]
483 | },
484 | "metadata": {
485 | "tags": []
486 | },
487 | "execution_count": 20
488 | }
489 | ]
490 | },
491 | {
492 | "cell_type": "code",
493 | "metadata": {
494 | "id": "9h6DbAJR2yrZ",
495 | "colab_type": "code",
496 | "colab": {}
497 | },
498 | "source": [
499 | "x=df.drop([\"class\",\"attention\",\"meditation\"] ,axis=1)\n"
500 | ],
501 | "execution_count": 0,
502 | "outputs": []
503 | },
504 | {
505 | "cell_type": "code",
506 | "metadata": {
507 | "id": "gh-pmcOAjqvK",
508 | "colab_type": "code",
509 | "outputId": "e0c82566-8d3c-44a8-b1e2-c42465c8106d",
510 | "colab": {
511 | "base_uri": "https://localhost:8080/",
512 | "height": 151
513 | }
514 | },
515 | "source": [
516 | "y = df.loc[:,'class'].values\n",
517 | "print(y)\n",
518 | "print(x.values)\n",
519 | "x=x.values\n"
520 | ],
521 | "execution_count": 0,
522 | "outputs": [
523 | {
524 | "output_type": "stream",
525 | "text": [
526 | "[1 1 1 ... 0 0 0]\n",
527 | "[[1290697 18187 6345 ... 4278 2589 1228]\n",
528 | " [ 105432 21344 8323 ... 12071 2024 1123]\n",
529 | " [ 732143 37527 48422 ... 6050 2362 4157]\n",
530 | " ...\n",
531 | " [ 11198 21200 18905 ... 12856 5412 7044]\n",
532 | " [ 537338 31723 1915 ... 8500 7453 3461]\n",
533 | " [ 534966 54906 30588 ... 17227 14293 3204]]\n"
534 | ],
535 | "name": "stdout"
536 | }
537 | ]
538 | },
539 | {
540 | "cell_type": "code",
541 | "metadata": {
542 | "id": "7vVS-2t12-2t",
543 | "colab_type": "code",
544 | "outputId": "e815ad3f-4f58-46ac-ca6b-57e92e461595",
545 | "colab": {
546 | "base_uri": "https://localhost:8080/",
547 | "height": 134
548 | }
549 | },
550 | "source": [
551 | "from sklearn.preprocessing import StandardScaler\n",
552 | "scaler = StandardScaler()\n",
553 | "scaler.fit(x)\n",
554 | "x = scaler.transform(x)\n",
555 | "from keras.utils import to_categorical\n",
556 | "y = to_categorical(y)\n",
557 | "y"
558 | ],
559 | "execution_count": 0,
560 | "outputs": [
561 | {
562 | "output_type": "execute_result",
563 | "data": {
564 | "text/plain": [
565 | "array([[0., 1.],\n",
566 | " [0., 1.],\n",
567 | " [0., 1.],\n",
568 | " ...,\n",
569 | " [1., 0.],\n",
570 | " [1., 0.],\n",
571 | " [1., 0.]], dtype=float32)"
572 | ]
573 | },
574 | "metadata": {
575 | "tags": []
576 | },
577 | "execution_count": 23
578 | }
579 | ]
580 | },
581 | {
582 | "cell_type": "code",
583 | "metadata": {
584 | "id": "-9_NdI2G3Fga",
585 | "colab_type": "code",
586 | "colab": {}
587 | },
588 | "source": [
589 | "x = np.reshape(x, (x.shape[0],1,x.shape[1]))\n"
590 | ],
591 | "execution_count": 0,
592 | "outputs": []
593 | },
594 | {
595 | "cell_type": "code",
596 | "metadata": {
597 | "id": "OtcPe53W3kGL",
598 | "colab_type": "code",
599 | "colab": {}
600 | },
601 | "source": [
602 | "from sklearn.model_selection import train_test_split\n",
603 | "x_train, x_test, y_train, y_test = train_test_split(x, y, test_size = 0.2, random_state = 155)"
604 | ],
605 | "execution_count": 0,
606 | "outputs": []
607 | },
608 | {
609 | "cell_type": "code",
610 | "metadata": {
611 | "id": "k5ddLVk13NjQ",
612 | "colab_type": "code",
613 | "outputId": "d196481f-7173-4f2c-faf4-4ab87a3ec63a",
614 | "colab": {
615 | "base_uri": "https://localhost:8080/",
616 | "height": 54
617 | }
618 | },
619 | "source": [
620 | "\"\"\"from tensorflow.keras import Sequential\n",
621 | "\n",
622 | "from tensorflow.keras.layers import Dense, Dropout,BatchNormalization\n",
623 | "from tensorflow.keras.layers import Embedding,Dropout\n",
624 | "from tensorflow.keras.layers import LSTM\n",
625 | "tf.keras.backend.clear_session()\n",
626 | "\n",
627 | "\n",
628 | "model=Sequential()\n",
629 | "model.add(LSTM(128, return_sequences=True,activation=\"relu\",input_shape=(1,8))) # returns a sequence of vectors of dimension 30\n",
630 | "model.add(BatchNormalization())\n",
631 | "model.add(Dropout(0.2))\n",
632 | "model.add(LSTM(30, return_sequences=True,activation=\"relu\"))\n",
633 | "model.add(BatchNormalization())\n",
634 | "model.add(LSTM(30, return_sequences=True,activation=\"relu\"))\n",
635 | "\n",
636 | "model.add(Dropout(0.2)) # returns a sequence of vectors of dimension 30\n",
637 | "model.add(LSTM(30)) # return a single vector of dimension 30\n",
638 | "model.add(Dense(2, activation='sigmoid'))\n",
639 | "\n",
640 | "model.compile(loss='binary_crossentropy',\n",
641 | " optimizer='adam',\n",
642 | " metrics=['accuracy'])\n",
643 | "\n",
644 | "\n",
645 | "\n",
646 | "model.summary()\n",
647 | "model.fit(x_train, y_train, batch_size = 10, epochs = 100, validation_data=(x_test,y_test))\n",
648 | "\"\"\""
649 | ],
650 | "execution_count": 0,
651 | "outputs": [
652 | {
653 | "output_type": "execute_result",
654 | "data": {
655 | "text/plain": [
656 | "'from tensorflow.keras import Sequential\\n\\nfrom tensorflow.keras.layers import Dense, Dropout,BatchNormalization\\nfrom tensorflow.keras.layers import Embedding,Dropout\\nfrom tensorflow.keras.layers import LSTM\\ntf.keras.backend.clear_session()\\n\\n\\nmodel=Sequential()\\nmodel.add(LSTM(128, return_sequences=True,activation=\"relu\",input_shape=(1,8))) # returns a sequence of vectors of dimension 30\\nmodel.add(BatchNormalization())\\nmodel.add(Dropout(0.2))\\nmodel.add(LSTM(30, return_sequences=True,activation=\"relu\"))\\nmodel.add(BatchNormalization())\\nmodel.add(LSTM(30, return_sequences=True,activation=\"relu\"))\\n\\nmodel.add(Dropout(0.2)) # returns a sequence of vectors of dimension 30\\nmodel.add(LSTM(30)) # return a single vector of dimension 30\\nmodel.add(Dense(2, activation=\\'sigmoid\\'))\\n\\nmodel.compile(loss=\\'binary_crossentropy\\',\\n optimizer=\\'adam\\',\\n metrics=[\\'accuracy\\'])\\n\\n\\n\\nmodel.summary()\\nmodel.fit(x_train, y_train, batch_size = 10, epochs = 100, validation_data=(x_test,y_test))\\n'"
657 | ]
658 | },
659 | "metadata": {
660 | "tags": []
661 | },
662 | "execution_count": 11
663 | }
664 | ]
665 | },
666 | {
667 | "cell_type": "code",
668 | "metadata": {
669 | "id": "wdBgNMGu9QQs",
670 | "colab_type": "code",
671 | "outputId": "735c8af8-4af3-4d9e-bcd1-8fd04a111a6e",
672 | "colab": {
673 | "base_uri": "https://localhost:8080/",
674 | "height": 857
675 | }
676 | },
677 | "source": [
678 | "import keras\n",
679 | "import keras.backend as K\n",
680 | "from keras.models import Sequential\n",
681 | "from keras.layers import Dense, Activation, LSTM, Dropout, BatchNormalization\n",
682 | "model = Sequential()\n",
683 | "model.add(LSTM(512, input_shape = (1,8),activation=\"relu\",return_sequences=True))\n",
684 | "\n",
685 | "#model.add(LSTM(100, batch_input_shape = (None, None, x.shape[2])))\n",
686 | "model.add(BatchNormalization())\n",
687 | "model.add(Dropout(0.3))\n",
688 | "\n",
689 | "model.add(LSTM(256,activation=\"relu\",return_sequences=True))\n",
690 | "model.add(BatchNormalization())\n",
691 | "model.add(Dropout(0.5))\n",
692 | "\n",
693 | "\n",
694 | "model.add(LSTM(128,activation=\"relu\",return_sequences=True))\n",
695 | "model.add(BatchNormalization())\n",
696 | "model.add(Dropout(0.3))\n",
697 | "\n",
698 | "model.add(LSTM(64,activation=\"relu\",return_sequences=True))\n",
699 | "model.add(BatchNormalization())\n",
700 | "model.add(Dropout(0.3))\n",
701 | "\n",
702 | "\n",
703 | "model.add(LSTM(32,activation=\"relu\"))\n",
704 | "model.add(BatchNormalization())\n",
705 | "model.add(Dropout(0.2))\n",
706 | "\n",
707 | "\n",
708 | "\n",
709 | "model.add(Dense(700))\n",
710 | "model.add(BatchNormalization())\n",
711 | "model.add(Activation('relu'))\n",
712 | "model.add(Dropout(0.2))\n",
713 | "\n",
714 | "model.add(Dense(2))\n",
715 | "model.add(Activation('sigmoid'))\n",
716 | "\n",
717 | "rmsprop =keras.optimizers.RMSprop(lr=0.009, rho=0.9, epsilon=1e-08)\n",
718 | "model.compile(loss='mean_squared_error',\n",
719 | " optimizer=rmsprop,\n",
720 | " metrics=['accuracy'])\n",
721 | "#adam = keras.optimizers.Adam(lr=0.5, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0, amsgrad=False)\n",
722 | "#model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n",
723 | "model.summary()"
724 | ],
725 | "execution_count": 0,
726 | "outputs": [
727 | {
728 | "output_type": "stream",
729 | "text": [
730 | "Model: \"sequential_6\"\n",
731 | "_________________________________________________________________\n",
732 | "Layer (type) Output Shape Param # \n",
733 | "=================================================================\n",
734 | "lstm_24 (LSTM) (None, 1, 512) 1067008 \n",
735 | "_________________________________________________________________\n",
736 | "batch_normalization_28 (Batc (None, 1, 512) 2048 \n",
737 | "_________________________________________________________________\n",
738 | "dropout_28 (Dropout) (None, 1, 512) 0 \n",
739 | "_________________________________________________________________\n",
740 | "lstm_25 (LSTM) (None, 1, 256) 787456 \n",
741 | "_________________________________________________________________\n",
742 | "batch_normalization_29 (Batc (None, 1, 256) 1024 \n",
743 | "_________________________________________________________________\n",
744 | "dropout_29 (Dropout) (None, 1, 256) 0 \n",
745 | "_________________________________________________________________\n",
746 | "lstm_26 (LSTM) (None, 1, 128) 197120 \n",
747 | "_________________________________________________________________\n",
748 | "batch_normalization_30 (Batc (None, 1, 128) 512 \n",
749 | "_________________________________________________________________\n",
750 | "dropout_30 (Dropout) (None, 1, 128) 0 \n",
751 | "_________________________________________________________________\n",
752 | "lstm_27 (LSTM) (None, 1, 64) 49408 \n",
753 | "_________________________________________________________________\n",
754 | "batch_normalization_31 (Batc (None, 1, 64) 256 \n",
755 | "_________________________________________________________________\n",
756 | "dropout_31 (Dropout) (None, 1, 64) 0 \n",
757 | "_________________________________________________________________\n",
758 | "lstm_28 (LSTM) (None, 32) 12416 \n",
759 | "_________________________________________________________________\n",
760 | "batch_normalization_32 (Batc (None, 32) 128 \n",
761 | "_________________________________________________________________\n",
762 | "dropout_32 (Dropout) (None, 32) 0 \n",
763 | "_________________________________________________________________\n",
764 | "dense_10 (Dense) (None, 700) 23100 \n",
765 | "_________________________________________________________________\n",
766 | "batch_normalization_33 (Batc (None, 700) 2800 \n",
767 | "_________________________________________________________________\n",
768 | "activation_10 (Activation) (None, 700) 0 \n",
769 | "_________________________________________________________________\n",
770 | "dropout_33 (Dropout) (None, 700) 0 \n",
771 | "_________________________________________________________________\n",
772 | "dense_11 (Dense) (None, 2) 1402 \n",
773 | "_________________________________________________________________\n",
774 | "activation_11 (Activation) (None, 2) 0 \n",
775 | "=================================================================\n",
776 | "Total params: 2,144,678\n",
777 | "Trainable params: 2,141,294\n",
778 | "Non-trainable params: 3,384\n",
779 | "_________________________________________________________________\n"
780 | ],
781 | "name": "stdout"
782 | }
783 | ]
784 | },
785 | {
786 | "cell_type": "code",
787 | "metadata": {
788 | "id": "APRGM1Dv9StV",
789 | "colab_type": "code",
790 | "outputId": "a89c54bd-4ea3-4294-9e77-1c39b500535d",
791 | "colab": {
792 | "base_uri": "https://localhost:8080/",
793 | "height": 1000
794 | }
795 | },
796 | "source": [
797 | "model.fit(x_train, y_train, batch_size = 100, epochs = 100, validation_data=(x_test,y_test))\n"
798 | ],
799 | "execution_count": 0,
800 | "outputs": [
801 | {
802 | "output_type": "stream",
803 | "text": [
804 | "Train on 10695 samples, validate on 2674 samples\n",
805 | "Epoch 1/100\n",
806 | "10695/10695 [==============================] - 11s 1ms/step - loss: 0.2240 - accuracy: 0.7020 - val_loss: 0.2114 - val_accuracy: 0.7057\n",
807 | "Epoch 2/100\n",
808 | "10695/10695 [==============================] - 8s 742us/step - loss: 0.2121 - accuracy: 0.7069 - val_loss: 0.2084 - val_accuracy: 0.7053\n",
809 | "Epoch 3/100\n",
810 | "10695/10695 [==============================] - 8s 721us/step - loss: 0.2097 - accuracy: 0.7066 - val_loss: 0.2120 - val_accuracy: 0.7057\n",
811 | "Epoch 4/100\n",
812 | "10695/10695 [==============================] - 8s 723us/step - loss: 0.2114 - accuracy: 0.7025 - val_loss: 0.2109 - val_accuracy: 0.6990\n",
813 | "Epoch 5/100\n",
814 | "10695/10695 [==============================] - 8s 728us/step - loss: 0.2105 - accuracy: 0.7058 - val_loss: 0.2079 - val_accuracy: 0.7046\n",
815 | "Epoch 6/100\n",
816 | "10695/10695 [==============================] - 8s 725us/step - loss: 0.2111 - accuracy: 0.7054 - val_loss: 0.2069 - val_accuracy: 0.7057\n",
817 | "Epoch 7/100\n",
818 | "10695/10695 [==============================] - 8s 729us/step - loss: 0.2079 - accuracy: 0.7075 - val_loss: 0.2067 - val_accuracy: 0.7057\n",
819 | "Epoch 8/100\n",
820 | "10695/10695 [==============================] - 8s 749us/step - loss: 0.2082 - accuracy: 0.7071 - val_loss: 0.2086 - val_accuracy: 0.7019\n",
821 | "Epoch 9/100\n",
822 | "10695/10695 [==============================] - 8s 736us/step - loss: 0.2107 - accuracy: 0.7020 - val_loss: 0.2068 - val_accuracy: 0.7042\n",
823 | "Epoch 10/100\n",
824 | "10695/10695 [==============================] - 8s 725us/step - loss: 0.2102 - accuracy: 0.7052 - val_loss: 0.2050 - val_accuracy: 0.7064\n",
825 | "Epoch 11/100\n",
826 | "10695/10695 [==============================] - 8s 726us/step - loss: 0.2079 - accuracy: 0.7072 - val_loss: 0.2050 - val_accuracy: 0.7057\n",
827 | "Epoch 12/100\n",
828 | "10695/10695 [==============================] - 8s 722us/step - loss: 0.2068 - accuracy: 0.7084 - val_loss: 0.2075 - val_accuracy: 0.7061\n",
829 | "Epoch 13/100\n",
830 | "10695/10695 [==============================] - 8s 726us/step - loss: 0.2090 - accuracy: 0.7059 - val_loss: 0.2050 - val_accuracy: 0.7061\n",
831 | "Epoch 14/100\n",
832 | "10695/10695 [==============================] - 8s 724us/step - loss: 0.2078 - accuracy: 0.7082 - val_loss: 0.2074 - val_accuracy: 0.7057\n",
833 | "Epoch 15/100\n",
834 | "10695/10695 [==============================] - 8s 720us/step - loss: 0.2073 - accuracy: 0.7077 - val_loss: 0.2052 - val_accuracy: 0.7064\n",
835 | "Epoch 16/100\n",
836 | "10695/10695 [==============================] - 8s 725us/step - loss: 0.2083 - accuracy: 0.7072 - val_loss: 0.2053 - val_accuracy: 0.7064\n",
837 | "Epoch 17/100\n",
838 | "10695/10695 [==============================] - 8s 718us/step - loss: 0.2083 - accuracy: 0.7067 - val_loss: 0.2060 - val_accuracy: 0.7072\n",
839 | "Epoch 18/100\n",
840 | "10695/10695 [==============================] - 8s 722us/step - loss: 0.2079 - accuracy: 0.7072 - val_loss: 0.2064 - val_accuracy: 0.7061\n",
841 | "Epoch 19/100\n",
842 | "10695/10695 [==============================] - 8s 725us/step - loss: 0.2076 - accuracy: 0.7067 - val_loss: 0.2062 - val_accuracy: 0.7057\n",
843 | "Epoch 20/100\n",
844 | "10695/10695 [==============================] - 8s 722us/step - loss: 0.2072 - accuracy: 0.7078 - val_loss: 0.2066 - val_accuracy: 0.7057\n",
845 | "Epoch 21/100\n",
846 | "10695/10695 [==============================] - 8s 726us/step - loss: 0.2080 - accuracy: 0.7077 - val_loss: 0.2054 - val_accuracy: 0.7057\n",
847 | "Epoch 22/100\n",
848 | "10695/10695 [==============================] - 8s 728us/step - loss: 0.2073 - accuracy: 0.7077 - val_loss: 0.2061 - val_accuracy: 0.7057\n",
849 | "Epoch 23/100\n",
850 | "10695/10695 [==============================] - 8s 727us/step - loss: 0.2073 - accuracy: 0.7075 - val_loss: 0.2060 - val_accuracy: 0.7057\n",
851 | "Epoch 24/100\n",
852 | "10695/10695 [==============================] - 8s 723us/step - loss: 0.2058 - accuracy: 0.7072 - val_loss: 0.2059 - val_accuracy: 0.7057\n",
853 | "Epoch 25/100\n",
854 | "10695/10695 [==============================] - 8s 725us/step - loss: 0.2060 - accuracy: 0.7076 - val_loss: 0.2057 - val_accuracy: 0.7057\n",
855 | "Epoch 26/100\n",
856 | "10695/10695 [==============================] - 8s 724us/step - loss: 0.2078 - accuracy: 0.7066 - val_loss: 0.2063 - val_accuracy: 0.7057\n",
857 | "Epoch 27/100\n",
858 | "10695/10695 [==============================] - 8s 721us/step - loss: 0.2076 - accuracy: 0.7076 - val_loss: 0.2060 - val_accuracy: 0.7057\n",
859 | "Epoch 28/100\n",
860 | "10695/10695 [==============================] - 8s 740us/step - loss: 0.2069 - accuracy: 0.7078 - val_loss: 0.2056 - val_accuracy: 0.7057\n",
861 | "Epoch 29/100\n",
862 | "10695/10695 [==============================] - 8s 720us/step - loss: 0.2062 - accuracy: 0.7072 - val_loss: 0.2060 - val_accuracy: 0.7057\n",
863 | "Epoch 30/100\n",
864 | "10695/10695 [==============================] - 8s 729us/step - loss: 0.2052 - accuracy: 0.7078 - val_loss: 0.2057 - val_accuracy: 0.7057\n",
865 | "Epoch 31/100\n",
866 | "10695/10695 [==============================] - 8s 723us/step - loss: 0.2056 - accuracy: 0.7076 - val_loss: 0.2056 - val_accuracy: 0.7057\n",
867 | "Epoch 32/100\n",
868 | "10695/10695 [==============================] - 8s 722us/step - loss: 0.2058 - accuracy: 0.7072 - val_loss: 0.2053 - val_accuracy: 0.7057\n",
869 | "Epoch 33/100\n",
870 | "10695/10695 [==============================] - 8s 721us/step - loss: 0.2064 - accuracy: 0.7069 - val_loss: 0.2054 - val_accuracy: 0.7057\n",
871 | "Epoch 34/100\n",
872 | "10695/10695 [==============================] - 8s 725us/step - loss: 0.2057 - accuracy: 0.7076 - val_loss: 0.2056 - val_accuracy: 0.7057\n",
873 | "Epoch 35/100\n",
874 | "10695/10695 [==============================] - 8s 718us/step - loss: 0.2059 - accuracy: 0.7065 - val_loss: 0.2055 - val_accuracy: 0.7057\n",
875 | "Epoch 36/100\n",
876 | "10695/10695 [==============================] - 8s 723us/step - loss: 0.2079 - accuracy: 0.7054 - val_loss: 0.2049 - val_accuracy: 0.7053\n",
877 | "Epoch 37/100\n",
878 | "10695/10695 [==============================] - 8s 718us/step - loss: 0.2076 - accuracy: 0.7069 - val_loss: 0.2055 - val_accuracy: 0.7049\n",
879 | "Epoch 38/100\n",
880 | "10695/10695 [==============================] - 8s 720us/step - loss: 0.2075 - accuracy: 0.7079 - val_loss: 0.2049 - val_accuracy: 0.7053\n",
881 | "Epoch 39/100\n",
882 | "10695/10695 [==============================] - 8s 723us/step - loss: 0.2072 - accuracy: 0.7095 - val_loss: 0.2054 - val_accuracy: 0.7061\n",
883 | "Epoch 40/100\n",
884 | "10695/10695 [==============================] - 8s 724us/step - loss: 0.2072 - accuracy: 0.7069 - val_loss: 0.2052 - val_accuracy: 0.7053\n",
885 | "Epoch 41/100\n",
886 | "10695/10695 [==============================] - 8s 722us/step - loss: 0.2071 - accuracy: 0.7072 - val_loss: 0.2052 - val_accuracy: 0.7061\n",
887 | "Epoch 42/100\n",
888 | "10695/10695 [==============================] - 8s 729us/step - loss: 0.2068 - accuracy: 0.7076 - val_loss: 0.2054 - val_accuracy: 0.7061\n",
889 | "Epoch 43/100\n",
890 | "10695/10695 [==============================] - 8s 730us/step - loss: 0.2067 - accuracy: 0.7071 - val_loss: 0.2060 - val_accuracy: 0.7057\n",
891 | "Epoch 44/100\n",
892 | "10695/10695 [==============================] - 8s 727us/step - loss: 0.2054 - accuracy: 0.7071 - val_loss: 0.2054 - val_accuracy: 0.7057\n",
893 | "Epoch 45/100\n",
894 | "10695/10695 [==============================] - 8s 727us/step - loss: 0.2067 - accuracy: 0.7075 - val_loss: 0.2065 - val_accuracy: 0.7057\n",
895 | "Epoch 46/100\n",
896 | "10695/10695 [==============================] - 8s 729us/step - loss: 0.2062 - accuracy: 0.7073 - val_loss: 0.2059 - val_accuracy: 0.7057\n",
897 | "Epoch 47/100\n",
898 | "10695/10695 [==============================] - 8s 718us/step - loss: 0.2068 - accuracy: 0.7063 - val_loss: 0.2057 - val_accuracy: 0.7049\n",
899 | "Epoch 48/100\n",
900 | "10695/10695 [==============================] - 8s 744us/step - loss: 0.2069 - accuracy: 0.7071 - val_loss: 0.2047 - val_accuracy: 0.7057\n",
901 | "Epoch 49/100\n",
902 | "10695/10695 [==============================] - 8s 744us/step - loss: 0.2062 - accuracy: 0.7068 - val_loss: 0.2055 - val_accuracy: 0.7057\n",
903 | "Epoch 50/100\n",
904 | "10695/10695 [==============================] - 8s 741us/step - loss: 0.2057 - accuracy: 0.7055 - val_loss: 0.2049 - val_accuracy: 0.7057\n",
905 | "Epoch 51/100\n",
906 | "10695/10695 [==============================] - 8s 721us/step - loss: 0.2047 - accuracy: 0.7074 - val_loss: 0.2055 - val_accuracy: 0.7057\n",
907 | "Epoch 52/100\n",
908 | "10695/10695 [==============================] - 8s 722us/step - loss: 0.2051 - accuracy: 0.7076 - val_loss: 0.2054 - val_accuracy: 0.7057\n",
909 | "Epoch 53/100\n",
910 | "10695/10695 [==============================] - 8s 728us/step - loss: 0.2045 - accuracy: 0.7087 - val_loss: 0.2056 - val_accuracy: 0.7057\n",
911 | "Epoch 54/100\n",
912 | "10695/10695 [==============================] - 8s 720us/step - loss: 0.2059 - accuracy: 0.7050 - val_loss: 0.2050 - val_accuracy: 0.7057\n",
913 | "Epoch 55/100\n",
914 | "10695/10695 [==============================] - 8s 726us/step - loss: 0.2047 - accuracy: 0.7073 - val_loss: 0.2058 - val_accuracy: 0.7057\n",
915 | "Epoch 56/100\n",
916 | "10695/10695 [==============================] - 8s 725us/step - loss: 0.2046 - accuracy: 0.7080 - val_loss: 0.2049 - val_accuracy: 0.7057\n",
917 | "Epoch 57/100\n",
918 | "10695/10695 [==============================] - 8s 722us/step - loss: 0.2051 - accuracy: 0.7071 - val_loss: 0.2051 - val_accuracy: 0.7053\n",
919 | "Epoch 58/100\n",
920 | "10695/10695 [==============================] - 8s 722us/step - loss: 0.2043 - accuracy: 0.7076 - val_loss: 0.2051 - val_accuracy: 0.7057\n",
921 | "Epoch 59/100\n",
922 | "10695/10695 [==============================] - 8s 726us/step - loss: 0.2044 - accuracy: 0.7085 - val_loss: 0.2052 - val_accuracy: 0.7057\n",
923 | "Epoch 60/100\n",
924 | "10695/10695 [==============================] - 8s 725us/step - loss: 0.2037 - accuracy: 0.7074 - val_loss: 0.2053 - val_accuracy: 0.7057\n",
925 | "Epoch 61/100\n",
926 | "10695/10695 [==============================] - 8s 729us/step - loss: 0.2037 - accuracy: 0.7073 - val_loss: 0.2048 - val_accuracy: 0.7057\n",
927 | "Epoch 62/100\n",
928 | "10695/10695 [==============================] - 8s 728us/step - loss: 0.2038 - accuracy: 0.7083 - val_loss: 0.2045 - val_accuracy: 0.7061\n",
929 | "Epoch 63/100\n",
930 | "10695/10695 [==============================] - 8s 729us/step - loss: 0.2039 - accuracy: 0.7072 - val_loss: 0.2054 - val_accuracy: 0.7057\n",
931 | "Epoch 64/100\n",
932 | "10695/10695 [==============================] - 8s 724us/step - loss: 0.2038 - accuracy: 0.7060 - val_loss: 0.2052 - val_accuracy: 0.7057\n",
933 | "Epoch 65/100\n",
934 | "10695/10695 [==============================] - 8s 731us/step - loss: 0.2035 - accuracy: 0.7080 - val_loss: 0.2049 - val_accuracy: 0.7061\n",
935 | "Epoch 66/100\n",
936 | "10695/10695 [==============================] - 8s 726us/step - loss: 0.2039 - accuracy: 0.7089 - val_loss: 0.2059 - val_accuracy: 0.7053\n",
937 | "Epoch 67/100\n",
938 | "10695/10695 [==============================] - 8s 737us/step - loss: 0.2039 - accuracy: 0.7081 - val_loss: 0.2054 - val_accuracy: 0.7057\n",
939 | "Epoch 68/100\n",
940 | "10695/10695 [==============================] - 8s 734us/step - loss: 0.2041 - accuracy: 0.7072 - val_loss: 0.2051 - val_accuracy: 0.7061\n",
941 | "Epoch 69/100\n",
942 | "10695/10695 [==============================] - 8s 732us/step - loss: 0.2034 - accuracy: 0.7084 - val_loss: 0.2051 - val_accuracy: 0.7053\n",
943 | "Epoch 70/100\n",
944 | "10695/10695 [==============================] - 8s 735us/step - loss: 0.2029 - accuracy: 0.7069 - val_loss: 0.2045 - val_accuracy: 0.7068\n",
945 | "Epoch 71/100\n",
946 | "10695/10695 [==============================] - 8s 727us/step - loss: 0.2033 - accuracy: 0.7081 - val_loss: 0.2058 - val_accuracy: 0.7053\n",
947 | "Epoch 72/100\n",
948 | "10695/10695 [==============================] - 8s 733us/step - loss: 0.2031 - accuracy: 0.7088 - val_loss: 0.2047 - val_accuracy: 0.7061\n",
949 | "Epoch 73/100\n",
950 | "10695/10695 [==============================] - 8s 738us/step - loss: 0.2036 - accuracy: 0.7079 - val_loss: 0.2050 - val_accuracy: 0.7057\n",
951 | "Epoch 74/100\n",
952 | "10695/10695 [==============================] - 8s 736us/step - loss: 0.2032 - accuracy: 0.7072 - val_loss: 0.2051 - val_accuracy: 0.7057\n",
953 | "Epoch 75/100\n",
954 | "10695/10695 [==============================] - 8s 739us/step - loss: 0.2028 - accuracy: 0.7085 - val_loss: 0.2045 - val_accuracy: 0.7064\n",
955 | "Epoch 76/100\n",
956 | "10695/10695 [==============================] - 8s 737us/step - loss: 0.2033 - accuracy: 0.7070 - val_loss: 0.2056 - val_accuracy: 0.7053\n",
957 | "Epoch 77/100\n",
958 | "10695/10695 [==============================] - 8s 735us/step - loss: 0.2028 - accuracy: 0.7090 - val_loss: 0.2053 - val_accuracy: 0.7053\n",
959 | "Epoch 78/100\n",
960 | "10695/10695 [==============================] - 8s 739us/step - loss: 0.2018 - accuracy: 0.7084 - val_loss: 0.2048 - val_accuracy: 0.7061\n",
961 | "Epoch 79/100\n",
962 | "10695/10695 [==============================] - 8s 738us/step - loss: 0.2032 - accuracy: 0.7083 - val_loss: 0.2049 - val_accuracy: 0.7053\n",
963 | "Epoch 80/100\n",
964 | "10695/10695 [==============================] - 8s 733us/step - loss: 0.2024 - accuracy: 0.7095 - val_loss: 0.2049 - val_accuracy: 0.7053\n",
965 | "Epoch 81/100\n",
966 | "10695/10695 [==============================] - 8s 741us/step - loss: 0.2022 - accuracy: 0.7084 - val_loss: 0.2059 - val_accuracy: 0.7046\n",
967 | "Epoch 82/100\n",
968 | "10695/10695 [==============================] - 8s 736us/step - loss: 0.2023 - accuracy: 0.7092 - val_loss: 0.2048 - val_accuracy: 0.7061\n",
969 | "Epoch 83/100\n",
970 | "10695/10695 [==============================] - 8s 738us/step - loss: 0.2011 - accuracy: 0.7110 - val_loss: 0.2060 - val_accuracy: 0.7046\n",
971 | "Epoch 84/100\n",
972 | "10695/10695 [==============================] - 8s 739us/step - loss: 0.2026 - accuracy: 0.7091 - val_loss: 0.2053 - val_accuracy: 0.7053\n",
973 | "Epoch 85/100\n",
974 | "10695/10695 [==============================] - 8s 738us/step - loss: 0.2027 - accuracy: 0.7081 - val_loss: 0.2055 - val_accuracy: 0.7049\n",
975 | "Epoch 86/100\n",
976 | "10695/10695 [==============================] - 8s 739us/step - loss: 0.2020 - accuracy: 0.7072 - val_loss: 0.2054 - val_accuracy: 0.7049\n",
977 | "Epoch 87/100\n",
978 | "10695/10695 [==============================] - 8s 738us/step - loss: 0.2019 - accuracy: 0.7091 - val_loss: 0.2056 - val_accuracy: 0.7053\n",
979 | "Epoch 88/100\n",
980 | "10695/10695 [==============================] - 8s 735us/step - loss: 0.2014 - accuracy: 0.7096 - val_loss: 0.2065 - val_accuracy: 0.7049\n",
981 | "Epoch 89/100\n",
982 | "10695/10695 [==============================] - 8s 744us/step - loss: 0.2022 - accuracy: 0.7087 - val_loss: 0.2057 - val_accuracy: 0.7046\n",
983 | "Epoch 90/100\n",
984 | "10695/10695 [==============================] - 8s 747us/step - loss: 0.2012 - accuracy: 0.7107 - val_loss: 0.2059 - val_accuracy: 0.7031\n",
985 | "Epoch 91/100\n",
986 | "10695/10695 [==============================] - 8s 739us/step - loss: 0.2015 - accuracy: 0.7109 - val_loss: 0.2063 - val_accuracy: 0.7034\n",
987 | "Epoch 92/100\n",
988 | "10695/10695 [==============================] - 8s 736us/step - loss: 0.2014 - accuracy: 0.7081 - val_loss: 0.2056 - val_accuracy: 0.7034\n",
989 | "Epoch 93/100\n",
990 | "10695/10695 [==============================] - 8s 739us/step - loss: 0.2020 - accuracy: 0.7096 - val_loss: 0.2050 - val_accuracy: 0.7046\n",
991 | "Epoch 94/100\n",
992 | "10695/10695 [==============================] - 8s 736us/step - loss: 0.2010 - accuracy: 0.7113 - val_loss: 0.2057 - val_accuracy: 0.7061\n",
993 | "Epoch 95/100\n",
994 | "10695/10695 [==============================] - 8s 734us/step - loss: 0.2021 - accuracy: 0.7101 - val_loss: 0.2055 - val_accuracy: 0.7061\n",
995 | "Epoch 96/100\n",
996 | "10695/10695 [==============================] - 8s 736us/step - loss: 0.2012 - accuracy: 0.7099 - val_loss: 0.2047 - val_accuracy: 0.7053\n",
997 | "Epoch 97/100\n",
998 | "10695/10695 [==============================] - 8s 734us/step - loss: 0.2015 - accuracy: 0.7083 - val_loss: 0.2064 - val_accuracy: 0.7053\n",
999 | "Epoch 98/100\n",
1000 | "10695/10695 [==============================] - 8s 736us/step - loss: 0.2015 - accuracy: 0.7083 - val_loss: 0.2057 - val_accuracy: 0.7049\n",
1001 | "Epoch 99/100\n",
1002 | "10695/10695 [==============================] - 8s 734us/step - loss: 0.2008 - accuracy: 0.7102 - val_loss: 0.2054 - val_accuracy: 0.7027\n",
1003 | "Epoch 100/100\n",
1004 | "10695/10695 [==============================] - 8s 735us/step - loss: 0.2011 - accuracy: 0.7096 - val_loss: 0.2052 - val_accuracy: 0.7031\n"
1005 | ],
1006 | "name": "stdout"
1007 | },
1008 | {
1009 | "output_type": "execute_result",
1010 | "data": {
1011 | "text/plain": [
1012 | ""
1013 | ]
1014 | },
1015 | "metadata": {
1016 | "tags": []
1017 | },
1018 | "execution_count": 36
1019 | }
1020 | ]
1021 | },
1022 | {
1023 | "cell_type": "code",
1024 | "metadata": {
1025 | "id": "F51Vrjc59Zpf",
1026 | "colab_type": "code",
1027 | "outputId": "f5fb8aef-0c74-47eb-d118-8438e84eac7e",
1028 | "colab": {
1029 | "base_uri": "https://localhost:8080/",
1030 | "height": 67
1031 | }
1032 | },
1033 | "source": [
1034 | "from sklearn.metrics import accuracy_score\n",
1035 | "pred = model.predict(x_test)\n",
1036 | "predict_classes = np.argmax(pred,axis=1)\n",
1037 | "expected_classes = np.argmax(y_test,axis=1)\n",
1038 | "print(expected_classes.shape)\n",
1039 | "print(predict_classes.shape)\n",
1040 | "correct = accuracy_score(expected_classes,predict_classes)\n",
1041 | "print(f\"Training Accuracy: {correct}\")"
1042 | ],
1043 | "execution_count": 0,
1044 | "outputs": [
1045 | {
1046 | "output_type": "stream",
1047 | "text": [
1048 | "(2674,)\n",
1049 | "(2674,)\n",
1050 | "Training Accuracy: 0.7030665669409125\n"
1051 | ],
1052 | "name": "stdout"
1053 | }
1054 | ]
1055 | },
1056 | {
1057 | "cell_type": "code",
1058 | "metadata": {
1059 | "id": "dbpfIXDhSWAg",
1060 | "colab_type": "code",
1061 | "colab": {}
1062 | },
1063 | "source": [
1064 | ""
1065 | ],
1066 | "execution_count": 0,
1067 | "outputs": []
1068 | }
1069 | ]
1070 | }
--------------------------------------------------------------------------------
/single_channel_eeg_emotion/singlechannel_lstm_emotion_data.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "Copy of working-lstm-emotion-data_acc47.ipynb",
7 | "provenance": [],
8 | "collapsed_sections": []
9 | },
10 | "kernelspec": {
11 | "name": "python3",
12 | "display_name": "Python 3"
13 | }
14 | },
15 | "cells": [
16 | {
17 | "cell_type": "code",
18 | "metadata": {
19 | "id": "6NpVHDR_NSpv",
20 | "colab_type": "code",
21 | "outputId": "5c4442ee-2321-4ea3-c247-a9ed9cb56dbc",
22 | "colab": {
23 | "base_uri": "https://localhost:8080/",
24 | "height": 121
25 | }
26 | },
27 | "source": [
28 | "from google.colab import drive\n",
29 | "drive.mount('/content/gdrive')\n"
30 | ],
31 | "execution_count": 0,
32 | "outputs": [
33 | {
34 | "output_type": "stream",
35 | "text": [
36 | "Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly\n",
37 | "\n",
38 | "Enter your authorization code:\n",
39 | "··········\n",
40 | "Mounted at /content/gdrive\n"
41 | ],
42 | "name": "stdout"
43 | }
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "metadata": {
49 | "id": "KFu9OVI5Nac6",
50 | "colab_type": "code",
51 | "colab": {}
52 | },
53 | "source": [
54 | "import pandas as pd\n",
55 | "import tensorflow as tf\n",
56 | "\n",
57 | "\n",
58 | "import numpy as np\n",
59 | "import random\n",
60 | "random.seed(72)"
61 | ],
62 | "execution_count": 0,
63 | "outputs": []
64 | },
65 | {
66 | "cell_type": "code",
67 | "metadata": {
68 | "id": "hVyRp4fENz-E",
69 | "colab_type": "code",
70 | "outputId": "8a45749d-020e-42b7-c8aa-cfd5ad37a8af",
71 | "colab": {
72 | "base_uri": "https://localhost:8080/",
73 | "height": 402
74 | }
75 | },
76 | "source": [
77 | "df=pd.read_csv(\"/content/gdrive/My Drive/data/emotion1channel.csv\")\n",
78 | "df"
79 | ],
80 | "execution_count": 0,
81 | "outputs": [
82 | {
83 | "output_type": "execute_result",
84 | "data": {
85 | "text/html": [
86 | "\n",
87 | "\n",
100 | "
\n",
101 | " \n",
102 | " \n",
103 | " \n",
104 | " attention \n",
105 | " meditation \n",
106 | " delta \n",
107 | " theta \n",
108 | " lowAplha \n",
109 | " highAlpha \n",
110 | " lowBeta \n",
111 | " highBeta \n",
112 | " lowGamma \n",
113 | " highGamma \n",
114 | " class \n",
115 | " \n",
116 | " \n",
117 | " \n",
118 | " \n",
119 | " 0 \n",
120 | " 88 \n",
121 | " 17 \n",
122 | " 1290697 \n",
123 | " 18187 \n",
124 | " 6345 \n",
125 | " 7462 \n",
126 | " 7266 \n",
127 | " 4278 \n",
128 | " 2589 \n",
129 | " 1228 \n",
130 | " 7 \n",
131 | " \n",
132 | " \n",
133 | " 1 \n",
134 | " 88 \n",
135 | " 17 \n",
136 | " 105432 \n",
137 | " 21344 \n",
138 | " 8323 \n",
139 | " 4496 \n",
140 | " 4784 \n",
141 | " 12071 \n",
142 | " 2024 \n",
143 | " 1123 \n",
144 | " 7 \n",
145 | " \n",
146 | " \n",
147 | " 2 \n",
148 | " 83 \n",
149 | " 29 \n",
150 | " 732143 \n",
151 | " 37527 \n",
152 | " 48422 \n",
153 | " 10286 \n",
154 | " 9499 \n",
155 | " 6050 \n",
156 | " 2362 \n",
157 | " 4157 \n",
158 | " 7 \n",
159 | " \n",
160 | " \n",
161 | " 3 \n",
162 | " 80 \n",
163 | " 26 \n",
164 | " 21265 \n",
165 | " 24517 \n",
166 | " 7051 \n",
167 | " 1790 \n",
168 | " 9106 \n",
169 | " 5771 \n",
170 | " 1977 \n",
171 | " 3265 \n",
172 | " 7 \n",
173 | " \n",
174 | " \n",
175 | " 4 \n",
176 | " 69 \n",
177 | " 20 \n",
178 | " 349390 \n",
179 | " 145647 \n",
180 | " 10068 \n",
181 | " 21707 \n",
182 | " 11878 \n",
183 | " 19883 \n",
184 | " 9971 \n",
185 | " 6592 \n",
186 | " 7 \n",
187 | " \n",
188 | " \n",
189 | " ... \n",
190 | " ... \n",
191 | " ... \n",
192 | " ... \n",
193 | " ... \n",
194 | " ... \n",
195 | " ... \n",
196 | " ... \n",
197 | " ... \n",
198 | " ... \n",
199 | " ... \n",
200 | " ... \n",
201 | " \n",
202 | " \n",
203 | " 13364 \n",
204 | " 66 \n",
205 | " 61 \n",
206 | " 36288 \n",
207 | " 10942 \n",
208 | " 15819 \n",
209 | " 4781 \n",
210 | " 10488 \n",
211 | " 11690 \n",
212 | " 7245 \n",
213 | " 2287 \n",
214 | " 1 \n",
215 | " \n",
216 | " \n",
217 | " 13365 \n",
218 | " 63 \n",
219 | " 81 \n",
220 | " 434483 \n",
221 | " 16548 \n",
222 | " 25760 \n",
223 | " 10239 \n",
224 | " 7332 \n",
225 | " 3822 \n",
226 | " 9214 \n",
227 | " 5527 \n",
228 | " 1 \n",
229 | " \n",
230 | " \n",
231 | " 13366 \n",
232 | " 61 \n",
233 | " 91 \n",
234 | " 11198 \n",
235 | " 21200 \n",
236 | " 18905 \n",
237 | " 20228 \n",
238 | " 4850 \n",
239 | " 12856 \n",
240 | " 5412 \n",
241 | " 7044 \n",
242 | " 1 \n",
243 | " \n",
244 | " \n",
245 | " 13367 \n",
246 | " 56 \n",
247 | " 88 \n",
248 | " 537338 \n",
249 | " 31723 \n",
250 | " 1915 \n",
251 | " 13033 \n",
252 | " 10876 \n",
253 | " 8500 \n",
254 | " 7453 \n",
255 | " 3461 \n",
256 | " 1 \n",
257 | " \n",
258 | " \n",
259 | " 13368 \n",
260 | " 51 \n",
261 | " 90 \n",
262 | " 534966 \n",
263 | " 54906 \n",
264 | " 30588 \n",
265 | " 22906 \n",
266 | " 14624 \n",
267 | " 17227 \n",
268 | " 14293 \n",
269 | " 3204 \n",
270 | " 1 \n",
271 | " \n",
272 | " \n",
273 | "
\n",
274 | "
13369 rows × 11 columns
\n",
275 | "
"
276 | ],
277 | "text/plain": [
278 | " attention meditation delta ... lowGamma highGamma class\n",
279 | "0 88 17 1290697 ... 2589 1228 7\n",
280 | "1 88 17 105432 ... 2024 1123 7\n",
281 | "2 83 29 732143 ... 2362 4157 7\n",
282 | "3 80 26 21265 ... 1977 3265 7\n",
283 | "4 69 20 349390 ... 9971 6592 7\n",
284 | "... ... ... ... ... ... ... ...\n",
285 | "13364 66 61 36288 ... 7245 2287 1\n",
286 | "13365 63 81 434483 ... 9214 5527 1\n",
287 | "13366 61 91 11198 ... 5412 7044 1\n",
288 | "13367 56 88 537338 ... 7453 3461 1\n",
289 | "13368 51 90 534966 ... 14293 3204 1\n",
290 | "\n",
291 | "[13369 rows x 11 columns]"
292 | ]
293 | },
294 | "metadata": {
295 | "tags": []
296 | },
297 | "execution_count": 3
298 | }
299 | ]
300 | },
301 | {
302 | "cell_type": "code",
303 | "metadata": {
304 | "id": "fAuGGx6UN-CL",
305 | "colab_type": "code",
306 | "outputId": "7e8f4b6b-9059-4c23-9f3f-003d768219e8",
307 | "colab": {
308 | "base_uri": "https://localhost:8080/",
309 | "height": 218
310 | }
311 | },
312 | "source": [
313 | "df.isnull().sum()"
314 | ],
315 | "execution_count": 0,
316 | "outputs": [
317 | {
318 | "output_type": "execute_result",
319 | "data": {
320 | "text/plain": [
321 | "attention 0\n",
322 | "meditation 0\n",
323 | "delta 0\n",
324 | "theta 0\n",
325 | "lowAplha 0\n",
326 | "highAlpha 0\n",
327 | "lowBeta 0\n",
328 | "highBeta 0\n",
329 | "lowGamma 0\n",
330 | "highGamma 0\n",
331 | "class 0\n",
332 | "dtype: int64"
333 | ]
334 | },
335 | "metadata": {
336 | "tags": []
337 | },
338 | "execution_count": 4
339 | }
340 | ]
341 | },
342 | {
343 | "cell_type": "code",
344 | "metadata": {
345 | "id": "CeukCO_2iDJa",
346 | "colab_type": "code",
347 | "outputId": "f5d9cec8-138f-4193-dfc9-54be06e9285c",
348 | "colab": {
349 | "base_uri": "https://localhost:8080/",
350 | "height": 34
351 | }
352 | },
353 | "source": [
354 | "#df=x=df[df['class']<5 ]\n",
355 | "df[\"class\"].unique()\n",
356 | "\n"
357 | ],
358 | "execution_count": 0,
359 | "outputs": [
360 | {
361 | "output_type": "execute_result",
362 | "data": {
363 | "text/plain": [
364 | "array([7, 5, 3, 4, 0, 2, 1, 6])"
365 | ]
366 | },
367 | "metadata": {
368 | "tags": []
369 | },
370 | "execution_count": 5
371 | }
372 | ]
373 | },
374 | {
375 | "cell_type": "code",
376 | "metadata": {
377 | "id": "YGjboqH2ulcT",
378 | "colab_type": "code",
379 | "outputId": "225c7459-d5e9-46f0-f492-2cf4e607afe1",
380 | "colab": {
381 | "base_uri": "https://localhost:8080/",
382 | "height": 195
383 | }
384 | },
385 | "source": [
386 | "df.loc[df[\"class\"] == 1, \"class\"] = 0\n",
387 | "df.loc[df[\"class\"] == 2, \"class\"] = 1\n",
388 | "df.loc[df[\"class\"] == 3, \"class\"] = 1\n",
389 | "df.loc[df[\"class\"] == 4, \"class\"] = 2\n",
390 | "df.loc[df[\"class\"] == 5, \"class\"] = 2\n",
391 | "df.loc[df[\"class\"] == 6, \"class\"] = 3\n",
392 | "df.loc[df[\"class\"] == 7, \"class\"] = 3\n",
393 | "df.head()"
394 | ],
395 | "execution_count": 0,
396 | "outputs": [
397 | {
398 | "output_type": "execute_result",
399 | "data": {
400 | "text/html": [
401 | "\n",
402 | "\n",
415 | "
\n",
416 | " \n",
417 | " \n",
418 | " \n",
419 | " attention \n",
420 | " meditation \n",
421 | " delta \n",
422 | " theta \n",
423 | " lowAplha \n",
424 | " highAlpha \n",
425 | " lowBeta \n",
426 | " highBeta \n",
427 | " lowGamma \n",
428 | " highGamma \n",
429 | " class \n",
430 | " \n",
431 | " \n",
432 | " \n",
433 | " \n",
434 | " 0 \n",
435 | " 88 \n",
436 | " 17 \n",
437 | " 1290697 \n",
438 | " 18187 \n",
439 | " 6345 \n",
440 | " 7462 \n",
441 | " 7266 \n",
442 | " 4278 \n",
443 | " 2589 \n",
444 | " 1228 \n",
445 | " 3 \n",
446 | " \n",
447 | " \n",
448 | " 1 \n",
449 | " 88 \n",
450 | " 17 \n",
451 | " 105432 \n",
452 | " 21344 \n",
453 | " 8323 \n",
454 | " 4496 \n",
455 | " 4784 \n",
456 | " 12071 \n",
457 | " 2024 \n",
458 | " 1123 \n",
459 | " 3 \n",
460 | " \n",
461 | " \n",
462 | " 2 \n",
463 | " 83 \n",
464 | " 29 \n",
465 | " 732143 \n",
466 | " 37527 \n",
467 | " 48422 \n",
468 | " 10286 \n",
469 | " 9499 \n",
470 | " 6050 \n",
471 | " 2362 \n",
472 | " 4157 \n",
473 | " 3 \n",
474 | " \n",
475 | " \n",
476 | " 3 \n",
477 | " 80 \n",
478 | " 26 \n",
479 | " 21265 \n",
480 | " 24517 \n",
481 | " 7051 \n",
482 | " 1790 \n",
483 | " 9106 \n",
484 | " 5771 \n",
485 | " 1977 \n",
486 | " 3265 \n",
487 | " 3 \n",
488 | " \n",
489 | " \n",
490 | " 4 \n",
491 | " 69 \n",
492 | " 20 \n",
493 | " 349390 \n",
494 | " 145647 \n",
495 | " 10068 \n",
496 | " 21707 \n",
497 | " 11878 \n",
498 | " 19883 \n",
499 | " 9971 \n",
500 | " 6592 \n",
501 | " 3 \n",
502 | " \n",
503 | " \n",
504 | "
\n",
505 | "
"
506 | ],
507 | "text/plain": [
508 | " attention meditation delta theta ... highBeta lowGamma highGamma class\n",
509 | "0 88 17 1290697 18187 ... 4278 2589 1228 3\n",
510 | "1 88 17 105432 21344 ... 12071 2024 1123 3\n",
511 | "2 83 29 732143 37527 ... 6050 2362 4157 3\n",
512 | "3 80 26 21265 24517 ... 5771 1977 3265 3\n",
513 | "4 69 20 349390 145647 ... 19883 9971 6592 3\n",
514 | "\n",
515 | "[5 rows x 11 columns]"
516 | ]
517 | },
518 | "metadata": {
519 | "tags": []
520 | },
521 | "execution_count": 6
522 | }
523 | ]
524 | },
525 | {
526 | "cell_type": "code",
527 | "metadata": {
528 | "id": "U_XO16Wnigct",
529 | "colab_type": "code",
530 | "outputId": "880e42df-599f-4b0d-ab12-e566145a0cc6",
531 | "colab": {
532 | "base_uri": "https://localhost:8080/",
533 | "height": 402
534 | }
535 | },
536 | "source": [
537 | "#x=df.drop([\"class\"] ,axis=1)\n",
538 | "x=df.drop([\"class\",\"attention\",\"meditation\"] ,axis=1)\n",
539 | "#x=df\n",
540 | "x"
541 | ],
542 | "execution_count": 0,
543 | "outputs": [
544 | {
545 | "output_type": "execute_result",
546 | "data": {
547 | "text/html": [
548 | "\n",
549 | "\n",
562 | "
\n",
563 | " \n",
564 | " \n",
565 | " \n",
566 | " delta \n",
567 | " theta \n",
568 | " lowAplha \n",
569 | " highAlpha \n",
570 | " lowBeta \n",
571 | " highBeta \n",
572 | " lowGamma \n",
573 | " highGamma \n",
574 | " \n",
575 | " \n",
576 | " \n",
577 | " \n",
578 | " 0 \n",
579 | " 1290697 \n",
580 | " 18187 \n",
581 | " 6345 \n",
582 | " 7462 \n",
583 | " 7266 \n",
584 | " 4278 \n",
585 | " 2589 \n",
586 | " 1228 \n",
587 | " \n",
588 | " \n",
589 | " 1 \n",
590 | " 105432 \n",
591 | " 21344 \n",
592 | " 8323 \n",
593 | " 4496 \n",
594 | " 4784 \n",
595 | " 12071 \n",
596 | " 2024 \n",
597 | " 1123 \n",
598 | " \n",
599 | " \n",
600 | " 2 \n",
601 | " 732143 \n",
602 | " 37527 \n",
603 | " 48422 \n",
604 | " 10286 \n",
605 | " 9499 \n",
606 | " 6050 \n",
607 | " 2362 \n",
608 | " 4157 \n",
609 | " \n",
610 | " \n",
611 | " 3 \n",
612 | " 21265 \n",
613 | " 24517 \n",
614 | " 7051 \n",
615 | " 1790 \n",
616 | " 9106 \n",
617 | " 5771 \n",
618 | " 1977 \n",
619 | " 3265 \n",
620 | " \n",
621 | " \n",
622 | " 4 \n",
623 | " 349390 \n",
624 | " 145647 \n",
625 | " 10068 \n",
626 | " 21707 \n",
627 | " 11878 \n",
628 | " 19883 \n",
629 | " 9971 \n",
630 | " 6592 \n",
631 | " \n",
632 | " \n",
633 | " ... \n",
634 | " ... \n",
635 | " ... \n",
636 | " ... \n",
637 | " ... \n",
638 | " ... \n",
639 | " ... \n",
640 | " ... \n",
641 | " ... \n",
642 | " \n",
643 | " \n",
644 | " 13364 \n",
645 | " 36288 \n",
646 | " 10942 \n",
647 | " 15819 \n",
648 | " 4781 \n",
649 | " 10488 \n",
650 | " 11690 \n",
651 | " 7245 \n",
652 | " 2287 \n",
653 | " \n",
654 | " \n",
655 | " 13365 \n",
656 | " 434483 \n",
657 | " 16548 \n",
658 | " 25760 \n",
659 | " 10239 \n",
660 | " 7332 \n",
661 | " 3822 \n",
662 | " 9214 \n",
663 | " 5527 \n",
664 | " \n",
665 | " \n",
666 | " 13366 \n",
667 | " 11198 \n",
668 | " 21200 \n",
669 | " 18905 \n",
670 | " 20228 \n",
671 | " 4850 \n",
672 | " 12856 \n",
673 | " 5412 \n",
674 | " 7044 \n",
675 | " \n",
676 | " \n",
677 | " 13367 \n",
678 | " 537338 \n",
679 | " 31723 \n",
680 | " 1915 \n",
681 | " 13033 \n",
682 | " 10876 \n",
683 | " 8500 \n",
684 | " 7453 \n",
685 | " 3461 \n",
686 | " \n",
687 | " \n",
688 | " 13368 \n",
689 | " 534966 \n",
690 | " 54906 \n",
691 | " 30588 \n",
692 | " 22906 \n",
693 | " 14624 \n",
694 | " 17227 \n",
695 | " 14293 \n",
696 | " 3204 \n",
697 | " \n",
698 | " \n",
699 | "
\n",
700 | "
12610 rows × 8 columns
\n",
701 | "
"
702 | ],
703 | "text/plain": [
704 | " delta theta lowAplha ... highBeta lowGamma highGamma\n",
705 | "0 1290697 18187 6345 ... 4278 2589 1228\n",
706 | "1 105432 21344 8323 ... 12071 2024 1123\n",
707 | "2 732143 37527 48422 ... 6050 2362 4157\n",
708 | "3 21265 24517 7051 ... 5771 1977 3265\n",
709 | "4 349390 145647 10068 ... 19883 9971 6592\n",
710 | "... ... ... ... ... ... ... ...\n",
711 | "13364 36288 10942 15819 ... 11690 7245 2287\n",
712 | "13365 434483 16548 25760 ... 3822 9214 5527\n",
713 | "13366 11198 21200 18905 ... 12856 5412 7044\n",
714 | "13367 537338 31723 1915 ... 8500 7453 3461\n",
715 | "13368 534966 54906 30588 ... 17227 14293 3204\n",
716 | "\n",
717 | "[12610 rows x 8 columns]"
718 | ]
719 | },
720 | "metadata": {
721 | "tags": []
722 | },
723 | "execution_count": 8
724 | }
725 | ]
726 | },
727 | {
728 | "cell_type": "code",
729 | "metadata": {
730 | "id": "gh-pmcOAjqvK",
731 | "colab_type": "code",
732 | "outputId": "35fd7846-7770-4a8e-d044-9a02fe861a1c",
733 | "colab": {
734 | "base_uri": "https://localhost:8080/",
735 | "height": 151
736 | }
737 | },
738 | "source": [
739 | "y = df.loc[:,'class'].values\n",
740 | "print(y)\n",
741 | "print(x.values)\n",
742 | "x=x.values\n"
743 | ],
744 | "execution_count": 0,
745 | "outputs": [
746 | {
747 | "output_type": "stream",
748 | "text": [
749 | "[3 3 3 ... 0 0 0]\n",
750 | "[[1290697 18187 6345 ... 4278 2589 1228]\n",
751 | " [ 105432 21344 8323 ... 12071 2024 1123]\n",
752 | " [ 732143 37527 48422 ... 6050 2362 4157]\n",
753 | " ...\n",
754 | " [ 11198 21200 18905 ... 12856 5412 7044]\n",
755 | " [ 537338 31723 1915 ... 8500 7453 3461]\n",
756 | " [ 534966 54906 30588 ... 17227 14293 3204]]\n"
757 | ],
758 | "name": "stdout"
759 | }
760 | ]
761 | },
762 | {
763 | "cell_type": "code",
764 | "metadata": {
765 | "id": "jE5RURdCs_e-",
766 | "colab_type": "code",
767 | "outputId": "5be67bae-905b-4278-8da0-9ce4689461bb",
768 | "colab": {
769 | "base_uri": "https://localhost:8080/",
770 | "height": 151
771 | }
772 | },
773 | "source": [
774 | "from sklearn.preprocessing import StandardScaler\n",
775 | "scaler = StandardScaler()\n",
776 | "scaler.fit(x)\n",
777 | "x = scaler.transform(x)\n",
778 | "from keras.utils import to_categorical\n",
779 | "y = to_categorical(y)\n",
780 | "y"
781 | ],
782 | "execution_count": 0,
783 | "outputs": [
784 | {
785 | "output_type": "stream",
786 | "text": [
787 | "Using TensorFlow backend.\n"
788 | ],
789 | "name": "stderr"
790 | },
791 | {
792 | "output_type": "execute_result",
793 | "data": {
794 | "text/plain": [
795 | "array([[0., 0., 0., 1.],\n",
796 | " [0., 0., 0., 1.],\n",
797 | " [0., 0., 0., 1.],\n",
798 | " ...,\n",
799 | " [1., 0., 0., 0.],\n",
800 | " [1., 0., 0., 0.],\n",
801 | " [1., 0., 0., 0.]], dtype=float32)"
802 | ]
803 | },
804 | "metadata": {
805 | "tags": []
806 | },
807 | "execution_count": 10
808 | }
809 | ]
810 | },
811 | {
812 | "cell_type": "code",
813 | "metadata": {
814 | "id": "sICGfmO7kDVN",
815 | "colab_type": "code",
816 | "colab": {}
817 | },
818 | "source": [
819 | "from sklearn.model_selection import train_test_split\n",
820 | "x_train, x_test, y_train, y_test = train_test_split(x, y, test_size = 0.2, random_state = 4)"
821 | ],
822 | "execution_count": 0,
823 | "outputs": []
824 | },
825 | {
826 | "cell_type": "code",
827 | "metadata": {
828 | "id": "B9ycOxB3o1FJ",
829 | "colab_type": "code",
830 | "colab": {}
831 | },
832 | "source": [
833 | "x_train = np.reshape(x_train, (x_train.shape[0],1,x.shape[1]))\n",
834 | "x_test = np.reshape(x_test, (x_test.shape[0],1,x.shape[1]))\n"
835 | ],
836 | "execution_count": 0,
837 | "outputs": []
838 | },
839 | {
840 | "cell_type": "code",
841 | "metadata": {
842 | "id": "ddNd6dwtsGPP",
843 | "colab_type": "code",
844 | "outputId": "76980845-46d3-4944-df46-2d6e0b19b3db",
845 | "colab": {
846 | "base_uri": "https://localhost:8080/",
847 | "height": 319
848 | }
849 | },
850 | "source": [
851 | "from tensorflow.keras import Sequential\n",
852 | "\n",
853 | "from tensorflow.keras.layers import Dense, Dropout\n",
854 | "from tensorflow.keras.layers import Embedding\n",
855 | "from tensorflow.keras.layers import LSTM\n",
856 | "tf.keras.backend.clear_session()\n",
857 | "\n",
858 | "model = Sequential()\n",
859 | "model.add(LSTM(64, input_shape=(1,8),activation=\"relu\",return_sequences=True))\n",
860 | "model.add(Dropout(0.2))\n",
861 | "model.add(LSTM(32,activation=\"sigmoid\"))\n",
862 | "model.add(Dropout(0.5))\n",
863 | "#model.add(LSTM(100,return_sequences=True))\n",
864 | "#model.add(Dropout(0.2))\n",
865 | "#model.add(LSTM(50))\n",
866 | "#model.add(Dropout(0.2))\n",
867 | "model.add(Dense(4, activation='sigmoid'))\n",
868 | "from keras.optimizers import SGD\n",
869 | "model.compile(loss = 'categorical_crossentropy', optimizer = \"adam\", metrics = ['accuracy'])\n",
870 | "model.summary()"
871 | ],
872 | "execution_count": 0,
873 | "outputs": [
874 | {
875 | "output_type": "stream",
876 | "text": [
877 | "Model: \"sequential\"\n",
878 | "_________________________________________________________________\n",
879 | "Layer (type) Output Shape Param # \n",
880 | "=================================================================\n",
881 | "lstm (LSTM) (None, 1, 64) 18688 \n",
882 | "_________________________________________________________________\n",
883 | "dropout (Dropout) (None, 1, 64) 0 \n",
884 | "_________________________________________________________________\n",
885 | "lstm_1 (LSTM) (None, 32) 12416 \n",
886 | "_________________________________________________________________\n",
887 | "dropout_1 (Dropout) (None, 32) 0 \n",
888 | "_________________________________________________________________\n",
889 | "dense (Dense) (None, 4) 132 \n",
890 | "=================================================================\n",
891 | "Total params: 31,236\n",
892 | "Trainable params: 31,236\n",
893 | "Non-trainable params: 0\n",
894 | "_________________________________________________________________\n"
895 | ],
896 | "name": "stdout"
897 | }
898 | ]
899 | },
900 | {
901 | "cell_type": "code",
902 | "metadata": {
903 | "id": "dCWqeiQFnr6R",
904 | "colab_type": "code",
905 | "outputId": "fb12bc12-49a0-48c1-f333-affd847cf625",
906 | "colab": {
907 | "base_uri": "https://localhost:8080/",
908 | "height": 370
909 | }
910 | },
911 | "source": [
912 | "#history = model.fit(x_train, y_train, validation_split=0.33, epochs=50, batch_size=10, verbose=0)\n",
913 | "\n",
914 | "history = model.fit(x_train, y_train, epochs = 10, validation_data= (x_test, y_test))\n",
915 | "score, acc = model.evaluate(x_test, y_test)\n"
916 | ],
917 | "execution_count": 0,
918 | "outputs": [
919 | {
920 | "output_type": "stream",
921 | "text": [
922 | "Epoch 1/10\n",
923 | "316/316 [==============================] - 1s 4ms/step - loss: 1.3410 - accuracy: 0.3936 - val_loss: 1.2226 - val_accuracy: 0.5063\n",
924 | "Epoch 2/10\n",
925 | "316/316 [==============================] - 1s 3ms/step - loss: 1.2449 - accuracy: 0.5006 - val_loss: 1.2185 - val_accuracy: 0.5063\n",
926 | "Epoch 3/10\n",
927 | "316/316 [==============================] - 1s 3ms/step - loss: 1.2356 - accuracy: 0.5020 - val_loss: 1.2171 - val_accuracy: 0.5063\n",
928 | "Epoch 4/10\n",
929 | "316/316 [==============================] - 1s 3ms/step - loss: 1.2314 - accuracy: 0.5018 - val_loss: 1.2158 - val_accuracy: 0.5063\n",
930 | "Epoch 5/10\n",
931 | "316/316 [==============================] - 1s 3ms/step - loss: 1.2304 - accuracy: 0.5018 - val_loss: 1.2157 - val_accuracy: 0.5063\n",
932 | "Epoch 6/10\n",
933 | "316/316 [==============================] - 1s 3ms/step - loss: 1.2292 - accuracy: 0.5020 - val_loss: 1.2150 - val_accuracy: 0.5063\n",
934 | "Epoch 7/10\n",
935 | "316/316 [==============================] - 1s 3ms/step - loss: 1.2269 - accuracy: 0.5017 - val_loss: 1.2148 - val_accuracy: 0.5063\n",
936 | "Epoch 8/10\n",
937 | "316/316 [==============================] - 1s 3ms/step - loss: 1.2233 - accuracy: 0.5015 - val_loss: 1.2150 - val_accuracy: 0.5063\n",
938 | "Epoch 9/10\n",
939 | "316/316 [==============================] - 1s 3ms/step - loss: 1.2247 - accuracy: 0.5018 - val_loss: 1.2145 - val_accuracy: 0.5063\n",
940 | "Epoch 10/10\n",
941 | "316/316 [==============================] - 1s 3ms/step - loss: 1.2228 - accuracy: 0.5018 - val_loss: 1.2140 - val_accuracy: 0.5063\n",
942 | "79/79 [==============================] - 0s 1ms/step - loss: 1.2140 - accuracy: 0.5063\n"
943 | ],
944 | "name": "stdout"
945 | }
946 | ]
947 | },
948 | {
949 | "cell_type": "code",
950 | "metadata": {
951 | "id": "AUaeKFadBMTh",
952 | "colab_type": "code",
953 | "outputId": "fad02389-cf0f-4549-d900-1425f7cecb9e",
954 | "colab": {
955 | "base_uri": "https://localhost:8080/",
956 | "height": 50
957 | }
958 | },
959 | "source": [
960 | "print('Test score:', score)\n",
961 | "print('Test accuracy:', acc)\n"
962 | ],
963 | "execution_count": 0,
964 | "outputs": [
965 | {
966 | "output_type": "stream",
967 | "text": [
968 | "Test score: 1.2140363454818726\n",
969 | "Test accuracy: 0.506344199180603\n"
970 | ],
971 | "name": "stdout"
972 | }
973 | ]
974 | },
975 | {
976 | "cell_type": "code",
977 | "metadata": {
978 | "id": "daXPd6uMrkhy",
979 | "colab_type": "code",
980 | "outputId": "0500dd72-8095-4277-c10b-62ccb0a21ba5",
981 | "colab": {
982 | "base_uri": "https://localhost:8080/",
983 | "height": 67
984 | }
985 | },
986 | "source": [
987 | "\n",
988 | "from sklearn.metrics import accuracy_score\n",
989 | "pred = model.predict(x_test)\n",
990 | "predict_classes = np.argmax(pred,axis=1)\n",
991 | "expected_classes = np.argmax(y_test,axis=1)\n",
992 | "print(expected_classes.shape)\n",
993 | "print(predict_classes.shape)\n",
994 | "correct = accuracy_score(expected_classes,predict_classes)\n",
995 | "print(f\"Training Accuracy: {correct}\")"
996 | ],
997 | "execution_count": 0,
998 | "outputs": [
999 | {
1000 | "output_type": "stream",
1001 | "text": [
1002 | "(2522,)\n",
1003 | "(2522,)\n",
1004 | "Training Accuracy: 0.5063441712926249\n"
1005 | ],
1006 | "name": "stdout"
1007 | }
1008 | ]
1009 | },
1010 | {
1011 | "cell_type": "code",
1012 | "metadata": {
1013 | "id": "7075Dhqj64hc",
1014 | "colab_type": "code",
1015 | "outputId": "f80b3333-112c-4c17-936e-4ea826d93011",
1016 | "colab": {
1017 | "base_uri": "https://localhost:8080/",
1018 | "height": 50
1019 | }
1020 | },
1021 | "source": [
1022 | "print(predict_classes.shape)\n",
1023 | "print(expected_classes.shape)\n"
1024 | ],
1025 | "execution_count": 0,
1026 | "outputs": [
1027 | {
1028 | "output_type": "stream",
1029 | "text": [
1030 | "(2522,)\n",
1031 | "(2522,)\n"
1032 | ],
1033 | "name": "stdout"
1034 | }
1035 | ]
1036 | }
1037 | ]
1038 | }
--------------------------------------------------------------------------------