├── CSV.csv ├── LICENSE ├── Multivatiate-GRU ├── .ipynb_checkpoints │ ├── Multivariate-3-GRU-Copy1-checkpoint.ipynb │ ├── Multivariate-3-GRU-Copy2-checkpoint.ipynb │ ├── Multivariate-3-GRU-checkpoint.ipynb │ └── Tuning-checkpoint.ipynb ├── MV3-GRU-Split1.h5 ├── MV3-GRU-Split2.h5 ├── MV3-GRU-Split3.h5 ├── MV3-GRU-Split4.h5 ├── MV3-GRU-Split5.h5 ├── MV3-GRU_40_[40,35]_1e-4_64.h5 ├── MV3-GRU_40_[40,35]_1e-4_64_train_loss.csv ├── MV3-GRU_40_[40,35]_1e-4_64_val_loss.csv ├── MV3-GRU_CrossValidation.csv ├── MV3-GRU_CrossValidation_TrainLoss.csv ├── MV3-GRU_CrossValidation_ValLoss.csv └── Multivariate-3-GRU.ipynb ├── Multivatiate-LSTM ├── .ipynb_checkpoints │ ├── Multivariate-3-LSTM-Copy1-checkpoint.ipynb │ ├── Multivariate-3-LSTM-checkpoint.ipynb │ └── Tuning-checkpoint.ipynb ├── MV3-LSTM-Split1.h5 ├── MV3-LSTM-Split2.h5 ├── MV3-LSTM-Split3.h5 ├── MV3-LSTM-Split4.h5 ├── MV3-LSTM-Split5.h5 ├── MV3-LSTM_50_[40,35]_1e-3_64.h5 ├── MV3-LSTM_50_[40,35]_1e-3_64_train_loss.csv ├── MV3-LSTM_50_[40,35]_1e-3_64_val_loss.csv ├── MV3-LSTM_CrossValidation.csv ├── MV3-LSTM_CrossValidation_TrainLoss.csv ├── MV3-LSTM_CrossValidation_ValLoss.csv └── Multivariate-3-LSTM.ipynb ├── Multivatiate-RNN ├── .ipynb_checkpoints │ ├── Multivariate-3-RNN-Copy1-checkpoint.ipynb │ ├── Multivariate-3-RNN-checkpoint.ipynb │ └── Tuning-checkpoint.ipynb ├── MV3-RNN-Split1.h5 ├── MV3-RNN-Split2.h5 ├── MV3-RNN-Split3.h5 ├── MV3-RNN-Split4.h5 ├── MV3-RNN-Split5.h5 ├── MV3-RNN_CrossValidation.csv ├── MV3-RNN_CrossValidation_TrainLoss.csv ├── MV3-RNN_CrossValidation_ValLoss.csv ├── Multivariate-3-RNN.ipynb ├── model_simple_30_[50,45]_1e-3_32.h5 ├── model_simple_30_[50,45]_1e-3_32_train_loss.csv └── model_simple_30_[50,45]_1e-3_32_val_loss.csv ├── README.md └── Univariate -LSTM ├── .ipynb_checkpoints ├── Tuning-checkpoint.ipynb └── Univariate_LSTM-checkpoint.ipynb ├── UV-LSTM-Split1.h5 ├── UV-LSTM-Split2.h5 ├── UV-LSTM-Split3.h5 ├── UV-LSTM-Split4.h5 ├── UV-LSTM-Split5.h5 ├── UV-LSTM_40_[40,35]_1e-3_64.h5 ├── UV-LSTM_40_[40,35]_1e-3_64_train_loss.csv ├── UV-LSTM_40_[40,35]_1e-3_64_val_loss.csv ├── UV-LSTM_CrossValidation.csv ├── UV-LSTM_CrossValidation_TrainLoss.csv ├── UV-LSTM_CrossValidation_ValLoss.csv └── Univariate_LSTM.ipynb /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Navodit Jain 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Multivatiate-GRU/.ipynb_checkpoints/Multivariate-3-GRU-Copy2-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "colab": {}, 8 | "colab_type": "code", 9 | "id": "S19jleua1_GE" 10 | }, 11 | "outputs": [], 12 | "source": [ 13 | "import sys\n", 14 | "print(sys.version)" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": { 21 | "colab": {}, 22 | "colab_type": "code", 23 | "id": "XGXh32Yl3E5s" 24 | }, 25 | "outputs": [], 26 | "source": [ 27 | "# Importing dependencies\n", 28 | "\n", 29 | "import numpy as np\n", 30 | "np.random.seed(1)\n", 31 | "from tensorflow import set_random_seed\n", 32 | "set_random_seed(2)\n", 33 | "import pandas as pd\n", 34 | "import matplotlib.pyplot as plt\n", 35 | "from keras.models import Sequential, load_model\n", 36 | "from keras.layers.core import Dense\n", 37 | "from keras.layers.recurrent import GRU\n", 38 | "from keras import optimizers\n", 39 | "from keras.callbacks import EarlyStopping\n", 40 | "from sklearn.preprocessing import MinMaxScaler\n", 41 | "from sklearn.metrics import mean_squared_error, r2_score\n", 42 | "from math import sqrt\n", 43 | "import datetime as dt\n", 44 | "plt.style.use('ggplot')" 45 | ] 46 | }, 47 | { 48 | "cell_type": "markdown", 49 | "metadata": {}, 50 | "source": [ 51 | "### Data Preprocessing" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "metadata": { 58 | "colab": {}, 59 | "colab_type": "code", 60 | "id": "FNioyc6mZUAJ" 61 | }, 62 | "outputs": [], 63 | "source": [ 64 | "# Setting up an early stop\n", 65 | "earlystop = EarlyStopping(monitor='val_loss', min_delta=0.0001, patience=80, verbose=1, mode='min')\n", 66 | "callbacks_list = [earlystop]" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": null, 72 | "metadata": { 73 | "colab": {}, 74 | "colab_type": "code", 75 | "id": "r5O0a39R9Z_Z" 76 | }, 77 | "outputs": [], 78 | "source": [ 79 | "# Loading the dataset\n", 80 | "url = 'https://raw.githubusercontent.com/ninja3697/dataset/master/CSV.csv'\n", 81 | "#url = '../../CSV.csv'\n", 82 | "df = pd.read_csv(url,parse_dates = True,index_col=0)\n", 83 | "df.tail()" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": null, 89 | "metadata": { 90 | "colab": {}, 91 | "colab_type": "code", 92 | "id": "2H5Lkte_NgWT" 93 | }, 94 | "outputs": [], 95 | "source": [ 96 | "# Correlation matrix\n", 97 | "df.corr()['Close']" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": null, 103 | "metadata": {}, 104 | "outputs": [], 105 | "source": [ 106 | "print(df.describe().Volume) \n", 107 | "df.drop(df[df['Volume']==0].index, inplace = True) #Dropping rows with volume value 0\n", 108 | "df['Volume'].hist(bins = 20)" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": null, 114 | "metadata": { 115 | "colab": {}, 116 | "colab_type": "code", 117 | "id": "zRVEzZ1FXj_p" 118 | }, 119 | "outputs": [], 120 | "source": [ 121 | "#Build and train the model\n", 122 | "def fit_model(train,val,timesteps,hl,lr,batch,epochs):\n", 123 | " X_train = []\n", 124 | " Y_train = []\n", 125 | " X_val = []\n", 126 | " Y_val = []\n", 127 | " \n", 128 | " # Loop for training data\n", 129 | " for i in range(timesteps,train.shape[0]):\n", 130 | " X_train.append(train[i-timesteps:i])\n", 131 | " Y_train.append(train[i][0])\n", 132 | " X_train,Y_train = np.array(X_train),np.array(Y_train)\n", 133 | " \n", 134 | " # Loop for val data\n", 135 | " for i in range(timesteps,val.shape[0]):\n", 136 | " X_val.append(val[i-timesteps:i])\n", 137 | " Y_val.append(val[i][0])\n", 138 | " X_val,Y_val = np.array(X_val),np.array(Y_val)\n", 139 | " \n", 140 | " # Adding Layers to the model\n", 141 | " model = Sequential()\n", 142 | " model.add(GRU(X_train.shape[2],input_shape = (X_train.shape[1],X_train.shape[2]),return_sequences = True,\n", 143 | " activation = 'relu'))\n", 144 | " for i in range(len(hl)-1): \n", 145 | " model.add(GRU(hl[i],activation = 'relu',return_sequences = True))\n", 146 | " model.add(GRU(hl[-1],activation = 'relu'))\n", 147 | " model.add(Dense(1))\n", 148 | " model.compile(optimizer = optimizers.Adam(lr = lr), loss = 'mean_squared_error')\n", 149 | " #print(model.summary())\n", 150 | " \n", 151 | " # Training the data\n", 152 | " history = model.fit(X_train,Y_train,epochs = epochs,batch_size = batch,validation_data = (X_val, Y_val),verbose = 0,\n", 153 | " shuffle = False)#, callbacks=callbacks_list)\n", 154 | " model.reset_states()\n", 155 | " return model, history.history['loss'], history.history['val_loss']\n", 156 | " " 157 | ] 158 | }, 159 | { 160 | "cell_type": "code", 161 | "execution_count": null, 162 | "metadata": { 163 | "colab": {}, 164 | "colab_type": "code", 165 | "id": "LpwHmJeQJqyI" 166 | }, 167 | "outputs": [], 168 | "source": [ 169 | "# Evaluating the model\n", 170 | "def evaluate_model(model,test,timesteps):\n", 171 | " X_test = []\n", 172 | " Y_test = []\n", 173 | "\n", 174 | " # Loop for testing data\n", 175 | " for i in range(timesteps,test.shape[0]):\n", 176 | " X_test.append(test[i-timesteps:i])\n", 177 | " Y_test.append(test[i][0])\n", 178 | " X_test,Y_test = np.array(X_test),np.array(Y_test)\n", 179 | " \n", 180 | " # Prediction Time !!!!\n", 181 | " Y_hat = model.predict(X_test)\n", 182 | " rmse = sqrt(mean_squared_error(Y_test,Y_hat))\n", 183 | " r2 = r2_score(Y_test,Y_hat)\n", 184 | " return rmse, r2, Y_test, Y_hat\n", 185 | " " 186 | ] 187 | }, 188 | { 189 | "cell_type": "code", 190 | "execution_count": null, 191 | "metadata": { 192 | "colab": {}, 193 | "colab_type": "code", 194 | "id": "pI0q18ajCLx6" 195 | }, 196 | "outputs": [], 197 | "source": [ 198 | "# Plotting the predictions\n", 199 | "def plot_data(Y_test,Y_hat):\n", 200 | " plt.plot(Y_test,c = 'r')\n", 201 | " plt.plot(Y_hat,c = 'y')\n", 202 | " plt.xlabel('Day')\n", 203 | " plt.ylabel('Price')\n", 204 | " plt.title(\"Stock Price Prediction using Multivariate-GRU\")\n", 205 | " plt.legend(['Actual','Predicted'],loc = 'upper right')\n", 206 | " plt.show()" 207 | ] 208 | }, 209 | { 210 | "cell_type": "code", 211 | "execution_count": null, 212 | "metadata": { 213 | "colab": {}, 214 | "colab_type": "code", 215 | "id": "4NeqKRBZZr0Q" 216 | }, 217 | "outputs": [], 218 | "source": [ 219 | "# Plotting the training errors\n", 220 | "def plot_error(train_loss,val_loss):\n", 221 | " plt.plot(train_loss,c = 'r')\n", 222 | " plt.plot(val_loss,c = 'b')\n", 223 | " plt.ylabel('Loss')\n", 224 | " plt.xlabel('Epochs')\n", 225 | " plt.tiltle('Loss Plot')\n", 226 | " plt.legend(['train','val'],loc = 'lower right')\n", 227 | " plt.show()" 228 | ] 229 | }, 230 | { 231 | "cell_type": "markdown", 232 | "metadata": {}, 233 | "source": [ 234 | "### Model Building" 235 | ] 236 | }, 237 | { 238 | "cell_type": "code", 239 | "execution_count": null, 240 | "metadata": { 241 | "colab": {}, 242 | "colab_type": "code", 243 | "id": "gAvOMIyjIQO-" 244 | }, 245 | "outputs": [], 246 | "source": [ 247 | "# Extracting the series\n", 248 | "series = df[['Close','High','Volume']] # Picking the features\n", 249 | "print(series.shape)\n", 250 | "print(series.tail())" 251 | ] 252 | }, 253 | { 254 | "cell_type": "code", 255 | "execution_count": null, 256 | "metadata": { 257 | "colab": {}, 258 | "colab_type": "code", 259 | "id": "Tjso-RjNDmbs" 260 | }, 261 | "outputs": [], 262 | "source": [ 263 | "# Train Val Test Split\n", 264 | "train_start = dt.date(1997,1,1)\n", 265 | "train_end = dt.date(2006,12,31)\n", 266 | "train_data = series.loc[train_start:train_end]\n", 267 | "\n", 268 | "val_start = dt.date(2007,1,1)\n", 269 | "val_end = dt.date(2008,12,31)\n", 270 | "val_data = series.loc[val_start:val_end]\n", 271 | "\n", 272 | "test_start = dt.date(2009,1,1)\n", 273 | "test_end = dt.date(2010,12,31)\n", 274 | "test_data = series.loc[test_start:test_end]\n", 275 | "\n", 276 | "print(train_data.shape,val_data.shape,test_data.shape)" 277 | ] 278 | }, 279 | { 280 | "cell_type": "code", 281 | "execution_count": null, 282 | "metadata": { 283 | "colab": {}, 284 | "colab_type": "code", 285 | "id": "NWXR5oL2ZnY4" 286 | }, 287 | "outputs": [], 288 | "source": [ 289 | "# Normalisation\n", 290 | "sc = MinMaxScaler()\n", 291 | "train = sc.fit_transform(train_data)\n", 292 | "val = sc.transform(val_data)\n", 293 | "test = sc.transform(test_data)\n", 294 | "print(train.shape,val.shape,test.shape)" 295 | ] 296 | }, 297 | { 298 | "cell_type": "code", 299 | "execution_count": null, 300 | "metadata": {}, 301 | "outputs": [], 302 | "source": [ 303 | "timesteps = 40\n", 304 | "hl = [40,35]\n", 305 | "lr = 1e-4\n", 306 | "batch_size = 64\n", 307 | "num_epochs = 500" 308 | ] 309 | }, 310 | { 311 | "cell_type": "code", 312 | "execution_count": null, 313 | "metadata": {}, 314 | "outputs": [], 315 | "source": [ 316 | "model,train_error,val_error = fit_model(\n", 317 | " train,val,timesteps,hl,lr,batch_size,num_epochs)" 318 | ] 319 | }, 320 | { 321 | "cell_type": "code", 322 | "execution_count": null, 323 | "metadata": {}, 324 | "outputs": [], 325 | "source": [ 326 | "plot_error(train_error,val_error)" 327 | ] 328 | }, 329 | { 330 | "cell_type": "code", 331 | "execution_count": null, 332 | "metadata": {}, 333 | "outputs": [], 334 | "source": [ 335 | "rmse, r2_value, true,predicted = evaluate_model(model,test,40)\n", 336 | "print(\"RMSE=\",rmse)\n", 337 | "print(\"R2-Score=\",r2_value)\n", 338 | "plot_data(true,predicted)" 339 | ] 340 | }, 341 | { 342 | "cell_type": "code", 343 | "execution_count": null, 344 | "metadata": {}, 345 | "outputs": [], 346 | "source": [ 347 | "pd.Series(train_error).to_csv('MV3-GRU_40_[40,35]_1e-4_64_train_loss.csv')\n", 348 | "pd.Series(val_error).to_csv('MV3-GRU_40_[40,35]_1e-4_64_val_loss.csv')" 349 | ] 350 | }, 351 | { 352 | "cell_type": "code", 353 | "execution_count": null, 354 | "metadata": {}, 355 | "outputs": [], 356 | "source": [ 357 | "# Save a model\n", 358 | "model.save('MV3-GRU_40_[40,35]_1e-4_64.h5')\n", 359 | "#del model #Deletes the model\n", 360 | "# Load a model\n", 361 | "#model = load_model('model1.h5')" 362 | ] 363 | }, 364 | { 365 | "cell_type": "markdown", 366 | "metadata": {}, 367 | "source": [ 368 | "### Model 2\n", 369 | "Converting volume to log scale" 370 | ] 371 | }, 372 | { 373 | "cell_type": "code", 374 | "execution_count": null, 375 | "metadata": { 376 | "colab": {}, 377 | "colab_type": "code", 378 | "id": "gAvOMIyjIQO-" 379 | }, 380 | "outputs": [], 381 | "source": [ 382 | "# Converting Volume to log scale\n", 383 | "df['Volume_log'] = np.log(df['Volume'])\n", 384 | "print(df['Volume_log'].describe())\n", 385 | "df['Volume_log'].hist(bins=20)" 386 | ] 387 | }, 388 | { 389 | "cell_type": "code", 390 | "execution_count": null, 391 | "metadata": {}, 392 | "outputs": [], 393 | "source": [ 394 | "# Extracting the series\n", 395 | "series = df[['Close','High','Volume_log']] # Picking the multivariate series \n", 396 | "print(series.shape)\n", 397 | "print(series.tail())" 398 | ] 399 | }, 400 | { 401 | "cell_type": "code", 402 | "execution_count": null, 403 | "metadata": { 404 | "colab": {}, 405 | "colab_type": "code", 406 | "id": "Tjso-RjNDmbs" 407 | }, 408 | "outputs": [], 409 | "source": [ 410 | "# Train Val Test Split\n", 411 | "train_start = dt.date(1997,1,1)\n", 412 | "train_end = dt.date(2006,12,31)\n", 413 | "train_data = series.loc[train_start:train_end]\n", 414 | "\n", 415 | "val_start = dt.date(2007,1,1)\n", 416 | "val_end = dt.date(2008,12,31)\n", 417 | "val_data = series.loc[val_start:val_end]\n", 418 | "\n", 419 | "test_start = dt.date(2009,1,1)\n", 420 | "test_end = dt.date(2010,12,31)\n", 421 | "test_data = series.loc[test_start:test_end]\n", 422 | "\n", 423 | "print(train_data.shape,val_data.shape,test_data.shape)" 424 | ] 425 | }, 426 | { 427 | "cell_type": "code", 428 | "execution_count": null, 429 | "metadata": { 430 | "colab": {}, 431 | "colab_type": "code", 432 | "id": "NWXR5oL2ZnY4" 433 | }, 434 | "outputs": [], 435 | "source": [ 436 | "# Normalisation\n", 437 | "sc = MinMaxScaler()\n", 438 | "train = sc.fit_transform(train_data)\n", 439 | "val = sc.transform(val_data)\n", 440 | "test = sc.transform(test_data)\n", 441 | "print(train.shape,val.shape,test.shape)" 442 | ] 443 | }, 444 | { 445 | "cell_type": "code", 446 | "execution_count": null, 447 | "metadata": {}, 448 | "outputs": [], 449 | "source": [ 450 | "results = list()\n", 451 | "for t in timesteps:\n", 452 | " for l in hl:\n", 453 | " for rate in lr:\n", 454 | " for batch in batch_size:\n", 455 | " for epochs in num_epochs:\n", 456 | " model,train_loss,val_loss = fit_model(train,val,t,l,rate,batch,epochs)\n", 457 | " results.append([t,l,rate,batch,train_loss[-1],val_loss[-1]])\n", 458 | "pd.DataFrame(results,columns=['Timestep','Hidden_Layers','Learning_Rate','Batch_Size','Train_Loss','Val_Loss']).to_csv('Multivariate-3-GRU_model2.csv')\n" 459 | ] 460 | }, 461 | { 462 | "cell_type": "code", 463 | "execution_count": null, 464 | "metadata": {}, 465 | "outputs": [], 466 | "source": [ 467 | "'''\n", 468 | "timesteps = 42\n", 469 | "hl = [35,30]\n", 470 | "lr = 8e-4\n", 471 | "batch_size = 32\n", 472 | "num_epochs = 500\n", 473 | "'''" 474 | ] 475 | }, 476 | { 477 | "cell_type": "code", 478 | "execution_count": null, 479 | "metadata": {}, 480 | "outputs": [], 481 | "source": [ 482 | "'''model,train_error,val_error = fit_model(train,val,timesteps,hl,lr,batch_size,num_epochs)\n", 483 | "plot_error(train_error,val_error)\n", 484 | "'''" 485 | ] 486 | }, 487 | { 488 | "cell_type": "code", 489 | "execution_count": null, 490 | "metadata": {}, 491 | "outputs": [], 492 | "source": [ 493 | "'''\n", 494 | "rmse, r2_value, true,predicted = evaluate_model(model,test,42)\n", 495 | "print(rmse)\n", 496 | "plot_data(true,predicted)\n", 497 | "'''" 498 | ] 499 | }, 500 | { 501 | "cell_type": "code", 502 | "execution_count": null, 503 | "metadata": {}, 504 | "outputs": [], 505 | "source": [ 506 | "# Save a model\n", 507 | "#model.save('model1.h5')\n", 508 | "del model #Deletes the model\n", 509 | "# Load a model\n", 510 | "#model = load_model('model1.h5')" 511 | ] 512 | } 513 | ], 514 | "metadata": { 515 | "accelerator": "TPU", 516 | "colab": { 517 | "collapsed_sections": [], 518 | "name": "Multivariate-3-GRU.ipynb", 519 | "provenance": [ 520 | { 521 | "file_id": "1ANiwUb7Hem1MxFKaX-BUbbEjAGAmJ2l8", 522 | "timestamp": 1551229090275 523 | }, 524 | { 525 | "file_id": "1TbS1iN6r9LWkUIIg2eyr5EB0pIzV5yAK", 526 | "timestamp": 1551228758851 527 | }, 528 | { 529 | "file_id": "1iQNCZLjJhQ56R4aZ1sLrnNQ4PS2KfN2m", 530 | "timestamp": 1551146300148 531 | }, 532 | { 533 | "file_id": "1OKnKPBI38XYPGQW0xUAj0nnJbfO6SuXE", 534 | "timestamp": 1550670800726 535 | } 536 | ], 537 | "version": "0.3.2" 538 | }, 539 | "kernelspec": { 540 | "display_name": "Python [default]", 541 | "language": "python", 542 | "name": "python3" 543 | }, 544 | "language_info": { 545 | "codemirror_mode": { 546 | "name": "ipython", 547 | "version": 3 548 | }, 549 | "file_extension": ".py", 550 | "mimetype": "text/x-python", 551 | "name": "python", 552 | "nbconvert_exporter": "python", 553 | "pygments_lexer": "ipython3", 554 | "version": "3.6.8" 555 | } 556 | }, 557 | "nbformat": 4, 558 | "nbformat_minor": 1 559 | } 560 | -------------------------------------------------------------------------------- /Multivatiate-GRU/.ipynb_checkpoints/Tuning-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import pandas as pd\n", 11 | "import matplotlib.pyplot as plt\n", 12 | "plt.style.use('ggplot')" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 2, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "lr = [1e-2,5e-3,1e-3,5e-4,1e-4,5e-5]" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": null, 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "fiveeminus5_train = pd.DataFrame()\n", 31 | "fiveeminus5_val = pd.DataFrame()\n", 32 | "for i in range(35):\n", 33 | " fiveeminus5_train[str(i)] = pd.read_csv('Results/exp_{}_train_loss.csv'.format(i), index_col = 0)[str(5e-5)]\n", 34 | " fiveeminus5_val[str(i)] = pd.read_csv('Results/exp_{}_val_loss.csv'.format(i), index_col = 0)[str(5e-5)]\n", 35 | "fiveeminus5_train.to_csv('fiveeminus5_train.csv')\n", 36 | "fiveeminus5_val.to_csv('fiveeminus5_val.csv')" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": null, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "fiveeminus5_val.shape" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": null, 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "plt.figure(figsize = [15,10])\n", 55 | "lst = list(np.arange(0,35,1))\n", 56 | "for i in lst:\n", 57 | " plt.plot(pd.read_csv('Results/exp_{}_train_loss.csv'.format(i), index_col = 0)[str(1e-4)],c = 'r')\n", 58 | " plt.plot(pd.read_csv('Results/exp_{}_val_loss.csv'.format(i), index_col = 0)[str(1e-4)],c = 'y')\n", 59 | "plt.ylim(0,0.6) " 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "execution_count": 3, 65 | "metadata": {}, 66 | "outputs": [], 67 | "source": [ 68 | "df_train = pd.DataFrame()\n", 69 | "df_val = pd.DataFrame()\n", 70 | "for i in range(35):\n", 71 | " df_train[str(i)] = pd.read_csv('Results/exp_{}_train_loss.csv'.format(i), index_col = 0).T.iloc[:,-1]\n", 72 | " df_val[str(i)] = pd.read_csv('Results/exp_{}_val_loss.csv'.format(i), index_col = 0).T.iloc[:,-1]\n", 73 | "df_train, df_val = df_train.T, df_val.T" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": 4, 79 | "metadata": {}, 80 | "outputs": [ 81 | { 82 | "data": { 83 | "text/plain": [ 84 | "Index(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12',\n", 85 | " '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24',\n", 86 | " '25', '26', '27', '28', '29', '30', '31', '32', '33', '34'],\n", 87 | " dtype='object')" 88 | ] 89 | }, 90 | "execution_count": 4, 91 | "metadata": {}, 92 | "output_type": "execute_result" 93 | } 94 | ], 95 | "source": [ 96 | "for i in range(len(df_train.columns)):\n", 97 | " df_train = df_train[df_train[df_train.columns[i]] < 1]\n", 98 | "df_train.index" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": 5, 104 | "metadata": {}, 105 | "outputs": [ 106 | { 107 | "data": { 108 | "text/plain": [ 109 | "Index(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12',\n", 110 | " '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24',\n", 111 | " '25', '26', '27', '28', '29', '30', '31', '32', '33', '34'],\n", 112 | " dtype='object')" 113 | ] 114 | }, 115 | "execution_count": 5, 116 | "metadata": {}, 117 | "output_type": "execute_result" 118 | } 119 | ], 120 | "source": [ 121 | "for i in range(len(df_val.columns)):\n", 122 | " df_val = df_val[df_val[df_val.columns[i]] < 1]\n", 123 | "df_val.index" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": 6, 129 | "metadata": {}, 130 | "outputs": [], 131 | "source": [ 132 | "df_train = df_train[df_train.columns[::-1]]\n", 133 | "df_val = df_val[df_val.columns[::-1]]" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": 8, 139 | "metadata": {}, 140 | "outputs": [ 141 | { 142 | "name": "stdout", 143 | "output_type": "stream", 144 | "text": [ 145 | "(35, 6)\n" 146 | ] 147 | }, 148 | { 149 | "data": { 150 | "text/plain": [ 151 | "" 152 | ] 153 | }, 154 | "execution_count": 8, 155 | "metadata": {}, 156 | "output_type": "execute_result" 157 | }, 158 | { 159 | "data": { 160 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmQAAAHVCAYAAABfb+fxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzs3XuczHX///HnzM6eZmdm1x5UDlc6p3R1dfl2TiRJlEr5iFBXykWRy5lKKcmZSAglKvG5UnLpIIoo+uXqXHR1VKHY82F29jCH3x+rLrloF7v7mcPjfru5ZXdnZl/2vc0+9/V5z/tlC4VCAgAAgHXsVhcAAAAQ6whkAAAAFiOQAQAAWIxABgAAYDECGQAAgMUIZAAAABYjkAEAAFiMQAYAAGAxAhkAAIDFHFYXcBCMDgAAAJHEdrQPEI6BTLt377a6BByhzMxM5eTkWF0GjhDrF7lYu8jG+kWuRo0a1crjcMkSAADAYgQyAAAAixHIAAAALEYgAwAAsBiBDAAAwGIEMgAAAIsRyAAAACxGIAMAALAYgQwAAMBiBDIAAACLEcgAAAAsRiADAACwGIEMAADAYgQyAAAAixHIAAAALEYgAwAAsBiBDAAA4AiEQrX3WAQyAACAI/D660m19lgEMgAAgMPk9do0ZkxqrT0egQwAAOAwTZvm1s8/x9Xa4xHIAAAADsO2bQ4tXJiiHj28tfaYBDIAAIAaCgal0aPTlJoa1OjRRbX2uI5aeyQAAIAot3y5U//+d4KmT89XenrtvcySDhkAAEAN5OXZ9fDDHp1/frkMw1erj00gAwAAqIGHH/aopMSmCRMKZbPV7mMTyAAAAKrx/vsJWr7cqb59S3Taaf5af3wCGQAAwB+orJRGj05V48Z+DR5cUiefg039AAAAf2DhwhR9+WW8Fi3KldNZi/OS9kOHDAAA4BB27YrTtGlutW/vU/v25XX2eQhkAAAAhzBmjEeSNG5c7Z05djAEMgAAgIN4441ErVmTrMGDS9SkSaBOPxeBDAAA4AClpVXDw089tVJ33FE3G/n3x6Z+AACAA8yc6dLOnQ6tWJGjhIS6/3x0yAAAAPbz1VcOzZvnkmGU6oILKurlcxLIAAAA9gmFqs4cc7lCuu++ut3Ivz8uWQIAAOzzz38m6733EjV5coEyMoL19nnpkAEAAEjKz7dp3DiPWrasUPfupfX6uQlkAAAAkiZM8Kiw0K4JEwpkr+eERCADAAAx74MP4vXccynq08erM8+s/eHh1anRHjLDMDpImikpTtJC0zQnHvDxIZJul+SXlC3pNtM0f9j3sVsk3bfvpg+bprm4lmoHAAA4an6/NGpUmo49NqChQ4stqaHaDplhGHGSHpd0laQzJHU3DOOMA272kaT/M03zz5JekDR5333TJT0g6XxJ50l6wDCMBrVXPgAAwNF56qkUbdsWr4ceKpTLVTfDw6tTkw7ZeZK+MU3zO0kyDGOZpGslbfv1BqZprt/v9u9J6rnv71dKWmuaZt6++66V1EHS80dfOgAAwNHZvduuqVPdatu2TB07lllWR00CWWNJP+339k5VdbwOpY+k1/7gvo0PvINhGH0l9ZUk0zSVmZlZg7IQjhwOB+sXwVi/yMXaRTbWzzoDBzoUCNj0+ON2ZWVZtwY1CWS2g7zvoP08wzB6Svo/Sa0P576mac6XNP/Xj+fk5NSgLISjzMxMsX6Ri/WLXKxdZGP9rLF+faJefDFDI0YUyeMp0ZEsQaNGjWqllpq8ynKnpKb7vd1E0u4Db2QYRjtJ90rqbJpm+eHcFwAAoD75fNK996bqpJMq1a9f3Q8Pr05NOmRbJZ1iGMYJknZJuklSj/1vYBjGOZKekNTBNM29+31ojaRH9tvI317S6KOuGgAA4CjMnu3WDz84tHx5jhITra6mBh0y0zT9kgaoKlxtr3qX+YVhGA8ZhtF5382mSHJJ+qdhGB8bhrFq333zJI1TVajbKumhXzf4AwAAWOGbb+L0+OMudelSqksuqZ/h4dWxhULWvLzzD4R27+aqZqRiH0RkY/0iF2sX2Vi/+hMKSd26Zeizz+K1ceNeZWUd3bzKfXvIDrZn/rBwUj8AAIgZK1cm6913EzVqVNFRh7HaRCADAAAxobDQpgcf9Ogvf6lQz571Ozy8OjUanQQAABDpJk/2KDfXrmeeyVNcnNXV/B4dMgAAEPU+/jheixc7deutXp11VqXV5fwPAhkAAIhqgYA0enSqsrKCGj7cmuHh1eGSJQAAiGpLljj16acJmjMnTx5P2J0uIYkOGQAAiGJ79tg1aZJHl15aps6drRseXh0CGQAAiFoPPeRRRYVN48cXynbUp4XVHQIZAACIShs3JmrlSqfuuqtEJ54YsLqcP0QgAwAAUaesTLrnnlQ1a+bXXXeF50b+/bGpHwAARJ25c136/nuHli7NVVKS1dVUjw4ZAACIKt9/H6fHHnOrc2efWrcut7qcGiGQAQCAqBEKSffdl6r4+JAeeKDQ6nJqjEAGAACixurVSdqwIUkjRhTr2GPDZ3h4dQhkAAAgKhQX2/TAA6lq0aJCt9zitbqcw8KmfgAAEBWmTHFr7167nnwyT44ISzh0yAAAQMT7/HOHFi1KUa9epTrnnPAbHl4dAhkAAIhowaA0alSa0tODGjWqyOpyjkiENfQAAAB+77nnnProowTNmpWv1NTwHB5eHTpkAAAgYmVn2zVhgkcXXVSuLl18VpdzxAhkAAAgYo0b51FpqU0TJoT38PDqEMgAAEBE2rw5QStWONW/f4lOPtlvdTlHhUAGAAAiTkVF1fDwP/3Jr7vvDv/h4dVhUz8AAIg4Tzzh0tdfx2vJklwlJ1tdzdGjQwYAACLKjz/G6dFHXerY0afLL4+M4eHVIZABAICI8evwcLtdGjs2coaHV4dABgAAIsbrryfpzTeTNGxYsRo3jpzh4dUhkAEAgIjg9do0ZkyqmjevVJ8+kTU8vDps6gcAABFh+nS3fv45TnPnRt7w8OrQIQMAAGFv+3aHFixIUY8eXp17buQND68OgQwAAIS1X4eHp6YGNXp0ZA4Pr06UNfwAAEC0Wb7cqX//O0HTp+crPT0yh4dXhw4ZAAAIW3l5dj38sEfnn1+url0jd3h4dQhkAAAgbI0f71ZJSdXwcHsUp5Yo/qcBAIBItnVrgpYtS1HfviU67bTIHh5eHQIZAAAIO5WV0qhRqWrc2K/Bg0usLqfOsakfAACEnYULU/Tll/F66qk8OZ3RuZF/f3TIAABAWNm1K07TprnVvr1PV15ZZnU59YJABgAAwsr993skSePGReeZYwdDIAMAAGFj7dpEvf56sgYPLlGTJgGry6k3BDIAABAWfD6b7rsvVaeeWqk77oj+jfz7Y1M/AAAIC48+6tLOnQ6tWJGjhASrq6lfdMgAAIDlvvrKoXnzXDKMUl1wQYXV5dQ7AhkAALBUKCTdc0+qXK6Q7rsvdjby749LlgAAwFIvvJCsLVsSNXlygTIyglaXYwk6ZAAAwDL5+TaNG+dRy5YV6t691OpyLEMgAwAAlpkwwaOCArsmTCiI6uHh1YnhfzoAALDSBx/E67nnUnTbbV6deWZ0Dw+vDoEMAADUO79fGjUqTcceG9CwYcVWl2M5NvUDAIB6t2hRirZti9f8+XlyuaJ/eHh16JABAIB69fPPdk2Z4lbbtmXq2DE2hodXh0AGAADq1dixqQoEbHr44ULZbFZXEx4IZAAAoN6sX5+o1auTNXBgsY4/PnaGh1eHQAYAAOqFzyfde2+qTjqpUv37x9bw8OqwqR8AANSL2bPd+uEHh5Yvz1FiotXVhBc6ZAAAoM59+22c5sxxqUuXUl1ySewND68OgQwAANSpquHhaUpKCun++2NzeHh1CGQAAKBOrVyZrHfeSdTIkUXKyorN4eHVIZABAIA6U1ho04MPevSXv1SoV6/YHR5eHTb1AwCAOjN5ske5uXY980ye4uKsriZ80SEDAAB14pNP4rV4sVO33urVWWdVWl1OWCOQAQCAWhcISKNGpSorK6jhwxkeXh0uWQIAgFr3zDNOffppgubMyZPHw/Dw6tAhAwAAtWrPHrsmTvSoVatyde7M8PCaIJABAIBa9dBDHlVU2PTIIwUMD68hAhkAAKg1GzcmaOVKp+66q0Qnnsjw8JoikAEAgFpRXi7de2+amjXz66672Mh/ONjUDwAAasWcOS59951DS5fmKinJ6moiCx0yAABw1L7/Pk6PPebWNdf41Lp1udXlRBwCGQAAOCqhkHTffamKjw9p7NhCq8uJSAQyAABwVFavTtKGDUkaMaJYxx7L8PAjQSADAABHrLjYprFjU9WiRYVuucVrdTkRi039AADgiE2d6taePXYtXJgnB6niiNEhAwAAR+Tzzx166qkU9exZqnPOYXj40SCQAQCAwxYMSqNGpSk9PahRo4qsLifi0VwEAACH7bnnnProowTNmpWvtDSGhx8tOmQAAOCw5OTYNWGCRxddVK4uXXxWlxMVCGQAAOCwjBvnUWmpTRMmFDI8vJYQyAAAQI1t3pygF15wql+/Ep18st/qcqIGgQwAANRIRYV0zz2patrUr0GDSqwuJ6qwqR8AANTIE0+49PXX8VqyJFfJyWzkr010yAAAQLV+/DFOjz7qUseOPl1+OcPDaxuBDAAA/KFQSBozJlV2uxgeXkcIZAAA4A+tWZOkdeuSNHRosRo3Znh4XajRHjLDMDpImikpTtJC0zQnHvDxSyU9KunPkm4yTfOF/T4WkPTZvjd/NE2zc20UDgAA6p7Xa9N996WqefNK9enD8PC6Um0gMwwjTtLjkq6QtFPSVsMwVpmmuW2/m/0o6VZJww7yED7TNP9SC7UCAIB6Nn26Wz//HKe5c/MUH291NdGrJh2y8yR9Y5rmd5JkGMYySddK+i2Qmaa5Y9/H6GMCABAltm93aMGCFPXo4dW55zI8vC7VJJA1lvTTfm/vlHT+YXyOJMMw/i3JL2miaZorD7yBYRh9JfWVJNM0lZmZeRgPj3DicDhYvwjG+kUu1i6yheP6BYPS/fc71KCBNHVqvDIywqu+aFOTQHawoQiHc/jIn0zT3G0YxomS3jIM4zPTNL/d/wamac6XNP/Xx87JyTmMh0c4yczMFOsXuVi/yMXaRbZwXL9ly5K1eXMDTZ+er1DIpzArL2w0atSoVh6nJq+y3Cmp6X5vN5G0u6afwDTN3fv++52kDZLOOYz6AABAPcvLs+vhhz0677xyde3K8PD6UJMO2VZJpxiGcYKkXZJuktSjJg9uGEYDSaWmaZYbhpEp6WJJk4+0WAAAUPfGj3eruNiuCRMKZeeArHpR7ZfZNE2/pAGS1kjaXvUu8wvDMB4yDKOzJBmGca5hGDsldZX0hGEYX+y7e3NJ/zYM4xNJ61W1h2zb/34WAAAQDrZuTdCyZSnq27dEp5/O8PD6YguFwm4WVWj37hpfEUWYCcd9EKg51i9ysXaRLVzWr7JS6tAhS8XFNm3YkC2nM+wyQtjZt4fsYPvtDwvDxQEAgCTpySdT9OWX8XrqqTzCWD3jyjAAANCuXXGaNs2tK64o05VXllldTswhkAEAAN1/v0fBoDRuHMPDrUAgAwAgxq1dm6jXX0/WkCElato0YHU5MYlABgBADPP5bBozJlWnnlqpO+4osbqcmMWmfgAAYtijj7r0008OrViRo4QEq6uJXXTIAACIUV9/7dATT7jUtWupLrigwupyYhqBDACAGBQKSaNHpyolJaQxY4qsLifmcckSAIAY9MILydqyJVGTJxcoIyNodTkxjw4ZAAAxJj/fpnHjPGrZskLdu5daXQ5EIAMAIOZMnOhRQYFdEyYUMDw8TLAMAADEkA8/jNdzzzl1221enXkmw8PDBYEMAIAY4fdLo0al6Zhjgho2rNjqcrAfNvUDABAjFi1K0RdfxGv+/Dy5XAwPDyd0yAAAiAE//2zXlClutW1bpo4dGR4ebghkAADEgLFjUxUI2PTww4Wy2ayuBgcikAEAEOU2bEjU6tXJGjiwWMcfz/DwcEQgAwAgivl80r33puqkkyrVvz/Dw8MVm/oBAIhijz/u1o4dDi1fnqPERKurwaHQIQMAIEp9+22cHn/cpS5dSnXJJQwPD2cEMgAAolAoJN1zT5qSkkK6/36Gh4c7AhkAAFHo5ZeT9c47iRo5skhZWQwPD3cEMgAAokxRkU0PPujR2WdXqFcvhodHAjb1AwAQZSZPdisnx67Fi/MUF2d1NagJOmQAAESRTz6J19NPp+jWW736858rrS4HNUQgAwAgSgQC0qhRqcrKCmr4cIaHRxIuWQIAECWeecapTz9N0Jw5efJ4GB4eSeiQAQAQBfbutWviRI9atSpX584MD480BDIAAKLAQw95VF5u0/jxBQwPj0AEMgAAItymTQl66SWnBgwo0UknMTw8EhHIAACIYOXlVSfyN2vm1113sZE/UrGpHwCACDZnjkvffefQ0qW5SkqyuhocKTpkAABEqB074vTYY25dc41PrVuXW10OjgKBDACACBQKSffdl6r4+JDGji20uhwcJQIZAAAR6JVXkrR+fZKGDy/WsccyPDzSEcgAAIgwxcU2PfBAqlq0qNCtt3qtLge1gE39AABEmKlT3dqzx66FC/Pk4Cd5VKBDBgBABPn8c4eeeipFPXuW6pxzGB4eLQhkAABEiGBQGj06TenpQY0aVWR1OahFNDoBAIgQS5c69eGHCZo5M19paQwPjyZ0yAAAiAA5OXY98ohHF15Yrhtu8FldDmoZgQwAgAgwbpxHpaU2TZxYyPDwKEQgAwAgzG3ZkqAXXnCqX78SnXyy3+pyUAcIZAAAhLGKCmn06FQ1berXoEElVpeDOsKmfgAAwtj8+S59/XW8Fi/OVXIyG/mjFR0yAADC1E8/xWnGDJc6dvSpXTuGh0czAhkAAGHo1+HhdrsYHh4DCGQAAIShNWuStG5dkoYOLVbjxgwPj3YEMgAAwozXa9OYMR41b16pPn0YHh4L2NQPAECYmTHDrd27HZozJ0fx8VZXg/pAhwwAgDCyfbtDCxakqHt3r849t8LqclBPCGQAAISJquHhqXK7g7rnHoaHxxIuWQIAECZMM1lbtyZq+vR8padz5lgsoUMGAEAYyMuz6eGHPTrvvHJ17crw8FhDIAMAIAw88ohHxcV2TZhQKDs/nWMOSw4AgMU2b7bp+edTdMcdXp1+OsPDYxGBDAAAC1VWSgMHxqlxY7+GDCm2uhxYhE39AABY6MknU/T553Y99VSBnE428scqOmQAAFgkL8+u6dPd6tgxqCuvLLO6HFiIQAYAgEVmz3bJ57NpwgT2jcU6AhkAABbYvduup59O0Y03+nT66VZXA6sRyAAAsMDMmW4Fg2IjPyQRyAAAqHc7dsRp2TKnevb0qmnTgNXlIAwQyAAAqGfTprnlcIR0990lVpeCMEEgAwCgHn35pUMvvZSsPn28atgwaHU5CBMEMgAA6tGUKW65XCH17093DP9FIAMAoJ589FG8Xn89Wf36lahBAw6BxX8RyAAAqCeTJ7uVnh7Q7bd7rS4FYYZABgBAPXj33QRt3JikgQNL5HLRHcPvEcgAAKhjoZA0aZJHxx0XUO/edMfwvwhkAADUsXXrEvXBBwkaPLhYSUlWV4NwRCADAKAOBYPS5MkeNWvml2GUWl0OwpTD6gIAAIhm//pXkrZti9fs2fmKj7e6GoQrOmQAANQRv1+aMsWj5s0rde21PqvLQRijQwYAQB355z+d+v57hxYtypWdFgj+AN8eAADUgfJyafp0l845p0JXXFFudTkIc3TIAACoA88+m6Ldux2aPj1HNpvV1SDc0SEDAKCWeb02zZrl0sUXl6tVqwqry0EEoEMGAEAte/LJFOXkxOmpp/KsLgURgg4ZAAC1qKDAprlzXWrf3qeWLSutLgcRgkAGAEAtmjfPpeJim4YPL7a6FEQQAhkAALUkO9uuhQtTdO21Pp1xht/qchBBCGQAANSSxx5zqaLCpqFD6Y7h8BDIAACoBbt2xemZZ1J0002lOvHEgNXlIMLU6FWWhmF0kDRTUpykhaZpTjzg45dKelTSnyXdZJrmC/t97BZJ9+1782HTNBfXRuEAAISTGTNckqRBg+iO4fBV2yEzDCNO0uOSrpJ0hqTuhmGcccDNfpR0q6SlB9w3XdIDks6XdJ6kBwzDaHD0ZQMAED6++SZOpulU795eNW4ctLocRKCaXLI8T9I3pml+Z5pmhaRlkq7d/wamae4wTfNTSQd+F14paa1pmnmmaeZLWiupQy3UDQBA2Jg2zaPExJAGDiyxuhREqJoEssaSftrv7Z373lcTR3NfAADC3uefO7RqVbLuuMOrzEy6YzgyNdlDdrAJXKEaPn6N7msYRl9JfSXJNE1lZmbW8OERbhwOB+sXwVi/yMXaWWfmTIfS0kK6555EpaUlHtFjsH6oSSDbKanpfm83kbS7ho+/U1KbA+674cAbmaY5X9L8fW+GcnJyavjwCDeZmZli/SIX6xe5WDtrbN0ar1dfzdLo0UXy+0t0pEvA+kWuRo0a1crj1CSQbZV0imEYJ0jaJekmST1q+PhrJD2y30b+9pJGH3aVAACEmVBImjTJo6ysgG67zWt1OYhw1e4hM03TL2mAqsLV9qp3mV8YhvGQYRidJckwjHMNw9gpqaukJwzD+GLfffMkjVNVqNsq6aF97wMAIKJt2pSoLVsSNWhQsZzOmu7kAQ7OFgqF3TdRaPfuml4RRbih7R7ZWL/IxdrVr1BI6tQpUzk5dm3atFeJR7Z17DesX+Tad8nyYHvmDwsn9QMAcJjWrEnSJ58kaOjQ4qMOY4BEIAMA4LAEAtLkyW6ddFKlbrjBZ3U5iBI1Gp0EAACqrFyZrP/8J17z5uXJwU9R1BI6ZAAA1FBlpTRtmlstWlSoU6cyq8tBFCHbAwBQQ8uWOfXDDw4tWZIrOy0N1CK+nQAAqAGfT3r0UbfOPbdcbduWW10OogwdMgAAamDx4hT98kucHn88X7ajPuQA+D06ZAAAVKO42KbZs11q06ZMF1xQYXU5iEIEMgAAqrFwYYry8+M0YkSx1aUgShHIAAD4A3l5Ns2b51LHjj6dfXal1eUgShHIAAD4A3PmuOX12jR8ON0x1B0CGQAAh/DLL3YtWpSiG27w6dRT/VaXgyhGIAMA4BBmzXLL75eGDKE7hrpFIAMA4CB++CFOzz3nVI8epTr++IDV5SDKEcgAADiI6dPdcjikQYPojqHuEcgAADjAf/7j0IoVyfrb37w69tig1eUgBhDIAAA4wNSpbqWkhHTnnXTHUD8IZAAA7OeTT+L16qvJ+vvfS5SeHrK6HMQIAhkAAPuZPNmtBg0CuuMOr9WlIIYQyAAA2GfLlgRt2JCkAQNK5HbTHUP9IZABACApFJImTXLr2GMDuuUWumOoXwQyAAAkrV+fqK1bEzVoULGSk62uBrGGQAYAiHnBYFV37Pjj/brpplKry0EMclhdAAAAVnvllSR9/nmCZs3KV0KC1dUgFtEhAwDENL9fmjLFrdNOq9R11/msLgcxig4ZACCmrViRrG+/jdfChXmKi7O6GsQqOmQAgJhVXl41s/LssyvUoUOZ1eUghhHIAAAxa+lSp3budGjUqGLZbFZXg1hGIAMAxKTSUptmznTrwgvL1apVudXlIMaxhwwAEJMWLUpRdnacFizIozsGy9EhAwDEnMJCm+bMcenyy8t07rmVVpcDEMgAALHniSdcKiiwa8SIIqtLASQRyAAAMSYnx64FC1LUubNPLVr4rS4HkEQgAwDEmNmzXSors2noULpjCB8EMgBAzNi1y64lS1JkGKU6+eSA1eUAvyGQAQBixsyZboVC0uDBJVaXAvwOgQwAEBO++y5Oy5Y51auXV02a0B1DeCGQAQBiwrRpbiUkhDRwIN0xhB8CGQAg6m3b5tDLLyerTx+vsrKCVpcD/A8CGQAg6k2Z4pbbHVL//nTHEJ4IZACAqPbBB/F6441k9e9forS0kNXlAAdFIAMARLVJkzzKzAyoTx+v1aUAh0QgAwBErU2bEvTuu4kaOLBEKSl0xxC+CGQAgKgUClV1xxo18qtnT7pjCG8EMgBAVFq7NlEffZSgIUNKlJRkdTXAHyOQAQCiTjAoTZ7s0Qkn+NW1a6nV5QDVclhdAAAAtW3VqmRt3x6vOXPy5OAnHSIAHTIAQFSprKw6d6x580pdc02Z1eUANcLvDQCAqGKaTu3Y4dDTT+fKTtsBEYJvVQBA1Cgrk2bMcKtlywq1a1dudTlAjdEhAwBEjWeeSdHPP8dp5sx82WxWVwPUHB0yAEBUKCmx6bHHXGrVqlwXX1xhdTnAYSGQAQCiwsKFKcrNjdPIkUVWlwIcNgIZACDi5efbNG+eSx06+HTOOZVWlwMcNgIZACDizZ3rUkmJTcOHF1tdCnBECGQAgIi2d69dTz6Zouuv9+n00/1WlwMcEQIZACCizZrlkt9v05AhdMcQuQhkAICI9dNPcXr22RTddFOpTjghYHU5wBEjkAEAItb06W7Z7dKgQXTHENkIZACAiPTNNw698EKybrnFq0aNglaXAxyVsAxku3eHZVkAgDAyZYpbyckhDRhQYnUpwFELy+TTrl1DvfxyktVlAADC1GefxWv16mT17etVRgbdMUS+sAxkJ57o1513pmvAgDQVFjKMDADwe5Mnu5WWFlTfvnTHEB3CMpCtXJmjYcOKtGpVsi6/vKHeeSfB6pIAAGHi/fcT9NZbSbrrrhJ5PCGrywFqRVgGModDGjy4RKtW5Sg5OaRu3TL14IMelZVZXRkAwEqhkDRxolsNGwb0t795rS4HqDVhGch+9Ze/VGrNmmz17u3V/PkuXX11lrZtc1hdFgDAIm+/naj/9/8SNWhQsZKT6Y4heoR1IJMkpzOkCRMKtWRJrnJy7OrUKUvz5qUoyB5OAIgpoZA0aZJbTZv61aNHqdXlALUq7APZry6/vFxvvpmttm3LNG5cqgwjQzt3xlldFgCgnrz2WpI+/TRBQ4YUK4GtxYgyERPIJCkjI6iFC/M1fXq+Pv00Xu3aZWnFimSF6FoDQFQLBKpeWXnKKZW64Qaf1eUAtS6R+Av4AAAgAElEQVSiApkk2WxSt24+rV2brdNPr9TddzdQ//4NlJ/P8RgAEK1efDFZX38dr+HDixXHxRFEoYgLZL86/viAVqzI1ciRRXrttSS1a9dQGzfSwwaAaFNRIU2b5taf/1yhjh15uT2iU8QGMkmKi5PuvrtEq1fnyO0Oqnv3TN1/v0c+utkAEDWef96pn35yaMSIYtm4GIIoFdGB7FdnnVWp117L1m23lejJJ13q2DFLn3/O8RgAEOl8PptmznTr/PPL1aZNudXlAL+xlZTINX16rT1eVAQySUpOlsaNK9Jzz+WqsNCuq6/O0uzZLgUCVlcGADhSTz/t1J49cRo5ku4YwkRlpZxPP62GF18sz7RptfawURPIftWmTbnWrdur9u3LNGGCR127Zuinn9gBCgCRpqjIptmz3Wrbtkznn19hdTmIdaGQklatUsM2bZR2773yn3SSsletqrWHj7pAJknp6SE98US+Zs7M17ZtVcdjmCbHYwBAJJk/36WCArtGjCi2uhTEuIR331Vmp05K799foaQk5S5erNwVK1TZsmWtfY6oDGRS1fEYN97o07p12WrRolKDBzdQ374NlJcXtf9kAIgaeXl2zZ+fok6dfDrrrEqry0GMcnzxhdJ79lSmYciena38GTOU/cYbKm/XTrV9DT3q00mTJgGZZq7uvbdIa9cm6fLLs7R+faLVZQEA/sDs2S75fDYNH053DPUv7qeflDZwoLKuvFIJH32kwjFjtHfTJvkMQ3V1EF7UBzKp6mt3550lWr06Ww0aBNWzZ4buvTdVPh87RAEg3OzebdfTT6foxht9OuUUv9XlIIbY8/LkGTtWDS+9VMmvvqqSO+/UnnfflbdfPykpqU4/d0ydDdGihV+vvpqtiRM9WrDApU2bEjR7doH+/Gfa4QAQLmbOdCsYlIYMoTuG+mHz+ZSyYIFcc+bI5vWq1DBUPGSIgo0b11sNMdEh219SkjR2bJGWLcuR12vXNddk6tFHXfLzSxgAWG7HjjgtW+bUzTeXqmlTzi1CHfP75XzuuaojLCZNUvmFFyp73ToVTptWr2FMisFA9qtWrSr05pt71amTT1OmeNSlS6Z27OB4DACw0rRpbjkcId19N90x1KFQSEmvvaastm2VNmKEAk2aKOfFF5W/aJH8p51mSUkxG8gkKS0tpDlzCjR7dr6+/tqh9u2z9PzzTo7HAAALfPmlQy+9lKw+fbw65pig1eUgSiW8/74yr71W6bffLtlsynvySeW8/LIqzj/f0rpiOpD96vrrq47HOPvsSg0blqY+fRooN5cvDQDUpylT3HK5Qurfv8TqUhCFHP/5j9JvvVWZ11+vuJ07VTB5srLffFNlHTrU+hEWR1RfTW5kGEYHSTMlxUlaaJrmxAM+nihpiaSWknIldTNNc4dhGM0kbZf0n303fc80zX61VHutatw4oOXLc7VgQYomTvSobdsETZtWoHbtmJ0GAHXt44/j9frryRo2rEgNGnCZArXHvnu33NOmyWmaCqWkqGjUKHlvv12h5GSrS/udattAhmHESXpc0lWSzpDU3TCMMw64WR9J+aZpnixphqRJ+33sW9M0/7LvT1iGsV/Z7dLf/+7Vq69mKysrqFtuydDIkakqLbU+OQNANJs0ya309IDuuMNrdSmIEraCArnHj9cxrVrJ+eKL8vbpoz2bN6tk4MCwC2NSzS5ZnifpG9M0vzNNs0LSMknXHnCbayUt3vf3FyRdbhhGxKaY5s39euWVbPXrV6LnnnOqffssffhhvNVlAUBUevfdBG3cmKSBA0vkctEdw1EqK1PKvHk65uKL5Zo7V75OnbR340YVjR2rUHq61dUdUk0CWWNJP+339s597zvobUzT9EsqlJSx72MnGIbxkWEYbxuG0eoo6603iYnSmDFFMs1clZdL112XqWnT3ByPAQC1KBSSJk3y6LjjAurdm+4YjkIgoOTly9WwVSuljhuninPOUfaaNSqYNUuBpk2trq5aNdlDdrBO14G/whzqNj9L+pNpmrmGYbSUtNIwjDNN0yza/4aGYfSV1FeSTNNUZmZmDcqqH507S5deGtTgwTZNn+7Wpk0pWrTIr1NOsbqy8ORwOMJq/XB4WL/IFalr9+qrNn3wQbwef9yvJk0ir/7aEqnrFxZCIdlee01x990n+xdfKNiypSqfekr2yy5TmtW1HYaaBLKdkvaPlk0k7T7EbXYahuGQlCopzzTNkKRySTJN8wPDML6VdKqkf+9/Z9M050uav+/NUE5OzuH+O+rclClSq1ZJGj06Teee69ADDxSpZ8/ScHhhRljJzMxUOK4faob1i1yRuHbBoHTvvVlq1syvTp32KsLKr1WRuH7hIP6DD+R55BElvPee/M2aqWDuXJVdc03Vqybr6evZqFGjWnmcmgSyrZJOMQzjBEm7JN0kqccBt1kl6RZJWyTdKOkt0zRDhmFkqSqYBQzDOFHSKZK+q5XKLdC5c5nOPXevBg9uoFGj0rRuXZKmTi1QVhbn5QDA4frXv5K0bVu8Zs/OVzzbdHEY4r75Rp5Jk5T86qsKZGaqYPx4ld58syL5G6naPWT79oQNkLRGVUdYmKZpfmEYxkOGYXTed7MnJWUYhvGNpCGSRu17/6WSPjUM4xNVbfbvZ5pmXm3/I+rTcccFtXRprh56qFCbNiWqbdssrVlTtwNHASDa+P3SlCkeNW9eqWuv9VldDiKEfc8epY4cqYZt2yrx7bdVNHSo9m7erNJbb43oMCZJtlD4HUsf2r37wCui4emrrxwaMKCBvvgiXt27e/Xgg0VKSQm7r2e9ou0e2Vi/yBVpa/f8804NG5amp57K05VXllldjuUibf3qm624WK65c5Uyf75slZXy9uqlkkGDFMzKsrq0Xy9ZHvUGJo6jPwqnnurX6tXZGjCgWMuWOXXFFVn6978jO6EDQF0rL5emT3fpnHMq1L49YQx/oLxcKQsXquFFF8k9c6bKr7hCezdsUNHDD4dFGKtNBLKjlJAgjR5drBUrchUMStdfn6nJk92qrLS6MgAIT88+m6Ldux0aObKIF0bh4IJBJb/0khq2aaPUBx6Qv3lzZb/6qvLnzlXghBOsrq5OEMhqyfnnV2jt2mzdeKNPM2e6de21mfrmmzirywKAsOL12jRrlksXX1yuVq0qrC4HYSjx7beV1aGDGgwYoJDbrdylS5W7fLkqzz7b6tLqFIGsFrndIc2YUaD58/P0449xuvLKLD39tFPht00PAKzx5JMpysmJ08iRRdXfGDEl/tNPlXHTTcro0UO2oiLlP/aYsl9/XeWtW4fF8O+6RiCrA506lenNN7N14YUVuvfeNPXqla49e/hSA4htBQU2zZvn0hVXlKllS/Z1oErcjh1Ku/NOZV11lRyff67CBx/U3rfflq9Ll6oh0zEidv6l9eyYY4J65pk8jR9foC1bEnT55Vl69VWOxwAQu+bNc6mw0K4RI+iOQbLn5MgzZowatmmjpDVrVHz33dq7ebO8t99eNb8wxhDI6pDNJt16a6nWrMlR06YB3XFHugYPTlNxcfS3XgFgf9nZdi1cmKLrrivVGWcwFDiW2bxeuWbMUMOLLlLK4sUq7dZNezdvVvHIkQp5PFaXZxkCWT04+WS/Vq3K0aBBxXrhhWRdcUWW/t//S7C6LACoN4895lJFhU1DhxZbXQqsUlkp5+LFanjxxfJMnary1q219623VDhpkoLHHGN1dZYjkNWT+HhpxIhivfhijux26YYbMjRhglsVvMgIQJTbtStOzzyTom7dSnXiiQGry0F9C4WU9K9/qWGbNkq75x75TzxR2atWKX/BAgVOPtnq6sIGgayenXtupd54I1s33VSq2bPduuaaTH31VU1GigJAZJoxwyVJ+sc/6I7FmoTNm5V59dVK79dPoaQk5S5erNwVK1TZsqXVpYUdApkFXK6Qpk4t1JNP5mn37jhddVWWnnwyRUFmlAOIMt98EyfTdKp3b68aN+ZJLlY4tm1Teq9eyuzaVXF79ih/+nRlv/GGytu1i4kjLI4EgcxCHTqU6a23snXxxeW6//5U3Xxzun7+mSUBED2mTfMoMTGkgQNLrC4F9SBu506l3X23stq3V8KHH6pwzBjt2bRJvm7dpDgOS/8j/PS3WFZWUIsX52nixAJt3Zqgdu0aatUqjscAEPm++MKhVauSdfvtXmVm0h2LZra8PHkefFANW7VS8urVKunfX3vefVfefv2k5GSry4sIBLIwYLNJvXqVas2abJ1wgl/9+6dr4MA0FRXR1gUQuSZP9ig1Nah+/eiORSubzyfXY4/pmIsuUsrChSrt0kV7Nm1S8b33KpSWZnV5EYVAFkZOOimgl17K0dChRXr55WS1a5elLVs4HgNA5Nm6NV7r1iXpzjtLlJrK/Lio4/fLuXSpGl5yiTwTJ6rigguUvXatCqdNU7BxY6uri0gEsjATHy8NGVKilStzFB8vde2aoXHjPCovt7oyAKiZUEiaNMmjrKyAbrvNa3U5qE2hkJLWrFFWu3ZKGz5cgcaNlfPii8p7+mn5Tz/d6uoiGoEsTP31r5VauzZbN99cqnnzXOrUKUvbt3M8BoDwt2lTgrZsSdTdd5fI6aQ7Fi0Stm5V5nXXKf2226RgUHkLFyrn5ZdVcf75VpcWFQhkYczpDGnSpEI9/XSusrPt6tgxS088wfEYAMLXr92xxo39uvlmumPRwPHVV2rwt78p87rrFPfTTyqYPFnZb72lsquu4giLWkQgiwBXXFGuN9/MVps2ZXrooVR165ahXbtYOgDhZ82aJH38cYKGDi2OxfnQUcW+e7dShw1T1uWXK3HLFhWNHKm9776r0ptvlhxcsalt/FSPEJmZQT31VL6mTi3Qxx/Hq127hnrpJV5KDCB8BALS5MlunXRSpW64wWd1OThCtsJCuR95RMe0aiXnihXy9umjvZs3q+TuuxXiCIs6QyCLIDab1L17qdauzdYpp/g1YEAD3XlnmgoKaBkDsN7Klcn6z3/iNWxYMQ2USFRWppR583TMRRfJNWeOfB07au/GjSoaO1bB9HSrq4t6BLII1KxZQC++mKPhw4v0yivJateuoTZt4ngMANaprJSmTXPrzDMrdfXVZVaXg8MRCCjZNNWwVSuljhuninPOUfbrr6vgsccUaNrU6upiBoEsQjkc0j/+UaJVq3LkdAZ1002ZGjvWozKeBwFYYNkyp374waGRI4tk5ydLZAiFlPjmm8q68ko1GDxYwcxM5Sxfrrxnn5W/RQurq4s5/G8T4c4+u1Jr1uTo1lu9WrDApY4ds/TFF1wrAFB/fD7p0UfdOvfccrVty6GJkSD+ww+V0bWrMnr3ls3nU97cucp55RVVXHKJ1aXFLAJZFEhODmn8+EI9+2yu8vPt6tQpS3PmuBQIWF0ZgFiweHGKfvklTiNHFnMKQpiL+/ZbNejbV1nXXCPH11+rYPx47V2/XmWdO4vWprX46keRyy6rOh7jiivKNH68R4aRoZ0746wuC0AUKy62afZsl1q3LtOFF1ZYXQ4Owb5nj1JHjVLDyy5T4vr1Kho6tOoIi1tvlRLYgxwOCGRRJj09qPnz8zVjRr4+/zxe7dpl6YUXkhXisGwAdWDhwhTl51d1xxB+bMXFck+ZooYXXyzn88+rtFevqiMshgxRyOWyujzsh0AWhWw2yTB8Wrs2W82bV2rQoAbq16+B8vK4lgCg9uTl2TRvnksdO/p09tmVVpeD/VVUKOXJJ9XwoovkfvRRlV9xhfZu2KDC8eMVzMqyujocBIEsiv3pTwG98EKuRo8u0po1SWrXrqHefpujswHUjjlz3PJ6bRo2jO5Y2AgGlbxypRq2bq3U+++Xv3lzZb/6qvLnzlXghBOsrg5/gEAW5eLipAEDSrR6dbY8nqB69MjQmDEe+ThEG8BR+OUXuxYtcqpLF59OO81vdTmQlLhxozKvukoN7rpLIbdbuUuXKnf5clWefbbVpaEGCGQxokULv157LVt9+pToqadcuuqqLH32WbzVZQGIULNmueX32zR0KN0xq8V/9pnSu3dXRvfushcWKv+xx5T9+usqb92a4d8RhEAWQ5KTpYceKtLzz+equNiuq6/O1GOPcTwGgMPzww9xeu45p3r0KNXxx/MEYpW4H35Q2l13KatDB8V/9pkKH3xQe99+W74uXTjCIgKxYjHo0kvLtW7dXl11VZkmTvTohhsy9OOPHI8BoGamT3fL4ZAGDaI7ZgV7bq4899+vhq1bK+n111V8993au3mzvLffLiWyTzhSEchiVIMGIc2dm69Zs/L15ZdVx2MsX87xGAD+2FdfOfTii8m69Vavjj02aHU5McXm9co1Y4YaXnSRUp5+WqXdumnvu++qeORIhTweq8vDUSKQxTCbTbrhBp/WrcvWn/9cqSFDGuiOOxooL49vCwAHN2WKW05nSHfdVWJ1KbGjslLOJUvU8OKL5Zk6VeWXXqrst95S4aRJCh57rNXVoZbwkxdq0iSg5ctzNWZMod58M0lt22bpzTdpewP4vU8+iderrybr738vUXo63bE6FwopafVqNbzsMqWNHi3/iScqe9Uq5S9YIP/JJ1tdHWoZgQySqo7H6NfPq1deyVZGRlC9e2do9OhU+Xy8QgdAlcmT3WrQIKA77vBaXUrUS9iyRZnXXKP0v/9doYQE5T79tHJXrFBly5ZWl4Y6QiDD75xxhl+vvJKtvn1LtGRJitq3z9LHH3M8BhDr3nsvQRs2JGnAgBK53Ww2rSuObduU3quXMm+8UXG//KL86dOVvXatyq+4giMsohyBDP8jKUl64IEiLV+eI5/Pps6dMzVjhkt+zn4EYlIoJE2a5NaxxwZ0yy10x+rEjz8q7R//UFb79kr44AMV3nef9mzaJF+3blWXMBD1CGQ4pEsuqdC6dXvVubNPU6d6dP31mfr+e54YgFizfn2i3n8/UYMGFSs52epqokvcd9/Jc//9im/RQsmrVqmkf3/t2bxZ3v79xRc7tthC4XfOQWj37t1W14ADvPxykkaPTlNlpTR2bJF69Cg9aPc8MzNTOTk59V8gagXrF7nqau2CQemqqzJVXGzXhg17lZBQ658i9vj9Slq7VimLFytx0yaFHA4Fb75Z2XfdpWDjxlZXh8PUqFEjSTrq68l0yFAj115bprVr9+qccyo1YkSabrutgXJy+PYBot0rryTp888TNGRIMWHsKNl/+UWu6dN1zPnnK/322xX37bcqGj5ce95/X4H58wljMc5hdQGIHI0bB7VsWa4WLkzRxIketW2boKlTC9S+fbnVpQGoA36/NHWqW6eeWqnrr/dZXU5kCoWU8M47SlmyRElr1sgWCKisTRsVTJig8rZtJQc/hlGF7wQcFrtd6tvXq0svLdfAgQ30t79l6OabvXrggSKlpITd5W8AR2HFimR98028Fi7MY1/5YbIVFMhpmkp55hk5vvtOgQYN5O3bV96ePRVo1szq8hCGCGQ4Iqef7tfq1dmaOtWtuXNdevfdRM2ala8rr7S6MgC1oby8ambl2WdXqEOHMqvLiRjxH3+slCVLlPzyy7KVlamiZUvlz5olX6dOVS9hBw6BQIYjlpgo3Xtvsdq2LdegQWm6/vpM3XBDUA0aeNSgQfCQf5h9C4S/pUud2rnTocmTczn+qho2n0/JK1fKuWSJEj79VEGnU6U33ihv797yn3mm1eUhQhDIcNQuvLBC69Zla9w4jzZudConx6myskNv+Hc69w9ooT8Mb7/+8XhC/FAA6klpqU0zZ7p14YXluvRS9ogeiuObb+RcskTOf/5T9qIiVZ52mgrGj5fvhhsUcrutLg8RhkCGWuHxhDRlSqEyM+OVk5Mjn0/Kz7fX6M/OnfHKz7ersNCmUOjgqSsu7lDB7Y8DXTxDBoDDtmhRirKz47RgQT6/CB2oslJJr7+ulCVLlLh5s0Lx8fJ16qTS3r1Vcd55nKaPI0YgQ51ITpaSk4Nq1KjmA4gDAamw0FZtgMvLs+vHHx365JOqt8vLD/0E6HIdKqz9Psilp//37ykpdOMQuwoLbZozx6XLLy/TuedWWF1O2LDv2qWUpUvlXLpUcXv3yt+kiYpGjVLpTTcpmJVldXmIAgQyhI24OCk9PaT09ICkQI3uEwpJPt+vIc6mvLyDh7iCgqr//vCDY1837tCXVOPja3IZ9fe3SUsL8up1RIUnnnCpoMCuESOKrC7FesGgEjdtknPxYiWtXSuFQipv21YFvXur/LLLGGmEWsWPEEQ0m01yOkNyOgM6nDMV/X6psHD/0Hboztx33zl++3tl5aFbZx5P9Xvh/tuNqwp0ycl04xA+cnLsWrAgRddc41OLFrE7vNaWl/ffIyt27FAgI0Mld96p0ptvVuBPf7K6PEQpAhliksMhZWQElZFR80uqoZDk9R4suP3v+/Ly7Pr226ogV1x86G5cYmLod1226sNcSKmpQX4xR52YPdulsjKbhg2Lwe5YKKT4Dz9UyuLFSl69WrbycpWff76Khw+X76qrxMvDUdcIZEAN2WySyxWSyxVQ06Y1u6QqSZWV+u2SaXVh7uuvHb9dYvX7D946s9lCSk0NHTTA7b8X7sA/zCnGH9m1y64lS1JkGKU6+eSaf39HOpvXq+SXXlLKkiWK/+ILBV0uld50k7y9esnfvLnV5SGGEMiAOhYfL2VlBZWVdXjduOLi6l7gUPXx7Gy7vvqqKsh5vYfuxiUl/fGrUjMygrr8ciktrTb+1Yg0M2e6FQxKgweXWF1KvXD85z9VB7iuWCF7cbEqmzdXwcSJ8l1/vUIul9XlIQYRyIAwZLNVHSXi8QR0/PE171aUl/9RN+73QW779v9244LB/3bjmjVrqMsuK9Nll5XroosqlJzMSKxo9913cVq2zKlbbvGqSZMo7o5VVCjptdeqjqx47z2FEhLku/pqeXv3VuX//R9HVsBSBDIgiiQmSsccE9Qxx9S8GxcMSkVFNu3dG6dPPknXv/7l17JlTi1a5FJiYkgXXFCuyy6r+nPSSX5+ZkWh6dPdSkgIaeDA6OyOxe3cKeezz8r5/POKy8mR//jjVXjfffJ166ZgerrV5QGSCGRAzLPbpbS0kNLS/LrooqC6ds1TWZn0/vuJWr++6s/YsakaO1Zq2tSvNm3K1bZtmS6+uIKB8lFg2zaHVq5M1l13lahhw5oH+bAXCChxw4aqbthbb0mSytq1U2nv3ipv3brqGx8II7ZQKOyeUEO7d++2ugYcoczMTOXk5FhdBo7QodZv586438LZO+8kyuu1Kz4+pPPOq/jt8uZpp9E9s9KR/r/3t7810HvvJWrLlj1KSwu7nweHzZ6bK+eyZXI++6wcP/6oQFaWSrt3V2nPngocztk49YznzsjVqFEjSTrqZz8CGWoVTyqRrSbrV1Ehbd2aoA0bErV+fZK2b6+aT3XccYHfwtkll5TL4wm755aodiT/733wQbw6d87SiBFFGjQogi9XhkJK2LpVziVLlPzKK7JVVKj8wgvl7d1bZR06SAkJVldYLZ47IxeBDGGJJ5XIdiTr9/PPdm3YkKS33krUpk2JKi62y+EI6f/+r0Jt2pTrssvKdOaZdM/q2pGsXbduGdq+3aEtW/ZG5OVnW0mJklesUMozzyh++3YF3W6Vdu2q0l695D/1VKvLOyw8d0au2gpk7CEDcFSOOy6o7t1L1b17qSorpQ8/TPjt8ubEiR5NnOhRw4aB38LZpZeWR8WlsUi3aVOC3nknUQ8+WBhxYcyxfft/j6zwelXRooUKpkyR77rrFHI6rS4POCJ0yFCr+C0vstX2+u3da//t0ubGjYkqKLDLbg/pr3+tVJs2ZWrbtlxnnVXJ/upacDhrFwpJ11yTqT177Nq0aa+Skuq4uNpQXq7kV16Rc8kSJW7dqlBSknzXXFN1ZMU550T8kRU8d0YuOmQAwl7DhkEZhk+G4VMgIH30Ubw2bEjS+vWJmjbNralTPcrICKh166pjNdq0KVd6ehS90i9MrV2bqI8+StCUKQVhH8bifvih6siKZcsUl5cn/wknqPD++1VqGAo1aGB1eUCtCcsOWeUppyhw/PHyN2sm/wknKNCsmfzNminQpEnVsecIW/yWF9nqc/1yc+16++2qS5tvv52o3Nw42WwhnX125b5zz8r0l79UMrezhmq6dsGg1L59lsrKbFq/fm94PqUGAkp8802lPPOMEtevl+x2lbVvX3VkxSWXROWRFTx3Rq6o7pD5Tz1Vjh07lLBli+ylpb+9PxQXp0DTplXhbF9I8zdrpsAJJ8jftCnDX4EIkpERVJcuPnXp4lMwKH36afy+vWdJmjnTpRkz3EpLC6p167J9+8/KD2v8FA5u1apkbd8erzlz8sIujNmzs+V8/vmqIyt27VLgmGNU8o9/yNujh4JVP/SAqBWWHbLf9pCFQrJnZ8uxY4fivv9ejh07qv6+Y4cc338ve3Hxf+9ksynQuPF/g9r+nbXjj1eIycr1gt/yIlu4rF9+vk0bNyZqw4YkbdiQqL17q9pkZ51Vse9g2nL99a8VcoTlr5TWqMnaVVZKbdo0VHJySG+8kR0ejaZQSAnvvaeUJUuU9NprslVWqvySS6qOrGjfPmauioTL/3s4fBx7EQrJnp9/0KAWt2OH4vLzf3fzwLHHyn/CCf/bXWvWjEGytYgnlcgWjusXDFadJr9+fdXes3//O0GBgE0eT1CtWv2696xMxx0X292zmqzdc885NWJEmhYtylX79uX1VNnB2YqKqo6sWLJE8V99pWBqqkoNQ96ePRU4+eT/396dB8lRnncc/87M3qN7VwixIJCxiKUQWVwikgAhRQlHyWCOvIhDK1dBcAKEECpUTOyUKYKrbFwGu0hshyuwLmT5hSpUgmBEESwJCQEyAoHAdiK8i3Z1IrGS9pzdmen80b2zM7uz9xzbO79P1ZR6e95+p3sedfczb9BWyFYAABD6SURBVL/9dl7XLR/G4r4nQ6OEbBCBY8co+uyzRJKWSNjq6wl9/nlK2dj06T191rovgXrTzpQpo16XQqKDir/5IX4nTgR4881SNm0q5Y03yjh40G09mzu3KzEw7fnnd/phLNCMGix2HR1w0UUzmDkzxoYNR/J2U2LR7t3ukBUvvkiwrY3OBQvc1rCrriroKxl+2PckPSVkoxBoaUkkZ8mJWlF9PaEDB1LKxqdMSd+yNnu2+1Ban99qnWk6qPib3+LnOPD73xclkrMdO0ro6goQDrutZ92XN6urY/le1awbLHZPPBHmgQcmY+0RlizpzOGaAR0dlL/0EuHaWkp27iReVkb7NdfQVlND1/z5uV2XMcpv+570UEKWLe3tFO3dm7bfWqixkUDS9xWfODE1UUvqtxY/6aSCTNZ0UPE3v8evpSXAtm09D0VvbHQ7mc2Z033nZoQLL4yMy/t/BopdS0uAxYtPYt68KOvWHc3ZOoXq6gj/4hdU/OpXBI8do+vMM2mrqaHt+ut19aEXv+97hUwJWT5EIoQaGvq2rNXVEWpoIBDr+RUeLy93k7M0rWvxmTPH5W3boIOK342n+DkO7NlTlEjO3n67lM7OAOXlcZYs6Xko+umnj4/Ws4Fi9+MfT+CHP5zEyy9/zjnndGV3RaJRyl5/nYraWso2b8YpKqLjsstoramhc8mSgvyhOhTjad8rNErIxpquLkL79qXcXJCY3ruXQGfPJQKntJSo12ctlnRzQfSMM4hVV+PnW8d0UPG38Ry/trYAb73V81D0+np3P5s9O8ry5e7QGosWRfBrN6b+YtfUFGDx4hksWhTh6aeb0iyZGcFDh6hYu5bwc88ROnCA2MyZtN58M2033UR8xoysfe54MZ73vfFuXI9D5kvFxcS8xKqPWIzQgQM9l0C7bzaor6d0yxaCHR2Jok5RkTvWWrp+a6edRsH1VBbJkIoKhxUrIqxYEQFOUFcXSty5+dxzYZ56agJlZQ6LFkUSz9380pdivm/Q+fnPJ9DcHOC++5oHLzxcjkPJtm3ukBUbNxKIRulYupTjDz1Ex4oVvv5xKZJraiHLN8cheOhQ+pa1ujqCra09RYNBYqeemr7f2qxZjIVnoOhXnr8Vavza2+Gdd3r6nn36qTv21axZ0cSwGhdd1ElFxZg7Xiaki93hw0EWLTqJK6/s4LHHjmXsswLHj1Px/PNU1NZS/OmnxKdMoW3VKnfIitmzM/Y5haRQ973xQJcsC4HjEDx6tO9Ya93J2vHjPUUDAWIzZyb6rfUZa62iIierrIOKvyl+rr17Q4mnBmzbVkJbW5CSEoeFC92+Z8uXR5gzJzqmWs/Sxe4735lEbW2YzZsPM3v26PvKFe/aRUVtLeXr1xPs6KDz3HNpramhfeVKfHutd4zQvudfSsiEQFNTn0FxE3eEHk29kyo2Y0Zqy9rppyfGW3MmTcrYOumg4m+KX1+RCLz7bknioeh/+IPbelZdHU080umiiyJMnJjfY2nv2DU0hLj44pMwpo2HHz4+wJIDC7S3U7ZhgztkxQcfEC8vp/3aa2mtqSF69tmZWHVB+56fKSGTAQVOnHD7qtXVpfRZK6qvJ3TwYErZ2LRpaYfuiJ5xBs7UqcO6K0oHFX9T/Aa3b18wkZy9+WYpLS1BioocLrigM/FQ9Llzc9961jt29947hfXry9m69RCnnDL8pxiE9uxxh6x4/nmCx4/TddZZbmvYdddl9EecuLTv+ZcSMhmxQFsboc8+S9+ytn9/6lhrkyenXPpMfpJBvKqqT7Kmg4q/KX7D09UFv/1tSWJg2k8+cVvPTj45xqWXunduXnJJhMmTs3+cTY7dnj1FLFs2ndtua+W73z0x9Eq6uih77TXCtbWUbt2KU1xMxxVX0LpmDZ0XXqghK7JI+55/KSGT7OjooKihIW2/tVBDA4F4zy/teDjcp2UtPHs2J1pa3AN3MAjBIE4gkPJ393Sf+d57Tve8Xss4WaqD7jpEJ4VROngwyObNbt+zLVtKOX48SCjkcO65nYmBac8+uysrwxAmx+6b35zKb35Tyvbth6msHLx1LLh/P+G1a6lYu5bQoUNEq6tpu+UW2m68kfj06ZlfWelD+55/KSGT3OvsJNTYmL7fWkMDga4sDziZZU66ZG8YSV2f+d319K4D+s5PridNHf3NH7QO77OGVEcgQHlVFa3BIPEJE3C8V9rpiopxO7hxpkSj8P77xYmhNT780B2ypqoqxtKl7iOdLrmkg2nTMnMM7j6h795dxGWXncQ99zQPPNRFPE7p1q3uAK6vvQbxOJFly2itqSGyfDmEQhlZLxkaJWT+pYRMxpZolND+/UxzHI41NUE87r4cx70E6k2nnZ/0XsBx+pRLzO9dB7gtdr3Kpq3bcXrK9ldHr89IV8eA89PU0edzB5nfp47k+d2tk8nfYe96k+vovT1pyqbUEYsRaG9PeeJEf5xAACccdpM0799EwhYO40ycmDrdXSZ5OinBK4ST/5EjQTZtKk28mppCBAIOCxZ0JQam/epXu0b8VXSf0FevnsbOnSVs336ISZP6Ht8DTU1UWEu4tpai+npi06bRduONtN1yC7FZs0a5lTJSSsj8SwmZjEk6qPhbVWUlRxsbCbS0EGhpIej9mzLd2upONzf3TKcr39qa8oSKgcTLynAmTsQJh90kLXk6XbKXJsFLJHc+GDw5FoNdu4oTfc8++KAYxwkwdarbeuaOfRahqmronfGrqqp45ZUTXHNNFd/+9gnuuKOl503Hofj99wnX1lL+0ksEOjqIXHABbWvW0H7llYzLh3v6jI6d/qWETMYkHVT8LePxi0TcxCwTCV7SEy0G4pSU9CRygyV43dO9W/W89ykry0n/wi++CLBli3tpc9OmUo4ccZvJ5s/v6Xt2zjmdAw58X1lZxdKlDnV1Rbz11mHKyx0CbW2Ur19PxbPPUrJ7N/FwmPbrrqN19Wqi8+Zlfbtk6HTs9C8lZDIm6aDib2M6ftHowAnbUBK81lYCzc0pT8AYiBMKpSZv4TBxL7nrL6lLSfCSWvKcioohJXfxOOzeXZx4asB775UQjweYPDnOxRdHWL68g6VLI5x8cmrr2c6d0/na14r53veOcdsSdwDXihdeIHjiBF1z59K6erU7ZMWECSP6+iW7xvS+JwNSQiZjkg4q/lYw8YvHCbS19Z/UpUvw+rss29yccvdxf1L63fWX4KVJ9pqYyqb/O53/+aiaN3ZUcuiIe0l23ryevmfnndfJddfO4MjeVnZ/eSUT33kTp6SE9pUraaupofP883Un8RhXMPveOJTThMwYcznwEyAEPGmt/X6v90uBWuA84Chwg7W23nvvfuBWIAbcba3dOMjHKSHzMR1U/E3xGwHHIdDRMXhS19w8tARvgH53DvARf8avuYJXg1eyNb6YKMWUB9ppd8p5hjXcPGuTO2TFqlXEKytz9z3IqGjf86+cJWTGmBDwv8BfAo3ADuBGa+0nSWXuAOZba//WGLMKuMZae4MxZh7wS2AhcArwOnCWtXag27iUkPmYDir+pviNAcPod9f8RZTN9Wfy+r6ziYQm8qN/ayS6fKmGJPEh7Xv+lamEbIAuogkLgT3W2j8CGGPWAVcDnySVuRp4wJt+Afh3Y0zAm7/OWhsB6owxe7z6to92xUVExqXSUuKlpTBt2pCKX+K9dEIX8behJGTVQEPS343Ahf2VsdZGjTHHgUpv/tu9lq3u/QHGmNuB273lqaqqGur6yxhTVFSk+PmY4udfip2/KX4ylIQsXTNc7+uc/ZUZyrJYax8HHu9+X7/y/Eu/0v1N8fMvxc7fFD//8i5ZjtpQOho0Aqcl/X0q0LuTV6KMMaYImAx8McRlRURERAraUFrIdgBzjDGzgX3AKuCmXmU2AGtw+4ZdD7xhrXWMMRuAtcaYR3A79c8B3s3UyouIiIiMB4O2kFlro8BdwEbgd+4s+7Ex5kFjzFVesaeASq/T/r3At7xlPwYs7g0ArwJ3DnKHpYiIiEjB0cCwklHqB+Fvip9/KXb+pvj5V6aGvdBgNSIiIiJ5poRMREREJM+UkImIiIjkmRIyERERkTxTQiYiIiKSZ0rIRERERPJMCZmIiIhInikhExEREckzJWQiIiIieaaETERERCTPlJCJiIiI5JkSMhEREZE8G5MPF8/3CoiIiIgMw/h7uLgx5j3cDdPLhy/Fz98vxc+/L8XO3y/Fz78vL3ajNuYSMhEREZFCo4RMREREJM/GYkL2eL5XQEZF8fM3xc+/FDt/U/z8KyOxG4ud+kVEREQKylhsIRMREREpKErIRERERPKsKNsfYIypB5qBGBC11p4/jGXPA54ByoFXgH+w1jrGmAeAvwE+94r+i7X2lQyudsEyxlwO/AQIAU9aa7/f6/1SoBY4DzgK3GCtrffeux+4FTfWd1trNw5UpzHmLuAe4ExgurX2SNY3cJzLcfyeAZYCx73qv2Gt/SCb21dIshTLp4GVwGFr7dk52pSCk6XY1TPCc6mM3EhjaYypBF4ALgCesdbeNdhn5aqFbJm1dsEI/gP9DLgdmOO9Lk9671GvzgVKxjLDGBMC/gO4ApgH3GiMmder2K1Ak7X2y8CjwA+8ZecBq4A/xY3TT40xoUHq3AasAD7L6oYViDzED+C+pP1QyViGZCOW3jLPkHoclQzLYuxg5OdSGYHRxBLoAP4V+Kehfl5eLlkaY840xrxqjHnPGPOmMeYracrMBCZZa7dbax3cDPTrOV/ZwrIQ2GOt/aO1thNYB1zdq8zVwLPe9AvAXxhjAt78ddbaiLW2Dtjj1ddvndba97t/FUpG5DR+klXZiCXW2i3AF7nYgAKWldhJXow4ltbaVmvtVtzEbEhykZA5wGte8nW7N+9x4O+ttefhZo8/TbNcNdCY9HejN6/bXcaYD40xTxtjpmZjxQtQNdCQ9Hfv7zyljLU2inu5qnKAZYdSp2RGPuL3PW8/fNRrupfMyEYsJTeyFbt051LJrtHEcthykZAtsdaei9vkd6cx5hJgMfC8MeYD4D+BmWmWC6SZ1z1Gx89w+x0tAA4AP8r4Whemgb7zwcoMd75kXq7jdz/wFdw+EtOAfx7aasoQZCOWkhvZil26c6lk12hiOWxZ79Rvrd3v/XvYGPMicClwzFq7ILmcd622+3lQG3CTrlOTipwKdNd1KGm5J4CXs7X+BaYROC3p78R3nqZMozGmCJiMewlkoGUHq1MyI6fxs9Ye8OZFjDH/xTD6SsigshVLyb6sxC7NuXQhsCUbGyAJo4nlsGU1ITPGhIGgtbbZm/4r4EHgcmPMX1trn/eum8+31u7CbfFKXr7ZGPPnwDtADfCYN39m0sngGmB3NrejgOwA5hhjZgP7cDuX3tSrzAZgDbAduB54w7vzdQOw1hjzCHAK7k0Y7+L+ehisTsmMnMavez/09uGvo/0wk7IRS8mNjMdugHOpZNeIYzmSD8v2JcsZwFZjzC7cA8J/W2tfBW4GbvXmf0z/nYT/DngSt2Pjp8CvvfkPG2M+MsZ8CCwD/jGL21AwvOvfdwEbgd+5s+zHxpgHjTFXecWeAiqNMXuAe4Fvect+DFjgE+BV4E5rbay/OgGMMXcbYxpxf3V8aIx5MlfbOh7lOn7Ac8aYj4CPgCrgoVxsZyHIRiwBjDG/xD1x/IkxptEYc2sut6sQZCl2/Z1LJYtGE0tIDFXyCPANb3/rfYdmCj06SURERCTPNFK/iIiISJ4pIRMRERHJMyVkIiIiInmmhExEREQkz5SQiYiIiOSZEjIRERGRPFNCJiIiIpJn/w985b8g5K997QAAAABJRU5ErkJggg==\n", 161 | "text/plain": [ 162 | "
" 163 | ] 164 | }, 165 | "metadata": { 166 | "needs_background": "light" 167 | }, 168 | "output_type": "display_data" 169 | } 170 | ], 171 | "source": [ 172 | "\n", 173 | "plt.figure(figsize = [10,8])\n", 174 | "print(df_train.shape)\n", 175 | "df_train.mean().plot(c = 'r')\n", 176 | "#df_train.median().plot(c = 'b')\n", 177 | "df_val.mean().plot(c = 'b')" 178 | ] 179 | }, 180 | { 181 | "cell_type": "code", 182 | "execution_count": null, 183 | "metadata": {}, 184 | "outputs": [], 185 | "source": [ 186 | "print(df_train.describe())\n", 187 | "df_train.boxplot()" 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": null, 193 | "metadata": {}, 194 | "outputs": [], 195 | "source": [ 196 | "v = np.linspace(-5,-4,num = 11)\n", 197 | "v" 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": null, 203 | "metadata": {}, 204 | "outputs": [], 205 | "source": [ 206 | "lr = [10**x for x in np.linspace(-5,-4,num = 11)]" 207 | ] 208 | }, 209 | { 210 | "cell_type": "code", 211 | "execution_count": null, 212 | "metadata": {}, 213 | "outputs": [], 214 | "source": [ 215 | "lr" 216 | ] 217 | }, 218 | { 219 | "cell_type": "code", 220 | "execution_count": null, 221 | "metadata": {}, 222 | "outputs": [], 223 | "source": [] 224 | } 225 | ], 226 | "metadata": { 227 | "kernelspec": { 228 | "display_name": "Python [default]", 229 | "language": "python", 230 | "name": "python3" 231 | }, 232 | "language_info": { 233 | "codemirror_mode": { 234 | "name": "ipython", 235 | "version": 3 236 | }, 237 | "file_extension": ".py", 238 | "mimetype": "text/x-python", 239 | "name": "python", 240 | "nbconvert_exporter": "python", 241 | "pygments_lexer": "ipython3", 242 | "version": "3.6.8" 243 | } 244 | }, 245 | "nbformat": 4, 246 | "nbformat_minor": 2 247 | } 248 | -------------------------------------------------------------------------------- /Multivatiate-GRU/MV3-GRU-Split1.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-GRU/MV3-GRU-Split1.h5 -------------------------------------------------------------------------------- /Multivatiate-GRU/MV3-GRU-Split2.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-GRU/MV3-GRU-Split2.h5 -------------------------------------------------------------------------------- /Multivatiate-GRU/MV3-GRU-Split3.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-GRU/MV3-GRU-Split3.h5 -------------------------------------------------------------------------------- /Multivatiate-GRU/MV3-GRU-Split4.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-GRU/MV3-GRU-Split4.h5 -------------------------------------------------------------------------------- /Multivatiate-GRU/MV3-GRU-Split5.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-GRU/MV3-GRU-Split5.h5 -------------------------------------------------------------------------------- /Multivatiate-GRU/MV3-GRU_40_[40,35]_1e-4_64.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-GRU/MV3-GRU_40_[40,35]_1e-4_64.h5 -------------------------------------------------------------------------------- /Multivatiate-GRU/MV3-GRU_40_[40,35]_1e-4_64_train_loss.csv: -------------------------------------------------------------------------------- 1 | 0,0.1278009123136901 2 | 1,0.11591298575202624 3 | 2,0.10455911703445393 4 | 3,0.09209757226453436 5 | 4,0.07938120690743543 6 | 5,0.06814465834887938 7 | 6,0.05761304559260184 8 | 7,0.04839963078903354 9 | 8,0.04085603351206189 10 | 9,0.034736791376325515 11 | 10,0.029889346696060113 12 | 11,0.025856501257065873 13 | 12,0.022443304425763963 14 | 13,0.019368350620786047 15 | 14,0.016561619172606505 16 | 15,0.013929910520562017 17 | 16,0.011370097216444485 18 | 17,0.008857041397179017 19 | 18,0.006098376777601626 20 | 19,0.003916980667825262 21 | 20,0.002245204167357044 22 | 21,0.0013080655863612063 23 | 22,0.0008738864860362185 24 | 23,0.0007128769007433239 25 | 24,0.0006615478627601546 26 | 25,0.0006446014612079998 27 | 26,0.000636647230147524 28 | 27,0.0006308507481585504 29 | 28,0.0006256508879339136 30 | 29,0.0006208060140084449 31 | 30,0.0006162056958103244 32 | 31,0.0006116899004686128 33 | 32,0.0006072466171293982 34 | 33,0.0006029929835112669 35 | 34,0.0005989001596283586 36 | 35,0.0005948690556154664 37 | 36,0.000590970344604681 38 | 37,0.0005871258197852641 39 | 38,0.0005834164570570474 40 | 39,0.0005797352471673741 41 | 40,0.0005760836481821378 42 | 41,0.0005725193828248214 43 | 42,0.0005690085335810742 44 | 43,0.0005655752637356254 45 | 44,0.0005624214299174551 46 | 45,0.0005593855005377055 47 | 46,0.0005564524788349794 48 | 47,0.0005535931677150075 49 | 48,0.0005508351163477684 50 | 49,0.0005481336753725162 51 | 50,0.0005454726301699834 52 | 51,0.0005428873245604924 53 | 52,0.00054034021492319 54 | 53,0.0005377818159585597 55 | 54,0.0005353340634632552 56 | 55,0.0005328663916162725 57 | 56,0.0005305556248361362 58 | 57,0.0005282292020083241 59 | 58,0.0005258681014634526 60 | 59,0.000523650331289 61 | 60,0.0005214792309558069 62 | 61,0.000519248433423248 63 | 62,0.0005171041438576378 64 | 63,0.0005149947858657782 65 | 64,0.0005129424345990756 66 | 65,0.0005109317661863499 67 | 66,0.0005089737220101835 68 | 67,0.0005069983673508037 69 | 68,0.0005050486811090406 70 | 69,0.0005031416411646095 71 | 70,0.0005011611704498081 72 | 71,0.0004993771487679256 73 | 72,0.000497456089232759 74 | 73,0.0004956483956876315 75 | 74,0.0004938267206730094 76 | 75,0.0004920293063329118 77 | 76,0.0004902922171341476 78 | 77,0.0004885273423930601 79 | 78,0.00048679605159747435 80 | 79,0.0004851036507233118 81 | 80,0.0004833764985203449 82 | 81,0.00048170960614743767 83 | 82,0.0004800162398797428 84 | 83,0.0004783637525594318 85 | 84,0.00047665616652176887 86 | 85,0.0004750561573713309 87 | 86,0.0004733602208638919 88 | 87,0.00047177554466194273 89 | 88,0.0004701220793132888 90 | 89,0.0004685972115645071 91 | 90,0.00046694193745100746 92 | 91,0.0004654662812280356 93 | 92,0.0004639131048988378 94 | 93,0.0004624488598067664 95 | 94,0.00046091601292206144 96 | 95,0.0004594734643372552 97 | 96,0.00045794877261961126 98 | 97,0.000456562206528186 99 | 98,0.00045503186406116436 100 | 99,0.0004535924723001304 101 | 100,0.0004521154970173328 102 | 101,0.00045068512872658726 103 | 102,0.000449292353119065 104 | 103,0.0004477180726113294 105 | 104,0.0004462169459502823 106 | 105,0.0004448622474702654 107 | 106,0.0004434935728788718 108 | 107,0.00044222711915709723 109 | 108,0.0004408286791224993 110 | 109,0.00043957875265760143 111 | 110,0.00043810102335886436 112 | 111,0.00043701161228657133 113 | 112,0.00043570609310023944 114 | 113,0.0004342754425548871 115 | 114,0.00043295366397054605 116 | 115,0.0004315664714386462 117 | 116,0.00043025028353991786 118 | 117,0.00042899902293191353 119 | 118,0.0004280536718108877 120 | 119,0.0004260627426349792 121 | 120,0.0004253380110899205 122 | 121,0.00042339538828056186 123 | 122,0.0004227404958440486 124 | 123,0.0004207789034552836 125 | 124,0.0004201379975108806 126 | 125,0.0004183870295681191 127 | 126,0.00041765651799424735 128 | 127,0.00041591607166018815 129 | 128,0.00041496091981396677 130 | 129,0.0004136133851980875 131 | 130,0.0004124098495738122 132 | -------------------------------------------------------------------------------- /Multivatiate-GRU/MV3-GRU_40_[40,35]_1e-4_64_val_loss.csv: -------------------------------------------------------------------------------- 1 | 0,0.041165343987061 2 | 1,0.03447078605357343 3 | 2,0.02708339221887933 4 | 3,0.01961806286447521 5 | 4,0.013460503216704419 6 | 5,0.008674861108563069 7 | 6,0.005626523594275631 8 | 7,0.0041477764507049115 9 | 8,0.0034933951201623886 10 | 9,0.0034142682682080515 11 | 10,0.003577382787500091 12 | 11,0.0037033863441922285 13 | 12,0.0037033318758717387 14 | 13,0.0035263769898627854 15 | 14,0.003221196588128805 16 | 15,0.0028002372750177465 17 | 16,0.0022331887599209258 18 | 17,0.0015126283055749432 19 | 18,0.0008195328128781041 20 | 19,0.0005883363232112907 21 | 20,0.0004540644746659131 22 | 21,0.0003844495112044287 23 | 22,0.0003499188661125713 24 | 23,0.00033391495275407517 25 | 24,0.000325753303357111 26 | 25,0.00031983944113719566 27 | 26,0.0003155293906557149 28 | 27,0.00031173498578498075 29 | 28,0.00030814407489680013 30 | 29,0.00030476268348765784 31 | 30,0.00030154406478435827 32 | 31,0.00029865928307368326 33 | 32,0.0002956109739647343 34 | 33,0.0002924408061140827 35 | 34,0.00028958995545539875 36 | 35,0.00028652672109932733 37 | 36,0.00028378057598682315 38 | 37,0.00028053266076563763 39 | 38,0.0002777220837868236 40 | 39,0.000274627935141325 41 | 40,0.00027158427461660626 42 | 41,0.0002689323054851386 43 | 42,0.0002660311333000146 44 | 43,0.0002634271553813897 45 | 44,0.00026098349641462594 46 | 45,0.00025867410244998233 47 | 46,0.0002564864839298715 48 | 47,0.00025431221804645814 49 | 48,0.0002525974835815101 50 | 49,0.0002509721902054574 51 | 50,0.00024942514900086 52 | 51,0.000247771123119084 53 | 52,0.000246012332479887 54 | 53,0.00024433851699698074 55 | 54,0.0002426458000815634 56 | 55,0.0002411526170205967 57 | 56,0.00023953206556174775 58 | 57,0.00023779289893292147 59 | 58,0.0002363092970536572 60 | 59,0.00023486742705652682 61 | 60,0.0002332732025212769 62 | 61,0.00023199098610222853 63 | 62,0.00023094752245454182 64 | 63,0.0002299031194556376 65 | 64,0.00022869879893701653 66 | 65,0.00022755559646235458 67 | 66,0.00022642868611126625 68 | 67,0.00022538982387953279 69 | 68,0.00022448905631257542 70 | 69,0.0002235547209897175 71 | 70,0.00022276406425276193 72 | 71,0.00022189221585747497 73 | 72,0.00022110927715528243 74 | 73,0.00022029677197208693 75 | 74,0.0002193935290741702 76 | 75,0.00021850181881996706 77 | 76,0.00021759643026723945 78 | 77,0.00021666112448217282 79 | 78,0.00021577541864926703 80 | 79,0.00021483756577724526 81 | 80,0.00021394661824411615 82 | 81,0.00021303570949761518 83 | 82,0.00021217242716249591 84 | 83,0.0002112927473278652 85 | 84,0.00021047427987374751 86 | 85,0.00020961514784520943 87 | 86,0.00020880831088389046 88 | 87,0.00020800306560102337 89 | 88,0.00020725123688642835 90 | 89,0.00020645497565494915 91 | 90,0.0002057361085337169 92 | 91,0.00020501015563334884 93 | 92,0.00020431325939516056 94 | 93,0.00020358933874502265 95 | 94,0.00020292681663955465 96 | 95,0.00020223084670603918 97 | 96,0.00020159932971402104 98 | 97,0.0002009013250213245 99 | 98,0.00020022223681484446 100 | 99,0.00019955818073679534 101 | 100,0.00019891636399953658 102 | 101,0.0001983613740243339 103 | 102,0.0001978281948611076 104 | 103,0.00019735564549999503 105 | 104,0.00019692979524201103 106 | 105,0.00019654563070146432 107 | 106,0.0001962418835570008 108 | 107,0.00019586087076057648 109 | 108,0.00019546673765630815 110 | 109,0.00019498607936588213 111 | 110,0.00019466350806459528 112 | 111,0.0001942145428419563 113 | 112,0.00019364669850770514 114 | 113,0.00019311004394569016 115 | 114,0.00019257927588428017 116 | 115,0.0001920949224109665 117 | 116,0.00019164678915644642 118 | 117,0.00019123539547355654 119 | 118,0.00019042220183423367 120 | 119,0.00019008744154767743 121 | 120,0.0001893307279056773 122 | 121,0.00018902786742446237 123 | 122,0.0001882594319638507 124 | 123,0.00018799466839670363 125 | 124,0.00018732539570794022 126 | 125,0.00018699772872737255 127 | 126,0.00018630671648083832 128 | 127,0.00018592513666151027 129 | 128,0.00018538531931598895 130 | 129,0.00018490066225961623 131 | 130,0.00018436756438639914 132 | -------------------------------------------------------------------------------- /Multivatiate-GRU/MV3-GRU_CrossValidation.csv: -------------------------------------------------------------------------------- 1 | ,MSE,RMSE,R2_Score,Train_Time 2 | 0,8.74148E-05,0.009349591,0.862694185,0 3 | 1,0.000202998,0.014247751,0.95107602,117.8884475 4 | 2,8.59E-05,0.009267512,0.942749129,147.3105488 5 | 3,5.69E-04,0.023857602,0.971560514,180.4715095 6 | 4,0.000450968,0.021235998,0.948672734,207.3884559 7 | -------------------------------------------------------------------------------- /Multivatiate-GRU/MV3-GRU_CrossValidation_TrainLoss.csv: -------------------------------------------------------------------------------- 1 | ,Split1,Split2,Split3,Split4,Split5 2 | 0,0.262813453,0.002672081,0.001660418,0.001264343,0.00112448 3 | 1,0.254664158,0.002655247,0.001657457,0.001278565,0.000948573 4 | 2,0.245760335,0.002643126,0.001677027,0.001267167,0.000791246 5 | 3,0.235955665,0.002624812,0.001688861,0.001314996,0.000808288 6 | 4,0.22514803,0.002612725,0.00168011,0.001370321,0.000811749 7 | 5,0.213323161,0.002597879,0.001666716,0.001429491,0.000804786 8 | 6,0.200541679,0.002584362,0.001659291,0.001492561,0.000797826 9 | 7,0.186996274,0.002572353,0.001651191,0.001553514,0.000793761 10 | 8,0.172978449,0.002560725,0.001643853,0.00161189,0.000788714 11 | 9,0.158941714,0.002548978,0.001637306,0.001666658,0.000784505 12 | 10,0.145404586,0.00253782,0.001633186,0.001714279,0.000777681 13 | 11,0.133030714,0.002526135,0.001625286,0.001739373,0.000776749 14 | 12,0.122382387,0.002514876,0.001620336,0.00176479,0.000769164 15 | 13,0.114402133,0.002503559,0.00161723,0.001739802,0.000767702 16 | 14,0.1073084,0.002493859,0.001609374,0.001730395,0.000761365 17 | 15,0.102718943,0.002482673,0.001602674,0.001692773,0.000761996 18 | 16,0.099533047,0.002472312,0.001600708,0.001661481,0.000754804 19 | 17,0.097333535,0.002461213,0.001592778,0.001611139,0.000753969 20 | 18,0.095927511,0.00245075,0.001586319,0.001558197,0.000747742 21 | 19,0.094647586,0.002439292,0.001580635,0.001525278,0.000744895 22 | 20,0.093473341,0.002428049,0.001577228,0.001475535,0.000740862 23 | 21,0.092352834,0.002418433,0.001571347,0.001450947,0.000736468 24 | 22,0.091138256,0.002408715,0.001564418,0.001421345,0.000730125 25 | 23,0.089231046,0.002398013,0.001559115,0.001401993,0.000728689 26 | 24,0.087920976,0.002390604,0.00155349,0.001382862,0.000722881 27 | 25,0.085937328,0.00238066,0.001547469,0.001363186,0.000720391 28 | 26,0.083876204,0.002371226,0.001540813,0.001349599,0.000714684 29 | 27,0.081679685,0.002363203,0.001536862,0.00134712,0.0007125 30 | 28,0.079378655,0.002354147,0.001530127,0.001339256,0.000706935 31 | 29,0.077182112,0.002344187,0.001523388,0.001321336,0.000703763 32 | 30,0.074623154,0.002336757,0.001519983,0.00132114,0.000698731 33 | 31,0.071896585,0.00232735,0.001511927,0.001314167,0.000696914 34 | 32,0.068762649,0.002318017,0.001504434,0.001302492,0.000691725 35 | 33,0.065792738,0.002309601,0.001501327,0.001297529,0.000689749 36 | 34,0.062307507,0.002301464,0.001494536,0.001292632,0.000684972 37 | 35,0.058754526,0.002292329,0.001491193,0.001286482,0.00068393 38 | 36,0.054626331,0.002283846,0.001481308,0.001285796,0.000679462 39 | 37,0.050453257,0.002275952,0.001481163,0.001273711,0.000677141 40 | 38,0.045725984,0.00226739,0.001471387,0.001268787,0.000673101 41 | 39,0.040931887,0.00225982,0.00146439,0.001266888,0.000672527 42 | 40,0.035311891,0.002250657,0.00146091,0.001253558,0.000668684 43 | 41,0.030504222,0.002243338,0.001455537,0.001255932,0.000667585 44 | 42,0.024746082,0.002235177,0.001451544,0.001246107,0.00066403 45 | 43,0.019867107,0.002227568,0.001442906,0.001244653,0.000661507 46 | 44,0.015006554,0.002220128,0.00144174,0.001232842,0.000658854 47 | 45,0.011298826,0.002212047,0.001431258,0.0012353,0.000657502 48 | 46,0.008318708,0.002204802,0.001428912,0.001229499,0.000655696 49 | 47,0.006290434,0.002196978,0.001420758,0.001221781,0.000654522 50 | 48,0.005037605,0.002189168,0.00141509,0.001218015,0.000652694 51 | 49,0.00434827,0.002182425,0.001410976,0.0012137,0.00065101 52 | -------------------------------------------------------------------------------- /Multivatiate-GRU/MV3-GRU_CrossValidation_ValLoss.csv: -------------------------------------------------------------------------------- 1 | ,Split1,Split2,Split3,Split4,Split5 2 | 0,0.006233285,0.00028661,0.001605215,0.000454982,0.001008338 3 | 1,0.004665997,0.000297637,0.001548679,0.000557836,0.000722223 4 | 2,0.003180835,0.000287355,0.001478843,0.000711446,0.000656626 5 | 3,0.001849309,0.000302174,0.001457656,0.000824966,0.000645076 6 | 4,0.000812786,0.000304466,0.00145153,0.0009261,0.000643871 7 | 5,0.000264977,0.000306625,0.001447742,0.001003856,0.000640284 8 | 6,0.00047089,0.000309483,0.001436781,0.001061128,0.00063348 9 | 7,0.001756582,0.000312023,0.001429849,0.00109359,0.000627971 10 | 8,0.00448177,0.000310247,0.001420889,0.001105355,0.000621747 11 | 9,0.009062553,0.000314113,0.001411785,0.001105474,0.000617033 12 | 10,0.015674418,0.000311408,0.001399678,0.001085953,0.000608191 13 | 11,0.024282683,0.00031092,0.001392648,0.001055489,0.000605268 14 | 12,0.034170213,0.000309536,0.001381696,0.001013916,0.000596172 15 | 13,0.046258157,0.000308133,0.001368455,0.000972176,0.000590682 16 | 14,0.057632625,0.000310414,0.00136217,0.000931945,0.000579997 17 | 15,0.068050099,0.000302488,0.001354443,0.000893761,0.000575768 18 | 16,0.076960067,0.000306939,0.001340018,0.00085745,0.00056728 19 | 17,0.083040701,0.000301722,0.001332936,0.000822928,0.000562579 20 | 18,0.088101405,0.0003044,0.001324144,0.000798727,0.000555095 21 | 19,0.091214834,0.000300214,0.001316074,0.000771813,0.00054892 22 | 20,0.092946126,0.000301054,0.001303427,0.000752615,0.000543706 23 | 21,0.093804977,0.000303812,0.001294177,0.000736734,0.000539446 24 | 22,0.093499173,0.000298435,0.001288111,0.00072461,0.000532054 25 | 23,0.09115485,0.000296047,0.001276998,0.000713161,0.000527567 26 | 24,0.089722282,0.000302906,0.00126813,0.000704277,0.000521339 27 | 25,0.087754347,0.000293816,0.001259543,0.000698285,0.000516595 28 | 26,0.085795176,0.000294236,0.001251652,0.000695335,0.000510678 29 | 27,0.083702054,0.000296745,0.001240438,0.000690473,0.000506569 30 | 28,0.081334533,0.000288677,0.00123373,0.00068646,0.000501342 31 | 29,0.079304368,0.000286102,0.001226689,0.000683636,0.00049705 32 | 30,0.077000976,0.000289249,0.001214362,0.000682052,0.000491248 33 | 31,0.074478569,0.000283397,0.001208934,0.000678638,0.000487461 34 | 32,0.071102199,0.00028053,0.001202779,0.000676422,0.000482535 35 | 33,0.067966043,0.000281437,0.001190413,0.000675305,0.000478679 36 | 34,0.064254048,0.000279795,0.001185748,0.00067447,0.000473784 37 | 35,0.060510801,0.000274337,0.001172249,0.000673024,0.000470515 38 | 36,0.055964758,0.00027372,0.001170999,0.000670614,0.000466559 39 | 37,0.051602313,0.000274573,0.001153778,0.000668255,0.000463105 40 | 38,0.046507804,0.000268871,0.001149325,0.000667132,0.000458376 41 | 39,0.041576665,0.000268262,0.001142853,0.000664389,0.000454851 42 | 40,0.035375181,0.000266115,0.001131321,0.000663337,0.00045065 43 | 41,0.030298844,0.000263779,0.001124085,0.000661835,0.000447806 44 | 42,0.02344017,0.000263197,0.001111908,0.00066036,0.000445019 45 | 43,0.017920281,0.000261306,0.001108378,0.000658803,0.000442203 46 | 44,0.012863414,0.000259174,0.001094383,0.000658552,0.000438447 47 | 45,0.008981612,0.000257213,0.001091965,0.000658585,0.000434875 48 | 46,0.005740685,0.000256282,0.001078775,0.000657109,0.000431338 49 | 47,0.003512699,0.000254884,0.001073568,0.000656486,0.000428308 50 | 48,0.002173422,0.000251863,0.001065202,0.000657678,0.000425831 51 | 49,0.001366275,0.000251227,0.001055094,0.000657881,0.000423517 52 | -------------------------------------------------------------------------------- /Multivatiate-LSTM/.ipynb_checkpoints/Multivariate-3-LSTM-Copy1-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "colab": {}, 8 | "colab_type": "code", 9 | "id": "S19jleua1_GE" 10 | }, 11 | "outputs": [], 12 | "source": [ 13 | "import sys\n", 14 | "print(sys.version)" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": { 21 | "colab": {}, 22 | "colab_type": "code", 23 | "id": "XGXh32Yl3E5s" 24 | }, 25 | "outputs": [], 26 | "source": [ 27 | "# Importing dependencies\n", 28 | "\n", 29 | "import numpy as np\n", 30 | "np.random.seed(1)\n", 31 | "from tensorflow import set_random_seed\n", 32 | "set_random_seed(2)\n", 33 | "import pandas as pd\n", 34 | "import matplotlib.pyplot as plt\n", 35 | "from keras.models import Sequential, load_model\n", 36 | "from keras.layers.core import Dense\n", 37 | "from keras.layers.recurrent import LSTM\n", 38 | "from keras import optimizers\n", 39 | "from keras.callbacks import EarlyStopping\n", 40 | "from sklearn.preprocessing import MinMaxScaler\n", 41 | "from sklearn.metrics import mean_squared_error, r2_score\n", 42 | "from math import sqrt\n", 43 | "import datetime as dt\n", 44 | "plt.style.use('ggplot')" 45 | ] 46 | }, 47 | { 48 | "cell_type": "markdown", 49 | "metadata": {}, 50 | "source": [ 51 | "### Data Preprocessing" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "metadata": { 58 | "colab": {}, 59 | "colab_type": "code", 60 | "id": "FNioyc6mZUAJ" 61 | }, 62 | "outputs": [], 63 | "source": [ 64 | "# Setting up an early stop\n", 65 | "earlystop = EarlyStopping(monitor='val_loss', min_delta=0.0001, patience=80, verbose=1, mode='min')\n", 66 | "callbacks_list = [earlystop]" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": null, 72 | "metadata": { 73 | "colab": {}, 74 | "colab_type": "code", 75 | "id": "r5O0a39R9Z_Z" 76 | }, 77 | "outputs": [], 78 | "source": [ 79 | "# Loading the dataset\n", 80 | "url = 'https://raw.githubusercontent.com/ninja3697/dataset/master/CSV.csv'\n", 81 | "#url = '../../CSV.csv'\n", 82 | "df = pd.read_csv(url,parse_dates = True,index_col=0)\n", 83 | "df.tail()" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": null, 89 | "metadata": { 90 | "colab": {}, 91 | "colab_type": "code", 92 | "id": "2H5Lkte_NgWT" 93 | }, 94 | "outputs": [], 95 | "source": [ 96 | "# Correlation matrix\n", 97 | "df.corr()['Close']" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": null, 103 | "metadata": { 104 | "colab": {}, 105 | "colab_type": "code", 106 | "id": "1anbu_yx3bta" 107 | }, 108 | "outputs": [], 109 | "source": [ 110 | "print(df.describe().Volume) \n", 111 | "df.drop(df[df['Volume']==0].index, inplace = True) #Dropping rows with volume value 0\n", 112 | "df['Volume'].hist(bins = 10)" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": null, 118 | "metadata": {}, 119 | "outputs": [], 120 | "source": [ 121 | "timesteps = np.arange(30,55,10)\n", 122 | "hl = []\n", 123 | "for i in range(30,55,10):\n", 124 | " hl.append([i,i-5])\n", 125 | "lr = [1e-5,1e-4,1e-3,1e-2,1e-1]\n", 126 | "batch_size = [16,32,64]\n", 127 | "num_epochs = [20]" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": null, 133 | "metadata": { 134 | "colab": {}, 135 | "colab_type": "code", 136 | "id": "zRVEzZ1FXj_p" 137 | }, 138 | "outputs": [], 139 | "source": [ 140 | "#Build and train the model\n", 141 | "def fit_model(train,val,timesteps,hl,lr,batch,epochs):\n", 142 | " X_train = []\n", 143 | " Y_train = []\n", 144 | " X_val = []\n", 145 | " Y_val = []\n", 146 | " \n", 147 | " # Loop for training data\n", 148 | " for i in range(timesteps,train.shape[0]):\n", 149 | " X_train.append(train[i-timesteps:i])\n", 150 | " Y_train.append(train[i][0])\n", 151 | " X_train,Y_train = np.array(X_train),np.array(Y_train)\n", 152 | " \n", 153 | " # Loop for val data\n", 154 | " for i in range(timesteps,val.shape[0]):\n", 155 | " X_val.append(val[i-timesteps:i])\n", 156 | " Y_val.append(val[i][0])\n", 157 | " X_val,Y_val = np.array(X_val),np.array(Y_val)\n", 158 | " \n", 159 | " # Adding Layers to the model\n", 160 | " model = Sequential()\n", 161 | " model.add(LSTM(X_train.shape[2],input_shape = (X_train.shape[1],X_train.shape[2]),return_sequences = True,\n", 162 | " activation = 'relu'))\n", 163 | " for i in range(len(hl)-1): \n", 164 | " model.add(LSTM(hl[i],activation = 'relu',return_sequences = True))\n", 165 | " model.add(LSTM(hl[-1],activation = 'relu'))\n", 166 | " model.add(Dense(1))\n", 167 | " model.compile(optimizer = optimizers.Adam(lr = lr), loss = 'mean_squared_error')\n", 168 | " #print(model.summary())\n", 169 | " \n", 170 | " # Training the data\n", 171 | " history = model.fit(X_train,Y_train,epochs = epochs,batch_size = batch,validation_data = (X_val, Y_val),verbose = 0,\n", 172 | " shuffle = False)#, callbacks=callbacks_list)\n", 173 | " model.reset_states()\n", 174 | " return model, history.history['loss'], history.history['val_loss']\n" 175 | ] 176 | }, 177 | { 178 | "cell_type": "code", 179 | "execution_count": null, 180 | "metadata": { 181 | "colab": {}, 182 | "colab_type": "code", 183 | "id": "LpwHmJeQJqyI" 184 | }, 185 | "outputs": [], 186 | "source": [ 187 | "# Evaluating the model\n", 188 | "def evaluate_model(model,test,timesteps):\n", 189 | " X_test = []\n", 190 | " Y_test = []\n", 191 | "\n", 192 | " # Loop for testing data\n", 193 | " for i in range(timesteps,test.shape[0]):\n", 194 | " X_test.append(test[i-timesteps:i])\n", 195 | " Y_test.append(test[i][0])\n", 196 | " X_test,Y_test = np.array(X_test),np.array(Y_test)\n", 197 | " \n", 198 | " # Prediction Time !!!!\n", 199 | " Y_hat = model.predict(X_test)\n", 200 | " rmse = sqrt(mean_squared_error(Y_test,Y_hat))\n", 201 | " r2 = r2_score(Y_test,Y_hat)\n", 202 | " return rmse, r2, Y_test, Y_hat" 203 | ] 204 | }, 205 | { 206 | "cell_type": "code", 207 | "execution_count": null, 208 | "metadata": { 209 | "colab": {}, 210 | "colab_type": "code", 211 | "id": "pI0q18ajCLx6" 212 | }, 213 | "outputs": [], 214 | "source": [ 215 | "# Plotting the predictions\n", 216 | "def plot_data(Y_test,Y_hat):\n", 217 | " plt.plot(Y_test,c = 'r')\n", 218 | " plt.plot(Y_hat,c = 'y')\n", 219 | " plt.xlabel('Day')\n", 220 | " plt.ylabel('Price')\n", 221 | " plt.title('Stock Prediction Graph using Multivariate-LSTM model')\n", 222 | " plt.legend(['Actual','Predicted'],loc = 'lower right')\n", 223 | " plt.show()" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": null, 229 | "metadata": { 230 | "colab": {}, 231 | "colab_type": "code", 232 | "id": "4NeqKRBZZr0Q" 233 | }, 234 | "outputs": [], 235 | "source": [ 236 | "# Plotting the training errors\n", 237 | "def plot_error(train_loss,val_loss):\n", 238 | " plt.plot(train_loss,c = 'r')\n", 239 | " plt.plot(val_loss,c = 'b')\n", 240 | " plt.ylabel('Loss')\n", 241 | " plt.legend(['train','val'],loc = 'upper right')\n", 242 | " plt.show()" 243 | ] 244 | }, 245 | { 246 | "cell_type": "markdown", 247 | "metadata": {}, 248 | "source": [ 249 | "### Model 1" 250 | ] 251 | }, 252 | { 253 | "cell_type": "code", 254 | "execution_count": null, 255 | "metadata": { 256 | "colab": {}, 257 | "colab_type": "code", 258 | "id": "gAvOMIyjIQO-" 259 | }, 260 | "outputs": [], 261 | "source": [ 262 | "# Extracting the series\n", 263 | "series = df[['Close','High','Volume']] # Picking the series with high correlation\n", 264 | "print(series.shape)\n", 265 | "print(series.tail())" 266 | ] 267 | }, 268 | { 269 | "cell_type": "code", 270 | "execution_count": null, 271 | "metadata": { 272 | "colab": {}, 273 | "colab_type": "code", 274 | "id": "Tjso-RjNDmbs" 275 | }, 276 | "outputs": [], 277 | "source": [ 278 | "# Train Val Test Split\n", 279 | "train_start = dt.date(1997,1,1)\n", 280 | "train_end = dt.date(2006,12,31)\n", 281 | "train_data = series.loc[train_start:train_end]\n", 282 | "\n", 283 | "val_start = dt.date(2007,1,1)\n", 284 | "val_end = dt.date(2008,12,31)\n", 285 | "val_data = series.loc[val_start:val_end]\n", 286 | "\n", 287 | "test_start = dt.date(2009,1,1)\n", 288 | "test_end = dt.date(2010,12,31)\n", 289 | "test_data = series.loc[test_start:test_end]\n", 290 | "\n", 291 | "print(train_data.shape,val_data.shape,test_data.shape)" 292 | ] 293 | }, 294 | { 295 | "cell_type": "code", 296 | "execution_count": null, 297 | "metadata": { 298 | "colab": {}, 299 | "colab_type": "code", 300 | "id": "NWXR5oL2ZnY4" 301 | }, 302 | "outputs": [], 303 | "source": [ 304 | "# Normalisation\n", 305 | "sc = MinMaxScaler()\n", 306 | "train = sc.fit_transform(train_data)\n", 307 | "val = sc.transform(val_data)\n", 308 | "test = sc.transform(test_data)\n", 309 | "print(train.shape,val.shape,test.shape)" 310 | ] 311 | }, 312 | { 313 | "cell_type": "code", 314 | "execution_count": null, 315 | "metadata": {}, 316 | "outputs": [], 317 | "source": [ 318 | "results = list()\n", 319 | "for t in timesteps:\n", 320 | " for l in hl:\n", 321 | " for rate in lr:\n", 322 | " for batch in batch_size:\n", 323 | " for epochs in num_epochs:\n", 324 | " model,train_loss,val_loss = fit_model(train,val,t,l,rate,batch,epochs)\n", 325 | " results.append([t,l,rate,batch,train_loss[-1],val_loss[-1]])\n", 326 | "pd.DataFrame(results,columns=['Timestep','Hidden_Layers','Learning_Rate','Batch_Size','Train_Loss','Val_Loss']).to_csv('Multivariate-3-LSTM_model1.csv')\n", 327 | " " 328 | ] 329 | }, 330 | { 331 | "cell_type": "code", 332 | "execution_count": null, 333 | "metadata": {}, 334 | "outputs": [], 335 | "source": [ 336 | "'''\n", 337 | "timesteps = 42\n", 338 | "hl = [35,30]\n", 339 | "lr = 1.5e-5\n", 340 | "batch_size = 32\n", 341 | "num_epochs = 500\n", 342 | "model,train_error,val_error = fit_model(train,val,timesteps,hl,lr,batch_size,num_epochs)\n", 343 | "plot_error(train_error,val_error)\n", 344 | "rmse, r2_value,true,predicted = evaluate_model(model,test,42)\n", 345 | "print('R-Squared Score = {}'.format(r2_value))\n", 346 | "plot_data(true,predicted)\n", 347 | "'''" 348 | ] 349 | }, 350 | { 351 | "cell_type": "code", 352 | "execution_count": null, 353 | "metadata": { 354 | "colab": {}, 355 | "colab_type": "code", 356 | "id": "5mJnzFIPCZnw" 357 | }, 358 | "outputs": [], 359 | "source": [ 360 | "# Save a model\n", 361 | "#model.save('model1.h5')\n", 362 | "\n", 363 | "# Load a model\n", 364 | "#model = load_model('model1.h5')" 365 | ] 366 | }, 367 | { 368 | "cell_type": "markdown", 369 | "metadata": {}, 370 | "source": [ 371 | "### Model 2\n", 372 | "Converting volume to log scale and see what changes happen" 373 | ] 374 | }, 375 | { 376 | "cell_type": "code", 377 | "execution_count": null, 378 | "metadata": {}, 379 | "outputs": [], 380 | "source": [ 381 | "'''\n", 382 | "# Converting Volume to log scale\n", 383 | "df['Volume_log'] = np.log(df['Volume'])\n", 384 | "print(df['Volume_log'].describe())\n", 385 | "df['Volume_log'].hist(bins=20)\n", 386 | "'''" 387 | ] 388 | }, 389 | { 390 | "cell_type": "code", 391 | "execution_count": null, 392 | "metadata": { 393 | "colab": {}, 394 | "colab_type": "code", 395 | "id": "gAvOMIyjIQO-" 396 | }, 397 | "outputs": [], 398 | "source": [ 399 | "'''\n", 400 | "# Extracting the series\n", 401 | "series = df[['Close','High','Volume_log']] # Picking the multivariate series \n", 402 | "print(series.shape)\n", 403 | "print(series.tail())\n", 404 | "'''" 405 | ] 406 | }, 407 | { 408 | "cell_type": "code", 409 | "execution_count": null, 410 | "metadata": { 411 | "colab": {}, 412 | "colab_type": "code", 413 | "id": "Tjso-RjNDmbs" 414 | }, 415 | "outputs": [], 416 | "source": [ 417 | "'''\n", 418 | "# Train Val Test Split\n", 419 | "train_start = dt.date(1997,1,1)\n", 420 | "train_end = dt.date(2006,12,31)\n", 421 | "train_data = series.loc[train_start:train_end]\n", 422 | "\n", 423 | "val_start = dt.date(2007,1,1)\n", 424 | "val_end = dt.date(2008,12,31)\n", 425 | "val_data = series.loc[val_start:val_end]\n", 426 | "\n", 427 | "test_start = dt.date(2009,1,1)\n", 428 | "test_end = dt.date(2010,12,31)\n", 429 | "test_data = series.loc[test_start:test_end]\n", 430 | "\n", 431 | "print(train_data.shape,val_data.shape,test_data.shape)\n", 432 | "'''" 433 | ] 434 | }, 435 | { 436 | "cell_type": "code", 437 | "execution_count": null, 438 | "metadata": { 439 | "colab": {}, 440 | "colab_type": "code", 441 | "id": "NWXR5oL2ZnY4" 442 | }, 443 | "outputs": [], 444 | "source": [ 445 | "'''\n", 446 | "# Normalisation\n", 447 | "sc = MinMaxScaler()\n", 448 | "train = sc.fit_transform(train_data)\n", 449 | "val = sc.transform(val_data)\n", 450 | "test = sc.transform(test_data)\n", 451 | "print(train.shape,val.shape,test.shape)\n", 452 | "'''" 453 | ] 454 | }, 455 | { 456 | "cell_type": "code", 457 | "execution_count": null, 458 | "metadata": {}, 459 | "outputs": [], 460 | "source": [ 461 | "'''\n", 462 | "results = list()\n", 463 | "for l in hl:\n", 464 | " for rate in lr:\n", 465 | " for batch in batch_size:\n", 466 | " for epochs in num_epochs:\n", 467 | " model,train_loss,val_loss = fit_model(train,val,timesteps[1],l,rate,batch,epochs)\n", 468 | " results.append([timesteps[1],l,rate,batch,train_loss[-1],val_loss[-1]])\n", 469 | "pd.DataFrame(results,columns=['Timestep','Hidden_Layers','Learning_Rate','Batch_Size','Train_Loss','Val_Loss']).to_csv('Multivariate-3-LSTM_1.csv')\n", 470 | "''' " 471 | ] 472 | }, 473 | { 474 | "cell_type": "code", 475 | "execution_count": null, 476 | "metadata": {}, 477 | "outputs": [], 478 | "source": [ 479 | "'''\n", 480 | "timesteps = 40\n", 481 | "hl = [40,35]\n", 482 | "lr = 0.001\n", 483 | "batch_size = 64\n", 484 | "num_epochs = 500\n", 485 | "model,train_error,val_error = fit_model(train,val,timesteps,hl,lr,batch_size,num_epochs)\n", 486 | "plot_error(train_error,val_error)\n", 487 | "rmse, r2_value,true,predicted = evaluate_model(model,test,42)\n", 488 | "print('R-Squared Score = {}'.format(r2_value))\n", 489 | "plot_data(true,predicted)\n", 490 | "'''" 491 | ] 492 | }, 493 | { 494 | "cell_type": "code", 495 | "execution_count": null, 496 | "metadata": { 497 | "colab": {}, 498 | "colab_type": "code", 499 | "id": "5mJnzFIPCZnw" 500 | }, 501 | "outputs": [], 502 | "source": [ 503 | "# Save a model\n", 504 | "#model.save('model_LSTM_30_4035_1e-3.h5')\n", 505 | "#del model # Deletes the model\n", 506 | "# Load a model\n", 507 | "#model = load_model('model2.h5')" 508 | ] 509 | } 510 | ], 511 | "metadata": { 512 | "accelerator": "TPU", 513 | "colab": { 514 | "collapsed_sections": [], 515 | "name": "Multivariate_Stock_Analysis-3-LSTM.ipynb", 516 | "provenance": [ 517 | { 518 | "file_id": "1TbS1iN6r9LWkUIIg2eyr5EB0pIzV5yAK", 519 | "timestamp": 1551228758851 520 | }, 521 | { 522 | "file_id": "1iQNCZLjJhQ56R4aZ1sLrnNQ4PS2KfN2m", 523 | "timestamp": 1551146300148 524 | }, 525 | { 526 | "file_id": "1OKnKPBI38XYPGQW0xUAj0nnJbfO6SuXE", 527 | "timestamp": 1550670800726 528 | } 529 | ], 530 | "version": "0.3.2" 531 | }, 532 | "kernelspec": { 533 | "display_name": "Python [default]", 534 | "language": "python", 535 | "name": "python3" 536 | }, 537 | "language_info": { 538 | "codemirror_mode": { 539 | "name": "ipython", 540 | "version": 3 541 | }, 542 | "file_extension": ".py", 543 | "mimetype": "text/x-python", 544 | "name": "python", 545 | "nbconvert_exporter": "python", 546 | "pygments_lexer": "ipython3", 547 | "version": "3.6.8" 548 | } 549 | }, 550 | "nbformat": 4, 551 | "nbformat_minor": 1 552 | } 553 | -------------------------------------------------------------------------------- /Multivatiate-LSTM/MV3-LSTM-Split1.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-LSTM/MV3-LSTM-Split1.h5 -------------------------------------------------------------------------------- /Multivatiate-LSTM/MV3-LSTM-Split2.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-LSTM/MV3-LSTM-Split2.h5 -------------------------------------------------------------------------------- /Multivatiate-LSTM/MV3-LSTM-Split3.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-LSTM/MV3-LSTM-Split3.h5 -------------------------------------------------------------------------------- /Multivatiate-LSTM/MV3-LSTM-Split4.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-LSTM/MV3-LSTM-Split4.h5 -------------------------------------------------------------------------------- /Multivatiate-LSTM/MV3-LSTM-Split5.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-LSTM/MV3-LSTM-Split5.h5 -------------------------------------------------------------------------------- /Multivatiate-LSTM/MV3-LSTM_50_[40,35]_1e-3_64.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-LSTM/MV3-LSTM_50_[40,35]_1e-3_64.h5 -------------------------------------------------------------------------------- /Multivatiate-LSTM/MV3-LSTM_50_[40,35]_1e-3_64_train_loss.csv: -------------------------------------------------------------------------------- 1 | 0,0.1151104056857941 2 | 1,0.07498822911827505 3 | 2,0.07406059924530103 4 | 3,0.058886242561479396 5 | 4,0.05186572234429159 6 | 5,0.01949057130938354 7 | 6,0.006517361833335658 8 | 7,0.020426629494880864 9 | 8,0.0036906144557176932 10 | 9,0.008232247559651242 11 | 10,0.0039171665152922105 12 | 11,0.006091659636206974 13 | 12,0.003298717171084098 14 | 13,0.0059605314636000375 15 | 14,0.0028920998241073258 16 | 15,0.004589991489119268 17 | 16,0.0033389061359257927 18 | 17,0.003627350430595112 19 | 18,0.003396093622123932 20 | 19,0.003550499462183474 21 | 20,0.0032485666934638823 22 | 21,0.003765217098147697 23 | 22,0.002906086644333453 24 | 23,0.003358938462069281 25 | 24,0.0028529783940598233 26 | 25,0.003066238824905767 27 | 26,0.0027391923899239526 28 | 27,0.00267059043460749 29 | 28,0.002477650119888579 30 | 29,0.0024441123512584403 31 | 30,0.0023791828513812885 32 | 31,0.002226667204033135 33 | 32,0.0020500681357076343 34 | 33,0.0019915618155800156 35 | 34,0.0019420162741017257 36 | 35,0.001900451787263717 37 | 36,0.0019322995003658175 38 | 37,0.001853640001437154 39 | 38,0.001977915935556954 40 | 39,0.0018067497990991621 41 | 40,0.0018975390848316655 42 | 41,0.001962734782122956 43 | 42,0.0019153998991881757 44 | 43,0.001864993616878937 45 | 44,0.0019492220473468966 46 | 45,0.001813867792193848 47 | 46,0.0018712627916777267 48 | 47,0.0019378126958903223 49 | 48,0.001832297701071968 50 | 49,0.0018983348205419785 51 | 50,0.0017753609346082286 52 | 51,0.0018063116352027419 53 | 52,0.0018118142599219608 54 | 53,0.0017539392184234798 55 | 54,0.0017657610135924718 56 | 55,0.0017868820017470936 57 | 56,0.0018041189164968315 58 | 57,0.0016723995035412607 59 | 58,0.0017505954987663821 60 | 59,0.0018331940109688864 61 | 60,0.0016944399289007565 62 | 61,0.0016189242768435144 63 | 62,0.0015897954113574212 64 | 63,0.0015782865955427695 65 | 64,0.001560510381741053 66 | 65,0.0016731807063449693 67 | 66,0.0015108578088493638 68 | 67,0.001543530889603892 69 | 68,0.0015243220831417247 70 | 69,0.0015489818202182601 71 | 70,0.001403138895983337 72 | 71,0.0015571191763918814 73 | 72,0.001392048623588297 74 | 73,0.0012855496820832557 75 | 74,0.00137223757504993 76 | 75,0.0011547486338364828 77 | 76,0.0012182902790245087 78 | 77,0.0010537071440198695 79 | 78,0.0012021294908277982 80 | 79,0.0014754135088000551 81 | 80,0.0010364977526921225 82 | 81,0.0014346115530000265 83 | 82,0.0016228361220731317 84 | 83,0.0015319649262390408 85 | 84,0.001246156428529082 86 | 85,0.0009549178284200556 87 | 86,0.0012902605613881854 88 | 87,0.00131965225963177 89 | 88,0.0009528907811382569 90 | 89,0.0014267940511847607 91 | 90,0.0016488845029591958 92 | 91,0.0014443164656222137 93 | 92,0.0010822972300270919 94 | 93,0.001003881455436804 95 | 94,0.000846752426892842 96 | 95,0.0010100988759033764 97 | 96,0.0008638496167895384 98 | 97,0.0014859926777244918 99 | 98,0.0021643191399971606 100 | 99,0.0020524712728559632 101 | 100,0.0015552578218005048 102 | 101,0.0010875920085618971 103 | 102,0.0010428467400161985 104 | 103,0.000770039961994288 105 | 104,0.0009156241810165816 106 | 105,0.0012301491194158607 107 | 106,0.0011236703486156884 108 | 107,0.001199301104759827 109 | 108,0.0010071686312146304 110 | 109,0.0019487266183681028 111 | 110,0.0021129699690123635 112 | 111,0.0013441277630762344 113 | 112,0.0010993407082741109 114 | 113,0.000804733968739433 115 | 114,0.0009372925651080551 116 | 115,0.0007078067853408485 117 | 116,0.0007972205589106495 118 | 117,0.0007280103619160893 119 | 118,0.0006448087867270989 120 | 119,0.0006310294298489949 121 | 120,0.0005968169736746228 122 | 121,0.0006095013669642096 123 | 122,0.000582833708924142 124 | 123,0.0006744014741546493 125 | 124,0.0006465211147620448 126 | 125,0.001025583016015868 127 | 126,0.0015888207924166652 128 | 127,0.0014976326226193975 129 | 128,0.002723717141169658 130 | 129,0.002764319465897078 131 | 130,0.0018662585613685747 132 | 131,0.0030348149317167474 133 | 132,0.001030181891604302 134 | 133,0.001917471109149927 135 | 134,0.0006853258742582307 136 | 135,0.0006704354478720409 137 | 136,0.0005194145068718866 138 | 137,0.00047343754370320616 139 | 138,0.00045514475435030525 140 | 139,0.00044753174194902867 141 | 140,0.00044633735744122917 142 | 141,0.0004442294538231249 143 | 142,0.00044005910144890014 144 | 143,0.00043486212535416417 145 | 144,0.0004292712928614514 146 | 145,0.0004246492991356463 147 | 146,0.0004213088273796902 148 | 147,0.000419713943354889 149 | 148,0.0004204006639593654 150 | 149,0.0004246437679641059 151 | 150,0.0004360473093213197 152 | 151,0.00046218442152144837 153 | 152,0.0005210564958348005 154 | 153,0.0006519205746157883 155 | 154,0.000926646883140691 156 | 155,0.0014094104915124111 157 | 156,0.0017891649421901653 158 | 157,0.0012946626819516948 159 | 158,0.0005417943511353897 160 | 159,0.00047830576535798873 161 | 160,0.0007135330725654404 162 | 161,0.0005436542016996211 163 | 162,0.0004090053365675876 164 | 163,0.00039094948729042185 165 | 164,0.00037779858058524995 166 | 165,0.00037082211573914134 167 | 166,0.0003681215123765423 168 | 167,0.00036614152082679456 169 | 168,0.00036641552897774283 170 | 169,0.00036876408356861974 171 | 170,0.00037486249452749817 172 | 171,0.0003815396428002987 173 | 172,0.0004018209772013883 174 | 173,0.00043360553924406953 175 | 174,0.0005145124401992532 176 | 175,0.0006760212706742306 177 | 176,0.0010249705660836477 178 | 177,0.0015414862925688011 179 | 178,0.001673122170259431 180 | 179,0.0009081341006286451 181 | 180,0.00037084590916064745 182 | 181,0.0005689307124406545 183 | 182,0.00046407644566516065 184 | 183,0.0006546411586283805 185 | 184,0.0008378133492084536 186 | 185,0.0005448929257322534 187 | 186,0.00037087805340532687 188 | 187,0.00034869839128188416 189 | 188,0.00033999868703091756 190 | 189,0.0003271940274558547 191 | 190,0.00031796490628548633 192 | 191,0.00031598457317675595 193 | 192,0.0003198783362562826 194 | 193,0.00032753652439727925 195 | 194,0.0003376545197542997 196 | 195,0.00035396777667222323 197 | 196,0.0003814409111160254 198 | 197,0.0004319235155947245 199 | 198,0.0005184026147831258 200 | 199,0.0006597598279930479 201 | 200,0.0008163915741109548 202 | 201,0.0009689390991635443 203 | 202,0.0011316669755261088 204 | 203,0.0011378516039752514 205 | 204,0.0021766643151456704 206 | 205,0.002275210610133892 207 | 206,0.0038382346697756644 208 | 207,0.003956307761645674 209 | 208,0.0009620809142753382 210 | 209,0.0007551110508751585 211 | 210,0.00036564900440018216 212 | 211,0.00034210230432520197 213 | 212,0.00030617415064721604 214 | 213,0.0003040523840496632 215 | 214,0.00030012155867178297 216 | 215,0.0002908672219416 217 | 216,0.0002865420032347536 218 | 217,0.00028384897454103545 219 | 218,0.0002784863659326958 220 | 219,0.0002786331584240513 221 | 220,0.00028513228745870306 222 | 221,0.00028046569967241346 223 | 222,0.00027582600438186267 224 | 223,0.0002707919908102326 225 | 224,0.00027314940407284163 226 | 225,0.00026942905387140133 227 | 226,0.00026401636969830257 228 | 227,0.0002608551936066836 229 | 228,0.0002568215702331975 230 | 229,0.0002572601913608301 231 | 230,0.0002556525287771837 232 | 231,0.0002717664605041996 233 | 232,0.0002840406989057731 234 | 233,0.0004268310833900341 235 | 234,0.0007285148776368934 236 | 235,0.0017804492743413458 237 | 236,0.002340038303672747 238 | 237,0.0006045496142606305 239 | -------------------------------------------------------------------------------- /Multivatiate-LSTM/MV3-LSTM_50_[40,35]_1e-3_64_val_loss.csv: -------------------------------------------------------------------------------- 1 | 0,0.009639119577598203 2 | 1,0.011317148029246257 3 | 2,0.005383548601674088 4 | 3,0.006169961572579613 5 | 4,0.004523977657804668 6 | 5,0.004790950408574637 7 | 6,0.006311657061053661 8 | 7,0.002473947934658648 9 | 8,0.00513964780454528 10 | 9,0.002195884631577436 11 | 10,0.002860957473549286 12 | 11,0.0017556575524912007 13 | 12,0.0025526956554567893 14 | 13,0.0017248042356551088 15 | 14,0.0018272567417324903 16 | 15,0.0017263418871761682 17 | 16,0.0016108646020621456 18 | 17,0.0016108300497894902 19 | 18,0.001572897746986689 20 | 19,0.0014942271485186226 21 | 20,0.0014661345374462756 22 | 21,0.0012648819621635954 23 | 22,0.001298567127346139 24 | 23,0.001244403721379026 25 | 24,0.0012519043179983807 26 | 25,0.0011508124203540155 27 | 26,0.0010439716115396979 28 | 27,0.0007633282997879294 29 | 28,0.0009324083946817575 30 | 29,0.0007936358149265994 31 | 30,0.0007200915554111684 32 | 31,0.0007012179141888797 33 | 32,0.0006458074790897414 34 | 33,0.0006009434541638136 35 | 34,0.00059665346955101 36 | 35,0.0005754577334445085 37 | 36,0.0005671798045406455 38 | 37,0.0005483519053086638 39 | 38,0.0005340780975658135 40 | 39,0.0005491488245992802 41 | 40,0.0005319471279208422 42 | 41,0.0005414998143427238 43 | 42,0.0005367256276358724 44 | 43,0.0005214509589080638 45 | 44,0.0005176369550059414 46 | 45,0.0005321628398709163 47 | 46,0.0005264431776659407 48 | 47,0.0005194156718879347 49 | 48,0.0005088834746408437 50 | 49,0.0005047721178004198 51 | 50,0.0005078009957455854 52 | 51,0.0004972569302897818 53 | 52,0.0004965685726459331 54 | 53,0.00048801853856900586 55 | 54,0.000507288824229415 56 | 55,0.0004827750088761885 57 | 56,0.0004833655812396024 58 | 57,0.0005014377809232224 59 | 58,0.00048122019301853776 60 | 59,0.00046975386794656515 61 | 60,0.0004718881536332587 62 | 61,0.00047598158969392075 63 | 62,0.000474753707036015 64 | 63,0.00047669222266597894 65 | 64,0.00047310148129887914 66 | 65,0.00047088702699571585 67 | 66,0.00047406955931893663 68 | 67,0.00047441425380432395 69 | 68,0.00047117158655684427 70 | 69,0.0004624830023488153 71 | 70,0.0004735425015140144 72 | 71,0.0004591896052016889 73 | 72,0.0004609779419894392 74 | 73,0.00045317978293548793 75 | 74,0.0004530949789891225 76 | 75,0.0004576232521753709 77 | 76,0.00046228015352253873 78 | 77,0.0004975629261682541 79 | 78,0.00047724239993840456 80 | 79,0.0004669938652903367 81 | 80,0.0005346338037967616 82 | 81,0.0005463066706762112 83 | 82,0.0005356455862825728 84 | 83,0.00048787692984328135 85 | 84,0.0004733852741599805 86 | 85,0.0005138458105600091 87 | 86,0.00048997586527388 88 | 87,0.00047507447073654585 89 | 88,0.000557229389530906 90 | 89,0.0005550490654307512 91 | 90,0.0005327121155656513 92 | 91,0.0005100356837496621 93 | 92,0.00048616146109793087 94 | 93,0.0004667637691246387 95 | 94,0.00047376065690373994 96 | 95,0.0004692279240362642 97 | 96,0.0005777816858454423 98 | 97,0.0006372075079681787 99 | 98,0.0006389705046789827 100 | 99,0.0006180596922520171 101 | 100,0.0005573860240386315 102 | 101,0.0005110500037817835 103 | 102,0.0004703690559964315 104 | 103,0.0004893966944322055 105 | 104,0.0005475770792870532 106 | 105,0.0005528992241954161 107 | 106,0.0005454635304000869 108 | 107,0.000554476794376477 109 | 108,0.0006587085876170759 110 | 109,0.0006388489465692823 111 | 110,0.0005899565827741497 112 | 111,0.0005620062153269476 113 | 112,0.0005018321596217313 114 | 113,0.000469681419922441 115 | 114,0.00042495531381809437 116 | 115,0.00042432970044281056 117 | 116,0.00040968301750120544 118 | 117,0.0003869604449596108 119 | 118,0.00037670924583663303 120 | 119,0.00037004398889757594 121 | 120,0.0003658334504339089 122 | 121,0.0003716728254118811 123 | 122,0.00037659917732942765 124 | 123,0.0004108469219907035 125 | 124,0.00045996589051756497 126 | 125,0.0005624172126972793 127 | 126,0.0006621540754844767 128 | 127,0.0008619890865225689 129 | 128,0.0009469365109258745 130 | 129,0.0008960403014786772 131 | 130,0.001124724053925453 132 | 131,0.0005691088751442567 133 | 132,0.0008238291497124843 134 | 133,0.0005253515691426685 135 | 134,0.0005038756145613621 136 | 135,0.00035539886692132687 137 | 136,0.00030777681045057785 138 | 137,0.00027918437615576477 139 | 138,0.0002567947866284342 140 | 139,0.0002461444760689672 141 | 140,0.00023820430019461152 142 | 141,0.0002345474439956215 143 | 142,0.00023531948329341324 144 | 143,0.00023598123316092534 145 | 144,0.00023690570644713274 146 | 145,0.00023826020458222896 147 | 146,0.00023814478702817546 148 | 147,0.000236838287295669 149 | 148,0.00023329902618137775 150 | 149,0.00022832644776526288 151 | 150,0.0002208096722848888 152 | 151,0.0002146668023705977 153 | 152,0.000216057990876189 154 | 153,0.0002430206292099018 155 | 154,0.0003188851314916974 156 | 155,0.000404155725653751 157 | 156,0.0003025614612227196 158 | 157,0.0001862666366019518 159 | 158,0.0002377261632522886 160 | 159,0.0003070812790330483 161 | 160,0.0003349557274076595 162 | 161,0.00032241218918236997 163 | 162,0.0002849476814776708 164 | 163,0.0002675132732633255 165 | 164,0.0002533669244318007 166 | 165,0.00024258863763080597 167 | 166,0.00023286814164764674 168 | 167,0.00022576929212433474 169 | 168,0.00021764162043147998 170 | 169,0.00021085498656047764 171 | 170,0.00020356123185781186 172 | 171,0.0001973268707590675 173 | 172,0.00019142457515068992 174 | 173,0.00019025364338427 175 | 174,0.0002035861618828616 176 | 175,0.00024364960179585346 177 | 176,0.000335783477430796 178 | 177,0.00037011827211373173 179 | 178,0.00022748894240579717 180 | 179,0.00018652090785600486 181 | 180,0.0002407568948678539 182 | 181,0.0002926293402060965 183 | 182,0.00031029602423226176 184 | 183,0.0003859796624124001 185 | 184,0.0003960719724423724 186 | 185,0.0003471777367709196 187 | 186,0.00027211728698875557 188 | 187,0.00023394235148118186 189 | 188,0.00021532082300756788 190 | 189,0.00021573385979356102 191 | 190,0.00022193054155249657 192 | 191,0.00023219531677901286 193 | 192,0.00024395238886082684 194 | 193,0.0002573263841501175 195 | 194,0.000271492611218371 196 | 195,0.00029054683196463003 197 | 196,0.0003192604330700662 198 | 197,0.0003644950561725949 199 | 198,0.0004329168738280361 200 | 199,0.0005344668131974005 201 | 200,0.0005497422078542313 202 | 201,0.0006268832105363715 203 | 202,0.0007640124998149762 204 | 203,0.0005985131602127122 205 | 204,0.0010158336815723763 206 | 205,0.0012174027978077465 207 | 206,0.0014390059580640777 208 | 207,0.001051246021394228 209 | 208,0.0009066659361247121 210 | 209,0.0004283194901987845 211 | 210,0.00034643329667882115 212 | 211,0.0003157951984219771 213 | 212,0.0002652889279328327 214 | 213,0.00023795349644920773 215 | 214,0.00022226553049538796 216 | 215,0.00021005572597808405 217 | 216,0.00020217128938800806 218 | 217,0.0001972121824755278 219 | 218,0.00019342133134364002 220 | 219,0.00018479918468186358 221 | 220,0.00018062990740856893 222 | 221,0.00017952681779278732 223 | 222,0.0001795190887185022 224 | 223,0.00017873826180277023 225 | 224,0.0001755016590980146 226 | 225,0.00017436054830864022 227 | 226,0.00017497744043906411 228 | 227,0.00017557058895623727 229 | 228,0.0001776965640890176 230 | 229,0.00017309012652414805 231 | 230,0.000172672636707858 232 | 231,0.00015698954710002992 233 | 232,0.0001569567152788367 234 | 233,0.0001746063066381958 235 | 234,0.00033041365876537954 236 | 235,0.0005448364980528049 237 | 236,0.00014534863825776026 238 | 237,0.00022853972196102933 239 | -------------------------------------------------------------------------------- /Multivatiate-LSTM/MV3-LSTM_CrossValidation.csv: -------------------------------------------------------------------------------- 1 | ,MSE,RMSE,R2_Score,Train_Time 2 | 0,4.32E-05,0.006572734,0.921799029,0 3 | 1,0.00023713,0.015399018,0.942282305,274.5334935 4 | 2,5.75E-05,0.007582882,0.962324463,334.8136861 5 | 3,0.000335134,0.018306674,0.983354737,403.5998371 6 | 4,0.001774933,0.042129949,0.797562919,467.8059676 7 | -------------------------------------------------------------------------------- /Multivatiate-LSTM/MV3-LSTM_CrossValidation_TrainLoss.csv: -------------------------------------------------------------------------------- 1 | ,Split1,Split2,Split3,Split4,Split5 2 | 0,0.24059573784063104,0.0021823568409053357,0.0011678294613868614,0.0007155200057010442,0.0005630477345166363 3 | 1,0.17697695951376644,0.002554630059092504,0.0012652701158250407,0.0010004323670266748,0.0007723754911097215 4 | 2,0.12461855281676565,0.003328614728277589,0.0013996941127438962,0.0017297612006766197,0.0011468849895789894 5 | 3,0.12025135536278997,0.004954270037891779,0.0015807756086298652,0.004011573799570996,0.001271100000496561 6 | 4,0.11078842520713807,0.005559377235662628,0.001478979608731668,0.0055535291933165995,0.001097234328879122 7 | 5,0.10445584256734167,0.0032114543667201823,0.001528895584044098,0.004834250216089471,0.0006976919414381586 8 | 6,0.07853437088429928,0.0020861172607104157,0.0012277385134654912,0.001375280924118452,0.0004826100557605618 9 | 7,0.050393729326980455,0.0018772511924791924,0.0012440340988116262,0.0007717623325961668,0.0004176575979968275 10 | 8,0.006402992452827415,0.0018862146531241609,0.0012053972055736713,0.0006437910443194814,0.0003808121981501793 11 | 9,0.008279331351015051,0.0019687623667468484,0.0011973204938316238,0.0005652466071130131,0.00036953502764724787 12 | 10,0.010052747104990496,0.0020175832737375924,0.0011956852944884735,0.0005466260225840018,0.00037630086696141464 13 | 11,0.005699892615999228,0.0018739360530781461,0.0012437690333480516,0.0005331844406433626,0.0004009133790576191 14 | 12,0.004823628572214927,0.001762828355555378,0.001277159720947088,0.000531668488192864,0.0004309597174098423 15 | 13,0.004668158171407413,0.0017608261426930573,0.0013593093557790132,0.000543452606456105,0.0004862167594687177 16 | 14,0.004490034140430258,0.00204971454454898,0.0013994905571161002,0.0005828788340449149,0.0005866250337251596 17 | 15,0.004417401346394659,0.0029160440316116634,0.0014714228386626435,0.0006692491324813577,0.0006872065857449737 18 | 16,0.004464665297003064,0.004321041248089812,0.0014094047482828146,0.0008528630220918247,0.000832903715403167 19 | 17,0.004633847202556873,0.004709880918690419,0.0014171147558214615,0.0013990979755836748,0.0009021621280215142 20 | 18,0.004759968346999293,0.003098074737422297,0.001189558123510555,0.001599756000452006,0.0008628768766883071 21 | 19,0.004775895187132327,0.0020113303947003626,0.0012013622237491753,0.001389566544811471,0.0007555797662363663 22 | 20,0.004693117522443312,0.0017332662784069048,0.0010459649649943704,0.0010997418145334322,0.0005700754306976212 23 | 21,0.00461836732529004,0.001667259737138359,0.0010891809900092963,0.0009150336951125597,0.0004752288359909526 24 | 22,0.004566120058090226,0.0016506969498840725,0.0009670385576378143,0.0008162061602875761,0.00039590173279964756 25 | 23,0.00456687010346546,0.001631526867338912,0.001038443779195358,0.0007564168408247446,0.00035585626404548723 26 | 24,0.00458509158995834,0.001605069482243283,0.0010042202841004375,0.0010033650522966358,0.00032644431676199457 27 | 25,0.00460938580180352,0.001606981327619151,0.001106845393143762,0.0010848129779128292,0.00030108219135695707 28 | 26,0.004587544151581824,0.0017216236653497463,0.001111673946039538,0.0009809961494732268,0.00029118405882948 29 | 27,0.004525634526258468,0.0021050878092671367,0.0013445171463560763,0.0008993352969746027,0.0002817323281382721 30 | 28,0.004434141538554936,0.0029388183076430032,0.0014223910958223267,0.0008481745471444938,0.0002810314136377749 31 | 29,0.004359594080139816,0.00407801818615321,0.0017760268724040065,0.0008364398647706404,0.00027666207348944684 32 | 30,0.004322726888307703,0.004107354964473919,0.0017541031414774638,0.0008420772405552783,0.00028245084030491984 33 | 31,0.004327329130984643,0.003102678859958108,0.0016319030286745223,0.0008687426683101081,0.0002788166628963965 34 | 32,0.004313962474656624,0.0024983386212818703,0.0011408874087387862,0.0009044408058119663,0.00030536143774032053 35 | 33,0.004299922069310144,0.0017504163882589869,0.0009784361302420514,0.0009484494828478053,0.0003345454134881926 36 | 34,0.004282779151316,0.0015724021436286189,0.0008085874697687775,0.0009711222409747842,0.000494917864213117 37 | 35,0.004264378566752255,0.0015439444596655023,0.0007928592563368542,0.000996556899897282,0.0008069111366864261 38 | 36,0.004242075450851449,0.0015584461960818947,0.000727810934364181,0.0009613563794836165,0.0014333862525723728 39 | 37,0.004226079484755506,0.0015585508616919818,0.0007329840879735411,0.0009339501993758538,0.0018778629498875346 40 | 38,0.004225429231342527,0.0015432232799054311,0.0006828232479473611,0.0008925621957324682,0.0015080805992302962 41 | 39,0.004250261694375825,0.0015090019468148735,0.0007283091123284421,0.0008934072059379156,0.0006578861725592358 42 | 40,0.004278097695122207,0.0014701089655647323,0.0007087536388329897,0.00086905175650289,0.00046512981924419126 43 | 41,0.004294695710164628,0.0014415366139183654,0.001050823666942745,0.0008574907120700899,0.0003700735549150029 44 | 42,0.004167882749253685,0.0015074188466198703,0.0020223766217556767,0.0008792411481459875,0.0003208429268063383 45 | 43,0.003978036672675184,0.001894779187507006,0.0038302548411439153,0.0008984208192666467,0.0002973711378370967 46 | 44,0.003827258027740754,0.0029957174698752847,0.0037803975692595024,0.0008815056956938836,0.0002834030347879757 47 | 45,0.0037440188744221814,0.004758004705728765,0.0029883780692786894,0.0008531902520548434,0.0002763488700231874 48 | 46,0.0037625541815318035,0.005103811147087749,0.002052223597410514,0.00082081853755189,0.00027156379415740607 49 | 47,0.0038444947830118636,0.0028042413961845746,0.0013604416054276646,0.000797756137301765,0.00026838611056628433 50 | 48,0.003915180860286844,0.0016789793607963378,0.0010022159338713193,0.0007959029968896828,0.00026847208856748057 51 | 49,0.003910420856388685,0.0014704941208037781,0.0008541031537464795,0.0007929617611602814,0.00026681669331148707 52 | 50,0.003820748137825701,0.0015101933203426443,0.0007776192140812844,0.0008000046555666109,0.00027216702316753026 53 | 51,0.0037168625818698535,0.0016114737852826403,0.0007315758705725922,0.0008143230106699621,0.0002739909083087345 54 | 52,0.003635687180586891,0.0017007554823995886,0.0007066431186134558,0.0008507591595587937,0.0002958333022927189 55 | 53,0.0036021357787207565,0.0018285638662727,0.0007024855712049846,0.0009253204418969354,0.00036444199467512657 56 | 54,0.003613484668811517,0.001939704770940923,0.0007307669350342683,0.0009747172014305736,0.0005971633580040624 57 | 55,0.0036652543103472064,0.0021152461047406292,0.0008229337749657195,0.001053941945551934,0.001131801133735543 58 | 56,0.003715118647794173,0.002216321506142428,0.001092910343294981,0.0010207211537881834,0.0013864690511899787 59 | 57,0.003722442797775979,0.0023110903983135666,0.0017806566072360542,0.0009336770747248805,0.0011455570144756945 60 | 58,0.003677648196878311,0.002225339265877413,0.0031773922673460605,0.0007559921804399059,0.0006202483742746121 61 | 59,0.0036162665926635132,0.002083015063660841,0.0035799197873329523,0.000750523716093264,0.00047237140587453886 62 | 60,0.00355591108232537,0.001853404228754795,0.0016594891743714748,0.0006329908794649667,0.00039868047936919077 63 | 61,0.0035062583573744633,0.0016863964860834692,0.0009618702946774369,0.0006448349978161556,0.00036117736983368263 64 | 62,0.0035221265337064063,0.0015462746268740182,0.0007813993703721434,0.0005886879553657655,0.00033651799062079 65 | 63,0.0035336514353652352,0.0014509169210214167,0.0007227872084158816,0.0007176735911241234,0.0003120346397287016 66 | 64,0.0035203915335919843,0.001386127855561852,0.0007008848156964,0.0006950093977215608,0.0002917155283832844 67 | 65,0.003483176038217997,0.001336233307046449,0.0006848145001979793,0.0006740561814759005,0.0002766364018246251 68 | 66,0.0034478351412690245,0.0012859581125048089,0.0006704238095045508,0.0008847812061734223,0.00026699816531092153 69 | 67,0.003413729596192882,0.0012492337528531975,0.0006600549489506856,0.001079440017498892,0.0002636937775539728 70 | 68,0.003400451737772008,0.0012243211660267874,0.0006560285404332918,0.0012633730516593549,0.0002607299573333788 71 | 69,0.0034048024441082295,0.0012302442113921526,0.000666708688232436,0.0010976590321712446,0.00026420022775447107 72 | 70,0.0034218431204914982,0.0013467083405827153,0.0006919800696993923,0.0019616578027407613,0.00027621597655086655 73 | 71,0.0034331564304011407,0.0017093509479735153,0.0007587475290581667,0.0015533754265331008,0.00032030314814400333 74 | 72,0.0034313182863323684,0.0026648373660096464,0.0009227796930059351,0.0008326273478123254,0.0004355033309179981 75 | 73,0.0034057898084159078,0.003893702248596844,0.0012564807140672386,0.0008415532260469135,0.000543103039444211 76 | 74,0.0033651687497123412,0.0053646552728172,0.0019230737258790907,0.0005516710230206716,0.0008946867586753095 77 | 75,0.0033212659678039406,0.004516997517017212,0.002419568239756854,0.0005047751631190948,0.0012946670130701702 78 | 76,0.0032931264926030833,0.0016006826560577734,0.0017453139396512612,0.00044287114170901346,0.001234512224364591 79 | 77,0.0032834773122574134,0.0013466267820671845,0.0009191838754597514,0.0004663922153757623,0.000655354999862429 80 | 78,0.0032815278347905923,0.0013112121902404978,0.0006634308283321581,0.0005284706991441338,0.00046053296118113494 81 | 79,0.0032746772355624542,0.0014387358691121644,0.0006111544607354178,0.0007061216002166218,0.0003636401431201621 82 | 80,0.0032568345349032564,0.0015258880331725294,0.0005990510186011769,0.0009745162070878867,0.0003080844123765731 83 | 81,0.00323552873621728,0.0015888229418085254,0.0005902264699632385,0.002075455999096141,0.00027108547885773185 84 | 82,0.00321370813536175,0.0016692970701811607,0.0005932020238845834,0.004283177004008009,0.0002523836514688832 85 | 83,0.0031989075724102025,0.0016977223894076466,0.0006535507468454899,0.0027826006065572047,0.00024187518125056313 86 | 84,0.0031893531689378765,0.0016725667506878072,0.0008583136973680605,0.000910482394327016,0.00024208437485013056 87 | 85,0.003199310636783983,0.001634588183298276,0.0013281968192116484,0.0004974853185712176,0.000249392706539667 88 | 86,0.003232350430085457,0.001573466143290329,0.001896323570369948,0.0004332695485086141,0.00026967601322201474 89 | 87,0.0032431724521198445,0.0015845129554168608,0.002037174667911732,0.0004344642813265009,0.000324678340572962 90 | 88,0.0032188665974328093,0.001625052876803006,0.0021049337309658905,0.00041935487900791744,0.00043614075493897836 91 | 89,0.003175470148624819,0.0016554157957022865,0.002121454944845952,0.00040498373203609194,0.0006030039672185078 92 | 90,0.003126777093046777,0.0016573570884731376,0.0022676778391942823,0.0003910766434888552,0.0006714183691743879 93 | 91,0.003087279835848936,0.0016461848635833333,0.0017969435949746215,0.0003864873214387073,0.0006039877156625328 94 | 92,0.0030588439083866045,0.0016257121049420675,0.0014654195043199176,0.00038108555923878465,0.0004570468446886425 95 | 93,0.003043690610273708,0.0016109794251106188,0.001047569116324944,0.0003811410428836456,0.0003734820374832573 96 | 94,0.0030234476151651635,0.001582031098889197,0.0007981732812383552,0.0003804904232490283,0.00031795804093399715 97 | 95,0.0029973310452312166,0.0015589041089542467,0.0006954521288947544,0.00038215774275414963,0.0002946650458858152 98 | 96,0.002969540439122024,0.0015324880381082581,0.0006348430977595853,0.00038723683310180743,0.00027764947118563016 99 | 97,0.0029354920755888868,0.0015121451864267348,0.0006005887449831235,0.00039960559651019495,0.00027108460938956317 100 | 98,0.0029132766968021835,0.0014834149807900255,0.0005922734607723877,0.00042962212573399136,0.00026756598168306604 101 | 99,0.0028451803852138774,0.0014696863162247068,0.0005996704367566577,0.0004923456037149791,0.00027597323597404623 102 | -------------------------------------------------------------------------------- /Multivatiate-LSTM/MV3-LSTM_CrossValidation_ValLoss.csv: -------------------------------------------------------------------------------- 1 | ,Split1,Split2,Split3,Split4,Split5 2 | 0,0.0015950548023517643,0.00020720868524996206,0.0002497565563964476,0.0005206132202710904,0.0003035276717259493 3 | 1,0.04302330008574894,0.00013362887427532808,0.00020677074244920701,0.0007879836875027124,0.0004646079188369843 4 | 2,0.05700928888150624,0.0007460173571165254,0.0002204758452273258,0.0014255349239019651,0.000575428663231208 5 | 3,0.0659157236133303,0.0006773730982689465,0.0002002303849144097,0.0012213074549717803,0.0005400914902916649 6 | 4,0.0607648503780365,0.0004259238654916036,0.00022468097786413173,0.0007304731333418422,0.00029805000963991127 7 | 5,0.055189932137727736,0.0002563071247269808,0.000222740231636296,0.0003731771289046303,0.0001843996852333377 8 | 6,0.024659973915134158,0.0001631332859649271,0.00020958748871758502,0.00022553146474922574,0.0001715830107870321 9 | 7,0.014718165317816393,0.00013725577189185794,0.00020246374037539018,0.00017156787882772155,0.00016883547328053468 10 | 8,0.00022049738767756415,0.0001336958886624532,0.00020144017769903796,0.00015532109676532341,0.00016960888148666954 11 | 9,0.0011293715201983493,0.0001854199576166835,0.00019677666596026492,0.00016480653639390906,0.00017028708748676512 12 | 10,0.0006867209352952029,0.0001282737507086069,0.00019532443522189555,0.000155220914207412,0.00017419248238677182 13 | 11,0.0003530500073351764,0.00010441374813919848,0.0001922360880614132,0.0001494157564593479,0.00018139700068677046 14 | 12,0.00034259042361684677,9.446505737674245e-05,0.00019971042433931192,0.0001536846076036659,0.00020059461562322027 15 | 13,0.0003367155937926977,9.859541641298107e-05,0.0001974298612074532,0.00015737665362137987,0.0002458871748994093 16 | 14,0.00031659533932854955,0.00013192153219666154,0.00022857325068690766,0.0001732938224449754,0.00031670226451817405 17 | 15,0.0002844749176750026,0.0003088342752001134,0.00019707895659494323,0.00015885831790641737,0.0004115594211539411 18 | 16,0.0002694028984530762,0.0004909676104905381,0.00022699498999133801,0.00024626000651525675,0.00047646475316260955 19 | 17,0.0002640414638153743,0.0003720118980941442,0.00018877805789273579,0.00032843754169347523,0.0004364311815721981 20 | 18,0.0002682830945039833,0.00024513857448023085,0.00019301805519193047,0.000344022654626571,0.0003539123096929706 21 | 19,0.00027569055632089396,0.0001667379676949535,0.00018997698436763486,0.0002812812994332643,0.0002234362525842994 22 | 20,0.0002794444942264818,0.0001241621704390669,0.0001908543867688484,0.0002271366728679605,0.0001798848797572799 23 | 21,0.0002820885812564354,9.893493663688052e-05,0.0002018814471400327,0.00020094683966074715,0.0001521239668714354 24 | 22,0.00028148366766150243,8.61323639834696e-05,0.00018738110931304093,0.00019579725799111414,0.00014543024085305004 25 | 23,0.00028388659311791086,7.955944352904632e-05,0.0001950060731807321,0.0001967770592670258,0.000141640840636858 26 | 24,0.00028457153979356267,7.528424748477149e-05,0.0001826739554309239,0.00023519996935698453,0.00013857955892895718 27 | 25,0.00028813677274488976,7.712154221464632e-05,0.0001833209776446831,0.0002481329376960147,0.00014513861338877264 28 | 26,0.0002873899377716173,8.624263160101614e-05,0.00019260929699866812,0.00024207161950983737,0.0001531378468442484 29 | 27,0.00028820436755527877,0.00014457441809893082,0.00019185601568867658,0.0002336358667915346,0.00016278491666854683 30 | 28,0.00028557913120104265,0.0003255646794717363,0.0003001150752810398,0.0002301218525119598,0.00016292249489328767 31 | 29,0.0002867920430643218,0.0004006369856336065,0.0002611979398969166,0.00022994355883934652,0.00016907676170770843 32 | 30,0.0002871222480566108,0.00033388875725653093,0.00030024921758568026,0.0002445600540737923,0.00015419560277004198 33 | 31,0.0002894986716065822,0.00027813401144119275,0.00020835355550180963,0.00024710203033976886,0.000163900077550571 34 | 32,0.00029050635822516465,0.00019401925608961276,0.0001746126610545986,0.0002727068079353481,0.0001621398400145033 35 | 33,0.0002918862730231402,0.00013174414585166155,0.0001935743595699393,0.0002751945671264712,0.00022497037060962744 36 | 34,0.00029199196275190583,9.553212704362367e-05,0.0002228629101271015,0.00028923032870909696,0.000422134673424873 37 | 35,0.00029498416040691413,7.699433549436086e-05,0.0002578192140775835,0.0002892112412693268,0.0008263886670570776 38 | 36,0.0002942454187931227,6.72659194567538e-05,0.00027475029350232757,0.00028548679530987855,0.0014406132590339845 39 | 37,0.0002979329795510109,6.399190566453592e-05,0.00032293289705964944,0.000280034451492944,0.0010260256942844085 40 | 38,0.0002997330060627844,5.941369337109636e-05,0.0003459967124168327,0.00027029290328025294,0.000269939593202154 41 | 39,0.00030555631715937386,5.983589768181852e-05,0.0005019825213574962,0.0002694757281422195,0.0001407374981341951 42 | 40,0.0003090746289983924,5.4948190727179496e-05,0.0005714579918461268,0.00026684557809166507,0.00012712477782112828 43 | 41,0.00031037754129751454,5.621822596793286e-05,0.0010535173691311945,0.0002667287298196166,0.00015612217262443378 44 | 42,0.0003010557053078498,6.013848122049549e-05,0.0011960483951813747,0.000280684860452721,0.00015009947020156556 45 | 43,0.0002906854954614703,8.680704688734469e-05,0.001013899198437299,0.0002780605937209977,0.0001573485102790621 46 | 44,0.00028226374798188253,0.00035791656242663717,0.002501126941711784,0.00028380728654668364,0.00016388848728320774 47 | 45,0.0002813717771953504,0.0006177218181348624,0.003242231661580124,0.00027399003558175667,0.00017111067469400095 48 | 46,0.0002840082270891539,0.00042722136744525915,0.0017063379800243113,0.0002746271573721995,0.00017390043828187577 49 | 47,0.00029239120394257564,0.00024140570308868228,0.0010440890889447598,0.0002665081803365545,0.00017567472588495318 50 | 48,0.0002943654316810093,0.00015836812693684825,0.0010709991367270882,0.0002793112467207067,0.0001749131329222975 51 | 49,0.0002940580526566399,9.235495738121631e-05,0.0010626159319915547,0.0002667323334477055,0.0001738403810199395 52 | 50,0.00028569892265035637,6.588957277269034e-05,0.0009256182697214881,0.0003004535439942347,0.00016906031629025756 53 | 51,0.0002799145544746092,5.988701213162982e-05,0.0008448721189594481,0.0002750216294604708,0.00016591464904282827 54 | 52,0.00027475176861376633,7.390247696132567e-05,0.000698032835671102,0.00032101058512663756,0.00018083816478365515 55 | 53,0.00027409569177377436,6.304945906409326e-05,0.0006004155305425352,0.0003190083980960378,0.0002951168791806123 56 | 54,0.0002748005191928574,8.489702563275251e-05,0.0004880259280271583,0.0003389165182532349,0.0006949136639574924 57 | 55,0.000279248379769602,7.289334695436064e-05,0.0003551675097758377,0.0003567887197124024,0.001030244182301573 58 | 56,0.00028074242823225044,9.675398697699495e-05,0.00021568533534803666,0.0003348759769029181,0.0008375674594160035 59 | 57,0.00028074479961235607,8.75970587777432e-05,0.00030019796937822476,0.00030603665058356774,0.00029354213010407534 60 | 58,0.00027467683938864085,0.00010374745759961908,0.00046119903025840865,0.0002577613329213165,0.00014589361864680717 61 | 59,0.000270449157438374,8.598185553289132e-05,0.00037818255918633224,0.00025519749787504927,0.0001367079300934965 62 | 60,0.00026368403507928765,8.541809727714591e-05,0.00020995102773452084,0.0002193223026705722,0.00014203839559467083 63 | 61,0.00025866832500988883,7.007808788020679e-05,0.00019688041504843445,0.00031261660976820743,0.00014381517939471517 64 | 62,0.00026134031492152385,6.088010318791853e-05,0.00027041360803719975,0.00025757800756533904,0.0001346682267774076 65 | 63,0.0002567500647689615,5.113051907910824e-05,0.0003142592876876302,0.0003512341291996204,0.00012684209680420357 66 | 64,0.00025610189153147593,5.310787989033765e-05,0.00033534439301593783,0.0002939382965617459,0.0001239980032375778 67 | 65,0.0002487345633562654,4.4782341677652784e-05,0.00033903694101185066,0.00029269199634731653,0.00012382523813116622 68 | 66,0.0002461742273797946,4.471673517478036e-05,0.0003396155160725496,0.00034270349785055915,0.00012485207036047814 69 | 67,0.00023999318314184035,4.1854046735642574e-05,0.00033853411119800234,0.0003941665853629492,0.00012671680853096207 70 | 68,0.0002407629046189998,4.1370159048730124e-05,0.00031933303044613654,0.0002773065191306646,0.00012910597766917873 71 | 69,0.00023718041991482356,4.107586654696361e-05,0.0002893776674404685,0.000500173060285767,0.0001351244704158281 72 | 70,0.00023986409118931208,4.9191062502591485e-05,0.00025563950480703834,0.0006188005372934358,0.000155946192701897 73 | 71,0.0002373963619382786,6.669304914774987e-05,0.0002077978487018993,0.00039656698362718166,0.0002144721053338733 74 | 72,0.0002370888478721359,0.00017556838624357702,0.00016803365145016746,0.00022874656264525905,0.0003301708448498723 75 | 73,0.00023241184884682299,0.0007351204913921971,0.00022414061337042504,0.00019423289349119007,0.0005469315491773817 76 | 74,0.00022729616495780646,0.000947882413636259,0.000548822058056939,0.0001554237825805846,0.0009752665691257241 77 | 75,0.00022338373578220072,0.00027313040435670855,0.0004824527603686566,0.00015092342374616787,0.0010389670566936214 78 | 76,0.00021900895309434938,7.640090118505388e-05,0.0001938728280207616,0.00018477073745985686,0.0004514414841640202 79 | 77,0.00021995263315537677,6.125352515772783e-05,0.00017181780449237075,0.0004038588729855919,0.00014635600200124893 80 | 78,0.00021554294449742883,4.611466357791289e-05,0.0002446542837448868,0.00029766117351729765,0.00012715625350857265 81 | 79,0.0002180498488347179,5.078353010987407e-05,0.0003011702025660424,0.0002674797724466771,0.00012235366458813828 82 | 80,0.000210769566225021,5.410716112624443e-05,0.0003433805006537468,8.121073058671908e-05,0.00011538237926595092 83 | 81,0.00021380241669248788,6.560387460690146e-05,0.0003981353991178567,0.00021163990160960272,0.00011600640466358198 84 | 82,0.00020575914806353725,7.288920584501962e-05,0.00048397507865556073,0.00038085844276331915,0.00012212253501077733 85 | 83,0.00020970367768313736,6.293858145038533e-05,0.000601101628804611,0.00017099141722879516,0.00013550519129732646 86 | 84,0.0002035916697267177,6.0334601250283314e-05,0.0007355924646648326,0.0001985178882843146,0.00015416771085113737 87 | 85,0.00020971564235099192,4.4918191188697435e-05,0.0008265886928711553,0.00014428251466392473,0.00018958349078220416 88 | 86,0.00020838838070631028,4.3805724546589465e-05,0.0007618178201097427,0.0001404597696153001,0.000244823977609266 89 | 87,0.00020930718422667788,4.507309208457492e-05,0.0009535453826513623,0.00013719093760403373,0.0003460645133556506 90 | 88,0.00020497990148474595,4.8097245894979926e-05,0.0019477515341319396,0.0001336079763091156,0.0004776465397255203 91 | 89,0.00019894238819168615,4.761211070062734e-05,0.002304070402070943,0.00012754417761576584,0.0005403565213782713 92 | 90,0.00019696349178307824,4.8123577299191585e-05,0.0017767318901783487,0.00012474408726581752,0.0004999674213573292 93 | 91,0.00018819367280229927,4.516132275773301e-05,0.0014881500903264446,0.00011784135684548078,0.0004340989449263427 94 | 92,0.00018982007667156203,4.5222074261781676e-05,0.0020782215674644555,0.00011307937974392266,0.0003842511203095137 95 | 93,0.00018108854793743895,4.274411903486211e-05,0.002044306740448889,0.00010841420075272434,0.0003569834307397166 96 | 94,0.00018069828502900365,4.268003490033926e-05,0.0016672805873124655,0.0001035107821914774,0.00034214667653305884 97 | 95,0.00016686330557734306,4.0903098319989673e-05,0.0014565859711137047,9.877342016691946e-05,0.00033145611208066885 98 | 96,0.0001732192383081253,4.0993208949657854e-05,0.0013271475986230297,9.412465196489816e-05,0.0003256599068170391 99 | 97,0.00014678066114096768,3.9006527793467384e-05,0.0012693717934179388,8.85683656006094e-05,0.0003288015535280827 100 | 98,0.0002095090305166585,3.8645402346833666e-05,0.0012334064624649156,8.158819205996672e-05,0.000338880082098655 101 | 99,0.00013132124864828905,3.777660524761775e-05,0.0012164891056442176,7.733647466856908e-05,0.0003687424739508564 102 | -------------------------------------------------------------------------------- /Multivatiate-RNN/MV3-RNN-Split1.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-RNN/MV3-RNN-Split1.h5 -------------------------------------------------------------------------------- /Multivatiate-RNN/MV3-RNN-Split2.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-RNN/MV3-RNN-Split2.h5 -------------------------------------------------------------------------------- /Multivatiate-RNN/MV3-RNN-Split3.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-RNN/MV3-RNN-Split3.h5 -------------------------------------------------------------------------------- /Multivatiate-RNN/MV3-RNN-Split4.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-RNN/MV3-RNN-Split4.h5 -------------------------------------------------------------------------------- /Multivatiate-RNN/MV3-RNN-Split5.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-RNN/MV3-RNN-Split5.h5 -------------------------------------------------------------------------------- /Multivatiate-RNN/MV3-RNN_CrossValidation.csv: -------------------------------------------------------------------------------- 1 | ,MSE,RMSE,R2_Score,Train_Time 2 | 0,9.85E-05,0.009927079,0.861439278,0 3 | 1,0.000258888,0.016089993,0.938144871,47.18599963 4 | 2,5.39E-05,7.34E-03,0.963602479,73.52999496 5 | 3,0.000407152,0.020178016,0.979568256,92.77700067 6 | 4,2.70E-04,0.016427616,0.968899491,134.3899958 7 | -------------------------------------------------------------------------------- /Multivatiate-RNN/MV3-RNN_CrossValidation_TrainLoss.csv: -------------------------------------------------------------------------------- 1 | ,Split1,Split2,Split3,Split4,Split5 2 | 0,0.18299041,0.001268928,0.000934458,0.000564899,0.000344955 3 | 1,0.002896921,0.001465245,0.00095504,0.000502478,0.000345573 4 | 2,0.002257387,0.001252668,0.001325748,0.000714515,0.00034642 5 | 3,0.002131461,0.001790571,0.00105049,0.000535496,0.000427791 6 | 4,0.00192377,0.003179623,0.001313271,0.000289879,0.000421272 7 | 5,0.001816951,0.001555874,0.001570058,0.000415639,0.000477032 8 | 6,0.001785918,0.002061451,0.001343558,0.000284033,0.00039184 9 | 7,0.001745656,0.002374487,0.001165341,0.000459835,0.000453202 10 | 8,0.00172988,0.00220921,0.000929121,0.000302301,0.000403343 11 | 9,0.001729307,0.001609773,0.000967827,0.000547202,0.000455579 12 | 10,0.001730989,0.001718723,0.001090875,0.000481547,0.000419474 13 | 11,0.001727145,0.002112089,0.001135934,0.000985118,0.00041121 14 | 12,0.001719325,0.001950833,0.001059551,0.001497617,0.000370158 15 | 13,0.00170943,0.001814438,0.000950185,0.001400661,0.000323162 16 | 14,0.001692415,0.001615161,0.000843595,0.00056224,0.000335689 17 | 15,0.001681751,0.001792487,0.000795495,0.000575809,0.000299807 18 | 16,0.001683917,0.001588219,0.000928954,0.000434972,0.000314363 19 | 17,0.001666458,0.001879495,0.00103788,0.000487166,0.000320222 20 | 18,0.001648717,0.002152566,0.001071979,0.000475613,0.000379727 21 | 19,0.001628803,0.001683886,0.000965671,0.000471934,0.000377571 22 | 20,0.001608259,0.001329319,0.000851328,0.000499337,0.000477188 23 | 21,0.001588159,0.001460855,0.000800896,0.000531349,0.00044571 24 | 22,0.001568103,0.001512957,0.000885434,0.000523306,0.000440031 25 | 23,0.001553489,0.001428533,0.000949849,0.000546335,0.000379425 26 | 24,0.001585956,0.001394174,0.001011978,0.000521771,0.000353094 27 | 25,0.001588867,0.001367162,0.000934487,0.000531316,0.000311182 28 | 26,0.001567748,0.001356262,0.000875157,0.00048453,0.000296938 29 | 27,0.00152575,0.00134128,0.000736464,0.00054655,0.000265282 30 | 28,0.001446917,0.001325967,0.000793722,0.000496068,0.000279432 31 | 29,0.00144624,0.001313043,0.000824127,0.000518654,0.000291752 32 | 30,0.00146721,0.001303118,0.00090406,0.000472254,0.000344098 33 | 31,0.001458213,0.001293802,0.000883409,0.00050987,0.000353672 34 | 32,0.001414723,0.001281068,0.00092161,0.00048028,0.000447639 35 | 33,0.001414872,0.001274357,0.00085588,0.000501692,0.000424992 36 | 34,0.001446888,0.001260621,0.000838505,0.000458244,0.000434956 37 | 35,0.001422233,0.001253072,0.000756472,0.000492992,0.000343043 38 | 36,0.001482982,0.001240382,0.000796754,0.000445827,0.000301417 39 | 37,0.001461556,0.001216271,0.000761307,0.000607591,0.000267977 40 | 38,0.001421208,0.001474095,0.000806792,0.000588866,0.000283685 41 | 39,0.001363113,0.001455411,0.000787426,0.000601024,0.000266026 42 | 40,0.001364325,0.001263382,0.000809951,0.000460008,0.000321307 43 | 41,0.001343323,0.001158042,0.000778261,0.000452912,0.000294082 44 | 42,0.001360546,0.001158001,0.000803956,0.000386844,0.000395893 45 | 43,0.001368117,0.001171134,0.000763461,0.000409414,0.000357094 46 | 44,0.00140335,0.001165123,0.000811542,0.000408732,0.000418181 47 | 45,0.001365772,0.001162691,0.000758222,0.000440047,0.000379446 48 | 46,0.001336551,0.001156048,0.000792687,0.000454725,0.000349345 49 | 47,0.001306923,0.001150424,0.000788435,0.000465238,0.000287728 50 | 48,0.001309397,0.001150418,0.0007846,0.000509953,0.000269714 51 | 49,0.001300242,0.001137251,0.000724305,0.000510903,0.000237865 52 | -------------------------------------------------------------------------------- /Multivatiate-RNN/MV3-RNN_CrossValidation_ValLoss.csv: -------------------------------------------------------------------------------- 1 | ,Split1,Split2,Split3,Split4,Split5, 2 | 0,0.002290739,0.000847438,7.73E-05,3.22E-05,9.42E-05, 3 | 1,0.001788652,0.000686635,9.38E-05,1.40E-05,0.000111109, 4 | 2,0.001696703,0.000695116,0.000103435,8.82E-05,0.000111546, 5 | 3,0.001658757,0.000689797,7.14E-05,6.76E-05,0.000135183, 6 | 4,0.001602496,0.000730093,8.32E-05,1.70E-05,0.000120479, 7 | 5,0.001472858,0.000675511,9.55E-05,6.18E-05,0.000126265, 8 | 6,0.001393618,0.000677663,6.35E-05,1.69E-05,0.000135958, 9 | 7,0.001387856,0.000796631,7.43E-05,4.12E-05,7.15E-05, 10 | 8,0.001401317,0.000830583,5.00E-05,2.08E-05,0.000149296, 11 | 9,0.00142759,0.000797106,5.83E-05,1.70E-05,0.000126769, 12 | 10,0.001454348,0.000775499,4.96E-05,2.45E-05,0.000166608, 13 | 11,0.001481597,0.000803931,4.65E-05,3.98E-05,0.000138111, 14 | 12,0.00150057,0.000783,6.16E-05,4.15E-05,0.000134025, 15 | 13,0.001506198,0.000752859,4.21E-05,2.58E-05,0.000137263, 16 | 14,0.001500666,0.000737012,4.71E-05,1.85E-05,0.000132986, 17 | 15,0.001497491,0.000725999,4.01E-05,2.14E-05,0.000107209, 18 | 16,0.001502014,0.000707315,4.60E-05,1.61E-05,0.000115551, 19 | 17,0.001535123,0.000705958,3.55E-05,1.97E-05,0.000124008, 20 | 18,0.0015399,0.00072004,4.95E-05,1.91E-05,0.000112644, 21 | 19,0.001518997,0.000681738,3.51E-05,2.16E-05,0.000150235, 22 | 20,0.00150426,0.000682055,4.49E-05,1.92E-05,0.000160603, 23 | 21,0.001512142,0.00067655,3.28E-05,2.03E-05,0.000161246, 24 | 22,0.001455481,0.000674833,4.00E-05,1.80E-05,0.000121973, 25 | 23,0.001560804,0.000663991,3.01E-05,1.84E-05,0.000136213, 26 | 24,0.001639461,0.000658292,4.35E-05,1.54E-05,0.000137166, 27 | 25,0.001632484,0.00065167,3.26E-05,2.39E-05,0.000124275, 28 | 26,0.001583983,0.000645726,3.82E-05,1.37E-05,0.00011597, 29 | 27,0.001553009,0.000639066,2.99E-05,2.32E-05,0.000103829, 30 | 28,0.001493673,0.000634612,3.61E-05,1.43E-05,0.000111238, 31 | 29,0.001628399,0.000626379,2.95E-05,2.34E-05,0.000119594, 32 | 30,0.00164209,0.000621579,4.61E-05,1.38E-05,0.000106686, 33 | 31,0.001473436,0.000614526,2.86E-05,1.81E-05,0.000150464, 34 | 32,0.001372451,0.000609846,4.99E-05,1.50E-05,0.000134718, 35 | 33,0.00166915,0.000604542,2.64E-05,2.02E-05,0.000145386, 36 | 34,0.001682327,0.000596482,4.15E-05,1.33E-05,0.000146092, 37 | 35,0.001708418,0.000596518,2.45E-05,1.68E-05,0.000119544, 38 | 36,0.001869676,0.000584675,3.38E-05,1.70E-05,0.000119443, 39 | 37,0.001741645,0.000566996,2.38E-05,3.29E-05,0.000102482, 40 | 38,0.001428482,0.000608338,3.36E-05,1.40E-05,0.000115585, 41 | 39,0.001453685,0.000573006,2.29E-05,1.95E-05,0.000116133, 42 | 40,0.001468856,0.000564756,3.32E-05,1.52E-05,9.70E-05, 43 | 41,0.001362969,0.000558126,2.23E-05,3.44E-05,0.000138553, 44 | 42,0.001562128,0.000553966,4.02E-05,1.91E-05,0.000109721, 45 | 43,0.001827447,0.000552557,2.05E-05,2.64E-05,0.000151472, 46 | 44,0.001579896,0.000547382,3.39E-05,1.72E-05,0.00016033, 47 | 45,0.001587545,0.000544594,2.43E-05,1.73E-05,0.000154975, 48 | 46,0.001564979,0.000539361,4.00E-05,1.20E-05,0.000133354, 49 | 47,0.001600503,0.000536537,2.04E-05,2.02E-05,0.000116486, 50 | 48,0.001625337,0.000533076,4.10E-05,1.39E-05,0.000105599, 51 | 49,0.001570012,0.000527812,1.90E-05,2.04E-05,0.000102771, 52 | -------------------------------------------------------------------------------- /Multivatiate-RNN/model_simple_30_[50,45]_1e-3_32.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Multivatiate-RNN/model_simple_30_[50,45]_1e-3_32.h5 -------------------------------------------------------------------------------- /Multivatiate-RNN/model_simple_30_[50,45]_1e-3_32_train_loss.csv: -------------------------------------------------------------------------------- 1 | 0,0.03340153823365055 2 | 1,0.001735811851284152 3 | 2,0.001172150411330734 4 | 3,0.001043682540118061 5 | 4,0.0009572485287618023 6 | 5,0.0009053013471667388 7 | 6,0.001135374784784769 8 | 7,0.0009924244407595556 9 | 8,0.001065079510561015 10 | 9,0.001035767552250248 11 | 10,0.000939633828600186 12 | 11,0.0007872662035831751 13 | 12,0.0008580217554821725 14 | 13,0.0008254591691537216 15 | 14,0.0007064822630496963 16 | 15,0.0007199125658462252 17 | 16,0.0006971094099437243 18 | 17,0.0006615981886366436 19 | 18,0.000657321750690284 20 | 19,0.0006127763072551863 21 | 20,0.0005745342827579314 22 | 21,0.0005931658559658947 23 | 22,0.0005060664302155551 24 | 23,0.0005107730232416094 25 | 24,0.0005934128942153364 26 | 25,0.0006484640354360992 27 | 26,0.0005157976111570657 28 | 27,0.0005858510179509621 29 | 28,0.000803924516152788 30 | 29,0.0004027763361031526 31 | 30,0.00038591337801796767 32 | 31,0.00037734005935464364 33 | 32,0.00036323096128746595 34 | 33,0.0003888647552651288 35 | 34,0.0003818011124467357 36 | 35,0.00042777584903906353 37 | 36,0.00037912602227488807 38 | 37,0.0003529875267763628 39 | 38,0.0003453587957513262 40 | 39,0.0004452790240677161 41 | 40,0.0003261242436188252 42 | 41,0.0003120240419620108 43 | 42,0.00029712760550200634 44 | 43,0.0003871041772385128 45 | 44,0.00031719285541005317 46 | 45,0.0003534627266847124 47 | 46,0.00030677324536721335 48 | 47,0.0003834808171143574 49 | 48,0.00028553405496565084 50 | 49,0.00031158080486826914 51 | 50,0.0003122699751424504 52 | 51,0.0003283025277818034 53 | 52,0.0003212339014212293 54 | 53,0.00031030631094375126 55 | 54,0.0002627046006765199 56 | 55,0.00038610840311763164 57 | 56,0.00025263996233275475 58 | 57,0.0006267924228358053 59 | 58,0.00022753335279744157 60 | 59,0.0003613197562049712 61 | 60,0.00022184086488042474 62 | 61,0.0004463286253729539 63 | 62,0.00022739970528407508 64 | 63,0.00040416166762271595 65 | 64,0.00028839728350375327 66 | 65,0.0002196824349747085 67 | 66,0.0003281582776835275 68 | 67,0.00021068462176640834 69 | 68,0.00036800901304652705 70 | 69,0.0002027856426585409 71 | 70,0.0003573222207449644 72 | 71,0.00019672708431701737 73 | 72,0.00032687044867540845 74 | 73,0.0001975839449675935 75 | 74,0.00041595406744368145 76 | 75,0.00019455504743764432 77 | 76,0.0004473729054049436 78 | 77,0.00019081146765950304 79 | 78,0.0003529217623856579 80 | 79,0.00018540066655939596 81 | 80,0.00036828059418745414 82 | 81,0.00018875001329448512 83 | 82,0.00033945101177326265 84 | 83,0.0001886891238619926 85 | 84,0.0003062574610835574 86 | 85,0.00019065914898190833 87 | 86,0.0003544621628139095 88 | 87,0.00018667161329097762 89 | 88,0.0003799940494850187 90 | 89,0.00016990374209217225 91 | 90,0.0002928327103139967 92 | 91,0.0001750539749391501 93 | 92,0.0002248693858541308 94 | 93,0.00018709119022957676 95 | 94,0.00020036377251324693 96 | 95,0.00017085310949001335 97 | 96,0.0001880191690050149 98 | 97,0.00015615984388794355 99 | 98,0.00021561824193120984 100 | 99,0.00015342537019162516 101 | 100,0.0002155087841706527 102 | 101,0.00016993555112549988 103 | 102,0.0002177366141340808 104 | 103,0.0001529734400677373 105 | 104,0.0002600824723158 106 | 105,0.0001395819845046805 107 | 106,0.000201777866733581 108 | 107,0.00014518266430727097 109 | 108,0.00018747306755378425 110 | 109,0.0001579707975416354 111 | 110,0.00016792887582126148 112 | 111,0.00017605162666078694 113 | 112,0.0001551957590748369 114 | 113,0.00017377178053508578 115 | 114,0.0001627807543951473 116 | 115,0.00018212259163803566 117 | 116,0.00015978087654982723 118 | 117,0.0001934567382847437 119 | 118,0.000169613778952348 120 | 119,0.00015499848224928824 121 | 120,0.0001797859631553376 122 | 121,0.00016760909410123702 123 | 122,0.00017414922718631473 124 | 123,0.00017188706173094446 125 | 124,0.0001833461509644259 126 | 125,0.00015609315685658702 127 | 126,0.00016874462799828792 128 | 127,0.00016984167463273282 129 | 128,0.0001703310548292728 130 | 129,0.00025210284475764516 131 | 130,0.00017457917197436466 132 | 131,0.0002061159571210269 133 | 132,0.00019503508327151187 134 | 133,0.00018349560618560761 135 | 134,0.00015892456235752238 136 | 135,0.0001530557624993864 137 | 136,0.00020939738789593778 138 | 137,0.00014007759538133184 139 | 138,0.0005499802770225895 140 | 139,0.000989178757952785 141 | 140,0.00014301914500462133 142 | 141,0.00020763091781275358 143 | 142,0.0001323671943880934 144 | 143,0.00018457445516090868 145 | 144,0.0001329339793560128 146 | 145,0.0001600330232075696 147 | 146,0.00013350455348692733 148 | 147,0.00015835356369296377 149 | 148,0.00013085854865461244 150 | 149,0.00017108453132390371 151 | 150,0.0001301952305817702 152 | 151,0.0001954485450764521 153 | 152,0.00012874639630374197 154 | 153,0.00020937704227300043 155 | 154,0.000126192985795529 156 | 155,0.0001992219705173316 157 | 156,0.00012662691071978273 158 | 157,0.0001914114286185686 159 | 158,0.00012802902568217385 160 | 159,0.00019681128724937435 161 | 160,0.00013524444008836809 162 | 161,0.00015185314045691492 163 | 162,0.0001405015588672277 164 | 163,0.0001477832108890429 165 | 164,0.00015023022238358385 166 | 165,0.00014067932963583553 167 | 166,0.0001714089714685623 168 | 167,0.00013733932868530488 169 | 168,0.00018148243795221202 170 | 169,0.00012840527886645662 171 | 170,0.0001940703418530057 172 | 171,0.0001290039125568996 173 | 172,0.00018509404343278085 174 | 173,0.0001346343041113942 175 | 174,0.00018153923229395183 176 | 175,0.00013086748071186108 177 | 176,0.00017017979021147767 178 | -------------------------------------------------------------------------------- /Multivatiate-RNN/model_simple_30_[50,45]_1e-3_32_val_loss.csv: -------------------------------------------------------------------------------- 1 | 0,0.002456867957354095 2 | 1,0.0019059405491241835 3 | 2,0.0013676592989290283 4 | 3,0.0013564578913258319 5 | 4,0.001182789256158167 6 | 5,0.0009892736928771302 7 | 6,0.0012843927802254724 8 | 7,0.0010093454401908179 9 | 8,0.0008511634026300398 10 | 9,0.0009073491302597138 11 | 10,0.0008864754332373909 12 | 11,0.0007539680766426557 13 | 12,0.0007315642352192368 14 | 13,0.000735543629618505 15 | 14,0.0007588742176839052 16 | 15,0.0006376114062332983 17 | 16,0.00060045085117633 18 | 17,0.0005140764427441074 19 | 18,0.0004941515758468323 20 | 19,0.00047449631532231733 21 | 20,0.00039843178697064016 22 | 21,0.00041082456105554237 23 | 22,0.0003923220413330282 24 | 23,0.0003380046808099441 25 | 24,0.00039550504044979373 26 | 25,0.00043248617628644285 27 | 26,0.00029391743599354653 28 | 27,0.00037331234774614177 29 | 28,0.00029549387553232754 30 | 29,0.000279207011664636 31 | 30,0.00024065896146943034 32 | 31,0.00022533315934834193 33 | 32,0.00022937546886099927 34 | 33,0.00021997352739435498 35 | 34,0.00023882685192556854 36 | 35,0.00021385149846667864 37 | 36,0.00016908117972678095 38 | 37,0.0001660927852374042 39 | 38,0.00021111216010621117 40 | 39,0.00015860901256610114 41 | 40,0.00012872699900927663 42 | 41,0.0001372221336571818 43 | 42,0.00017096370106532517 44 | 43,0.00013582238417920924 45 | 44,0.00014697201748023434 46 | 45,0.00012256826880058363 47 | 46,0.00015382539584200223 48 | 47,0.00010829160320226256 49 | 48,0.00010988889022464795 50 | 49,0.00011325491623172462 51 | 50,0.00011989829815362476 52 | 51,0.00010696025707408735 53 | 52,0.00010773887549595021 54 | 53,9.846885677305235e-05 55 | 54,0.0001365323328422402 56 | 55,9.240009903000032e-05 57 | 56,0.0002500160276385826 58 | 57,0.00014225449122636842 59 | 58,0.0001256882229265816 60 | 59,0.00015333025430855715 61 | 60,0.0001638258024750529 62 | 61,0.00016469782092957266 63 | 62,0.00011700842351504584 64 | 63,9.336182905949518e-05 65 | 64,0.00011879942089743875 66 | 65,0.00010059441203476944 67 | 66,0.00011996325800297999 68 | 67,0.0001085996384047687 69 | 68,0.00014873919710747796 70 | 69,0.00010370809867543871 71 | 70,9.967154528951088e-05 72 | 71,9.721704360030352e-05 73 | 72,9.500273562430401e-05 74 | 73,0.00012711520448723958 75 | 74,0.00012525268680541797 76 | 75,0.0001855661054222868 77 | 76,0.0001260346902744601 78 | 77,0.00012720939135945202 79 | 78,0.00014223618907689407 80 | 79,0.000128237818311782 81 | 80,9.225071278299531e-05 82 | 81,0.00013326239524205817 83 | 82,9.546525216291026e-05 84 | 83,0.00014007244219085916 85 | 84,0.00012290598673268048 86 | 85,0.00010289579789631436 87 | 86,8.406784292543798e-05 88 | 87,0.0001499671892555016 89 | 88,0.0001033259465008923 90 | 89,0.00013407991993617273 91 | 90,9.416266408835376e-05 92 | 91,7.63171464686168e-05 93 | 92,7.807437191043886e-05 94 | 93,7.212735116145306e-05 95 | 94,7.206804491536552e-05 96 | 95,7.13823877199099e-05 97 | 96,6.779383512224633e-05 98 | 97,7.817782379366567e-05 99 | 98,6.740762990160614e-05 100 | 99,9.325910343656922e-05 101 | 100,6.993320235425757e-05 102 | 101,9.660027155658537e-05 103 | 102,7.188740407864814e-05 104 | 103,0.00011538285008687643 105 | 104,6.482628239855368e-05 106 | 105,9.051572779458279e-05 107 | 106,6.725986582474618e-05 108 | 107,7.96974797176594e-05 109 | 108,6.720939907662861e-05 110 | 109,7.507318944202633e-05 111 | 110,7.423078616121234e-05 112 | 111,6.700243250515267e-05 113 | 112,7.39928022668528e-05 114 | 113,7.225456877826725e-05 115 | 114,7.490812952361042e-05 116 | 115,7.086631850615038e-05 117 | 116,8.09985274996192e-05 118 | 117,7.630682813226175e-05 119 | 118,6.794458152817166e-05 120 | 119,8.082854878825414e-05 121 | 120,7.19280543626347e-05 122 | 121,7.78592002154662e-05 123 | 122,7.409087809235965e-05 124 | 123,7.97552488870007e-05 125 | 124,7.056243296340304e-05 126 | 125,7.502862845362314e-05 127 | 126,7.963462139972003e-05 128 | 127,7.266517369251478e-05 129 | 128,0.00011117784018438694 130 | 129,7.834963794976457e-05 131 | 130,8.479370036234151e-05 132 | 131,0.00010222880497505055 133 | 132,6.901570989377993e-05 134 | 133,7.393408019266598e-05 135 | 134,6.849801179419718e-05 136 | 135,9.682087724063671e-05 137 | 136,9.644192734704734e-05 138 | 137,9.97961722970115e-05 139 | 138,0.0004013399162500672 140 | 139,8.18794133816551e-05 141 | 140,0.00011825527341786835 142 | 141,8.417073918031509e-05 143 | 142,8.209771181196161e-05 144 | 143,7.146395556419943e-05 145 | 144,7.718231413318906e-05 146 | 145,6.911420503855714e-05 147 | 146,7.681936336428769e-05 148 | 147,6.915446314996331e-05 149 | 148,8.184416483614184e-05 150 | 149,7.139667676430675e-05 151 | 150,0.00010123640669211117 152 | 151,8.82993406215943e-05 153 | 152,0.0001007917591685995 154 | 153,8.112019346867303e-05 155 | 154,9.779597262096422e-05 156 | 155,7.818946531049824e-05 157 | 156,9.25265292316294e-05 158 | 157,7.999326183146527e-05 159 | 158,9.315910505438873e-05 160 | 159,7.040912274947638e-05 161 | 160,7.042317973073564e-05 162 | 161,6.955951820473965e-05 163 | 162,6.856031727750897e-05 164 | 163,7.302288468207531e-05 165 | 164,6.736911861868212e-05 166 | 165,8.409481288944784e-05 167 | 166,6.90167443614963e-05 168 | 167,8.588262740878746e-05 169 | 168,7.142373775453739e-05 170 | 169,9.830464600801039e-05 171 | 170,7.032298218840311e-05 172 | 171,9.567088893033124e-05 173 | 172,6.744371115283811e-05 174 | 173,9.082928250169476e-05 175 | 174,6.899114855571762e-05 176 | 175,7.882909078402975e-05 177 | 176,6.97163739283412e-05 178 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Sequence Learning: Predicting Stock Prices using Multivariate Analysis 2 | 3 | ## I) Abstract 4 | In this paper, we build multivariate analysis models to predict stock price movement on Carriage Services, Inc. stocks data. Stock prices depends on various factors and their complex dynamics which makes them a difficult problem in real world. The purpose of this paper is to analyses the capability of a neural network to solve this problem efficiently. Recurrent Neural Networks (RNN) has demonstrated its capability of addressing complex time series problems. We analyzed different multivariate models based on different RNN architectures like GRU and LSTM and compares them with their univariate models and also with each other. We have used a soft computing approach based on RNN and models has been developed to find the temporal dependencies and forecast stock values of a particular company from its past history of stocks. We propose a multivariate neural network architecture in predicting stock price, and compare and contrast the prediction error of these models with univariate LSTM model. From the results, we infer that multivariate prediction models easily outperform univariate prediction models when trained on the same data. We also show that multivariate prediction models are more efficient and faster to train and deploy in business environment. 5 | 6 | 7 | ## II) Introduction 8 | 9 | Stock price prediction is one of the most important business problems that has attracted the interest of all the stakeholders. To improve the performance, reliability of forecasting and the complexity of algorithms used in the process of solving this problem. However, the methods we have found yet are either based on simple linear regression assumptions (like ARIMA) or do not make full use of the data available and only consider one factor while forecasting (non-linear univariate models like ARCH, TAR [1] and deep learning models). Some researchers have also tried a combination of ANN and fuzzy logic [2] to use human like reasoning for this problem. But the stocks prediction is still open. The stock prices are highly dynamic and have non-linear relationships and is dependent on many factors at the same time [3]. We try to solve this problem of stock market forecasting using multivariate analysis. 10 | 11 | Since multivariate time series have more features than univariate time series, they are more informative than the later one, so it is almost always better to use multivariate model to forecast the trend of complex systems like stocks. We attempt to help the research community to better understand this question and tried to find an answer for it. Recurrent Neural Networks (RNN) and its extensions like GRU and LSTM has shown good performances in other sequential data like sound waves, time series variations and in natural language processing. 12 | 13 | We have used different deep learning techniques, namely RNN, GRU and LSTM to model our problem. It is proven that deep learning algorithms have the ability to identify existing patterns in the data and exploiting them by using a soft learning process [4]. Unlike other statistical and machine learning algorithms, deep learning architectures are capable to find short term as well as long term dependencies in the data and give good predictions by finding these hidden relationships. 14 | 15 | We have proposed a 3-level methodology of our work. First, we preprocess the data to make the data multidimensional and suitable for our network architectures. Next, we split the data into train and test sets and train our models on the training data. At the final step, we make predictions using the models trained in the previous step on test data and calculate and analyze various error matrices. This paper isorganized in four parts. Part (III) presents the theoretical background on the various architectures used, part (IV) shows the methodologyused to conduct this experiment, part (VII) contains the results we obtained and conclusions are drawn in part (VIII). 16 | 17 | ## III) Literature Review 18 | 19 | The following architectures are used in this paper: 20 | 21 | 1) Recurrent Neural Networks (RNN): 22 | RNN are a class of ANNs where the output from previous step 23 | are fed as input to the current step along with the normal input. In 24 | feed forward ANNs, all the inputs and outputs are independent of 25 | each other, but in cases like when it is required to predict the time 26 | series, the previous values are required and hence there is a need 27 | to remember the previous values. 28 | It is found out that RNN suffers from vanishing gradient problem 29 | [5]. As we propagate the error through the network, it has to go 30 | through the temporal loop – the hidden layers connected to 31 | themselves in time by the means of weights wreck. Because this 32 | weight is applied many-many times on top of itself, that causes 33 | the gradient to decline rapidly. As a result, weights of the layers 34 | on the very far left are updated much slower than the weights of 35 | the layers on the far right. This creates a domino effect because 36 | the weights of the far-left layers define the inputs to the far-right 37 | layers. Therefore, the whole training of the network suffers, and 38 | that is called the problem of the vanishing gradient. 39 | 40 | 2) Long Short Term Memory (LSTM): 41 | LSTM is an RNN network proposed by Sepp Hoch Reiter and 42 | Jürgen Schmidhuber in 1997 [6] to solve the problem of 43 | vanishing gradient in RNNs. LSTM uses the following gates to 44 | solve the problem: 45 | 46 | - Forget Gate: If set to true, the cell forgets the information coming from previous layers. 47 | - Input Gate: Chooses which value from input is going to update the memory state. 48 | - Output Gate: Chooses what will be the cell output on the basis of input and memory of the cell. 49 | 50 | 3) Gated Recurrent Unit (GRU): 51 | It is a variation of RNN introduced by Kyunghyun Cho et al [7] 52 | in 2014. It is like a LSTM unit without an output gate. It has 53 | fewer parameters than LSTM and have less complexity. GRU 54 | have shown better performance than LSTM on certain smaller 55 | datasets, but it is still weaker than LSTM overall. 56 | 57 | ## IV) Methodology 58 | 59 | - **Raw Data:** 60 | 61 | We used the historical stock prices of Carriage Services, Inc. stocks 62 | obtained from Yahoo finance [8]. It contains 5670 records of daily 63 | stock prices of the stocks from 09/08/1996 to 22/02/2019. Each record 64 | contains information of high, low, opening and closing value of stocks 65 | as well as the volume of the stock sold on that day. 66 | 67 | - **Data Pre-processing:** 68 | 69 | First, we remove some redundant and noisy data, such as the records 70 | with volume 0 and the records that are identical to previous record. For 71 | unifying the data range, we applied Min-Max normalization and 72 | mapped the values to a range of 0 to 1. 73 | 74 | This data was split into train, validation and test data. The training data 75 | contains records from 1 Jan 1997 to 31 Dec 2006, validation data 76 | contains records from 1 Jan 2007 to 31 Dec 2008 and test data contains 77 | records from 1 Jan 2009 to 31 Dec 2010. 78 | 79 | 80 | - **Training Process:** 81 | 82 | We train data on three sequential deep learning architectures, RNN, 83 | GRU and LSTM for our research. RNN is a special type of neural 84 | network where connections are made in a directed circle between the 85 | computational units. RNN make use of the internal memory to learn 86 | from the arbitrary sequence, unlike the feed forward neural networks. 87 | Each unit in an RNN has an activation function and weight. The 88 | activation function is time varying and real valued. The weights are 89 | modifiable. GRU and LSTM are extensions of RNN architecture. Each 90 | network we have created uses 3 layers of the respective RNN cell and 91 | a dense layer of 1 cell at the end. 92 | 93 | - **Testing and Error Calculation:** 94 | 95 | Each model has been tested on the test set and their Mean Squared Error (MSE), Root Mean Squared Error (RMSE) and R2-score are 96 | calculated. 97 | 98 | 99 | 100 | ### Model 1: Univariate-LSTM: 101 | 102 | The model is trained only on the Close price series of the dataset we 103 | obtained; thus, it is a univariate model. Different parameters of the ANN are as 104 | follows: 105 | 106 | - Timesteps: 40 107 | - Neurons in each Layer: 40 and 35 108 | - Learning Rate: 0.001 109 | - Batch Size: 64 110 | - Total Trainable Parameters: 17408 111 | 112 | The training data is fed to this network and the model is trained for 250 113 | epochs on the training data and validated by the validation data. 114 | 115 | ### Model 2: Multivariate-RNN: 116 | 117 | The model is trained on the series of records containing High price (Highest Correlation with target), Volume (Lowest Correlation with 118 | target) and Close price of the stock. Different parameters of this ANN are as follows: 119 | 120 | - Timesteps: 30 121 | - Neurons in each Layer: 50 and 45 122 | - Learning Rate: 0.001 123 | - Batch Size: 32 124 | - Total Trainable Parameters: 7087 125 | 126 | The training data is fed to this network and the model is trained for 150 epochs on the training data and validated by the validation data. 127 | 128 | ### Model 3: Multivariate-GRU: 129 | 130 | The model is trained on the series of records containing High price 131 | (Highest Correlation with target), Volume (Lowest Correlation with 132 | target) and Close price of the stock. Different parameters of this ANN are as follows: 133 | 134 | - Timesteps: 40 135 | - Neurons in each Layer: 40 and 35 136 | - Learning Rate: 0.0001 137 | - Batch Size: 64 138 | - Total Trainable Parameters: 13359 139 | 140 | The training data is fed to this network and the model is trained for 150 141 | epochs on the training data and validated by the validation data. 142 | 143 | ### Model 4: Multivariate-LSTM: 144 | 145 | The model is trained on the series of records containing High price 146 | (Highest Correlation with target), Volume (Lowest Correlation with 147 | target) and Close price of the stock.Different parameters of this ANN are as follows: 148 | 149 | - Timesteps: 50 150 | - Neurons in each Layer: 40 and 35 151 | - Learning Rate: 0.001 152 | - Batch Size: 64 153 | - Total Trainable Parameters: 17800 154 | 155 | The training data is fed to this network and the model is trained for 200 156 | epochs on the training data and validated by the validation data. 157 | 158 | 159 | ## V) Tools and Technology Used 160 | 161 | We used Python syntax for this project. As a framework we used 162 | Keras, which is a high-level neural network API written in Python. But 163 | Keras can’t work by itself, it needs a backend for low-level operations. 164 | Thus, we installed a dedicated software library — Google’s TensorFlow. 165 | 166 | For scientific computation, we installed Scipy. As a development environment we used the Anaconda Distribution and Jupyter Notebook. 167 | We used Matplotlib for data visualization, Numpy for various array 168 | operations and Pandas for data analysis. 169 | 170 | ## VI) Results 171 | 172 | The experiments were done for four deep learning models we have 173 | trained. The models are cross validated on a window size of 600 records 174 | and 5 splits. The final results obtained are shown in Table 1 below. 175 | 176 | ** Table 1: Results of different models on test data ** 177 | 178 | | Model | Features Used | MSE | RMSE | R2-score | 179 | |---|---|---|---|---| 180 | | Univariate-LSTM | Close | 0.0004030796 | 0.0185444448 | 0.9113916110 | 181 | | Multivariate-RNN | [High,Volume,Close] | 0.0002176880 | 0.0139925408 | 0.9423308750 | 182 | | Multivariate-GRU | [High,Volume,Close] | 0.0002792562 | 0.0155916908 | 0.9353505164 | 183 | | Multivariate-LSTM | [High,Volume,Close] | 0.0004895794 | 0.0179982514 | 0.9214646906 | 184 | 185 | 186 | The results from the table revealed that multivariate analysis not only 187 | improves the performance of the model significantly but also reduces 188 | the complexity of model (Univariate-LSTM model has 17408 trainable 189 | parameters whereas Multivariate-RNN model has only 7087 trainable 190 | parameters) making multivariate analysis a more efficient tool for 191 | stocks prediction. 192 | 193 | We also observe that the Multivariate-GRU and Multivariate-LSTM 194 | models do not improve performance as much as expected and possible 195 | reasons for it are: 196 | - The dataset used may not have long dependencies. 197 | - More data is required for these models for training. 198 | 199 | ## VII) Conclusion and Future Scope 200 | 201 | We have proposed a multivariate neural network approach to solve the 202 | problem of stock prices. We conclude that the multivariate ANN 203 | models clearly outperform the best univariate ANN model (Univariate 204 | LSTM). We also conclude that multivariate models make better use of 205 | the data given and improves both performance and efficiency of the 206 | stock prediction task. We proposed a multivariate deep learning-based 207 | approach for predicting the stock prices. The approach we suggested 208 | can only be solidified after comparing it with other methods of stock 209 | prediction. We encourage researchers to also find out the reason of the 210 | underperformance of the GRU and LSTM models of multivariate 211 | analysis. 212 | 213 | 214 | ## VIII) References 215 | 216 | [1] K. Soman, V. Sureshkumar, V. T. N. Pedamallu, S. A. Jami, N. C. 217 | Vasireddy and V. K. Menon, “Bulk price forecasting using spark over 218 | nse data set,” Springer, 2016, pp. 137–146. International Conference 219 | on Data Mining and Big Data. 220 | 221 | [2] C. S. Lin, H. A. Khan and C. C. Huang, 'Can the neuro fuzzy model 222 | predict stock indexes better than its rivals?', Proc. CIRJE, CIRJE-F- 223 | 165, Aug, 2002. 224 | 225 | [3] Z.P. Zhang, G.Z. Liu, and Y.W. Yang, “Stock market trend 226 | prediction based on neural networks, multiresolution analysis and 227 | dynamical reconstruction,” pp.155-56, March 2000. IEEE/IAFE 228 | Conference on Computational Intelligence for Financial Engineering, 229 | Proceedings (CIFEr). 230 | 231 | [4] Yoshua Bengio, I. J. Goodfellow, and A. Courville, “Deep 232 | learning,” pp. 436–444, Nature, vol. 521, 2015. 233 | 234 | [5] Razvan Pascanu, Tomas Mikolov, Yoshua Bengio, “On the 235 | difficulty of training Recurrent Neural Networks”, arXiv:1211.5063. 236 | 237 | [6] S. Hochreiter and J. Schmidhuber (1997). "Long short-term 238 | memory". Neural Computation. 1735 – 1780. 239 | doi:10.1162/neco.1997.9.8.1735. 240 | 241 | [7] Kyunghyun Cho (2014). "Learning Phrase Representations using 242 | RNN Encoder-Decoder for Statistical Machine Translation". 243 | arXiv:1406.1078. 244 | 245 | [8] https://in.finance.yahoo.com/quote/CSV/history/ 246 | 247 | 248 | -------------------------------------------------------------------------------- /Univariate -LSTM/UV-LSTM-Split1.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Univariate -LSTM/UV-LSTM-Split1.h5 -------------------------------------------------------------------------------- /Univariate -LSTM/UV-LSTM-Split2.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Univariate -LSTM/UV-LSTM-Split2.h5 -------------------------------------------------------------------------------- /Univariate -LSTM/UV-LSTM-Split3.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Univariate -LSTM/UV-LSTM-Split3.h5 -------------------------------------------------------------------------------- /Univariate -LSTM/UV-LSTM-Split4.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Univariate -LSTM/UV-LSTM-Split4.h5 -------------------------------------------------------------------------------- /Univariate -LSTM/UV-LSTM-Split5.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Univariate -LSTM/UV-LSTM-Split5.h5 -------------------------------------------------------------------------------- /Univariate -LSTM/UV-LSTM_40_[40,35]_1e-3_64.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ninja3697/Stocks-Price-Prediction-using-Multivariate-Analysis/bb3ed01605bba46d7f0b0f52f055114b0013586e/Univariate -LSTM/UV-LSTM_40_[40,35]_1e-3_64.h5 -------------------------------------------------------------------------------- /Univariate -LSTM/UV-LSTM_40_[40,35]_1e-3_64_train_loss.csv: -------------------------------------------------------------------------------- 1 | 0,0.12159701444347086 2 | 1,0.07275968477389225 3 | 2,0.06069497881885242 4 | 3,0.021695560008038135 5 | 4,0.008420009820773783 6 | 5,0.021642145686865944 7 | 6,0.003952962693739495 8 | 7,0.003361062733068441 9 | 8,0.008282972790052141 10 | 9,0.0025307869487630223 11 | 10,0.004136211707708285 12 | 11,0.0034476279861333716 13 | 12,0.002780956749844299 14 | 13,0.0028292490010187405 15 | 14,0.0026506393029580287 16 | 15,0.002475125141067882 17 | 16,0.0023612781503588205 18 | 17,0.002314376859017883 19 | 18,0.0023097004874102593 20 | 19,0.002261615895682057 21 | 20,0.0022458343488438236 22 | 21,0.0022344591594160054 23 | 22,0.002282561423648572 24 | 23,0.0023195505661053836 25 | 24,0.002300064831265399 26 | 25,0.0023618821185377647 27 | 26,0.0023030081354866554 28 | 27,0.0022642565603459707 29 | 28,0.0024328882533691193 30 | 29,0.002390708050655608 31 | 30,0.0024315207091208917 32 | 31,0.0022782626181648046 33 | 32,0.0024259159351964223 34 | 33,0.002266571662027737 35 | 34,0.002343662034163388 36 | 35,0.0022465676638607 37 | 36,0.0022672956075176546 38 | 37,0.00219686457000948 39 | 38,0.002182023419411806 40 | 39,0.0021401157500877898 41 | 40,0.002107234662221574 42 | 41,0.0020666113001892093 43 | 42,0.002049299114370675 44 | 43,0.002007599891389919 45 | 44,0.0019737739719306716 46 | 45,0.0018872827388854189 47 | 46,0.0018791492394989796 48 | 47,0.0017988742280343132 49 | 48,0.0017964459969817938 50 | 49,0.0016976028061624748 51 | 50,0.0016596283184275068 52 | 51,0.001610678839833138 53 | 52,0.0015113549854908746 54 | 53,0.0014593362063110684 55 | 54,0.0013307364856702424 56 | 55,0.0014080927361977626 57 | 56,0.0015242288750769056 58 | 57,0.001271364368260791 59 | 58,0.0021678989918563264 60 | 59,0.0035814531786647163 61 | 60,0.004304974252338088 62 | 61,0.0024315009922679337 63 | 62,0.0032878470194885848 64 | 63,0.002745055979672782 65 | 64,0.0026153904337534737 66 | 65,0.0021104815603199185 67 | 66,0.0021364834692594926 68 | 67,0.0018068540861691652 69 | 68,0.001892434870835242 70 | 69,0.0016938439640561953 71 | 70,0.0017382746278026113 72 | 71,0.0016483502720589654 73 | 72,0.0016653074184439975 74 | 73,0.0016246692954607143 75 | 74,0.001642864872555947 76 | 75,0.001621414314009378 77 | 76,0.001624971580000934 78 | 77,0.0016213573450243924 79 | 78,0.0016181440916687904 80 | 79,0.001616000125689636 81 | 80,0.0016063778946938162 82 | 81,0.0016021497607425312 83 | 82,0.0015917964563590375 84 | 83,0.0015795575210417743 85 | 84,0.0015773379517739876 86 | 85,0.0015677458356448267 87 | 86,0.0015548950167519225 88 | 87,0.0014588389048341666 89 | 88,0.0013752147269935283 90 | 89,0.001227056367961911 91 | 90,0.0013637322524271147 92 | 91,0.0014476715390562525 93 | 92,0.0014725323390266198 94 | 93,0.0015213799954837688 95 | 94,0.001533331917326179 96 | 95,0.0015323309552706743 97 | 96,0.0015321240173921757 98 | 97,0.0015050078941523238 99 | 98,0.0014885810564193907 100 | 99,0.0014582831605484256 101 | 100,0.0014358925578709764 102 | 101,0.0014012617875899499 103 | 102,0.0013939946304669844 104 | 103,0.0013693578634124469 105 | 104,0.0013582394464203684 106 | 105,0.0013460286070539492 107 | 106,0.0013589210685364893 108 | 107,0.0013478032739276202 109 | 108,0.0013415614551342166 110 | 109,0.0013331476866174725 111 | 110,0.0013211383507359183 112 | 111,0.0013087564684724615 113 | 112,0.0013019317788197753 114 | 113,0.0013027346196612037 115 | 114,0.0012798653736646166 116 | 115,0.0012803128731327961 117 | 116,0.0013039796785905668 118 | 117,0.0012403858226951508 119 | 118,0.001264112696446704 120 | 119,0.0010653711470336984 121 | 120,0.0012771216638683461 122 | 121,0.0017547691443986777 123 | 122,0.0016028314464766123 124 | 123,0.001308606947852167 125 | 124,0.0011807114998667414 126 | 125,0.0010802547498763423 127 | 126,0.000997249917824655 128 | 127,0.0009385942517066283 129 | 128,0.0008988594672156996 130 | 129,0.0008698736408015718 131 | 130,0.0008523239091763654 132 | 131,0.0008481077102080254 133 | 132,0.0008466495894471235 134 | 133,0.0008535631092062698 135 | 134,0.000878363580537332 136 | 135,0.0009032555667480932 137 | 136,0.0009092017958689459 138 | 137,0.0009379188364108253 139 | 138,0.0009539369974553472 140 | 139,0.0009861731480883794 141 | 140,0.0009330643224647513 142 | 141,0.0011090966714813605 143 | 142,0.0017676452729328315 144 | 143,0.0015685348015636055 145 | 144,0.0014049895075415883 146 | 145,0.0008161093747332067 147 | 146,0.0010207828101725554 148 | 147,0.0009397106084339391 149 | 148,0.0007290487603240028 150 | 149,0.0006514351988807168 151 | 150,0.000590072689136823 152 | 151,0.0005324150199650298 153 | 152,0.000545843246617358 154 | 153,0.000491653006988186 155 | 154,0.0006032849598918204 156 | 155,0.0006264101851786219 157 | 156,0.0005512923383598764 158 | 157,0.000779090906973138 159 | 158,0.0009302359610820081 160 | 159,0.0008243585300762347 161 | 160,0.001145944845750387 162 | 161,0.001293898084788287 163 | 162,0.0015950719840019693 164 | 163,0.0014279031122170654 165 | 164,0.002605051779671222 166 | 165,0.0023545238070345467 167 | 166,0.0025221457686043 168 | 167,0.0012015794414720871 169 | 168,0.0008104657073000462 170 | 169,0.0005112540863818581 171 | 170,0.0004085612808969495 172 | 171,0.00039482819889066507 173 | 172,0.00038115121636119603 174 | 173,0.0003771084361594121 175 | 174,0.00037722563608412837 176 | 175,0.0003750811657932854 177 | 176,0.000387954290450543 178 | 177,0.00037691598494917584 179 | 178,0.00037809612525428873 180 | 179,0.0003969385141012614 181 | 180,0.0004578907463211282 182 | 181,0.0005571906422420683 183 | 182,0.000820027456705275 184 | 183,0.0012384252914702614 185 | 184,0.0016059414404115142 186 | 185,0.0015499750511353892 187 | 186,0.001275287029112782 188 | 187,0.0009602378533861036 189 | 188,0.0007630908404833536 190 | 189,0.0006074089631297881 191 | 190,0.0005050455367087823 192 | 191,0.00043973046562020287 193 | 192,0.0004027412904528172 194 | 193,0.00038095797640261194 195 | 194,0.00036805971440943803 196 | 195,0.0003601693529579812 197 | 196,0.00035363442804939245 198 | 197,0.0003485691492666104 199 | 198,0.00034486602472654084 200 | 199,0.00034143863914534744 201 | 200,0.0003385872489036616 202 | 201,0.0003359943616360948 203 | 202,0.00033370428779185983 204 | 203,0.0003307171275031064 205 | 204,0.0003315524053911272 206 | 205,0.00034121444952302915 207 | 206,0.00033675778835201683 208 | 207,0.0003332207037087158 209 | 208,0.00033174426075555574 210 | 209,0.00033328015929116807 211 | 210,0.0003408070035690954 212 | 211,0.00036438844305343486 213 | 212,0.00044144499025394615 214 | 213,0.0007039617945768193 215 | 214,0.0015655123830638881 216 | 215,0.002775226215299247 217 | 216,0.0023572263761183643 218 | 217,0.002359521387324544 219 | 218,0.0009417749957591319 220 | 219,0.0005132300367727617 221 | 220,0.00036947877102794897 222 | 221,0.00032582793960913973 223 | 222,0.00031986609817499583 224 | 223,0.00032223801881223657 225 | 224,0.00032875807510851767 226 | 225,0.0003326702002900996 227 | 226,0.0003379344713175669 228 | 227,0.0003421633706588035 229 | 228,0.00034763480469759674 230 | 229,0.00035427408577166404 231 | 230,0.0003626457479652464 232 | 231,0.0003733345566066382 233 | 232,0.00038666834797671375 234 | 233,0.00040801170531359286 235 | 234,0.0003777884405633703 236 | 235,0.00037409909575180424 237 | 236,0.000343133184873332 238 | 237,0.00041746483823946546 239 | 238,0.0005944751194711173 240 | 239,0.0008795797640182586 241 | 240,0.0012079722958327638 242 | 241,0.0013774460949288268 243 | 242,0.0014215548225000002 244 | 243,0.002097266068931368 245 | 244,0.0021525249701219514 246 | 245,0.0014920250091337507 247 | 246,0.0005902757776148248 248 | 247,0.001501727257471595 249 | 248,0.0008425233522340399 250 | 249,0.0005042794928290691 251 | 250,0.00041338013416724137 252 | 251,0.0003746566238590855 253 | 252,0.00035060290828617076 254 | 253,0.00029410865908037964 255 | 254,0.00028772822326058613 256 | 255,0.00028477558953895685 257 | 256,0.0002816125052814099 258 | 257,0.00027890555589839657 259 | 258,0.0002767047403485225 260 | 259,0.00027479192339333074 261 | 260,0.0002730580161949896 262 | 261,0.0002715202944444062 263 | -------------------------------------------------------------------------------- /Univariate -LSTM/UV-LSTM_40_[40,35]_1e-3_64_val_loss.csv: -------------------------------------------------------------------------------- 1 | 0,0.011245927275640184 2 | 1,0.011069654808219137 3 | 2,0.004808797966688871 4 | 3,0.007487603399003374 5 | 4,0.009322782320066773 6 | 5,0.0018550232855667327 7 | 6,0.003408005364753049 8 | 7,0.00629215051644835 9 | 8,0.002248685228927382 10 | 9,0.0033885031328375996 11 | 10,0.002769674640148878 12 | 11,0.002307698210894034 13 | 12,0.0020961492675645598 14 | 13,0.0019480867937592596 15 | 14,0.001735523744518387 16 | 15,0.0015703253287436634 17 | 16,0.0014455433782382772 18 | 17,0.0013700467293503984 19 | 18,0.0012780807446688414 20 | 19,0.001217760145664215 21 | 20,0.0010877448085952423 22 | 21,0.0011441557756464544 23 | 22,0.0011042398962609727 24 | 23,0.0010586253664424193 25 | 24,0.0010413686455837611 26 | 25,0.0009242694359272718 27 | 26,0.0009818826561215624 28 | 27,0.0009837132790286478 29 | 28,0.000976965658302451 30 | 29,0.000965104767538864 31 | 30,0.0009243398631826557 32 | 31,0.0009518226183115922 33 | 32,0.0008956054586852933 34 | 33,0.0009030342495454283 35 | 34,0.0008668993190252062 36 | 35,0.0008708506276638344 37 | 36,0.000841900200486697 38 | 37,0.0008365600771302807 39 | 38,0.0008121931540041134 40 | 39,0.0008014171175530244 41 | 40,0.0007813911933194974 42 | 41,0.0007726718743877678 43 | 42,0.0007533725062064056 44 | 43,0.0007389968638468919 45 | 44,0.0007126816981953794 46 | 45,0.0007065437244379829 47 | 46,0.0006836902727533517 48 | 47,0.0006742604992127624 49 | 48,0.0006545276967166312 50 | 49,0.0006471579083530553 51 | 50,0.0006411513313651085 52 | 51,0.0006183835613573419 53 | 52,0.0006054858837662073 54 | 53,0.0005882105364560567 55 | 54,0.0005846164060820794 56 | 55,0.0006103484795011323 57 | 56,0.0006000297043697331 58 | 57,0.0007319118715180405 59 | 58,0.0012502343682893392 60 | 59,0.001424439014577917 61 | 60,0.0012152669228741835 62 | 61,0.0013128874686964113 63 | 62,0.0011736274785201612 64 | 63,0.0010463408844802401 65 | 64,0.000850864216396264 66 | 65,0.0007936535924994226 67 | 66,0.0006671727089018658 68 | 67,0.0006485721899260735 69 | 68,0.0005757811896759888 70 | 69,0.0005638533320021013 71 | 70,0.0005272014371665387 72 | 71,0.000517943687335556 73 | 72,0.0004994125137555188 74 | 73,0.0004952013340040013 75 | 74,0.0004842653067718292 76 | 75,0.00047942511897919505 77 | 76,0.00047343871997797797 78 | 77,0.00046788027574276103 79 | 78,0.0004624391099084811 80 | 79,0.0004561567871735014 81 | 80,0.0004518226126273131 82 | 81,0.00044704485154742823 83 | 82,0.00044243767905723433 84 | 83,0.0004393229610286653 85 | 84,0.0004353717888769662 86 | 85,0.0004313466086951566 87 | 86,0.000427544915823844 88 | 87,0.000396063432884242 89 | 88,0.00035838822529105275 90 | 89,0.0003840562783117438 91 | 90,0.00039051260582396183 92 | 91,0.00039246184590818555 93 | 92,0.00039922720190651456 94 | 93,0.0003996279696805467 95 | 94,0.0003993414916451378 96 | 95,0.0003989543243921522 97 | 96,0.0003937715825078816 98 | 97,0.0003914518968266403 99 | 98,0.00038541739421158003 100 | 99,0.00037972834202107685 101 | 100,0.00037357472846734113 102 | 101,0.00037031215093708756 103 | 102,0.00036454431969543984 104 | 103,0.00036112085418326073 105 | 104,0.00035808744222118425 106 | 105,0.0003582513349227093 107 | 106,0.00035451908379086647 108 | 107,0.00035088898710392675 109 | 108,0.00034878052698833674 110 | 109,0.00034581915001738174 111 | 110,0.0003427240961423979 112 | 111,0.00034034856522841185 113 | 112,0.0003387537475771688 114 | 113,0.00033447848119098564 115 | 114,0.0003339502727612853 116 | 115,0.0003353380297053734 117 | 116,0.00033252236055980985 118 | 117,0.00032496409378303533 119 | 118,0.00029730832541425683 120 | 119,0.00032303703683910184 121 | 120,0.000387187162012761 122 | 121,0.0003711183671839535 123 | 122,0.00033274107684927256 124 | 123,0.00031540394311064277 125 | 124,0.00030002323910593987 126 | 125,0.0002870941313850725 127 | 126,0.0002779293512315329 128 | 127,0.00027234509691660263 129 | 128,0.000266814686695178 130 | 129,0.0002640404834829528 131 | 130,0.00026293823511564526 132 | 131,0.00026341666595946096 133 | 132,0.00026483614005726473 134 | 133,0.00027093121075424655 135 | 134,0.00027374438901335514 136 | 135,0.00027819890288058026 137 | 136,0.0002828869900409261 138 | 137,0.0002911286890217713 139 | 138,0.0002963374196230594 140 | 139,0.0003041065045118589 141 | 140,0.0003428751811513613 142 | 141,0.0004117402272170474 143 | 142,0.00043339848494285653 144 | 143,0.00035339185831554493 145 | 144,0.0003453139697426352 146 | 145,0.0003665151588361839 147 | 146,0.0003380057670914664 148 | 147,0.0003227008815758444 149 | 148,0.00031520085056023354 150 | 149,0.00030455651998134524 151 | 150,0.00029578711034665845 152 | 151,0.00029233360767428734 153 | 152,0.0002873165057249496 154 | 153,0.0003005155113300887 155 | 154,0.000298672143956004 156 | 155,0.00030681428776508003 157 | 156,0.00033375627666326434 158 | 157,0.00034802994522233977 159 | 158,0.00037980725755915046 160 | 159,0.00039561704234701805 161 | 160,0.00047993451629861676 162 | 161,0.0004758001686523444 163 | 162,0.0005749027838866258 164 | 163,0.0005157505571521048 165 | 164,0.0008781756724006143 166 | 165,0.0009635999068168217 167 | 166,0.0006622938511507778 168 | 167,0.0005168658279781712 169 | 168,0.00036121237268350246 170 | 169,0.0003280518897648515 171 | 170,0.0003141843982779517 172 | 171,0.0002909079274355338 173 | 172,0.00027947167360127485 174 | 173,0.0002665699666349924 175 | 174,0.0002586875423981712 176 | 175,0.0002478818260614985 177 | 176,0.00024356925759689305 178 | 177,0.00023023712806050377 179 | 178,0.0002181032237974156 180 | 179,0.00020663415740147746 181 | 180,0.0001907361719500402 182 | 181,0.00017263388445857783 183 | 182,0.00016459571257843795 184 | 183,0.00017638242671041396 185 | 184,0.00018043784025237607 186 | 185,0.00017358529670485136 187 | 186,0.00017425024496584102 188 | 187,0.00018778365458650836 189 | 188,0.00019515050446678852 190 | 189,0.0001903780493162701 191 | 190,0.00018419540519344396 192 | 191,0.00018275092388021536 193 | 192,0.0001800730774291502 194 | 193,0.00018033972572021443 195 | 194,0.00018024482214341647 196 | 195,0.00017734149727841904 197 | 196,0.00017647887479738686 198 | 197,0.00017671149569274536 199 | 198,0.00017559131635513543 200 | 199,0.0001749578182954855 201 | 200,0.00017429360716427066 202 | 201,0.00017391732072955448 203 | 202,0.00017316520147844507 204 | 203,0.00017081369778366182 205 | 204,0.0001687749242811496 206 | 205,0.00016495474144527367 207 | 206,0.00016266138469479206 208 | 207,0.00016082974967691275 209 | 208,0.00015876466380271675 210 | 209,0.00015571377850298224 211 | 210,0.0001510654150604688 212 | 211,0.00014535742204692536 213 | 212,0.0001486141040443507 214 | 213,0.00021144958354225624 215 | 214,0.0003476464435466212 216 | 215,0.0004899139139211159 217 | 216,0.00023650191724300385 218 | 217,0.0001784266606936681 219 | 218,0.00020714559801051328 220 | 219,0.00019207797494941745 221 | 220,0.00017444328063745694 222 | 221,0.00016978777069919583 223 | 222,0.00016990674615452258 224 | 223,0.0001722721017672327 225 | 224,0.00017237263944417495 226 | 225,0.00017395569714075276 227 | 226,0.00017489085339234564 228 | 227,0.00017623307488473324 229 | 228,0.00017834898859166122 230 | 229,0.00018100420530917574 231 | 230,0.00018415521079642247 232 | 231,0.00018856644726775843 233 | 232,0.00019333699160127034 234 | 233,0.00019831824365296755 235 | 234,0.00019345036607460472 236 | 235,0.00019406346435626518 237 | 236,0.0002036576609906002 238 | 237,0.0002348874906350955 239 | 238,0.0002882592691156756 240 | 239,0.00035281152793623766 241 | 240,0.0003990926689484381 242 | 241,0.0005078988133704867 243 | 242,0.0006041919595786724 244 | 243,0.0006690208021358683 245 | 244,0.000625901990008123 246 | 245,0.0005599181087494924 247 | 246,0.000786970291238535 248 | 247,0.00045245117006084787 249 | 248,0.00035559461634718524 250 | 249,0.0003187318140809456 251 | 250,0.0002903590251787983 252 | 251,0.00024202760561883193 253 | 252,0.0002167706682892709 254 | 253,0.00022906222900552354 255 | 254,0.00022268839931147624 256 | 255,0.00021520622901554252 257 | 256,0.0002089126922736137 258 | 257,0.00020351368634849144 259 | 258,0.00019884976038532653 260 | 259,0.00019571570386499939 261 | 260,0.0001935378840526341 262 | 261,0.00018998222618267453 263 | -------------------------------------------------------------------------------- /Univariate -LSTM/UV-LSTM_CrossValidation.csv: -------------------------------------------------------------------------------- 1 | ,MSE,RMSE,R2_Score,Train_Time 2 | 0,0.001088057,0.032985713,0.737770871,0 3 | 1,0.000228818,0.015126718,0.944853416,143.7789888 4 | 2,0.000110306,0.01050266,0.926471723,179.0684941 5 | 3,2.32E-04,0.015233416,0.988405212,215.0318651 6 | 4,0.000356217,0.018873717,0.959456833,251.950314 7 | -------------------------------------------------------------------------------- /Univariate -LSTM/UV-LSTM_CrossValidation_TrainLoss.csv: -------------------------------------------------------------------------------- 1 | ,Split1,Split2,Split3,Split4,Split5 2 | 0,0.234993386,0.001862539,0.001192456,0.000764349,0.000280433 3 | 1,0.150776445,0.002189138,0.001246065,0.000838437,0.000270514 4 | 2,0.031950049,0.003200395,0.001184777,0.000995584,0.000254295 5 | 3,0.069875755,0.004840289,0.001523628,0.001356098,0.000250981 6 | 4,0.009524277,0.003889968,0.001717717,0.001682147,0.000256511 7 | 5,0.013115893,0.003132457,0.001884446,0.00204521,0.000277259 8 | 6,0.025957048,0.002630425,0.001981149,0.002576611,0.000350127 9 | 7,0.006650081,0.002505915,0.001842505,0.00300627,0.000515947 10 | 8,0.00774632,0.002384508,0.002330204,0.003604474,0.000780499 11 | 9,0.023990321,0.002362134,0.003300606,0.002954874,0.001163856 12 | 10,0.003933063,0.002323182,0.002514854,0.004299437,0.001330011 13 | 11,0.003433847,0.00252415,0.001686181,0.003064601,0.000933442 14 | 12,0.003452579,0.002250007,0.001560372,0.002178516,0.000552335 15 | 13,0.004932923,0.002370835,0.00126651,0.001361323,0.000336506 16 | 14,0.004043324,0.002337724,0.001177872,0.000946371,0.000265273 17 | 15,0.003419567,0.002283202,0.001078944,0.000703283,0.000246208 18 | 16,0.003561947,0.00213894,0.001059345,0.000608463,0.000241477 19 | 17,0.003655299,0.002233345,0.001045296,0.000545576,0.000241821 20 | 18,0.003663812,0.002208981,0.001069541,0.000518822,0.000244422 21 | 19,0.003514182,0.002235358,0.001114445,0.000515387,0.000246058 22 | 20,0.003558012,0.002152014,0.001155956,0.000521395,0.000249539 23 | 21,0.003561612,0.002213768,0.001206962,0.000542509,0.000254633 24 | 22,0.003687283,0.002179598,0.00124686,0.000588992,0.000263728 25 | 23,0.003484839,0.002156914,0.001254605,0.000661038,0.000278959 26 | 24,0.003502082,0.001977043,0.001357747,0.000734984,0.000306702 27 | 25,0.003562467,0.002068086,0.001527814,0.000814655,0.000352422 28 | 26,0.003490946,0.001807901,0.00167295,0.000812751,0.000405572 29 | 27,0.003311103,0.003010091,0.002279415,0.00086504,0.000439986 30 | 28,0.003392639,0.002200907,0.002191984,0.000912562,0.000419232 31 | 29,0.00339509,0.001920144,0.002888071,0.000945025,0.000355933 32 | 30,0.003283154,0.001910717,0.00217529,0.001010089,0.000278436 33 | 31,0.003376308,0.001824642,0.001601517,0.001056582,0.000246196 34 | 32,0.003344119,0.001763558,0.001296762,0.001061963,0.000221209 35 | 33,0.003226995,0.001897436,0.000919407,0.001113124,0.00023205 36 | 34,0.003229019,0.001955441,0.00077988,0.001102158,0.000213896 37 | 35,0.003259706,0.001976406,0.000681613,0.001081504,0.000222431 38 | 36,0.003146389,0.001786043,0.000623577,0.001205876,0.000220425 39 | 37,0.003255757,0.001819516,0.000557695,0.001041654,0.00024567 40 | 38,0.003079045,0.00177453,0.000528227,0.001222562,0.000362549 41 | 39,0.003118834,0.001871328,0.000504463,0.00141996,0.000674985 42 | 40,0.003064413,0.001789389,0.000487353,0.00112008,0.001329874 43 | 41,0.003077353,0.001808451,0.000474255,0.000816607,0.001908841 44 | 42,0.003031698,0.001726691,0.000465914,0.000796712,0.001582702 45 | 43,0.002974976,0.001867337,0.00045928,0.0006828,0.001267836 46 | 44,0.002964538,0.001760708,0.000455616,0.000620273,0.002747898 47 | 45,0.003067088,0.001797828,0.000453005,0.000602716,0.002933637 48 | 46,0.002979958,0.001773475,0.000452878,0.000585229,0.000803759 49 | 47,0.002870595,0.0018111,0.000460014,0.000576347,0.000252752 50 | 48,0.00295136,0.001747594,0.000483404,0.000580383,0.000266696 51 | 49,0.002937781,0.001721855,0.000558389,0.000601946,0.000239664 52 | -------------------------------------------------------------------------------- /Univariate -LSTM/UV-LSTM_CrossValidation_ValLoss.csv: -------------------------------------------------------------------------------- 1 | ,Split1,Split2,Split3,Split4,Split5 2 | 0,0.000776878,6.02E-05,0.000335277,0.001024352,4.97E-05 3 | 1,0.028817684,0.00027553,0.000350394,0.001321732,4.67E-05 4 | 2,0.007836335,0.000754915,0.000502504,0.001567616,5.19E-05 5 | 3,0.027694114,0.000968865,0.000579647,0.002014121,6.01E-05 6 | 4,0.000795211,0.000866362,0.000683984,0.002408826,6.56E-05 7 | 5,0.000491295,0.00057549,0.000774808,0.002617411,6.47E-05 8 | 6,0.000166531,0.000524977,0.000816861,0.002603573,6.01E-05 9 | 7,0.00026569,0.000463256,0.000985062,0.002262226,7.16E-05 10 | 8,0.000280108,0.000448375,0.000331297,0.002022458,8.32E-05 11 | 9,0.000515686,0.000452237,0.000794379,0.001622757,6.37E-05 12 | 10,0.000139788,0.000479023,0.000914424,0.001645788,5.39E-05 13 | 11,0.00017583,0.000480493,0.000848801,0.001225571,4.72E-05 14 | 12,0.000311293,0.000466464,0.000762225,0.001061361,4.74E-05 15 | 13,0.000324418,0.000464762,0.000691271,0.000834942,5.02E-05 16 | 14,0.000357115,0.000446959,0.000642154,0.000604096,5.39E-05 17 | 15,0.000340465,0.0004546,0.000608483,0.000474202,5.49E-05 18 | 16,0.000376035,0.00045195,0.000582666,0.000361034,5.62E-05 19 | 17,0.000403578,0.000421707,0.000571384,0.000306076,5.70E-05 20 | 18,0.000415497,0.000461544,0.000565578,0.000284133,5.73E-05 21 | 19,0.000422401,0.000410958,0.000566826,0.000278111,5.62E-05 22 | 20,0.000435968,0.000437956,0.000568104,0.000286544,5.70E-05 23 | 21,0.000468491,0.000423561,0.000585712,0.000318662,5.76E-05 24 | 22,0.000470367,0.000461219,0.000566145,0.000375655,5.84E-05 25 | 23,0.000455582,0.000386748,0.000632582,0.000471017,6.09E-05 26 | 24,0.000472889,0.000461945,0.000544843,0.000558094,6.34E-05 27 | 25,0.000505573,0.000196762,0.000706304,0.000629135,7.14E-05 28 | 26,0.000455772,0.000430819,0.000692622,0.000676186,7.93E-05 29 | 27,0.000459335,0.000615973,0.000954374,0.000721825,8.12E-05 30 | 28,0.000495257,0.000472105,0.000711738,0.000732107,7.50E-05 31 | 29,0.000465827,0.000371355,0.000874893,0.000759908,6.60E-05 32 | 30,0.000486633,0.000390339,0.000848915,0.000733196,5.59E-05 33 | 31,0.000506415,0.000361023,0.000738819,0.000756001,5.15E-05 34 | 32,0.000484839,0.000373027,0.000606669,0.000684099,4.62E-05 35 | 33,0.000465557,0.000387413,0.000532019,0.000700834,4.99E-05 36 | 34,0.000502143,0.000398018,0.000469045,0.000602287,4.58E-05 37 | 35,0.000461396,0.000406741,0.000372453,0.000651177,4.98E-05 38 | 36,0.000508896,0.000377534,0.000348533,0.000637866,4.70E-05 39 | 37,0.000462378,0.000364182,0.000333583,0.000417447,5.24E-05 40 | 38,0.000479267,0.000372802,0.000310398,0.000533832,5.41E-05 41 | 39,0.000457807,0.00038423,0.000303465,0.000593738,7.87E-05 42 | 40,0.000478265,0.000366235,0.00030208,0.000360941,0.00014572 43 | 41,0.000461482,0.000378269,0.000296586,0.000301156,0.000212612 44 | 42,0.000466462,0.000359088,0.000292427,0.000302816,0.000137633 45 | 43,0.000433514,0.00034282,0.000283945,0.000195506,0.000193778 46 | 44,0.000480721,0.000362958,0.000272153,0.000215218,7.95E-05 47 | 45,0.000470735,0.000377917,0.000256869,0.000174282,0.000120687 48 | 46,0.000448045,0.000360071,0.000241213,0.000207069,0.000101295 49 | 47,0.000446985,0.0003664,0.000221418,0.000180974,4.73E-05 50 | 48,0.000465598,0.000342785,0.000194021,0.000208689,4.12E-05 51 | 49,0.000447368,0.000347393,0.000159452,0.000204485,4.12E-05 52 | ,,,,, 53 | ,,,,, 54 | ,,,,, 55 | ,,,,, 56 | ,,,,, 57 | ,,,,, 58 | ,,,,, 59 | ,,,,, 60 | ,,,,, 61 | ,,,,, 62 | ,,,,, 63 | ,,,,, 64 | ,,,,, 65 | ,,,,, 66 | ,,,,, 67 | ,,,,, 68 | ,,,,, 69 | ,,,,, 70 | ,,,,, 71 | ,,,,, 72 | --------------------------------------------------------------------------------