├── .DS_Store ├── .gitignore ├── .ipynb_checkpoints └── DAGMM-checkpoint.ipynb ├── DAGMM.ipynb ├── README.md ├── __pycache__ ├── forward_step.cpython-36.pyc ├── model.cpython-36.pyc ├── preprocess.cpython-36.pyc ├── test.cpython-36.pyc └── train.cpython-36.pyc ├── data ├── .DS_Store └── kdd_cup.npz ├── forward_step.py ├── main.py ├── model.py ├── preprocess.py ├── test.py ├── train.py └── utils ├── __pycache__ └── utils.cpython-36.pyc └── utils.py /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mperezcarrasco/PyTorch-DAGMM/7ec580c1c0b7197b5d0274bca732ecdf646f45ed/.DS_Store -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | data/* 2 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/DAGMM-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import torch\n", 11 | "\n", 12 | "import matplotlib.pyplot as plt\n", 13 | "import pandas as pd \n", 14 | "\n", 15 | "from train import TrainerDAGMM\n", 16 | "from test import eval\n", 17 | "from preprocess import get_KDDCup99" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": 2, 23 | "metadata": { 24 | "scrolled": true 25 | }, 26 | "outputs": [ 27 | { 28 | "name": "stdout", 29 | "output_type": "stream", 30 | "text": [ 31 | "198371/198371: [===============================>] - ETA 1.6sss\n", 32 | "Training DAGMM... Epoch: 0, Loss: 43416445.505\n", 33 | "198371/198371: [===============================>] - ETA 0.0s\n", 34 | "Training DAGMM... Epoch: 1, Loss: 43430190.289\n", 35 | "198371/198371: [===============================>] - ETA 0.0s\n", 36 | "Training DAGMM... Epoch: 2, Loss: 43448783.134\n", 37 | "198371/198371: [===============================>] - ETA 0.0s\n", 38 | "Training DAGMM... Epoch: 3, Loss: 43431846.577\n", 39 | "198371/198371: [===============================>] - ETA 0.0s\n", 40 | "Training DAGMM... Epoch: 4, Loss: 43423711.526\n", 41 | "198371/198371: [===============================>] - ETA 0.0s\n", 42 | "Training DAGMM... Epoch: 5, Loss: 43419180.351\n", 43 | "198371/198371: [===============================>] - ETA 0.0s\n", 44 | "Training DAGMM... Epoch: 6, Loss: 43436569.072\n", 45 | "198371/198371: [===============================>] - ETA 0.0s\n", 46 | "Training DAGMM... Epoch: 7, Loss: 43431849.216\n", 47 | "198371/198371: [===============================>] - ETA 0.0s\n", 48 | "Training DAGMM... Epoch: 8, Loss: 43433504.619\n", 49 | "198371/198371: [===============================>] - ETA 0.0s\n", 50 | "Training DAGMM... Epoch: 9, Loss: 43448175.134\n", 51 | "198371/198371: [===============================>] - ETA 0.0s\n", 52 | "Training DAGMM... Epoch: 10, Loss: 43430255.608\n", 53 | "198371/198371: [===============================>] - ETA 0.0s\n", 54 | "Training DAGMM... Epoch: 11, Loss: 43425442.371\n", 55 | "198371/198371: [===============================>] - ETA 0.0s\n", 56 | "Training DAGMM... Epoch: 12, Loss: 43442255.505\n", 57 | "198371/198371: [===============================>] - ETA 0.0s\n", 58 | "Training DAGMM... Epoch: 13, Loss: 43421953.072\n", 59 | "198371/198371: [===============================>] - ETA 0.0s\n", 60 | "Training DAGMM... Epoch: 14, Loss: 43434495.959\n", 61 | "198371/198371: [===============================>] - ETA 0.0s\n", 62 | "Training DAGMM... Epoch: 15, Loss: 43421529.567\n", 63 | "198371/198371: [===============================>] - ETA 0.0s\n", 64 | "Training DAGMM... Epoch: 16, Loss: 43430372.412\n", 65 | "198371/198371: [===============================>] - ETA 0.0s\n", 66 | "Training DAGMM... Epoch: 17, Loss: 43421140.825\n", 67 | "198371/198371: [===============================>] - ETA 0.0s\n", 68 | "Training DAGMM... Epoch: 18, Loss: 43431085.299\n", 69 | "198371/198371: [===============================>] - ETA 0.0s\n", 70 | "Training DAGMM... Epoch: 19, Loss: 43439407.629\n", 71 | "198371/198371: [===============================>] - ETA 0.0s\n", 72 | "Training DAGMM... Epoch: 20, Loss: 43421279.505\n", 73 | "198371/198371: [===============================>] - ETA 0.0s\n", 74 | "Training DAGMM... Epoch: 21, Loss: 43422410.495\n", 75 | "198371/198371: [===============================>] - ETA 0.0s\n", 76 | "Training DAGMM... Epoch: 22, Loss: 43418252.371\n", 77 | "198371/198371: [===============================>] - ETA 0.0s\n", 78 | "Training DAGMM... Epoch: 23, Loss: 43477673.546\n", 79 | "198371/198371: [===============================>] - ETA 0.0s\n", 80 | "Training DAGMM... Epoch: 24, Loss: 43404796.660\n", 81 | "198371/198371: [===============================>] - ETA 0.0s\n", 82 | "Training DAGMM... Epoch: 25, Loss: 43431501.320\n", 83 | "198371/198371: [===============================>] - ETA 0.0s\n", 84 | "Training DAGMM... Epoch: 26, Loss: 43451058.330\n", 85 | "198371/198371: [===============================>] - ETA 0.0s\n", 86 | "Training DAGMM... Epoch: 27, Loss: 43465593.856\n", 87 | "198371/198371: [===============================>] - ETA 0.0s\n", 88 | "Training DAGMM... Epoch: 28, Loss: 43412597.938\n", 89 | "198371/198371: [===============================>] - ETA 0.0s\n", 90 | "Training DAGMM... Epoch: 29, Loss: 43445709.711\n", 91 | "198371/198371: [===============================>] - ETA 0.0s\n", 92 | "Training DAGMM... Epoch: 30, Loss: 43435486.330\n", 93 | "198371/198371: [===============================>] - ETA 0.0s\n", 94 | "Training DAGMM... Epoch: 31, Loss: 43442352.041\n", 95 | "198371/198371: [===============================>] - ETA 0.0s\n", 96 | "Training DAGMM... Epoch: 32, Loss: 43433058.206\n", 97 | "198371/198371: [===============================>] - ETA 0.0s\n", 98 | "Training DAGMM... Epoch: 33, Loss: 43435356.186\n", 99 | "198371/198371: [===============================>] - ETA 0.0s\n", 100 | "Training DAGMM... Epoch: 34, Loss: 43431390.619\n", 101 | "198371/198371: [===============================>] - ETA 0.0s\n", 102 | "Training DAGMM... Epoch: 35, Loss: 43439402.412\n", 103 | "198371/198371: [===============================>] - ETA 0.0s\n", 104 | "Training DAGMM... Epoch: 36, Loss: 43434369.443\n", 105 | "198371/198371: [===============================>] - ETA 0.0s\n", 106 | "Training DAGMM... Epoch: 37, Loss: 43431257.155\n", 107 | "198371/198371: [===============================>] - ETA 0.0s\n", 108 | "Training DAGMM... Epoch: 38, Loss: 43443205.876\n", 109 | "198371/198371: [===============================>] - ETA 0.0s\n", 110 | "Training DAGMM... Epoch: 39, Loss: 43439627.134\n", 111 | "198371/198371: [===============================>] - ETA 0.0s\n", 112 | "Training DAGMM... Epoch: 40, Loss: 43439081.155\n", 113 | "198371/198371: [===============================>] - ETA 0.0s\n", 114 | "Training DAGMM... Epoch: 41, Loss: 43431616.247\n", 115 | "198371/198371: [===============================>] - ETA 0.0s\n", 116 | "Training DAGMM... Epoch: 42, Loss: 43431875.175\n", 117 | "198371/198371: [===============================>] - ETA 0.0s\n", 118 | "Training DAGMM... Epoch: 43, Loss: 43455904.907\n", 119 | "198371/198371: [===============================>] - ETA 0.0s\n", 120 | "Training DAGMM... Epoch: 44, Loss: 43420986.536\n", 121 | "198371/198371: [===============================>] - ETA 0.0s\n", 122 | "Training DAGMM... Epoch: 45, Loss: 43451918.124\n", 123 | "198371/198371: [===============================>] - ETA 0.0s\n", 124 | "Training DAGMM... Epoch: 46, Loss: 43442273.485\n", 125 | "198371/198371: [===============================>] - ETA 0.0s\n", 126 | "Training DAGMM... Epoch: 47, Loss: 43435331.588\n", 127 | "198371/198371: [===============================>] - ETA 0.0s\n", 128 | "Training DAGMM... Epoch: 48, Loss: 43407983.526\n", 129 | "198371/198371: [===============================>] - ETA 0.0s\n", 130 | "Training DAGMM... Epoch: 49, Loss: 43437968.577\n", 131 | "198371/198371: [===============================>] - ETA 0.0s\n", 132 | "Training DAGMM... Epoch: 50, Loss: 43428314.990\n", 133 | "198371/198371: [===============================>] - ETA 0.0s\n", 134 | "Training DAGMM... Epoch: 51, Loss: 43445076.515\n", 135 | "198371/198371: [===============================>] - ETA 0.0s\n", 136 | "Training DAGMM... Epoch: 52, Loss: 43464964.330\n", 137 | "198371/198371: [===============================>] - ETA 0.0s\n", 138 | "Training DAGMM... Epoch: 53, Loss: 43428623.629\n", 139 | "198371/198371: [===============================>] - ETA 0.0s\n", 140 | "Training DAGMM... Epoch: 54, Loss: 43421078.742\n", 141 | "198371/198371: [===============================>] - ETA 0.0s\n", 142 | "Training DAGMM... Epoch: 55, Loss: 43443334.165\n", 143 | "198371/198371: [===============================>] - ETA 0.0s\n", 144 | "Training DAGMM... Epoch: 56, Loss: 43405380.515\n", 145 | "198371/198371: [===============================>] - ETA 0.0s\n", 146 | "Training DAGMM... Epoch: 57, Loss: 43435038.701\n", 147 | "198371/198371: [===============================>] - ETA 0.0s\n", 148 | "Training DAGMM... Epoch: 58, Loss: 43435549.237\n", 149 | "198371/198371: [===============================>] - ETA 0.0s\n", 150 | "Training DAGMM... Epoch: 59, Loss: 43432695.732\n", 151 | "198371/198371: [===============================>] - ETA 0.0s\n", 152 | "Training DAGMM... Epoch: 60, Loss: 43424946.804\n", 153 | "198371/198371: [===============================>] - ETA 0.0s\n", 154 | "Training DAGMM... Epoch: 61, Loss: 43424717.216\n", 155 | "198371/198371: [===============================>] - ETA 0.0s\n", 156 | "Training DAGMM... Epoch: 62, Loss: 43439938.247\n", 157 | "198371/198371: [===============================>] - ETA 0.0s\n", 158 | "Training DAGMM... Epoch: 63, Loss: 43446788.804\n", 159 | "198371/198371: [===============================>] - ETA 0.0s\n", 160 | "Training DAGMM... Epoch: 64, Loss: 43449003.381\n", 161 | "198371/198371: [===============================>] - ETA 0.0s\n", 162 | "Training DAGMM... Epoch: 65, Loss: 43422753.691\n", 163 | "198371/198371: [===============================>] - ETA 0.0s\n", 164 | "Training DAGMM... Epoch: 66, Loss: 43422169.299\n", 165 | "198371/198371: [===============================>] - ETA 0.0s\n", 166 | "Training DAGMM... Epoch: 67, Loss: 43433960.330\n", 167 | "198371/198371: [===============================>] - ETA 0.0s\n", 168 | "Training DAGMM... Epoch: 68, Loss: 43442515.196\n", 169 | "198371/198371: [===============================>] - ETA 0.0s\n", 170 | "Training DAGMM... Epoch: 69, Loss: 43468303.010\n", 171 | "198371/198371: [===============================>] - ETA 0.0s\n", 172 | "Training DAGMM... Epoch: 70, Loss: 43425772.990\n", 173 | "198371/198371: [===============================>] - ETA 0.0s\n", 174 | "Training DAGMM... Epoch: 71, Loss: 43415965.959\n", 175 | "198371/198371: [===============================>] - ETA 0.0s\n", 176 | "Training DAGMM... Epoch: 72, Loss: 43460916.474\n", 177 | "198371/198371: [===============================>] - ETA 0.0s\n", 178 | "Training DAGMM... Epoch: 73, Loss: 43421864.928\n", 179 | "198371/198371: [===============================>] - ETA 0.0s\n", 180 | "Training DAGMM... Epoch: 74, Loss: 43419913.753\n" 181 | ] 182 | }, 183 | { 184 | "name": "stdout", 185 | "output_type": "stream", 186 | "text": [ 187 | "198371/198371: [===============================>] - ETA 0.0s\n", 188 | "Training DAGMM... Epoch: 75, Loss: 43442808.433\n", 189 | "198371/198371: [===============================>] - ETA 0.0s\n", 190 | "Training DAGMM... Epoch: 76, Loss: 43428185.979\n", 191 | "198371/198371: [===============================>] - ETA 0.0s\n", 192 | "Training DAGMM... Epoch: 77, Loss: 43450846.041\n", 193 | "198371/198371: [===============================>] - ETA 0.0s\n", 194 | "Training DAGMM... Epoch: 78, Loss: 43439178.722\n", 195 | "198371/198371: [===============================>] - ETA 0.0s\n", 196 | "Training DAGMM... Epoch: 79, Loss: 43441306.186\n", 197 | "198371/198371: [===============================>] - ETA 0.0s\n", 198 | "Training DAGMM... Epoch: 80, Loss: 43430168.948\n", 199 | "198371/198371: [===============================>] - ETA 0.0s\n", 200 | "Training DAGMM... Epoch: 81, Loss: 43425490.763\n", 201 | "198371/198371: [===============================>] - ETA 0.0s\n", 202 | "Training DAGMM... Epoch: 82, Loss: 43422066.577\n", 203 | "198371/198371: [===============================>] - ETA 0.0s\n", 204 | "Training DAGMM... Epoch: 83, Loss: 43440077.753\n", 205 | "198371/198371: [===============================>] - ETA 0.0s\n", 206 | "Training DAGMM... Epoch: 84, Loss: 43456807.649\n", 207 | "198371/198371: [===============================>] - ETA 0.0s\n", 208 | "Training DAGMM... Epoch: 85, Loss: 43443363.856\n", 209 | "198371/198371: [===============================>] - ETA 0.0s\n", 210 | "Training DAGMM... Epoch: 86, Loss: 43404440.103\n", 211 | "198371/198371: [===============================>] - ETA 0.0s\n", 212 | "Training DAGMM... Epoch: 87, Loss: 43427650.660\n", 213 | "198371/198371: [===============================>] - ETA 0.0s\n", 214 | "Training DAGMM... Epoch: 88, Loss: 43408453.443\n", 215 | "198371/198371: [===============================>] - ETA 0.0s\n", 216 | "Training DAGMM... Epoch: 89, Loss: 43405550.742\n", 217 | "198371/198371: [===============================>] - ETA 0.0s\n", 218 | "Training DAGMM... Epoch: 90, Loss: 43412264.371\n", 219 | "198371/198371: [===============================>] - ETA 0.0s\n", 220 | "Training DAGMM... Epoch: 91, Loss: 43454277.732\n", 221 | "198371/198371: [===============================>] - ETA 0.0s\n", 222 | "Training DAGMM... Epoch: 92, Loss: 43426326.928\n", 223 | "198371/198371: [===============================>] - ETA 0.0s\n", 224 | "Training DAGMM... Epoch: 93, Loss: 43430818.660\n", 225 | "198371/198371: [===============================>] - ETA 0.0s\n", 226 | "Training DAGMM... Epoch: 94, Loss: 43442813.052\n", 227 | "198371/198371: [===============================>] - ETA 0.0s\n", 228 | "Training DAGMM... Epoch: 95, Loss: 43425756.454\n", 229 | "198371/198371: [===============================>] - ETA 0.0s\n", 230 | "Training DAGMM... Epoch: 96, Loss: 43420479.258\n", 231 | "198371/198371: [===============================>] - ETA 0.0s\n", 232 | "Training DAGMM... Epoch: 97, Loss: 43437058.763\n", 233 | "198371/198371: [===============================>] - ETA 0.0s\n", 234 | "Training DAGMM... Epoch: 98, Loss: 43443187.608\n", 235 | "198371/198371: [===============================>] - ETA 0.0s\n", 236 | "Training DAGMM... Epoch: 99, Loss: 43439066.660\n", 237 | "198371/198371: [===============================>] - ETA 0.0s\n", 238 | "Training DAGMM... Epoch: 100, Loss: 43432156.206\n", 239 | "198371/198371: [===============================>] - ETA 0.0s\n", 240 | "Training DAGMM... Epoch: 101, Loss: 43446940.598\n", 241 | "198371/198371: [===============================>] - ETA 0.0s\n", 242 | "Training DAGMM... Epoch: 102, Loss: 43473088.536\n", 243 | "198371/198371: [===============================>] - ETA 0.0s\n", 244 | "Training DAGMM... Epoch: 103, Loss: 43419472.144\n", 245 | "198371/198371: [===============================>] - ETA 0.0s\n", 246 | "Training DAGMM... Epoch: 104, Loss: 43424330.598\n", 247 | "198371/198371: [===============================>] - ETA 0.0s\n", 248 | "Training DAGMM... Epoch: 105, Loss: 43412227.588\n", 249 | "198371/198371: [===============================>] - ETA 0.0s\n", 250 | "Training DAGMM... Epoch: 106, Loss: 43445989.794\n", 251 | "198371/198371: [===============================>] - ETA 0.0s\n", 252 | "Training DAGMM... Epoch: 107, Loss: 43432074.309\n", 253 | "198371/198371: [===============================>] - ETA 0.0s\n", 254 | "Training DAGMM... Epoch: 108, Loss: 43435514.639\n", 255 | "198371/198371: [===============================>] - ETA 0.0s\n", 256 | "Training DAGMM... Epoch: 109, Loss: 43425738.309\n", 257 | "198371/198371: [===============================>] - ETA 0.0s\n", 258 | "Training DAGMM... Epoch: 110, Loss: 43438457.670\n", 259 | "198371/198371: [===============================>] - ETA 0.0s\n", 260 | "Training DAGMM... Epoch: 111, Loss: 43433071.938\n", 261 | "198371/198371: [===============================>] - ETA 0.0s\n", 262 | "Training DAGMM... Epoch: 112, Loss: 43422290.082\n", 263 | "198371/198371: [===============================>] - ETA 0.0s\n", 264 | "Training DAGMM... Epoch: 113, Loss: 43397658.433\n", 265 | "198371/198371: [===============================>] - ETA 0.0s\n", 266 | "Training DAGMM... Epoch: 114, Loss: 43410132.351\n", 267 | "198371/198371: [===============================>] - ETA 0.0s\n", 268 | "Training DAGMM... Epoch: 115, Loss: 43415145.959\n", 269 | "198371/198371: [===============================>] - ETA 0.0s\n", 270 | "Training DAGMM... Epoch: 116, Loss: 43433117.443\n", 271 | "198371/198371: [===============================>] - ETA 0.0s\n", 272 | "Training DAGMM... Epoch: 117, Loss: 43434084.907\n", 273 | "198371/198371: [===============================>] - ETA 0.0s\n", 274 | "Training DAGMM... Epoch: 118, Loss: 43446954.804\n", 275 | "198371/198371: [===============================>] - ETA 0.0s\n", 276 | "Training DAGMM... Epoch: 119, Loss: 43452942.000\n", 277 | "198371/198371: [===============================>] - ETA 0.0s\n", 278 | "Training DAGMM... Epoch: 120, Loss: 43432926.928\n", 279 | "198371/198371: [===============================>] - ETA 0.0s\n", 280 | "Training DAGMM... Epoch: 121, Loss: 43430535.897\n", 281 | "198371/198371: [===============================>] - ETA 0.0s\n", 282 | "Training DAGMM... Epoch: 122, Loss: 43436932.825\n", 283 | "198371/198371: [===============================>] - ETA 0.0s\n", 284 | "Training DAGMM... Epoch: 123, Loss: 43433276.887\n", 285 | "198371/198371: [===============================>] - ETA 0.0s\n", 286 | "Training DAGMM... Epoch: 124, Loss: 43435481.113\n", 287 | "198371/198371: [===============================>] - ETA 0.0s\n", 288 | "Training DAGMM... Epoch: 125, Loss: 43441739.134\n", 289 | "198371/198371: [===============================>] - ETA 0.0s\n", 290 | "Training DAGMM... Epoch: 126, Loss: 43431898.289\n", 291 | "198371/198371: [===============================>] - ETA 0.0s\n", 292 | "Training DAGMM... Epoch: 127, Loss: 43424055.959\n", 293 | "198371/198371: [===============================>] - ETA 0.0s\n", 294 | "Training DAGMM... Epoch: 128, Loss: 43421267.588\n", 295 | "198371/198371: [===============================>] - ETA 0.0s\n", 296 | "Training DAGMM... Epoch: 129, Loss: 43447161.505\n", 297 | "198371/198371: [===============================>] - ETA 0.0s\n", 298 | "Training DAGMM... Epoch: 130, Loss: 43444320.763\n", 299 | "198371/198371: [===============================>] - ETA 0.0s\n", 300 | "Training DAGMM... Epoch: 131, Loss: 43430823.093\n", 301 | "198371/198371: [===============================>] - ETA 0.0s\n", 302 | "Training DAGMM... Epoch: 132, Loss: 43412125.093\n", 303 | "198371/198371: [===============================>] - ETA 0.0s\n", 304 | "Training DAGMM... Epoch: 133, Loss: 43438941.526\n", 305 | "198371/198371: [===============================>] - ETA 0.0s\n", 306 | "Training DAGMM... Epoch: 134, Loss: 43439747.278\n", 307 | "198371/198371: [===============================>] - ETA 0.0s\n", 308 | "Training DAGMM... Epoch: 135, Loss: 43425832.082\n", 309 | "198371/198371: [===============================>] - ETA 0.0s\n", 310 | "Training DAGMM... Epoch: 136, Loss: 43433198.309\n", 311 | "198371/198371: [===============================>] - ETA 0.0s\n", 312 | "Training DAGMM... Epoch: 137, Loss: 43426305.052\n", 313 | "198371/198371: [===============================>] - ETA 0.0s\n", 314 | "Training DAGMM... Epoch: 138, Loss: 43445478.784\n", 315 | "198371/198371: [===============================>] - ETA 0.0s\n", 316 | "Training DAGMM... Epoch: 139, Loss: 43425395.361\n", 317 | "198371/198371: [===============================>] - ETA 0.0s\n", 318 | "Training DAGMM... Epoch: 140, Loss: 43419421.918\n", 319 | "198371/198371: [===============================>] - ETA 0.0s\n", 320 | "Training DAGMM... Epoch: 141, Loss: 43417765.608\n", 321 | "198371/198371: [===============================>] - ETA 0.0s\n", 322 | "Training DAGMM... Epoch: 142, Loss: 43441100.577\n", 323 | "198371/198371: [===============================>] - ETA 0.0s\n", 324 | "Training DAGMM... Epoch: 143, Loss: 43421530.866\n", 325 | "198371/198371: [===============================>] - ETA 0.0s\n", 326 | "Training DAGMM... Epoch: 144, Loss: 43419495.216\n", 327 | "198371/198371: [===============================>] - ETA 0.0s\n", 328 | "Training DAGMM... Epoch: 145, Loss: 43451884.330\n", 329 | "198371/198371: [===============================>] - ETA 0.0s\n", 330 | "Training DAGMM... Epoch: 146, Loss: 43432351.134\n", 331 | "198371/198371: [===============================>] - ETA 0.0s\n", 332 | "Training DAGMM... Epoch: 147, Loss: 43411839.856\n", 333 | "198371/198371: [===============================>] - ETA 0.0s\n", 334 | "Training DAGMM... Epoch: 148, Loss: 43425876.907\n", 335 | "198371/198371: [===============================>] - ETA 0.0s\n", 336 | "Training DAGMM... Epoch: 149, Loss: 43448637.526\n" 337 | ] 338 | }, 339 | { 340 | "name": "stdout", 341 | "output_type": "stream", 342 | "text": [ 343 | "198371/198371: [===============================>] - ETA 0.0s\n", 344 | "Training DAGMM... Epoch: 150, Loss: 43453290.103\n", 345 | "198371/198371: [===============================>] - ETA 0.0s\n", 346 | "Training DAGMM... Epoch: 151, Loss: 43416599.052\n", 347 | "198371/198371: [===============================>] - ETA 0.0s\n", 348 | "Training DAGMM... Epoch: 152, Loss: 43436919.732\n", 349 | "198371/198371: [===============================>] - ETA 0.0s\n", 350 | "Training DAGMM... Epoch: 153, Loss: 43435863.505\n", 351 | "198371/198371: [===============================>] - ETA 0.0s\n", 352 | "Training DAGMM... Epoch: 154, Loss: 43432635.216\n", 353 | "198371/198371: [===============================>] - ETA 0.0s\n", 354 | "Training DAGMM... Epoch: 155, Loss: 43448126.082\n", 355 | "198371/198371: [===============================>] - ETA 0.0s\n", 356 | "Training DAGMM... Epoch: 156, Loss: 43439170.289\n", 357 | "198371/198371: [===============================>] - ETA 0.0s\n", 358 | "Training DAGMM... Epoch: 157, Loss: 43433722.206\n", 359 | "198371/198371: [===============================>] - ETA 0.0s\n", 360 | "Training DAGMM... Epoch: 158, Loss: 43424128.227\n", 361 | "198371/198371: [===============================>] - ETA 0.0s\n", 362 | "Training DAGMM... Epoch: 159, Loss: 43426255.753\n", 363 | "198371/198371: [===============================>] - ETA 0.0s\n", 364 | "Training DAGMM... Epoch: 160, Loss: 43423256.454\n", 365 | "198371/198371: [===============================>] - ETA 0.0s\n", 366 | "Training DAGMM... Epoch: 161, Loss: 43422761.691\n", 367 | "198371/198371: [===============================>] - ETA 0.0s\n", 368 | "Training DAGMM... Epoch: 162, Loss: 43435175.856\n", 369 | "198371/198371: [===============================>] - ETA 0.0s\n", 370 | "Training DAGMM... Epoch: 163, Loss: 43431105.835\n", 371 | "198371/198371: [===============================>] - ETA 0.0s\n", 372 | "Training DAGMM... Epoch: 164, Loss: 43410072.206\n", 373 | "198371/198371: [===============================>] - ETA 0.0s\n", 374 | "Training DAGMM... Epoch: 165, Loss: 43423832.186\n", 375 | "198371/198371: [===============================>] - ETA 0.0s\n", 376 | "Training DAGMM... Epoch: 166, Loss: 43464915.196\n", 377 | "198371/198371: [===============================>] - ETA 0.0s\n", 378 | "Training DAGMM... Epoch: 167, Loss: 43428400.619\n", 379 | "198371/198371: [===============================>] - ETA 0.0s\n", 380 | "Training DAGMM... Epoch: 168, Loss: 43455033.320\n", 381 | "198371/198371: [===============================>] - ETA 0.0s\n", 382 | "Training DAGMM... Epoch: 169, Loss: 43440596.454\n", 383 | "198371/198371: [===============================>] - ETA 0.0s\n", 384 | "Training DAGMM... Epoch: 170, Loss: 43444403.505\n", 385 | "198371/198371: [===============================>] - ETA 0.0s\n", 386 | "Training DAGMM... Epoch: 171, Loss: 43434152.474\n", 387 | "198371/198371: [===============================>] - ETA 0.0s\n", 388 | "Training DAGMM... Epoch: 172, Loss: 43441112.887\n", 389 | "198371/198371: [===============================>] - ETA 0.0s\n", 390 | "Training DAGMM... Epoch: 173, Loss: 43439120.041\n", 391 | "198371/198371: [===============================>] - ETA 0.0s\n", 392 | "Training DAGMM... Epoch: 174, Loss: 43413917.876\n", 393 | "198371/198371: [===============================>] - ETA 0.0s\n", 394 | "Training DAGMM... Epoch: 175, Loss: 43424843.670\n", 395 | "198371/198371: [===============================>] - ETA 0.0s\n", 396 | "Training DAGMM... Epoch: 176, Loss: 43418712.557\n", 397 | "198371/198371: [===============================>] - ETA 0.0s\n", 398 | "Training DAGMM... Epoch: 177, Loss: 43451613.320\n", 399 | "198371/198371: [===============================>] - ETA 0.0s\n", 400 | "Training DAGMM... Epoch: 178, Loss: 43446335.773\n", 401 | "198371/198371: [===============================>] - ETA 0.0s\n", 402 | "Training DAGMM... Epoch: 179, Loss: 43421658.990\n", 403 | "198371/198371: [===============================>] - ETA 0.0s\n", 404 | "Training DAGMM... Epoch: 180, Loss: 43422974.969\n", 405 | "198371/198371: [===============================>] - ETA 0.0s\n", 406 | "Training DAGMM... Epoch: 181, Loss: 43423512.433\n", 407 | "198371/198371: [===============================>] - ETA 0.0s\n", 408 | "Training DAGMM... Epoch: 182, Loss: 43422441.381\n", 409 | "198371/198371: [===============================>] - ETA 0.0s\n", 410 | "Training DAGMM... Epoch: 183, Loss: 43439801.237\n", 411 | "198371/198371: [===============================>] - ETA 0.0s\n", 412 | "Training DAGMM... Epoch: 184, Loss: 43426050.619\n", 413 | "198371/198371: [===============================>] - ETA 0.0s\n", 414 | "Training DAGMM... Epoch: 185, Loss: 43427046.124\n", 415 | "198371/198371: [===============================>] - ETA 0.0s\n", 416 | "Training DAGMM... Epoch: 186, Loss: 43423887.691\n", 417 | "198371/198371: [===============================>] - ETA 0.0s\n", 418 | "Training DAGMM... Epoch: 187, Loss: 43433336.515\n", 419 | "198371/198371: [===============================>] - ETA 0.0s\n", 420 | "Training DAGMM... Epoch: 188, Loss: 43420106.474\n", 421 | "198371/198371: [===============================>] - ETA 0.0s\n", 422 | "Training DAGMM... Epoch: 189, Loss: 43424431.113\n", 423 | "198371/198371: [===============================>] - ETA 0.0s\n", 424 | "Training DAGMM... Epoch: 190, Loss: 43446279.526\n", 425 | "198371/198371: [===============================>] - ETA 0.0s\n", 426 | "Training DAGMM... Epoch: 191, Loss: 43434754.330\n", 427 | "198371/198371: [===============================>] - ETA 0.0s\n", 428 | "Training DAGMM... Epoch: 192, Loss: 43432176.186\n", 429 | "198371/198371: [===============================>] - ETA 0.0s\n", 430 | "Training DAGMM... Epoch: 193, Loss: 43450501.464\n", 431 | "198371/198371: [===============================>] - ETA 0.0s\n", 432 | "Training DAGMM... Epoch: 194, Loss: 43448774.454\n", 433 | "198371/198371: [===============================>] - ETA 0.0s\n", 434 | "Training DAGMM... Epoch: 195, Loss: 43417091.753\n", 435 | "198371/198371: [===============================>] - ETA 0.0s\n", 436 | "Training DAGMM... Epoch: 196, Loss: 43431497.485\n", 437 | "198371/198371: [===============================>] - ETA 0.0s\n", 438 | "Training DAGMM... Epoch: 197, Loss: 43423754.454\n", 439 | "198371/198371: [===============================>] - ETA 0.0s\n", 440 | "Training DAGMM... Epoch: 198, Loss: 43453314.309\n", 441 | "198371/198371: [===============================>] - ETA 0.0s\n", 442 | "Training DAGMM... Epoch: 199, Loss: 43425472.887\n" 443 | ] 444 | } 445 | ], 446 | "source": [ 447 | "class Args:\n", 448 | " num_epochs=200\n", 449 | " patience=50\n", 450 | " lr=1e-4\n", 451 | " lr_milestones=[50]\n", 452 | " batch_size=1024\n", 453 | " latent_dim=1\n", 454 | " n_gmm=4\n", 455 | " lambda_energy=0.1\n", 456 | " lambda_cov=0.005\n", 457 | " \n", 458 | " \n", 459 | "args = Args()\n", 460 | "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n", 461 | "data = get_KDDCup99(args)\n", 462 | "\n", 463 | "dagmm = TrainerDAGMM(args, data, device)\n", 464 | "dagmm.train()" 465 | ] 466 | }, 467 | { 468 | "cell_type": "code", 469 | "execution_count": 3, 470 | "metadata": {}, 471 | "outputs": [ 472 | { 473 | "name": "stdout", 474 | "output_type": "stream", 475 | "text": [ 476 | "Testing...\n", 477 | "Precision : 0.9561, Recall : 0.9306, F-score : 0.9432\n", 478 | "ROC AUC score: 99.11\n" 479 | ] 480 | } 481 | ], 482 | "source": [ 483 | "from test import eval\n", 484 | "\n", 485 | "labels, scores = eval(dagmm.model, data, device, args.n_gmm)" 486 | ] 487 | }, 488 | { 489 | "cell_type": "code", 490 | "execution_count": 5, 491 | "metadata": {}, 492 | "outputs": [ 493 | { 494 | "data": { 495 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEICAYAAACwDehOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3Xt8XHWZ+PHPM5NrkzSlt7QlpS1QhN4obWlxwSVdLoUuUldRQERwxbKL/Nx10V2UXRR1ueiuriuoVEEF0YooWLRsuTUIyKUtLdhSoKUUG1rovWnS3Gbm+f3xPTM5c0smbSYzk3ner1demTnnO2eeOZnMM9/L+X5FVTHGGGMAArkOwBhjTP6wpGCMMSbGkoIxxpgYSwrGGGNiLCkYY4yJsaRgjDEmxpKCyZiITBQRFZES7/4jInJFruPqbyLSICJNvvsbRKRhgJ77FhH554F4rkIlIp8TkVtzHcdgZUlhEBORK0XkzyJySETeFZEfiMiwPjx+q4icnW6/qp6vqj/rn2izyzsXzxzOY1V1qqo29nNISURkFPBJ4E7vfoOIRESkxftpEpH7ReTUbMeSJr6pIvKoiOwTkf0iskZEForI0SISEpHjUjzmQRH5L++2ikir91r2iMgTInJxQvlGEWkXkYMi0uw9x/UiUu4rtgT4hIiMzu4rLk6WFAYpEbkOuA34IlALnAZMAB4TkbIcx1aSy+cfKIfxOq8Elqtqm2/bdlWtBmpwf8PXgKdF5Kz+ibJPHgYeA+qA0cDngGZVfQd4ArjcX1hEhgMLAf8Xh5O91/M+4KfA7SLylYTnuVZVa4CxwHXAJcByEREAVW0HHsElUNPfVNV+BtkPMBRoAT6WsL0a2An8vXf/p8A3fPsbgCbv9r1ABGjzjvWvwERAgRKvTCNwle/xfw9sBPYBK4AJvn0KfBbYBLwFCPAdL54DwCvAtBSv5RJgdcK2zwPLvNsLgVeBg8A7wBfSnJMrgWd897cCX/Ce9wDwK6Ai8Tz4yp7t3Q4A1wNvAnuA+4Hh3r7o+fk08Bfgj0AF8HOv7H5gFVCXJsYngU+k+nsklLvdf06AE3Ef1nuB1/1/d6Ac+C8vnveAHwKV/uMDXwZ2e6/zsjSxjfRe27A0+z8OvJmw7RrgpYT3wPEJZS4C2oERqd5T3rZjgEPABb5tlwErc/2/Nhh/rKYwOP0V7sPot/6NqtqC+4Z1Tm8HUNXLcR8kH1TValX9Zk/lReRDuA+XDwOjgKeBXyYU+xAwD5gCnAv8NXACMAy4GPfBmWgZ8D4Rmezb9nHgF97tu4Cr1X2znIb7YM3Ux4DzgEnADFzi6M3nvNdxJjAOlwDvSChzJnASsAC4AldTGw+MAP4Bl2hTmY77UO/Nb4FZIlIlIlW4hPAL3Lf3S4Hvi8hUr+xtuHM8EzgeOBq40XesMbgP/KO9WJeIyPtSPOceYDPwcxH5kIjUJex/EBgpImf4tl0O3NPLa/kdUALMTVdAVf8CrAY+4Nu8ETi5l2Obw2BJYXAaCexW1VCKfTu8/f3tauAWVd3oPe/NwEwRmeArc4uq7lXXPNKFaxI5ERDvcTsSD6qqh3AfHJcCeMnhRFyywDvOFBEZqqr7VPWlPsT8v6q6XVX34ppGZmb4Om9Q1SZV7QC+ClyU0FT0VVVt9b3OEbhvyGFVXaOqzWmOPQxX4+nNdlxNaxhwAbBVVX+iqiHv9f/Gi0mAzwCf9877Qdzf5ZKE4/2Hqnao6lPAH3DJMo66r+fzcbWJ/wZ2iMgfo8nae62/xmvS8bbPpjt5p6SqXbhayvAMXrO/zEFcsjX9zJLC4LQb960tVZv2WG9/f5sAfNfrgNyPa8oQ3DfQqG3RG6r6JK4Z5A7gPRFZIiJD0xz7F3hJAVdLeMhLFgAfwTUhvS0iT4nI+/sQ87u+24dwzWu9mQA86HudG4Ewrp09apvv9r24prSlIrJdRL4pIqVpjr0Plyh7czSuKWa/F8+8aDxeTJfhagCjgCHAGt++//O2x55TVVt999/G1YCSeInwWlU9znveVuJrAj8DPiYiFbhawv+p6s6eXoh3Lkbh3i+9vWZ/mRpcs5/pZ5YUBqfngA5cU06M19RwPq5TENw/9RBfkTEJx+nLFLrbcM04w3w/lar6p3THU9X/VdXZwFRcE8cX0xz7UVySm4lLDrFvn6q6SlUX4ZpOHsK18WfTNuD8hNdZoa6zNRaWL74uVb1JVafgmvUuIH0H6Su489Cbv8O11bd68TyVEE+1qv4jLvm3AVN9+2rVdfRGHeW9L6KOwX0r75GqbsMl9Gm+bU/jmpkWAZ+g96YjvLIh4MV0BURkPK7W8bRv80nAyxkc3/SRJYVBSFUPADcB3xOR80SkVEQm4qr3TbhvrwDrgIUiMlxExgCJ4+PfA47N8Gl/CHwp2pYtIrUi8tF0hUXkVBGZ531TbMV1NobTvJ4Q8ADwLVwTwmPeMcpE5DIRqfWaIZrTHaMf/RD4z2izmIiMEpFF6QqLyHwRmS4iQS++rh5iXI7rj0h1HPGGfn4FuArXfwPwe+AEEbnc+zuXeuf2JFWNAD8CvhMdvukdY0HC4W/yzuUHcEnr1yme/ygRuUlEjheRgIiMxA0seD6h6D24foxhuCa5dOdluIhchksst6lqUn+SiAwRkTNxzYcveucn6kxc/5jpZ5YUBimvY/jLuJEnzcALuG+VZ3lt4eCSw8u4duJHcSNw/G4B/t1revhCL8/3IO7DYKmINAPrcbWSdIbiPrD24Zos9nixpvML4Gzg1wl9JZcDW73n/AfcN9Rs+i6uP+NRETmI+1Cc10P5MbiE1oxranoKNxoplXtwSbrSt22ciLTgRoCtwnVGN6jqowBeP8G5uH6C7bgmsdtwo44A/g3XQfy8d44exw0HjXoX9zfYDtwH/IOqvpYitk7c6KrHvdeyHlcbvTLFazgG+JXvfeb3svd6NuOS2+dV9caEMrd75/Y94H9wfSTneUkOr3kqcair6Sfi+o+MMflARG4Gdqrq/wzAczUAP1fV+mw/V38Skf8HjFfVf811LIORJQVjilShJgWTXdZ8ZIwxJsZqCsYYY2KspmCMMSam4CYmGzlypE6cODFpe2trK1VVVckPyCMWY/8ohBihMOK0GPtPvse5Zs2a3ao6qteCuZ58qa8/s2fP1lRWrlyZcns+sRj7RyHEqFoYcVqM/Sff4yRhYsl0P9Z8ZIwxJsaSgjHGmBhLCsYYY2IKrqPZGGOiurq6aGpqor29PdehUFtby8aNG3MdBhUVFdTX11Namm4y3p5lLSmIyN24ybV2quq0FPsFN4/MQty0xVdq3+bCN8YUuaamJmpqapg4cSLeap05c/DgQWpqMpn5PHtUlT179tDU1MSkSZMO6xjZbD76KW5Vq3TOByZ7P4uBH2QxFmPMINTe3s6IESNynhDyhYgwYsSII6o5ZS0pqOof6XnhjEXAPd5oqeeBYSIyNlvxGGMGJ0sI8Y70fGR1mgtvDv/fp2k++j1wq6o+491/Avg3VV2douxiXG2Curq62UuXLk16rpaWFqqrM1k4K3cKKcZQRPnT9hBnHF1CIM/+6QrhPEJhxFnoMdbW1nL88ccPcESphcNhgsFgrsMAYPPmzRw4EL8w3fz589eo6pxeH5zJxQyH+4Obf319mn1/AM7w3X8CmN3bMe3iteyKxvj9lZt1wr/9Xu9f9ZfcBpRCIZxH1cKIs9BjfPXVVwcukDSqqqpUVbW5uTltmTPPPFNXrVqlqqrnn3++7tu3L6sxpTovZHjxWi5HHzUB433368lgGUAzMA51unVsmva15TgSYwaX5cuX917IZ6BrILm8TmEZ8ElvmcHTgAOquiOH8RifoRVuOFtze1eOIzGmMDz99NM0NDRw0UUXceKJJ3LZZZdFW0HiTJw4kd27dwPw85//nLlz5zJz5kyuvvpqwmG3Umt1dTU33ngj8+bN47nnnhvQ15HNIam/BBpwC643AV8BSgFU9Ye49VYX4pblOwR8KluxmL4bUu6+mbS0h3opaUx+uOnhDby6vblfjzll3FC+8sGpGZdfu3YtGzZsYNy4cZx++uk8++yznHHGGSnLbty4kV/96lc8++yzlJaWcs0113DffffxyU9+ktbWVqZNm8bXvva1/nopGctaUlDVS3vZr8Bns/X85siEwu4bTp71MRuT1+bOnUt9vVvIbubMmWzdujVtUnjiiSdYs2YNp556KgBtbW2MHj0agGAwyEc+8pGBCTqBXdFsUuoIuWpsMGBZwRSGvnyjz5by8vLY7WAwSCiUvqatqlxxxRXccsstSfsqKipyNpLJ5j4yKXV0RQDybjiqMYPFWWedxQMPPMDOnTsB2Lt3L2+//XaOo7KkYNLoCLmkYIu1GpMdU6ZM4Rvf+AbnnnsuM2bM4JxzzmHHjtyPtbHmI5NStPkoFI7kOBJj8ltLSwsAH/jAB1i4cGFs++233x673djYGLu9devW2O2LL76Yiy++OO0xc8FqCialaE0h2uFsjCkOlhRMSl1eDaErYknBmGJiScGkFPaSgTUfGVNcLCmYlKK5oMuaj4wpKpYUTEoR7/L8UMRqCsYUE0sKJqVoUghbn4IxRcWSgkkpmgy6rE/BmB41NTWxaNEiZs6cyXHHHcc//dM/0dnZ2eNjbr755rj70fUitm/fzkUXXZS1WDNhScGkFGs+sj4FY9JSVT784Q/zoQ99iHXr1vHGG2/Q0tLCDTfc0OPjEpNC1Lhx43jggQcyfv7orKr9yZKCSSlWU7DmI2PSevLJJ6moqOBTn3KTPAeDQb7zne9w99138/3vf59rr702VvaCCy6gsbGR66+/nra2NmbOnMlll10Wd7ytW7cybZpbqDIcDvPFL36RU089lRkzZnDnnXcC7kK4+fPn8/GPf5zp06f3+2uyK5pNStFWIxuSagrGI9fDu3/u32OOmQ7n35p294YNG5g9e3bctqFDh3LMMceknQzv1ltv5fbbb2fdunU9PvVdd91FbW0tq1atoqOjg9NPP51zzz0XgBdffJH169czadKkPr6g3llSMClZ85ExvVNVJMWkkem298Wjjz7KK6+8EmtOOnDgAJs2baKsrIy5c+dmJSGAJQWTRjQpRFKsHGVMXurhG322TJ06ld/85jdx25qbm9m2bRu1tbVEfEO629vb+3RsVeV73/seCxYsiNve2NhIVVXV4QfdC+tTMClF+xTClhSMSeuss87i0KFD3HPPPYDrB7juuuu48sorOfbYY1m3bh2RSIRt27bx4osvxh5XWlpKV1fPS90uWLCAH/zgB7Fyb7zxBq2trdl7MR6rKZiUYjUF62g2Ji0R4cEHH+Saa67hpptuAmDhwoXcfPPNlJWVMWnSJKZPn860adOYNWtW7HGLFy9mxowZzJo1i/vuuy/lsa+66iq2bt3KrFmzUFVGjRrFQw89lPXXZEnBpGQ1BWMyM378eB5++GEOHjxITU1N3L50H/i33XYbt912W+x+dKrsiRMnsn79egACgQA333xz0vDVhoYGGhoa+vEVxLPmI5NStCnUZrkwprhYUjAp2TQXxhQnSwompbCNPjIFQu09GudIz4clBZNStIPZkoLJZxUVFezZs8cSg0dV2bNnDxUVFYd9DOtoNimFrfnIFID6+nqamprYtWtXrkOhvb39iD6M+0tFRQX19fWH/XhLCial6OwWlhRMPistLc3alb191djYyCmnnJLrMI6YNR+ZlLqbj3IciDFmQFlSMCnZNBfGFCdLCiYl61MwpjhZUjAp2egjY4qTJQWTktUUjClOlhRMSrFpLtQuDjKmmGQ1KYjIeSLyuohsFpHrU+w/RkRWishaEXlFRBZmMx6TOX8NwSoLxhSPrCUFEQkCdwDnA1OAS0VkSkKxfwfuV9VTgEuA72crHtM3/r4E61cwpnhks6YwF9isqltUtRNYCixKKKPAUO92LbA9i/GYPvAnAutXMKZ4SLbai0XkIuA8Vb3Ku385ME9Vr/WVGQs8ChwFVAFnq+qaFMdaDCwGqKurm7106dKk52tpaaG6ujobL6XfFFKM1z7RSou3MNSdZw+hvOTI1pvtT4VwHqEw4rQY+0++xzl//vw1qjqnt3LZnOYi1adIYga6FPipqv63iLwfuFdEpqlq3Cz+qroEWAIwZ84cTbXARGNjY1YXnugPhRRjoHEFdIUA+KszzqCmojTHkXUrhPMIhRGnxdh/CiXO3mSz+agJGO+7X09y89CngfsBVPU5oAIYmcWYTIYiCuKldVtox5jikc2ksAqYLCKTRKQM15G8LKHMX4CzAETkJFxSyP10h4ZwRCkNuLeHLclpTPHIWlJQ1RBwLbAC2IgbZbRBRL4mIhd6xa4DPiMiLwO/BK5UGxSfFyKqlAQldtsYUxyyOnW2qi4Hlidsu9F3+1Xg9GzGYA5PRJWKYBAIx6a8MMYMfnZFs0kpHFFKg9Z8ZEyxsaRgkqgqEYUyr/nIrlMwpnhYUjBJojmgtMS9PWz0kTHFw5KCSRLtWC4JWEezMcXGkoJJEm0usj4FY4qPJQWTJFoziCYFG31kTPGwpGCSdNcUvI5mqykYUzQsKZgk0Y7lWPOR1RSMKRqWFEyScFLzUS6jMcYMJEsKJkls9JFNc2FM0bGkYJJEbPSRMUXLkoJJEk0CZTb6yJiiY0nBJEkafWRJwZiiYUnBJEkafWTNR8YUDUsKJkk41tHs3h6WE4wpHpYUTJLuK5qt+ciYYmNJwSSx0UfGFC9LCiZJ8sVrlhSMKRaWFEySaHORLbJjTPGxpGCSJI4+siuajSkelhRMkkjC6COrKBhTPCwpmCRhG31kTNGypGCSJI4+suYjY4qHJQWTJGk5TqspGFM0LCmYJNZ8ZEzxsqRgkiSOPrLWI2OKhyUFkyRxkR27otmY4mFJwSRJvKLZmo+MKR6WFEySSCRhkR2rKRhTNCwpmCQ2+siY4pXVpCAi54nI6yKyWUSuT1PmYyLyqohsEJFfZDMek5nEPgXLCcYUj5JsHVhEgsAdwDlAE7BKRJap6qu+MpOBLwGnq+o+ERmdrXhM5sKx0UdeUrCsYEzRyGZNYS6wWVW3qGonsBRYlFDmM8AdqroPQFV3ZjEek6FIYkez9SkYUzSymRSOBrb57jd52/xOAE4QkWdF5HkROS+L8ZgMxZqPAtanYEyxyVrzESAptiV+upQAk4EGoB54WkSmqer+uAOJLAYWA9TV1dHY2Jh04JaWlpTb80mhxLj+HdfCt+6l1QC8uWULjdKUy7DiFMJ5hMKI02LsP4USZ2+ymRSagPG++/XA9hRlnlfVLuAtEXkdlyRW+Qup6hJgCcCcOXO0oaEh6ckaGxtJtT2fFEqMJ4w5Dv78Cqe/fx4808iECZNoaJic69BiCuE8QmHEaTH2n0KJszfZbD5aBUwWkUkiUgZcAixLKPMQMB9AREbimpO2ZDEmk4HE9RSsT8GY4pG1pKCqIeBaYAWwEbhfVTeIyNdE5EKv2Apgj4i8CqwEvqiqe7IVk8lMdPRRUISAgFpSMKZoZNR8JCK/Ae4GHlHVSKYHV9XlwPKEbTf6bivwL96PyRPRmkJAICBiHc3GFJFMawo/AD4ObBKRW0XkxCzGZHIslhQCQiAg1nxkTBHJKCmo6uOqehkwC9gKPCYifxKRT4lIaTYDNAMvWjMIihAUsamzjSkiGfcpiMgI4ErgKmAt8F1ckngsK5GZnIkmhUDA9SlY85ExxSPTPoXfAicC9wIfVNUd3q5ficjqbAVnciPafBSMNh9ZUjCmaGR6ncKPvU7jGBEpV9UOVZ2ThbhMDkVzQFCEYEBs9JExRSTT5qNvpNj2XH8GYvJHtGYg0dFHlhSMKRo91hREZAxuvqJKETmF7qkrhgJDshybyZHorKjBgHhDUnMckDFmwPTWfLQA17lcD3zbt/0g8OUsxWRyLFozcM1HNnW2McWkx6Sgqj8DfiYiH1HV3wxQTCbHInGjj8SW4zSmiPTWfPQJVf05MFFEkq46VtVvp3iYKXBhVYIB11JofQrGFJfemo+qvN/V2Q7E5I+IuqYjcP0K1nxkTPHorfnoTu/3TQMTjskHkYji5QSXFCwnGFM0MhqSKiLfFJGhIlIqIk+IyG4R+US2gzO5EY50Nx+J2NTZxhSTTK9TOFdVm4ELcAvjnAB8MWtRmZwKq3Y3H4k1HxlTTDJNCtFJ7xYCv1TVvVmKx+SBSEQJBHx9ClZTMKZoZDrNxcMi8hrQBlwjIqOA9uyFZXLJP/pI7OI1Y4pKplNnXw+8H5jjrafcCizKZmAmdyLqFtgB3MVrVlMwpmhkWlMAOAl3vYL/Mff0czwmD0QiSsDfp2BJwZiikenU2fcCxwHrgLC3WbGkMCjFjz6yqbONKSaZ1hTmAFPU5lAuCmH11RSso9mYopLp6KP1wJhsBmLyR8RXUwhaTcGYopJpTWEk8KqIvAh0RDeq6oVZicrkVETpnvsogF3RbEwRyTQpfDWbQZj8EtbuaS4CIoRsTKoxRSOjpKCqT4nIBGCyqj4uIkOAYHZDM7kSiWjchHg2zYUxxSPTuY8+AzwA3OltOhp4KFtBmdzyjz5y6ynkOCBjzIDJtKP5s8DpQDOAqm4CRmcrKJNbEd/oo4DYymvGFJNMk0KHqnZG73gXsNknxSDlrykEAzb6yJhikmlSeEpEvgxUisg5wK+Bh7MXlsmliBKbEM+W4zSmuGSaFK4HdgF/Bq4GlgP/nq2gTG655iN325KCMcUl09FHERF5CHhIVXdlOSaTY+HE0UfWfGRM0eixpiDOV0VkN/Aa8LqI7BKRGwcmPJMLYd96CgFbjtOYotJb89E/40YdnaqqI1R1ODAPOF1EPt/bwUXkPBF5XUQ2i8j1PZS7SERUROb0KXqTFRHfymsBsamzjSkmvSWFTwKXqupb0Q2qugX4hLcvLREJAncA5wNTgEtFZEqKcjXA54AX+ha6yZawzX1kTNHqLSmUquruxI1ev0JpivJ+c4HNqrrFG866lNQL83wd+Ca2klveiBt9FLA1mo0pJr11NHce5j5wVz1v891vwjU9xYjIKcB4Vf29iHwh3YFEZDGwGKCuro7GxsakMi0tLSm355NCifHAgSCRNqGxsZGd73XQ1h7Oq7gL4TxCYcRpMfafQomzN70lhZNFpDnFdgEqenmspNgW+8opIgHgO8CVvRwHVV0CLAGYM2eONjQ0JJVpbGwk1fZ8UigxDqkOMmpoBQ0Np7Ji7ytsPLAzr+IuhPMIhRGnxdh/CiXO3vSYFFT1SCa9awLG++7XA9t992uAaUCjuE7NMcAyEblQVVcfwfOaIxQ3+kis+ciYYpLpxWuHYxUwWUQmiUgZcAmwLLpTVQ+o6khVnaiqE4HnAUsIeSCiSknAVl4zphhlLSmoagi4FlgBbATuV9UNIvI1EbHFefJYKKGmYKOPjCkemS6yc1hUdTluSgz/tpQXvqlqQzZjMZmLRLprCjZ1tjHFJZvNR6ZAheKmubCL14wpJpYUTJJIwiI71nxkTPGwpGCShPxJwTqajSkqlhRMkojaNBfGFCtLCiZJck0hxwEZYwaMJQWTJBzXp+C22QVsxhQHSwomSdwiO97vsPUrGFMULCmYJOGIEgx2Nx+BDUs1plhYUjBJEpfjjG4zxgx+lhRMkrBv7qMSSwrGFBVLCiZORBX1LbJjNQVjioslBRMn+tmfWFMIWVIwpihYUjBxop/93TUF9xaxmoIxxcGSgoljNQVjipslBRMnHK0pJIw+CoUjuQrJGDOALCmYOJpYUwhaTcGYYmJJwcSJ1hSCseYj61MwpphYUjBxolcuRzuYu5uPLCkYUwwsKZg4kVhNwf1OdfGa1RqMGbwsKZg43UnBqynE+hRcR/PK13Zy3JeXs/Yv+3ISnzEmuywpmDjhXmoKD7zUBMD67c0DHpsxJvssKZg4STWFhOsUvOUV2NXcPtChGWMGgCUFEyeWFCR+9FG0o3n/oS4Adh7sGPjgjDFZZ0nBxOkefZR4nYLrU9jb2gnAwY5QDqIzxmSbJQUTJ5J0nUJ8n8KhTpcMDllSMGZQsqRg4iTOfZTYp3CoMwxAa0d44IMzxmSdJQUTJ5wwS2riFc2xpNBpNQVjBiNLCiZOTzUFVe1uPuq0moIxg5ElBRMnkjBLanefQoSOUCS2v8X6FIwZlCwpmDjR5qPoqCP/3EdtXu0gGBDraDZmkLKkYOJEh6TGagq+qbMPdbmkMLK6jNbOMBGbA8mYQSerSUFEzhOR10Vks4hcn2L/v4jIqyLyiog8ISITshmP6V3yymvexWsRpc3rTxhVUw5AW5f1Kxgz2GQtKYhIELgDOB+YAlwqIlMSiq0F5qjqDOAB4JvZisdkJu11CuFIrHN5ZLVLCq3WhGTMoJPNmsJcYLOqblHVTmApsMhfQFVXquoh7+7zQH0W4zEZSFxkJ+hvPvKSwqhoUrARSMYMOiVZPPbRwDbf/SZgXg/lPw08kmqHiCwGFgPU1dXR2NiYVKalpSXl9nxSCDG2tbUDwktrVvNedYCOkMsSb2zeTOu77jtE2773AHjq2ed5uzY44DEWwnmEwogzlzFKJIwGen//FMJ5hMKJszfZTAqSYlvKnkkR+QQwBzgz1X5VXQIsAZgzZ442NDQklWlsbCTV9nxSCDE+t/1xoIPT5s3luFHVdITC8Pj/MWHisUwcUQVrXmLWlMn8fsurTJlxCnMnDR/wGAvhPEJhxJmzGJf/K7y4BM76D/jAdT0WLYTzCIUTZ2+y2XzUBIz33a8HticWEpGzgRuAC1XVpt7Msejoo8SO5nCk+8K1EdVlQPc8SMb0yTtr4MU7obQSVt4CzUkfCyaHspkUVgGTRWSSiJQBlwDL/AVE5BTgTlxC2JnFWEyGEi9e83KDG33kjTYaUeX6FNpt9JE5HKt/AmU1cNXjEAnBS/fkOiLjk7WkoKoh4FpgBbARuF9VN4jI10TkQq/Yt4Bq4Ncisk5ElqU5nBkgkYSL10SEkoAQ8o0+Gl4VrSlYUjB9FAnD68vhhAVQNxXqT4XXU3YlmhzJZp8CqrocWJ6w7Ubf7bOz+fym78IJi+yAG4kU9o0+iiYFu07B9Nmu1+DQHph8rrt/wgJ48uvQsgsoOlqGAAAT3klEQVSqR+U2NgPYFc0mQSRhllSA0mAgdvFaZWmQIeVuxEib1RRMX21f534fPcv9nnC6+/3O6tzEY5JYUjBxojWF0kD3W8NfUxhSFqSy1JKCOUw7Xoayahh+nLs/9mQIlMC2F3Mbl4mxpGDiRNdNKC3primUBIRQJEJbZ5jKsiClwQClQYnNhWRMxnasgzEzIPqlo2wI1E2DplW5jcvEWFIwcULRmkIwfU0BoKI0aDUF0zeRMLz7Z1c78Ks/Fd55Cbx1wE1uWVIwccLe/2VJIKGmEHazpFaWubEJQ8osKZg+2r0Jug7BuJnx28eeDF2tsO+t3MRl4lhSMHFCESgNCuIffRQUN/dRR4gqr6ZQWRq00Uemb3Z4ncyJNYW6qe73e+sHNh6TkiUFEyekGtd0BK7Tucu7TmGIV1OoLCux6xRM3+x4GUoqYeQJ8dtHnwQSgPc25CYuE8eSgokTjpCUFMpKAnSGIhzqDMX6FCpLA3ZFs+mb7etgzHRInASvtBJGHG9JIU9YUjBxQpqcFMpLAnSEIrR2hqnyrlEYUlZicx+ZzEUi8O4ryf0JUXVTXSe0yTlLCiZO2OtT8CsvCdIRCnOoIxRrPqooDdLWZaNFTIb2vgmdLcn9CVF1U2H/29DePLBxmSSWFEycUCS5T6G81NUUDnWFYx3NbvSR1RRMhna87H6PTVdTmOZ+79w4MPGYtCwpmDhhTa4plAUDHGjrQhWGlPuGpFqfgsnU9rUQLIdR70u9PzYCyZqQcs2SgokTStHRXF4aYF9rJ0DcxWs2+shkbMfLMGYaBEtT768dD+W11tmcBywpmDipO5qD7DvUBRDrUxhSFrTRRyYzqrDjlfT9CQAibmSSdTbnnCUFEycc0ZTNR1H+i9e6wkpX2DqbTS/2vQUdB9L3J0SNme5qChH7spFLlhRMnM6waxryKy/tfptE+xQqveRg/QqmV9vTXMmcaMx0Nw3G3i3Zj8mkZUnBxOmMuOsS/Pz3YzWFMps+22Ro+0sQLIPRU3ouN8YbgWRNSDllScHE6QprUk2hury7czCaDKq9GkNLhw1LNb145yU3XXZJWc/lRp3o1lawpJBTlhRMnM5IcvNRdUX3qq21lS5BDK1wv5vbugYuOFN4wiE3HLV+Tu9lS8pdYrCkkFNZXaPZFB7XpxD/XaHGlxSOGuK+7Q31ksMBSwrZsf8v8Nz33QfkmOlwxuehpi7XUfXdro2un+DoDJICuNf65srsxmR6ZDUFE6cropSXxNcUasq7k0L0OoVaSwrZ8/oj8P2/gtV3QbgDVv0YfvQ3sOfNXEfWd++scb+jazL3ZuzJ0PIuNG/PXkymR5YUTJxUo49qKrr7FKLrLAytdInCmo/62Z8fgKWXwcjj4drVcNXj8JknINQGv7gYOlpyHWHfNK2CyqNg+LGZla+f637bms05Y0nBxIQjSliTm4+OGT4kqWy0ptDcbh3N/WbtffCbq+CY0+CKh+GoCW772JPhoz9zk8o9+u+5jbEvVGHLH2HC6e7itEyMmQ4lFbZmcw5ZUjAx0WsOKhNqCuOHVwJw0ez62LbykiAVpQFrPuovq34Mv7sGjm2Ayx6A8pr4/ZM+AKddA2t+UjjfovdugQN/gePmZ/6YkjIYNwu2vZC9uEyPLCmYmIPt7gPe31wErsnota+fx20fmRG3vbaylAOHLCkckXAIVtwAf7gOTjgfLl0KZck1MwAavgRD6+Hhf4ZwAZz3N590v4/tQ1IAV1Pavtam0c4RSwom5qDXFBTtL/CrKA0SDMQ3AdRWlmalpvD0pl18a8VrbN/f1u/Hzis7Xoa7F8Bzt8PcxXDxvVBakb58eTUs/Cbs3AB/+t7AxXm4Nj7s+hIy7U+IOv5siIRgS2NWwjI9s6RgYtLVFNIZVlnG3kOd/RrDnzbv5sqfrOKOlW/yiR+/MPgujlOFt/4Iv/4ULGlwC8t85C5Y+K30M4j6nfi3cNKF0Hgr7N6U9XAPW/MO9zqnfzTz/oSo8XPdjKmbH8tObKZHlhRMTHOb+wD2X5fQk/qjKmnae6jfnl9VuW3F64ytreDuK+ewZXcrP2wswGGYqbTsgme/C9+bBT/7oGtaOe0auHYVTL+ob8da+F9uXePffTZ/J497+ZeAuqTQV8FSOP5v3NDcQmgmG2QsKZiYaFPQ0AxrCseMGMKO5vZ+m0L72c17eHnbfv6x4Tj+5sQ6Ljx5HD96ekvhNiNpxDWB/PpK+PZJ8NiNUD0G/m4JXPcaLPhPN1yzr2rq4Pxvus7YR/+jv6M+cp2t8Nwdri9h5OTDO8aMS6B1F7yxon9jM72ypGBi3mtuB6BuaHlG5SeMGIIqNO3rn9rC7Ss3UTe0PDbK6YsL3ocC31rxer8cf8C07IJnvsO8F/4R7lnkEsPcxfDZF+HvH4GTL3bf9I/EyRfD3Kvh+Ttg5c2uWSpf/PG/4NBu1zF+uI4/2yXQVT/qv7hMRmyaCxPzbnM7FcHM+xQmjKgC4K3dhzh+dE0vpXu2dtPbsPUZ/ndqK+UP3gfb1zL+0F5erlC2bhjGvp9O5ajJfwXHnIZE+rFJoavNTcKWSXt+T0Id7lvtK79yvyNddNROo3LhN+CkD/bcgXy4FtwMnS3w1G1uXP9ZX3HXNPTWhq8Kze/A9nVMfOth2H2v69sItQMCQ8fBUZNg3EwYdwqMmAyBDL4/qsKLP4Jnvg2nXA7HzDv81xYsgfd/Fh77D46qbgAaDv9Ypk+ymhRE5Dzgu0AQ+LGq3pqwvxy4B5gN7AEuVtWt2YzJpLd1dysjKzPvFDxxTA1lJQGe2bSLc6YkzMujCh3N7kM33AWRLjf8srMF2vdD2z7YswV2vYbueJlT9mxiaRmwCag9xn0gDR1HsKuT3WtfpvbttRy19REAPiClsHmaW+y9birUjIGq0W4opwQAcU0Y7Qfcz6Hdrimidbf3s6v7fleri7esBoYMh5qxMHQs1Izzfo+N31Za4drx2/a7i8neWw9bnoItK91zVdfBvKth1hWs27CdhhkN/fGnSS1YAovucIngyf+EJWe6kT7jZkFtvWuaCpa689/Z6qaOOLDNLWRzaDcAExAYNt4lgapR7rU1vwNvPQ0v/KD73NRNcU1BIyZD9WioqIWyavd37WyFPZtdH0DTKpi8AP72v4/89c27Glbfxfte/x40X+ySlcm6rCUFEQkCdwDnAE3AKhFZpqqv+op9GtinqseLyCXAbcDF2YrJpNcZirBu236mDAumLxQJuw/6joPQ3syQtr185ZhX2L7qQZ7Z2sHRgX3UhnZT0bmH8o69BCO9j0wKDx3PW8FJPNQ1k5nz5nP2WQugamRsfxkQmLKbM+56gbPHwxdO2o+8sYJJpfsIvrYcWXtvRq9PAyVEhowkUjmScOVwwnWzkapRlNSMojSgSNs+lyQO7nBLR76xwk3klihQ6j4I/WrGwYkXwLQPw6QG92ENQPL8PR2hMO8eaOdge4ixtRUMryqLTR2SKBSO8Nq7B3li407e2HmQqeOGsmDqGI4bVd1dSMR9eE7/KGz4LWx+wl3c9urvEuIUlzyHHg3vO8+tgjZ2Jk+/sZe/PmtB8pNHwrD7DTft9faXYOdr8Maj0Prz9Cd51IkuGcz+FATSv492Hmzn8Vd3smrrXiaNrOK8aWOYPLo6+TyUlMPH7qXkx+fAkvlw1o0w+VyoHpU+BnPERLPUFiki7we+qqoLvPtfAlDVW3xlVnhlnhOREuBdYJT2ENScOXN09erVSdsbGxtpaGhIG8//PfkkJz3zOe9e9+HFfzvN00qa8vHHSV3eXwbV2Bs/qYzGHyfxOTXl9t6fM/MyiipUBKHCW4c5rngk1P2tOoUDVLMjchQ7dRi7GMYurWWPDuUQFXQRJKRBQpRwiHIOaBUHqKJJR3EI16yyaOY4vvOxmQQCqT8gH1zbxA0PrudQ3KI+ynAOMkr2M1IOUEEnQSIISiuVNOsQDjKEPVpDM1UJZ8N3XsRdxS1AREFRIqpUaxujdQ+jZS+j2ctY2cfQYAehQDntgSq2B8ayVY7mL1pHBCESUcKqRCJKRKEzFEIkQESVsLctUWlQqCwNogphr5z/djS+cbWVvON1uFeVBakqL+m5lUiVStoJEiFEkC5KCRFEiXY/eDF2dlJSWoqqG/2lQECEkoBQEhRKAoG456nSVmr1INW0UqlthCihgzK2Sx2tUtVDQE57V5g9re7LwogqN6RZFUZWl1Nd7q6FSUwOR7ds4Ebu5LjIVgDaqOCgVBOixL0eCXh/dSHxXT6QIpEIgUya2Y7Ajvd/hfefcxgjugARWaOqvU5Xm82kcBFwnqpe5d2/HJinqtf6yqz3yjR599/0yuxOONZiYDFAXV3d7KVLlyY9X0tLC9XV1Unbo7Y0NXHi2/5vld1vvPgz4N+e8F8nqbZnfhyNRBDfN6i05WOJo7uMIrESmu45pW9xJZYZVhFgZFmI0tKypDIqQjg4hFDJEEIlVYSDlXSV1tBRPoKO8hFEghV0hJT9Hep9sHY/OiDuw7a50+3f36F0hd1a0CUBOKYmwPHDAmm/MUe1dilv7AvzXnM7UlJOKOIlS+/5YufKuxEU1xQeFCEg3n3vt4ib/K89rLSHoCOssXhFxPt4ceXE26EKoYjSFYGuCIQjbn9A3IiN2G3vMaFQFxVlZbFzEBAIBuCocmFIqbC3XdnfrnSE1T1G3Aey/1jjqgKcOCLAsPIAe9oirNsV5r3WCO2HOeAr8XV1hbooKy31vXa8pOT9HOYS3On+lEGBMVUBpo0MUl8tHOhUXnovzFsHInR5c28lCoVClASDnBDexOTQJkboHqq0lRINI0S81xRNC7nrcNeIImm+1PSXpvEfYtQxvaxgl8b8+fMzSgru20EWfoCP4voRovcvB76XUGYDUO+7/yYwoqfjzp49W1NZuXJlyu35xGLsH4UQo2phxGkx9p98jxNYrRl8dmezrtMEjPfdrye5kTVWxms+qgX2ZjEmY4wxPchmUlgFTBaRSSJSBlwCLEsoswy4wrt9EfCkl9GMMcbkQNZGH6lqSESuBVbghqTeraobRORruGrMMuAu4F4R2YyrIVySrXiMMcb0LqvXKajqcmB5wrYbfbfbcX0Pxhhj8oBNc2GMMSbGkoIxxpgYSwrGGGNiLCkYY4yJydoVzdkiIruAt1PsGgnsTrE9n1iM/aMQYoTCiNNi7D/5HucEVe114qiCSwrpiMhqzeQS7hyyGPtHIcQIhRGnxdh/CiXO3ljzkTHGmBhLCsYYY2IGU1JYkusAMmAx9o9CiBEKI06Lsf8USpw9GjR9CsYYY47cYKopGGOMOUKWFIwxxsQUdFIQka+LyCsisk5EHhWRcd52EZH/FZHN3v5ZOYzxWyLymhfHgyIyzLfvS16Mr4tIioVyBzTOj4rIBhGJiMichH35FOd5XhybReT6XMbiJyJ3i8hObzXB6LbhIvKYiGzyfh+Vw/jGi8hKEdno/Z3/Kd9i9OKpEJEXReRlL86bvO2TROQFL85fedPx55SIBEVkrYj8Pl9jPCyZrMSTrz/AUN/tzwE/9G4vBB7BrTB4GvBCDmM8Fyjxbt8G3ObdngK8DJQDk3CrzgVzGOdJwPuARmCOb3vexImbgv1N4FigzItrSq7fh15sfw3MAtb7tn0TuN67fX30b5+j+MYCs7zbNcAb3t82b2L0YhCg2rtdCrzg/Q/fD1zibf8h8I958Df/F+AXwO+9+3kX4+H8FHRNQVWbfXer6F6mdxFwjzrPA8NEZOyABwio6qOqGvLuPo9bgS4a41JV7VDVt4DNwNxcxAigqhtV9fUUu/IpzrnAZlXdoqqdwFIvvpxT1T+SvGrgIuBn3u2fAR8a0KB8VHWHqr7k3T4IbASOJo9iBPD+Z1u8u6XejwJ/Azzgbc95nCJSD/wt8GPvvpBnMR6ugk4KACLynyKyDbgMiK7VcDSwzVesyduWa3+Pq8FA/saYKJ/izKdYMlGnqjvAfSgDo3McDwAiMhE4BfctPO9i9Jpl1gE7gcdwtcP9vi9X+fB3/x/gX4GId38E+RfjYcn7pCAij4vI+hQ/iwBU9QZVHQ/cB1wbfViKQ2Vt7G1vMXplbgBCXpwDHmOmcaZ6WIptuRrHnE+xFCQRqQZ+A/xzQk07b6hqWFVn4mrVc3FNm0nFBjaqbiJyAbBTVdf4N6coWpDvzayuvNYfVPXsDIv+AvgD8BVclh7v21cPbO/n0GJ6i1FErgAuAM5Sr8GRAY4R+nQu/QY8zgKJJRPvichYVd3hNV/uzGUwIlKKSwj3qepvvc15FaOfqu4XkUZcn8IwESnxvonn+u9+OnChiCwEKoChuJpDPsV42PK+ptATEZnsu3sh8Jp3exnwSW8U0mnAgWgVeaCJyHnAvwEXquoh365lwCUiUi4ik4DJwIu5iLEX+RTnKmCyN8qjDLem97IcxZKJZcAV3u0rgN/lKhCvzfsuYKOqftu3K29iBBCRUdEReiJSCZyN6/9YCVzkFctpnKr6JVWtV9WJuPfgk6p6GXkU4xHJdU/3kfzgvvWsB14BHgaO9rYLcAeuLfLP+EbT5CDGzbh28HXezw99+27wYnwdOD/H5/LvcN/EO4D3gBV5GudC3MiZN4EbchlLQly/BHYAXd55/DSunfkJYJP3e3gO4zsD15zxiu+9uDCfYvTinAGs9eJcD9zobT8W92VkM/BroDzXf3Mvrga6Rx/lZYx9/bFpLowxxsQUdPORMcaY/mVJwRhjTIwlBWOMMTGWFIwxxsRYUjDGGBNjScEYY0yMJQVjjDEx/x+2UJpJYLjNfgAAAABJRU5ErkJggg==\n", 496 | "text/plain": [ 497 | "
" 498 | ] 499 | }, 500 | "metadata": {}, 501 | "output_type": "display_data" 502 | } 503 | ], 504 | "source": [ 505 | "scores_in = scores[np.where(labels==0)[0]]\n", 506 | "scores_out = scores[np.where(labels==1)[0]]\n", 507 | "\n", 508 | "\n", 509 | "in_ = pd.DataFrame(scores_in, columns=['Inlier'])\n", 510 | "out_ = pd.DataFrame(scores_out, columns=['Outlier'])\n", 511 | "\n", 512 | "\n", 513 | "fig, ax = plt.subplots()\n", 514 | "in_.plot.kde(ax=ax, legend=True, title='Outliers vs Inliers (Deep SVDD)')\n", 515 | "out_.plot.kde(ax=ax, legend=True)\n", 516 | "ax.grid(axis='x')\n", 517 | "ax.grid(axis='y')\n", 518 | "plt.show()" 519 | ] 520 | }, 521 | { 522 | "cell_type": "code", 523 | "execution_count": null, 524 | "metadata": {}, 525 | "outputs": [], 526 | "source": [] 527 | } 528 | ], 529 | "metadata": { 530 | "kernelspec": { 531 | "display_name": "Python 3", 532 | "language": "python", 533 | "name": "python3" 534 | }, 535 | "language_info": { 536 | "codemirror_mode": { 537 | "name": "ipython", 538 | "version": 3 539 | }, 540 | "file_extension": ".py", 541 | "mimetype": "text/x-python", 542 | "name": "python", 543 | "nbconvert_exporter": "python", 544 | "pygments_lexer": "ipython3", 545 | "version": "3.6.5" 546 | } 547 | }, 548 | "nbformat": 4, 549 | "nbformat_minor": 2 550 | } 551 | -------------------------------------------------------------------------------- /DAGMM.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import torch\n", 11 | "\n", 12 | "import matplotlib.pyplot as plt\n", 13 | "import pandas as pd \n", 14 | "\n", 15 | "from train import TrainerDAGMM\n", 16 | "from test import eval\n", 17 | "from preprocess import get_KDDCup99" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": 2, 23 | "metadata": { 24 | "scrolled": true 25 | }, 26 | "outputs": [ 27 | { 28 | "name": "stdout", 29 | "output_type": "stream", 30 | "text": [ 31 | "198371/198371: [===============================>] - ETA 1.6sss\n", 32 | "Training DAGMM... Epoch: 0, Loss: 43416445.505\n", 33 | "198371/198371: [===============================>] - ETA 0.0s\n", 34 | "Training DAGMM... Epoch: 1, Loss: 43430190.289\n", 35 | "198371/198371: [===============================>] - ETA 0.0s\n", 36 | "Training DAGMM... Epoch: 2, Loss: 43448783.134\n", 37 | "198371/198371: [===============================>] - ETA 0.0s\n", 38 | "Training DAGMM... Epoch: 3, Loss: 43431846.577\n", 39 | "198371/198371: [===============================>] - ETA 0.0s\n", 40 | "Training DAGMM... Epoch: 4, Loss: 43423711.526\n", 41 | "198371/198371: [===============================>] - ETA 0.0s\n", 42 | "Training DAGMM... Epoch: 5, Loss: 43419180.351\n", 43 | "198371/198371: [===============================>] - ETA 0.0s\n", 44 | "Training DAGMM... Epoch: 6, Loss: 43436569.072\n", 45 | "198371/198371: [===============================>] - ETA 0.0s\n", 46 | "Training DAGMM... Epoch: 7, Loss: 43431849.216\n", 47 | "198371/198371: [===============================>] - ETA 0.0s\n", 48 | "Training DAGMM... Epoch: 8, Loss: 43433504.619\n", 49 | "198371/198371: [===============================>] - ETA 0.0s\n", 50 | "Training DAGMM... Epoch: 9, Loss: 43448175.134\n", 51 | "198371/198371: [===============================>] - ETA 0.0s\n", 52 | "Training DAGMM... Epoch: 10, Loss: 43430255.608\n", 53 | "198371/198371: [===============================>] - ETA 0.0s\n", 54 | "Training DAGMM... Epoch: 11, Loss: 43425442.371\n", 55 | "198371/198371: [===============================>] - ETA 0.0s\n", 56 | "Training DAGMM... Epoch: 12, Loss: 43442255.505\n", 57 | "198371/198371: [===============================>] - ETA 0.0s\n", 58 | "Training DAGMM... Epoch: 13, Loss: 43421953.072\n", 59 | "198371/198371: [===============================>] - ETA 0.0s\n", 60 | "Training DAGMM... Epoch: 14, Loss: 43434495.959\n", 61 | "198371/198371: [===============================>] - ETA 0.0s\n", 62 | "Training DAGMM... Epoch: 15, Loss: 43421529.567\n", 63 | "198371/198371: [===============================>] - ETA 0.0s\n", 64 | "Training DAGMM... Epoch: 16, Loss: 43430372.412\n", 65 | "198371/198371: [===============================>] - ETA 0.0s\n", 66 | "Training DAGMM... Epoch: 17, Loss: 43421140.825\n", 67 | "198371/198371: [===============================>] - ETA 0.0s\n", 68 | "Training DAGMM... Epoch: 18, Loss: 43431085.299\n", 69 | "198371/198371: [===============================>] - ETA 0.0s\n", 70 | "Training DAGMM... Epoch: 19, Loss: 43439407.629\n", 71 | "198371/198371: [===============================>] - ETA 0.0s\n", 72 | "Training DAGMM... Epoch: 20, Loss: 43421279.505\n", 73 | "198371/198371: [===============================>] - ETA 0.0s\n", 74 | "Training DAGMM... Epoch: 21, Loss: 43422410.495\n", 75 | "198371/198371: [===============================>] - ETA 0.0s\n", 76 | "Training DAGMM... Epoch: 22, Loss: 43418252.371\n", 77 | "198371/198371: [===============================>] - ETA 0.0s\n", 78 | "Training DAGMM... Epoch: 23, Loss: 43477673.546\n", 79 | "198371/198371: [===============================>] - ETA 0.0s\n", 80 | "Training DAGMM... Epoch: 24, Loss: 43404796.660\n", 81 | "198371/198371: [===============================>] - ETA 0.0s\n", 82 | "Training DAGMM... Epoch: 25, Loss: 43431501.320\n", 83 | "198371/198371: [===============================>] - ETA 0.0s\n", 84 | "Training DAGMM... Epoch: 26, Loss: 43451058.330\n", 85 | "198371/198371: [===============================>] - ETA 0.0s\n", 86 | "Training DAGMM... Epoch: 27, Loss: 43465593.856\n", 87 | "198371/198371: [===============================>] - ETA 0.0s\n", 88 | "Training DAGMM... Epoch: 28, Loss: 43412597.938\n", 89 | "198371/198371: [===============================>] - ETA 0.0s\n", 90 | "Training DAGMM... Epoch: 29, Loss: 43445709.711\n", 91 | "198371/198371: [===============================>] - ETA 0.0s\n", 92 | "Training DAGMM... Epoch: 30, Loss: 43435486.330\n", 93 | "198371/198371: [===============================>] - ETA 0.0s\n", 94 | "Training DAGMM... Epoch: 31, Loss: 43442352.041\n", 95 | "198371/198371: [===============================>] - ETA 0.0s\n", 96 | "Training DAGMM... Epoch: 32, Loss: 43433058.206\n", 97 | "198371/198371: [===============================>] - ETA 0.0s\n", 98 | "Training DAGMM... Epoch: 33, Loss: 43435356.186\n", 99 | "198371/198371: [===============================>] - ETA 0.0s\n", 100 | "Training DAGMM... Epoch: 34, Loss: 43431390.619\n", 101 | "198371/198371: [===============================>] - ETA 0.0s\n", 102 | "Training DAGMM... Epoch: 35, Loss: 43439402.412\n", 103 | "198371/198371: [===============================>] - ETA 0.0s\n", 104 | "Training DAGMM... Epoch: 36, Loss: 43434369.443\n", 105 | "198371/198371: [===============================>] - ETA 0.0s\n", 106 | "Training DAGMM... Epoch: 37, Loss: 43431257.155\n", 107 | "198371/198371: [===============================>] - ETA 0.0s\n", 108 | "Training DAGMM... Epoch: 38, Loss: 43443205.876\n", 109 | "198371/198371: [===============================>] - ETA 0.0s\n", 110 | "Training DAGMM... Epoch: 39, Loss: 43439627.134\n", 111 | "198371/198371: [===============================>] - ETA 0.0s\n", 112 | "Training DAGMM... Epoch: 40, Loss: 43439081.155\n", 113 | "198371/198371: [===============================>] - ETA 0.0s\n", 114 | "Training DAGMM... Epoch: 41, Loss: 43431616.247\n", 115 | "198371/198371: [===============================>] - ETA 0.0s\n", 116 | "Training DAGMM... Epoch: 42, Loss: 43431875.175\n", 117 | "198371/198371: [===============================>] - ETA 0.0s\n", 118 | "Training DAGMM... Epoch: 43, Loss: 43455904.907\n", 119 | "198371/198371: [===============================>] - ETA 0.0s\n", 120 | "Training DAGMM... Epoch: 44, Loss: 43420986.536\n", 121 | "198371/198371: [===============================>] - ETA 0.0s\n", 122 | "Training DAGMM... Epoch: 45, Loss: 43451918.124\n", 123 | "198371/198371: [===============================>] - ETA 0.0s\n", 124 | "Training DAGMM... Epoch: 46, Loss: 43442273.485\n", 125 | "198371/198371: [===============================>] - ETA 0.0s\n", 126 | "Training DAGMM... Epoch: 47, Loss: 43435331.588\n", 127 | "198371/198371: [===============================>] - ETA 0.0s\n", 128 | "Training DAGMM... Epoch: 48, Loss: 43407983.526\n", 129 | "198371/198371: [===============================>] - ETA 0.0s\n", 130 | "Training DAGMM... Epoch: 49, Loss: 43437968.577\n", 131 | "198371/198371: [===============================>] - ETA 0.0s\n", 132 | "Training DAGMM... Epoch: 50, Loss: 43428314.990\n", 133 | "198371/198371: [===============================>] - ETA 0.0s\n", 134 | "Training DAGMM... Epoch: 51, Loss: 43445076.515\n", 135 | "198371/198371: [===============================>] - ETA 0.0s\n", 136 | "Training DAGMM... Epoch: 52, Loss: 43464964.330\n", 137 | "198371/198371: [===============================>] - ETA 0.0s\n", 138 | "Training DAGMM... Epoch: 53, Loss: 43428623.629\n", 139 | "198371/198371: [===============================>] - ETA 0.0s\n", 140 | "Training DAGMM... Epoch: 54, Loss: 43421078.742\n", 141 | "198371/198371: [===============================>] - ETA 0.0s\n", 142 | "Training DAGMM... Epoch: 55, Loss: 43443334.165\n", 143 | "198371/198371: [===============================>] - ETA 0.0s\n", 144 | "Training DAGMM... Epoch: 56, Loss: 43405380.515\n", 145 | "198371/198371: [===============================>] - ETA 0.0s\n", 146 | "Training DAGMM... Epoch: 57, Loss: 43435038.701\n", 147 | "198371/198371: [===============================>] - ETA 0.0s\n", 148 | "Training DAGMM... Epoch: 58, Loss: 43435549.237\n", 149 | "198371/198371: [===============================>] - ETA 0.0s\n", 150 | "Training DAGMM... Epoch: 59, Loss: 43432695.732\n", 151 | "198371/198371: [===============================>] - ETA 0.0s\n", 152 | "Training DAGMM... Epoch: 60, Loss: 43424946.804\n", 153 | "198371/198371: [===============================>] - ETA 0.0s\n", 154 | "Training DAGMM... Epoch: 61, Loss: 43424717.216\n", 155 | "198371/198371: [===============================>] - ETA 0.0s\n", 156 | "Training DAGMM... Epoch: 62, Loss: 43439938.247\n", 157 | "198371/198371: [===============================>] - ETA 0.0s\n", 158 | "Training DAGMM... Epoch: 63, Loss: 43446788.804\n", 159 | "198371/198371: [===============================>] - ETA 0.0s\n", 160 | "Training DAGMM... Epoch: 64, Loss: 43449003.381\n", 161 | "198371/198371: [===============================>] - ETA 0.0s\n", 162 | "Training DAGMM... Epoch: 65, Loss: 43422753.691\n", 163 | "198371/198371: [===============================>] - ETA 0.0s\n", 164 | "Training DAGMM... Epoch: 66, Loss: 43422169.299\n", 165 | "198371/198371: [===============================>] - ETA 0.0s\n", 166 | "Training DAGMM... Epoch: 67, Loss: 43433960.330\n", 167 | "198371/198371: [===============================>] - ETA 0.0s\n", 168 | "Training DAGMM... Epoch: 68, Loss: 43442515.196\n", 169 | "198371/198371: [===============================>] - ETA 0.0s\n", 170 | "Training DAGMM... Epoch: 69, Loss: 43468303.010\n", 171 | "198371/198371: [===============================>] - ETA 0.0s\n", 172 | "Training DAGMM... Epoch: 70, Loss: 43425772.990\n", 173 | "198371/198371: [===============================>] - ETA 0.0s\n", 174 | "Training DAGMM... Epoch: 71, Loss: 43415965.959\n", 175 | "198371/198371: [===============================>] - ETA 0.0s\n", 176 | "Training DAGMM... Epoch: 72, Loss: 43460916.474\n", 177 | "198371/198371: [===============================>] - ETA 0.0s\n", 178 | "Training DAGMM... Epoch: 73, Loss: 43421864.928\n", 179 | "198371/198371: [===============================>] - ETA 0.0s\n", 180 | "Training DAGMM... Epoch: 74, Loss: 43419913.753\n" 181 | ] 182 | }, 183 | { 184 | "name": "stdout", 185 | "output_type": "stream", 186 | "text": [ 187 | "198371/198371: [===============================>] - ETA 0.0s\n", 188 | "Training DAGMM... Epoch: 75, Loss: 43442808.433\n", 189 | "198371/198371: [===============================>] - ETA 0.0s\n", 190 | "Training DAGMM... Epoch: 76, Loss: 43428185.979\n", 191 | "198371/198371: [===============================>] - ETA 0.0s\n", 192 | "Training DAGMM... Epoch: 77, Loss: 43450846.041\n", 193 | "198371/198371: [===============================>] - ETA 0.0s\n", 194 | "Training DAGMM... Epoch: 78, Loss: 43439178.722\n", 195 | "198371/198371: [===============================>] - ETA 0.0s\n", 196 | "Training DAGMM... Epoch: 79, Loss: 43441306.186\n", 197 | "198371/198371: [===============================>] - ETA 0.0s\n", 198 | "Training DAGMM... Epoch: 80, Loss: 43430168.948\n", 199 | "198371/198371: [===============================>] - ETA 0.0s\n", 200 | "Training DAGMM... Epoch: 81, Loss: 43425490.763\n", 201 | "198371/198371: [===============================>] - ETA 0.0s\n", 202 | "Training DAGMM... Epoch: 82, Loss: 43422066.577\n", 203 | "198371/198371: [===============================>] - ETA 0.0s\n", 204 | "Training DAGMM... Epoch: 83, Loss: 43440077.753\n", 205 | "198371/198371: [===============================>] - ETA 0.0s\n", 206 | "Training DAGMM... Epoch: 84, Loss: 43456807.649\n", 207 | "198371/198371: [===============================>] - ETA 0.0s\n", 208 | "Training DAGMM... Epoch: 85, Loss: 43443363.856\n", 209 | "198371/198371: [===============================>] - ETA 0.0s\n", 210 | "Training DAGMM... Epoch: 86, Loss: 43404440.103\n", 211 | "198371/198371: [===============================>] - ETA 0.0s\n", 212 | "Training DAGMM... Epoch: 87, Loss: 43427650.660\n", 213 | "198371/198371: [===============================>] - ETA 0.0s\n", 214 | "Training DAGMM... Epoch: 88, Loss: 43408453.443\n", 215 | "198371/198371: [===============================>] - ETA 0.0s\n", 216 | "Training DAGMM... Epoch: 89, Loss: 43405550.742\n", 217 | "198371/198371: [===============================>] - ETA 0.0s\n", 218 | "Training DAGMM... Epoch: 90, Loss: 43412264.371\n", 219 | "198371/198371: [===============================>] - ETA 0.0s\n", 220 | "Training DAGMM... Epoch: 91, Loss: 43454277.732\n", 221 | "198371/198371: [===============================>] - ETA 0.0s\n", 222 | "Training DAGMM... Epoch: 92, Loss: 43426326.928\n", 223 | "198371/198371: [===============================>] - ETA 0.0s\n", 224 | "Training DAGMM... Epoch: 93, Loss: 43430818.660\n", 225 | "198371/198371: [===============================>] - ETA 0.0s\n", 226 | "Training DAGMM... Epoch: 94, Loss: 43442813.052\n", 227 | "198371/198371: [===============================>] - ETA 0.0s\n", 228 | "Training DAGMM... Epoch: 95, Loss: 43425756.454\n", 229 | "198371/198371: [===============================>] - ETA 0.0s\n", 230 | "Training DAGMM... Epoch: 96, Loss: 43420479.258\n", 231 | "198371/198371: [===============================>] - ETA 0.0s\n", 232 | "Training DAGMM... Epoch: 97, Loss: 43437058.763\n", 233 | "198371/198371: [===============================>] - ETA 0.0s\n", 234 | "Training DAGMM... Epoch: 98, Loss: 43443187.608\n", 235 | "198371/198371: [===============================>] - ETA 0.0s\n", 236 | "Training DAGMM... Epoch: 99, Loss: 43439066.660\n", 237 | "198371/198371: [===============================>] - ETA 0.0s\n", 238 | "Training DAGMM... Epoch: 100, Loss: 43432156.206\n", 239 | "198371/198371: [===============================>] - ETA 0.0s\n", 240 | "Training DAGMM... Epoch: 101, Loss: 43446940.598\n", 241 | "198371/198371: [===============================>] - ETA 0.0s\n", 242 | "Training DAGMM... Epoch: 102, Loss: 43473088.536\n", 243 | "198371/198371: [===============================>] - ETA 0.0s\n", 244 | "Training DAGMM... Epoch: 103, Loss: 43419472.144\n", 245 | "198371/198371: [===============================>] - ETA 0.0s\n", 246 | "Training DAGMM... Epoch: 104, Loss: 43424330.598\n", 247 | "198371/198371: [===============================>] - ETA 0.0s\n", 248 | "Training DAGMM... Epoch: 105, Loss: 43412227.588\n", 249 | "198371/198371: [===============================>] - ETA 0.0s\n", 250 | "Training DAGMM... Epoch: 106, Loss: 43445989.794\n", 251 | "198371/198371: [===============================>] - ETA 0.0s\n", 252 | "Training DAGMM... Epoch: 107, Loss: 43432074.309\n", 253 | "198371/198371: [===============================>] - ETA 0.0s\n", 254 | "Training DAGMM... Epoch: 108, Loss: 43435514.639\n", 255 | "198371/198371: [===============================>] - ETA 0.0s\n", 256 | "Training DAGMM... Epoch: 109, Loss: 43425738.309\n", 257 | "198371/198371: [===============================>] - ETA 0.0s\n", 258 | "Training DAGMM... Epoch: 110, Loss: 43438457.670\n", 259 | "198371/198371: [===============================>] - ETA 0.0s\n", 260 | "Training DAGMM... Epoch: 111, Loss: 43433071.938\n", 261 | "198371/198371: [===============================>] - ETA 0.0s\n", 262 | "Training DAGMM... Epoch: 112, Loss: 43422290.082\n", 263 | "198371/198371: [===============================>] - ETA 0.0s\n", 264 | "Training DAGMM... Epoch: 113, Loss: 43397658.433\n", 265 | "198371/198371: [===============================>] - ETA 0.0s\n", 266 | "Training DAGMM... Epoch: 114, Loss: 43410132.351\n", 267 | "198371/198371: [===============================>] - ETA 0.0s\n", 268 | "Training DAGMM... Epoch: 115, Loss: 43415145.959\n", 269 | "198371/198371: [===============================>] - ETA 0.0s\n", 270 | "Training DAGMM... Epoch: 116, Loss: 43433117.443\n", 271 | "198371/198371: [===============================>] - ETA 0.0s\n", 272 | "Training DAGMM... Epoch: 117, Loss: 43434084.907\n", 273 | "198371/198371: [===============================>] - ETA 0.0s\n", 274 | "Training DAGMM... Epoch: 118, Loss: 43446954.804\n", 275 | "198371/198371: [===============================>] - ETA 0.0s\n", 276 | "Training DAGMM... Epoch: 119, Loss: 43452942.000\n", 277 | "198371/198371: [===============================>] - ETA 0.0s\n", 278 | "Training DAGMM... Epoch: 120, Loss: 43432926.928\n", 279 | "198371/198371: [===============================>] - ETA 0.0s\n", 280 | "Training DAGMM... Epoch: 121, Loss: 43430535.897\n", 281 | "198371/198371: [===============================>] - ETA 0.0s\n", 282 | "Training DAGMM... Epoch: 122, Loss: 43436932.825\n", 283 | "198371/198371: [===============================>] - ETA 0.0s\n", 284 | "Training DAGMM... Epoch: 123, Loss: 43433276.887\n", 285 | "198371/198371: [===============================>] - ETA 0.0s\n", 286 | "Training DAGMM... Epoch: 124, Loss: 43435481.113\n", 287 | "198371/198371: [===============================>] - ETA 0.0s\n", 288 | "Training DAGMM... Epoch: 125, Loss: 43441739.134\n", 289 | "198371/198371: [===============================>] - ETA 0.0s\n", 290 | "Training DAGMM... Epoch: 126, Loss: 43431898.289\n", 291 | "198371/198371: [===============================>] - ETA 0.0s\n", 292 | "Training DAGMM... Epoch: 127, Loss: 43424055.959\n", 293 | "198371/198371: [===============================>] - ETA 0.0s\n", 294 | "Training DAGMM... Epoch: 128, Loss: 43421267.588\n", 295 | "198371/198371: [===============================>] - ETA 0.0s\n", 296 | "Training DAGMM... Epoch: 129, Loss: 43447161.505\n", 297 | "198371/198371: [===============================>] - ETA 0.0s\n", 298 | "Training DAGMM... Epoch: 130, Loss: 43444320.763\n", 299 | "198371/198371: [===============================>] - ETA 0.0s\n", 300 | "Training DAGMM... Epoch: 131, Loss: 43430823.093\n", 301 | "198371/198371: [===============================>] - ETA 0.0s\n", 302 | "Training DAGMM... Epoch: 132, Loss: 43412125.093\n", 303 | "198371/198371: [===============================>] - ETA 0.0s\n", 304 | "Training DAGMM... Epoch: 133, Loss: 43438941.526\n", 305 | "198371/198371: [===============================>] - ETA 0.0s\n", 306 | "Training DAGMM... Epoch: 134, Loss: 43439747.278\n", 307 | "198371/198371: [===============================>] - ETA 0.0s\n", 308 | "Training DAGMM... Epoch: 135, Loss: 43425832.082\n", 309 | "198371/198371: [===============================>] - ETA 0.0s\n", 310 | "Training DAGMM... Epoch: 136, Loss: 43433198.309\n", 311 | "198371/198371: [===============================>] - ETA 0.0s\n", 312 | "Training DAGMM... Epoch: 137, Loss: 43426305.052\n", 313 | "198371/198371: [===============================>] - ETA 0.0s\n", 314 | "Training DAGMM... Epoch: 138, Loss: 43445478.784\n", 315 | "198371/198371: [===============================>] - ETA 0.0s\n", 316 | "Training DAGMM... Epoch: 139, Loss: 43425395.361\n", 317 | "198371/198371: [===============================>] - ETA 0.0s\n", 318 | "Training DAGMM... Epoch: 140, Loss: 43419421.918\n", 319 | "198371/198371: [===============================>] - ETA 0.0s\n", 320 | "Training DAGMM... Epoch: 141, Loss: 43417765.608\n", 321 | "198371/198371: [===============================>] - ETA 0.0s\n", 322 | "Training DAGMM... Epoch: 142, Loss: 43441100.577\n", 323 | "198371/198371: [===============================>] - ETA 0.0s\n", 324 | "Training DAGMM... Epoch: 143, Loss: 43421530.866\n", 325 | "198371/198371: [===============================>] - ETA 0.0s\n", 326 | "Training DAGMM... Epoch: 144, Loss: 43419495.216\n", 327 | "198371/198371: [===============================>] - ETA 0.0s\n", 328 | "Training DAGMM... Epoch: 145, Loss: 43451884.330\n", 329 | "198371/198371: [===============================>] - ETA 0.0s\n", 330 | "Training DAGMM... Epoch: 146, Loss: 43432351.134\n", 331 | "198371/198371: [===============================>] - ETA 0.0s\n", 332 | "Training DAGMM... Epoch: 147, Loss: 43411839.856\n", 333 | "198371/198371: [===============================>] - ETA 0.0s\n", 334 | "Training DAGMM... Epoch: 148, Loss: 43425876.907\n", 335 | "198371/198371: [===============================>] - ETA 0.0s\n", 336 | "Training DAGMM... Epoch: 149, Loss: 43448637.526\n" 337 | ] 338 | }, 339 | { 340 | "name": "stdout", 341 | "output_type": "stream", 342 | "text": [ 343 | "198371/198371: [===============================>] - ETA 0.0s\n", 344 | "Training DAGMM... Epoch: 150, Loss: 43453290.103\n", 345 | "198371/198371: [===============================>] - ETA 0.0s\n", 346 | "Training DAGMM... Epoch: 151, Loss: 43416599.052\n", 347 | "198371/198371: [===============================>] - ETA 0.0s\n", 348 | "Training DAGMM... Epoch: 152, Loss: 43436919.732\n", 349 | "198371/198371: [===============================>] - ETA 0.0s\n", 350 | "Training DAGMM... Epoch: 153, Loss: 43435863.505\n", 351 | "198371/198371: [===============================>] - ETA 0.0s\n", 352 | "Training DAGMM... Epoch: 154, Loss: 43432635.216\n", 353 | "198371/198371: [===============================>] - ETA 0.0s\n", 354 | "Training DAGMM... Epoch: 155, Loss: 43448126.082\n", 355 | "198371/198371: [===============================>] - ETA 0.0s\n", 356 | "Training DAGMM... Epoch: 156, Loss: 43439170.289\n", 357 | "198371/198371: [===============================>] - ETA 0.0s\n", 358 | "Training DAGMM... Epoch: 157, Loss: 43433722.206\n", 359 | "198371/198371: [===============================>] - ETA 0.0s\n", 360 | "Training DAGMM... Epoch: 158, Loss: 43424128.227\n", 361 | "198371/198371: [===============================>] - ETA 0.0s\n", 362 | "Training DAGMM... Epoch: 159, Loss: 43426255.753\n", 363 | "198371/198371: [===============================>] - ETA 0.0s\n", 364 | "Training DAGMM... Epoch: 160, Loss: 43423256.454\n", 365 | "198371/198371: [===============================>] - ETA 0.0s\n", 366 | "Training DAGMM... Epoch: 161, Loss: 43422761.691\n", 367 | "198371/198371: [===============================>] - ETA 0.0s\n", 368 | "Training DAGMM... Epoch: 162, Loss: 43435175.856\n", 369 | "198371/198371: [===============================>] - ETA 0.0s\n", 370 | "Training DAGMM... Epoch: 163, Loss: 43431105.835\n", 371 | "198371/198371: [===============================>] - ETA 0.0s\n", 372 | "Training DAGMM... Epoch: 164, Loss: 43410072.206\n", 373 | "198371/198371: [===============================>] - ETA 0.0s\n", 374 | "Training DAGMM... Epoch: 165, Loss: 43423832.186\n", 375 | "198371/198371: [===============================>] - ETA 0.0s\n", 376 | "Training DAGMM... Epoch: 166, Loss: 43464915.196\n", 377 | "198371/198371: [===============================>] - ETA 0.0s\n", 378 | "Training DAGMM... Epoch: 167, Loss: 43428400.619\n", 379 | "198371/198371: [===============================>] - ETA 0.0s\n", 380 | "Training DAGMM... Epoch: 168, Loss: 43455033.320\n", 381 | "198371/198371: [===============================>] - ETA 0.0s\n", 382 | "Training DAGMM... Epoch: 169, Loss: 43440596.454\n", 383 | "198371/198371: [===============================>] - ETA 0.0s\n", 384 | "Training DAGMM... Epoch: 170, Loss: 43444403.505\n", 385 | "198371/198371: [===============================>] - ETA 0.0s\n", 386 | "Training DAGMM... Epoch: 171, Loss: 43434152.474\n", 387 | "198371/198371: [===============================>] - ETA 0.0s\n", 388 | "Training DAGMM... Epoch: 172, Loss: 43441112.887\n", 389 | "198371/198371: [===============================>] - ETA 0.0s\n", 390 | "Training DAGMM... Epoch: 173, Loss: 43439120.041\n", 391 | "198371/198371: [===============================>] - ETA 0.0s\n", 392 | "Training DAGMM... Epoch: 174, Loss: 43413917.876\n", 393 | "198371/198371: [===============================>] - ETA 0.0s\n", 394 | "Training DAGMM... Epoch: 175, Loss: 43424843.670\n", 395 | "198371/198371: [===============================>] - ETA 0.0s\n", 396 | "Training DAGMM... Epoch: 176, Loss: 43418712.557\n", 397 | "198371/198371: [===============================>] - ETA 0.0s\n", 398 | "Training DAGMM... Epoch: 177, Loss: 43451613.320\n", 399 | "198371/198371: [===============================>] - ETA 0.0s\n", 400 | "Training DAGMM... Epoch: 178, Loss: 43446335.773\n", 401 | "198371/198371: [===============================>] - ETA 0.0s\n", 402 | "Training DAGMM... Epoch: 179, Loss: 43421658.990\n", 403 | "198371/198371: [===============================>] - ETA 0.0s\n", 404 | "Training DAGMM... Epoch: 180, Loss: 43422974.969\n", 405 | "198371/198371: [===============================>] - ETA 0.0s\n", 406 | "Training DAGMM... Epoch: 181, Loss: 43423512.433\n", 407 | "198371/198371: [===============================>] - ETA 0.0s\n", 408 | "Training DAGMM... Epoch: 182, Loss: 43422441.381\n", 409 | "198371/198371: [===============================>] - ETA 0.0s\n", 410 | "Training DAGMM... Epoch: 183, Loss: 43439801.237\n", 411 | "198371/198371: [===============================>] - ETA 0.0s\n", 412 | "Training DAGMM... Epoch: 184, Loss: 43426050.619\n", 413 | "198371/198371: [===============================>] - ETA 0.0s\n", 414 | "Training DAGMM... Epoch: 185, Loss: 43427046.124\n", 415 | "198371/198371: [===============================>] - ETA 0.0s\n", 416 | "Training DAGMM... Epoch: 186, Loss: 43423887.691\n", 417 | "198371/198371: [===============================>] - ETA 0.0s\n", 418 | "Training DAGMM... Epoch: 187, Loss: 43433336.515\n", 419 | "198371/198371: [===============================>] - ETA 0.0s\n", 420 | "Training DAGMM... Epoch: 188, Loss: 43420106.474\n", 421 | "198371/198371: [===============================>] - ETA 0.0s\n", 422 | "Training DAGMM... Epoch: 189, Loss: 43424431.113\n", 423 | "198371/198371: [===============================>] - ETA 0.0s\n", 424 | "Training DAGMM... Epoch: 190, Loss: 43446279.526\n", 425 | "198371/198371: [===============================>] - ETA 0.0s\n", 426 | "Training DAGMM... Epoch: 191, Loss: 43434754.330\n", 427 | "198371/198371: [===============================>] - ETA 0.0s\n", 428 | "Training DAGMM... Epoch: 192, Loss: 43432176.186\n", 429 | "198371/198371: [===============================>] - ETA 0.0s\n", 430 | "Training DAGMM... Epoch: 193, Loss: 43450501.464\n", 431 | "198371/198371: [===============================>] - ETA 0.0s\n", 432 | "Training DAGMM... Epoch: 194, Loss: 43448774.454\n", 433 | "198371/198371: [===============================>] - ETA 0.0s\n", 434 | "Training DAGMM... Epoch: 195, Loss: 43417091.753\n", 435 | "198371/198371: [===============================>] - ETA 0.0s\n", 436 | "Training DAGMM... Epoch: 196, Loss: 43431497.485\n", 437 | "198371/198371: [===============================>] - ETA 0.0s\n", 438 | "Training DAGMM... Epoch: 197, Loss: 43423754.454\n", 439 | "198371/198371: [===============================>] - ETA 0.0s\n", 440 | "Training DAGMM... Epoch: 198, Loss: 43453314.309\n", 441 | "198371/198371: [===============================>] - ETA 0.0s\n", 442 | "Training DAGMM... Epoch: 199, Loss: 43425472.887\n" 443 | ] 444 | } 445 | ], 446 | "source": [ 447 | "class Args:\n", 448 | " num_epochs=200\n", 449 | " patience=50\n", 450 | " lr=1e-4\n", 451 | " lr_milestones=[50]\n", 452 | " batch_size=1024\n", 453 | " latent_dim=1\n", 454 | " n_gmm=4\n", 455 | " lambda_energy=0.1\n", 456 | " lambda_cov=0.005\n", 457 | " \n", 458 | " \n", 459 | "args = Args()\n", 460 | "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n", 461 | "data = get_KDDCup99(args)\n", 462 | "\n", 463 | "dagmm = TrainerDAGMM(args, data, device)\n", 464 | "dagmm.train()" 465 | ] 466 | }, 467 | { 468 | "cell_type": "code", 469 | "execution_count": 3, 470 | "metadata": {}, 471 | "outputs": [ 472 | { 473 | "name": "stdout", 474 | "output_type": "stream", 475 | "text": [ 476 | "Testing...\n", 477 | "Precision : 0.9561, Recall : 0.9306, F-score : 0.9432\n", 478 | "ROC AUC score: 99.11\n" 479 | ] 480 | } 481 | ], 482 | "source": [ 483 | "from test import eval\n", 484 | "\n", 485 | "labels, scores = eval(dagmm.model, data, device, args.n_gmm)" 486 | ] 487 | }, 488 | { 489 | "cell_type": "code", 490 | "execution_count": 5, 491 | "metadata": {}, 492 | "outputs": [ 493 | { 494 | "data": { 495 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEICAYAAACwDehOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3Xt8XHWZ+PHPM5NrkzSlt7QlpS1QhN4obWlxwSVdLoUuUldRQERwxbKL/Nx10V2UXRR1ueiuriuoVEEF0YooWLRsuTUIyKUtLdhSoKUUG1rovWnS3Gbm+f3xPTM5c0smbSYzk3ner1demTnnO2eeOZnMM9/L+X5FVTHGGGMAArkOwBhjTP6wpGCMMSbGkoIxxpgYSwrGGGNiLCkYY4yJsaRgjDEmxpKCyZiITBQRFZES7/4jInJFruPqbyLSICJNvvsbRKRhgJ77FhH554F4rkIlIp8TkVtzHcdgZUlhEBORK0XkzyJySETeFZEfiMiwPjx+q4icnW6/qp6vqj/rn2izyzsXzxzOY1V1qqo29nNISURkFPBJ4E7vfoOIRESkxftpEpH7ReTUbMeSJr6pIvKoiOwTkf0iskZEForI0SISEpHjUjzmQRH5L++2ikir91r2iMgTInJxQvlGEWkXkYMi0uw9x/UiUu4rtgT4hIiMzu4rLk6WFAYpEbkOuA34IlALnAZMAB4TkbIcx1aSy+cfKIfxOq8Elqtqm2/bdlWtBmpwf8PXgKdF5Kz+ibJPHgYeA+qA0cDngGZVfQd4ArjcX1hEhgMLAf8Xh5O91/M+4KfA7SLylYTnuVZVa4CxwHXAJcByEREAVW0HHsElUNPfVNV+BtkPMBRoAT6WsL0a2An8vXf/p8A3fPsbgCbv9r1ABGjzjvWvwERAgRKvTCNwle/xfw9sBPYBK4AJvn0KfBbYBLwFCPAdL54DwCvAtBSv5RJgdcK2zwPLvNsLgVeBg8A7wBfSnJMrgWd897cCX/Ce9wDwK6Ai8Tz4yp7t3Q4A1wNvAnuA+4Hh3r7o+fk08Bfgj0AF8HOv7H5gFVCXJsYngU+k+nsklLvdf06AE3Ef1nuB1/1/d6Ac+C8vnveAHwKV/uMDXwZ2e6/zsjSxjfRe27A0+z8OvJmw7RrgpYT3wPEJZS4C2oERqd5T3rZjgEPABb5tlwErc/2/Nhh/rKYwOP0V7sPot/6NqtqC+4Z1Tm8HUNXLcR8kH1TValX9Zk/lReRDuA+XDwOjgKeBXyYU+xAwD5gCnAv8NXACMAy4GPfBmWgZ8D4Rmezb9nHgF97tu4Cr1X2znIb7YM3Ux4DzgEnADFzi6M3nvNdxJjAOlwDvSChzJnASsAC4AldTGw+MAP4Bl2hTmY77UO/Nb4FZIlIlIlW4hPAL3Lf3S4Hvi8hUr+xtuHM8EzgeOBq40XesMbgP/KO9WJeIyPtSPOceYDPwcxH5kIjUJex/EBgpImf4tl0O3NPLa/kdUALMTVdAVf8CrAY+4Nu8ETi5l2Obw2BJYXAaCexW1VCKfTu8/f3tauAWVd3oPe/NwEwRmeArc4uq7lXXPNKFaxI5ERDvcTsSD6qqh3AfHJcCeMnhRFyywDvOFBEZqqr7VPWlPsT8v6q6XVX34ppGZmb4Om9Q1SZV7QC+ClyU0FT0VVVt9b3OEbhvyGFVXaOqzWmOPQxX4+nNdlxNaxhwAbBVVX+iqiHv9f/Gi0mAzwCf9877Qdzf5ZKE4/2Hqnao6lPAH3DJMo66r+fzcbWJ/wZ2iMgfo8nae62/xmvS8bbPpjt5p6SqXbhayvAMXrO/zEFcsjX9zJLC4LQb960tVZv2WG9/f5sAfNfrgNyPa8oQ3DfQqG3RG6r6JK4Z5A7gPRFZIiJD0xz7F3hJAVdLeMhLFgAfwTUhvS0iT4nI+/sQ87u+24dwzWu9mQA86HudG4Ewrp09apvv9r24prSlIrJdRL4pIqVpjr0Plyh7czSuKWa/F8+8aDxeTJfhagCjgCHAGt++//O2x55TVVt999/G1YCSeInwWlU9znveVuJrAj8DPiYiFbhawv+p6s6eXoh3Lkbh3i+9vWZ/mRpcs5/pZ5YUBqfngA5cU06M19RwPq5TENw/9RBfkTEJx+nLFLrbcM04w3w/lar6p3THU9X/VdXZwFRcE8cX0xz7UVySm4lLDrFvn6q6SlUX4ZpOHsK18WfTNuD8hNdZoa6zNRaWL74uVb1JVafgmvUuIH0H6Su489Cbv8O11bd68TyVEE+1qv4jLvm3AVN9+2rVdfRGHeW9L6KOwX0r75GqbsMl9Gm+bU/jmpkWAZ+g96YjvLIh4MV0BURkPK7W8bRv80nAyxkc3/SRJYVBSFUPADcB3xOR80SkVEQm4qr3TbhvrwDrgIUiMlxExgCJ4+PfA47N8Gl/CHwp2pYtIrUi8tF0hUXkVBGZ531TbMV1NobTvJ4Q8ADwLVwTwmPeMcpE5DIRqfWaIZrTHaMf/RD4z2izmIiMEpFF6QqLyHwRmS4iQS++rh5iXI7rj0h1HPGGfn4FuArXfwPwe+AEEbnc+zuXeuf2JFWNAD8CvhMdvukdY0HC4W/yzuUHcEnr1yme/ygRuUlEjheRgIiMxA0seD6h6D24foxhuCa5dOdluIhchksst6lqUn+SiAwRkTNxzYcveucn6kxc/5jpZ5YUBimvY/jLuJEnzcALuG+VZ3lt4eCSw8u4duJHcSNw/G4B/t1revhCL8/3IO7DYKmINAPrcbWSdIbiPrD24Zos9nixpvML4Gzg1wl9JZcDW73n/AfcN9Rs+i6uP+NRETmI+1Cc10P5MbiE1oxranoKNxoplXtwSbrSt22ciLTgRoCtwnVGN6jqowBeP8G5uH6C7bgmsdtwo44A/g3XQfy8d44exw0HjXoX9zfYDtwH/IOqvpYitk7c6KrHvdeyHlcbvTLFazgG+JXvfeb3svd6NuOS2+dV9caEMrd75/Y94H9wfSTneUkOr3kqcair6Sfi+o+MMflARG4Gdqrq/wzAczUAP1fV+mw/V38Skf8HjFfVf811LIORJQVjilShJgWTXdZ8ZIwxJsZqCsYYY2KspmCMMSam4CYmGzlypE6cODFpe2trK1VVVckPyCMWY/8ohBihMOK0GPtPvse5Zs2a3ao6qteCuZ58qa8/s2fP1lRWrlyZcns+sRj7RyHEqFoYcVqM/Sff4yRhYsl0P9Z8ZIwxJsaSgjHGmBhLCsYYY2IKrqPZGGOiurq6aGpqor29PdehUFtby8aNG3MdBhUVFdTX11Namm4y3p5lLSmIyN24ybV2quq0FPsFN4/MQty0xVdq3+bCN8YUuaamJmpqapg4cSLeap05c/DgQWpqMpn5PHtUlT179tDU1MSkSZMO6xjZbD76KW5Vq3TOByZ7P4uBH2QxFmPMINTe3s6IESNynhDyhYgwYsSII6o5ZS0pqOof6XnhjEXAPd5oqeeBYSIyNlvxGGMGJ0sI8Y70fGR1mgtvDv/fp2k++j1wq6o+491/Avg3VV2douxiXG2Curq62UuXLk16rpaWFqqrM1k4K3cKKcZQRPnT9hBnHF1CIM/+6QrhPEJhxFnoMdbW1nL88ccPcESphcNhgsFgrsMAYPPmzRw4EL8w3fz589eo6pxeH5zJxQyH+4Obf319mn1/AM7w3X8CmN3bMe3iteyKxvj9lZt1wr/9Xu9f9ZfcBpRCIZxH1cKIs9BjfPXVVwcukDSqqqpUVbW5uTltmTPPPFNXrVqlqqrnn3++7tu3L6sxpTovZHjxWi5HHzUB433368lgGUAzMA51unVsmva15TgSYwaX5cuX917IZ6BrILm8TmEZ8ElvmcHTgAOquiOH8RifoRVuOFtze1eOIzGmMDz99NM0NDRw0UUXceKJJ3LZZZdFW0HiTJw4kd27dwPw85//nLlz5zJz5kyuvvpqwmG3Umt1dTU33ngj8+bN47nnnhvQ15HNIam/BBpwC643AV8BSgFU9Ye49VYX4pblOwR8KluxmL4bUu6+mbS0h3opaUx+uOnhDby6vblfjzll3FC+8sGpGZdfu3YtGzZsYNy4cZx++uk8++yznHHGGSnLbty4kV/96lc8++yzlJaWcs0113DffffxyU9+ktbWVqZNm8bXvva1/nopGctaUlDVS3vZr8Bns/X85siEwu4bTp71MRuT1+bOnUt9vVvIbubMmWzdujVtUnjiiSdYs2YNp556KgBtbW2MHj0agGAwyEc+8pGBCTqBXdFsUuoIuWpsMGBZwRSGvnyjz5by8vLY7WAwSCiUvqatqlxxxRXccsstSfsqKipyNpLJ5j4yKXV0RQDybjiqMYPFWWedxQMPPMDOnTsB2Lt3L2+//XaOo7KkYNLoCLmkYIu1GpMdU6ZM4Rvf+AbnnnsuM2bM4JxzzmHHjtyPtbHmI5NStPkoFI7kOBJj8ltLSwsAH/jAB1i4cGFs++233x673djYGLu9devW2O2LL76Yiy++OO0xc8FqCialaE0h2uFsjCkOlhRMSl1eDaErYknBmGJiScGkFPaSgTUfGVNcLCmYlKK5oMuaj4wpKpYUTEoR7/L8UMRqCsYUE0sKJqVoUghbn4IxRcWSgkkpmgy6rE/BmB41NTWxaNEiZs6cyXHHHcc//dM/0dnZ2eNjbr755rj70fUitm/fzkUXXZS1WDNhScGkFGs+sj4FY9JSVT784Q/zoQ99iHXr1vHGG2/Q0tLCDTfc0OPjEpNC1Lhx43jggQcyfv7orKr9yZKCSSlWU7DmI2PSevLJJ6moqOBTn3KTPAeDQb7zne9w99138/3vf59rr702VvaCCy6gsbGR66+/nra2NmbOnMlll10Wd7ytW7cybZpbqDIcDvPFL36RU089lRkzZnDnnXcC7kK4+fPn8/GPf5zp06f3+2uyK5pNStFWIxuSagrGI9fDu3/u32OOmQ7n35p294YNG5g9e3bctqFDh3LMMceknQzv1ltv5fbbb2fdunU9PvVdd91FbW0tq1atoqOjg9NPP51zzz0XgBdffJH169czadKkPr6g3llSMClZ85ExvVNVJMWkkem298Wjjz7KK6+8EmtOOnDgAJs2baKsrIy5c+dmJSGAJQWTRjQpRFKsHGVMXurhG322TJ06ld/85jdx25qbm9m2bRu1tbVEfEO629vb+3RsVeV73/seCxYsiNve2NhIVVXV4QfdC+tTMClF+xTClhSMSeuss87i0KFD3HPPPYDrB7juuuu48sorOfbYY1m3bh2RSIRt27bx4osvxh5XWlpKV1fPS90uWLCAH/zgB7Fyb7zxBq2trdl7MR6rKZiUYjUF62g2Ji0R4cEHH+Saa67hpptuAmDhwoXcfPPNlJWVMWnSJKZPn860adOYNWtW7HGLFy9mxowZzJo1i/vuuy/lsa+66iq2bt3KrFmzUFVGjRrFQw89lPXXZEnBpGQ1BWMyM378eB5++GEOHjxITU1N3L50H/i33XYbt912W+x+dKrsiRMnsn79egACgQA333xz0vDVhoYGGhoa+vEVxLPmI5NStCnUZrkwprhYUjAp2TQXxhQnSwompbCNPjIFQu09GudIz4clBZNStIPZkoLJZxUVFezZs8cSg0dV2bNnDxUVFYd9DOtoNimFrfnIFID6+nqamprYtWtXrkOhvb39iD6M+0tFRQX19fWH/XhLCial6OwWlhRMPistLc3alb191djYyCmnnJLrMI6YNR+ZlLqbj3IciDFmQFlSMCnZNBfGFCdLCiYl61MwpjhZUjAp2egjY4qTJQWTktUUjClOlhRMSrFpLtQuDjKmmGQ1KYjIeSLyuohsFpHrU+w/RkRWishaEXlFRBZmMx6TOX8NwSoLxhSPrCUFEQkCdwDnA1OAS0VkSkKxfwfuV9VTgEuA72crHtM3/r4E61cwpnhks6YwF9isqltUtRNYCixKKKPAUO92LbA9i/GYPvAnAutXMKZ4SLbai0XkIuA8Vb3Ku385ME9Vr/WVGQs8ChwFVAFnq+qaFMdaDCwGqKurm7106dKk52tpaaG6ujobL6XfFFKM1z7RSou3MNSdZw+hvOTI1pvtT4VwHqEw4rQY+0++xzl//vw1qjqnt3LZnOYi1adIYga6FPipqv63iLwfuFdEpqlq3Cz+qroEWAIwZ84cTbXARGNjY1YXnugPhRRjoHEFdIUA+KszzqCmojTHkXUrhPMIhRGnxdh/CiXO3mSz+agJGO+7X09y89CngfsBVPU5oAIYmcWYTIYiCuKldVtox5jikc2ksAqYLCKTRKQM15G8LKHMX4CzAETkJFxSyP10h4ZwRCkNuLeHLclpTPHIWlJQ1RBwLbAC2IgbZbRBRL4mIhd6xa4DPiMiLwO/BK5UGxSfFyKqlAQldtsYUxyyOnW2qi4Hlidsu9F3+1Xg9GzGYA5PRJWKYBAIx6a8MMYMfnZFs0kpHFFKg9Z8ZEyxsaRgkqgqEYUyr/nIrlMwpnhYUjBJojmgtMS9PWz0kTHFw5KCSRLtWC4JWEezMcXGkoJJEm0usj4FY4qPJQWTJFoziCYFG31kTPGwpGCSdNcUvI5mqykYUzQsKZgk0Y7lWPOR1RSMKRqWFEyScFLzUS6jMcYMJEsKJkls9JFNc2FM0bGkYJJEbPSRMUXLkoJJEk0CZTb6yJiiY0nBJEkafWRJwZiiYUnBJEkafWTNR8YUDUsKJkk41tHs3h6WE4wpHpYUTJLuK5qt+ciYYmNJwSSx0UfGFC9LCiZJ8sVrlhSMKRaWFEySaHORLbJjTPGxpGCSJI4+siuajSkelhRMkkjC6COrKBhTPCwpmCRhG31kTNGypGCSJI4+suYjY4qHJQWTJGk5TqspGFM0LCmYJNZ8ZEzxsqRgkiSOPrLWI2OKhyUFkyRxkR27otmY4mFJwSRJvKLZmo+MKR6WFEySSCRhkR2rKRhTNCwpmCQ2+siY4pXVpCAi54nI6yKyWUSuT1PmYyLyqohsEJFfZDMek5nEPgXLCcYUj5JsHVhEgsAdwDlAE7BKRJap6qu+MpOBLwGnq+o+ERmdrXhM5sKx0UdeUrCsYEzRyGZNYS6wWVW3qGonsBRYlFDmM8AdqroPQFV3ZjEek6FIYkez9SkYUzSymRSOBrb57jd52/xOAE4QkWdF5HkROS+L8ZgMxZqPAtanYEyxyVrzESAptiV+upQAk4EGoB54WkSmqer+uAOJLAYWA9TV1dHY2Jh04JaWlpTb80mhxLj+HdfCt+6l1QC8uWULjdKUy7DiFMJ5hMKI02LsP4USZ2+ymRSagPG++/XA9hRlnlfVLuAtEXkdlyRW+Qup6hJgCcCcOXO0oaEh6ckaGxtJtT2fFEqMJ4w5Dv78Cqe/fx4808iECZNoaJic69BiCuE8QmHEaTH2n0KJszfZbD5aBUwWkUkiUgZcAixLKPMQMB9AREbimpO2ZDEmk4HE9RSsT8GY4pG1pKCqIeBaYAWwEbhfVTeIyNdE5EKv2Apgj4i8CqwEvqiqe7IVk8lMdPRRUISAgFpSMKZoZNR8JCK/Ae4GHlHVSKYHV9XlwPKEbTf6bivwL96PyRPRmkJAICBiHc3GFJFMawo/AD4ObBKRW0XkxCzGZHIslhQCQiAg1nxkTBHJKCmo6uOqehkwC9gKPCYifxKRT4lIaTYDNAMvWjMIihAUsamzjSkiGfcpiMgI4ErgKmAt8F1ckngsK5GZnIkmhUDA9SlY85ExxSPTPoXfAicC9wIfVNUd3q5ficjqbAVnciPafBSMNh9ZUjCmaGR6ncKPvU7jGBEpV9UOVZ2ThbhMDkVzQFCEYEBs9JExRSTT5qNvpNj2XH8GYvJHtGYg0dFHlhSMKRo91hREZAxuvqJKETmF7qkrhgJDshybyZHorKjBgHhDUnMckDFmwPTWfLQA17lcD3zbt/0g8OUsxWRyLFozcM1HNnW2McWkx6Sgqj8DfiYiH1HV3wxQTCbHInGjj8SW4zSmiPTWfPQJVf05MFFEkq46VtVvp3iYKXBhVYIB11JofQrGFJfemo+qvN/V2Q7E5I+IuqYjcP0K1nxkTPHorfnoTu/3TQMTjskHkYji5QSXFCwnGFM0MhqSKiLfFJGhIlIqIk+IyG4R+US2gzO5EY50Nx+J2NTZxhSTTK9TOFdVm4ELcAvjnAB8MWtRmZwKq3Y3H4k1HxlTTDJNCtFJ7xYCv1TVvVmKx+SBSEQJBHx9ClZTMKZoZDrNxcMi8hrQBlwjIqOA9uyFZXLJP/pI7OI1Y4pKplNnXw+8H5jjrafcCizKZmAmdyLqFtgB3MVrVlMwpmhkWlMAOAl3vYL/Mff0czwmD0QiSsDfp2BJwZiikenU2fcCxwHrgLC3WbGkMCjFjz6yqbONKSaZ1hTmAFPU5lAuCmH11RSso9mYopLp6KP1wJhsBmLyR8RXUwhaTcGYopJpTWEk8KqIvAh0RDeq6oVZicrkVETpnvsogF3RbEwRyTQpfDWbQZj8EtbuaS4CIoRsTKoxRSOjpKCqT4nIBGCyqj4uIkOAYHZDM7kSiWjchHg2zYUxxSPTuY8+AzwA3OltOhp4KFtBmdzyjz5y6ynkOCBjzIDJtKP5s8DpQDOAqm4CRmcrKJNbEd/oo4DYymvGFJNMk0KHqnZG73gXsNknxSDlrykEAzb6yJhikmlSeEpEvgxUisg5wK+Bh7MXlsmliBKbEM+W4zSmuGSaFK4HdgF/Bq4GlgP/nq2gTG655iN325KCMcUl09FHERF5CHhIVXdlOSaTY+HE0UfWfGRM0eixpiDOV0VkN/Aa8LqI7BKRGwcmPJMLYd96CgFbjtOYotJb89E/40YdnaqqI1R1ODAPOF1EPt/bwUXkPBF5XUQ2i8j1PZS7SERUROb0KXqTFRHfymsBsamzjSkmvSWFTwKXqupb0Q2qugX4hLcvLREJAncA5wNTgEtFZEqKcjXA54AX+ha6yZawzX1kTNHqLSmUquruxI1ev0JpivJ+c4HNqrrFG866lNQL83wd+Ca2klveiBt9FLA1mo0pJr11NHce5j5wVz1v891vwjU9xYjIKcB4Vf29iHwh3YFEZDGwGKCuro7GxsakMi0tLSm355NCifHAgSCRNqGxsZGd73XQ1h7Oq7gL4TxCYcRpMfafQomzN70lhZNFpDnFdgEqenmspNgW+8opIgHgO8CVvRwHVV0CLAGYM2eONjQ0JJVpbGwk1fZ8UigxDqkOMmpoBQ0Np7Ji7ytsPLAzr+IuhPMIhRGnxdh/CiXO3vSYFFT1SCa9awLG++7XA9t992uAaUCjuE7NMcAyEblQVVcfwfOaIxQ3+kis+ciYYpLpxWuHYxUwWUQmiUgZcAmwLLpTVQ+o6khVnaiqE4HnAUsIeSCiSknAVl4zphhlLSmoagi4FlgBbATuV9UNIvI1EbHFefJYKKGmYKOPjCkemS6yc1hUdTluSgz/tpQXvqlqQzZjMZmLRLprCjZ1tjHFJZvNR6ZAheKmubCL14wpJpYUTJJIwiI71nxkTPGwpGCShPxJwTqajSkqlhRMkojaNBfGFCtLCiZJck0hxwEZYwaMJQWTJBzXp+C22QVsxhQHSwomSdwiO97vsPUrGFMULCmYJOGIEgx2Nx+BDUs1plhYUjBJEpfjjG4zxgx+lhRMkrBv7qMSSwrGFBVLCiZORBX1LbJjNQVjioslBRMn+tmfWFMIWVIwpihYUjBxop/93TUF9xaxmoIxxcGSgoljNQVjipslBRMnHK0pJIw+CoUjuQrJGDOALCmYOJpYUwhaTcGYYmJJwcSJ1hSCseYj61MwpphYUjBxolcuRzuYu5uPLCkYUwwsKZg4kVhNwf1OdfGa1RqMGbwsKZg43UnBqynE+hRcR/PK13Zy3JeXs/Yv+3ISnzEmuywpmDjhXmoKD7zUBMD67c0DHpsxJvssKZg4STWFhOsUvOUV2NXcPtChGWMGgCUFEyeWFCR+9FG0o3n/oS4Adh7sGPjgjDFZZ0nBxOkefZR4nYLrU9jb2gnAwY5QDqIzxmSbJQUTJ5J0nUJ8n8KhTpcMDllSMGZQsqRg4iTOfZTYp3CoMwxAa0d44IMzxmSdJQUTJ5wwS2riFc2xpNBpNQVjBiNLCiZOTzUFVe1uPuq0moIxg5ElBRMnkjBLanefQoSOUCS2v8X6FIwZlCwpmDjR5qPoqCP/3EdtXu0gGBDraDZmkLKkYOJEh6TGagq+qbMPdbmkMLK6jNbOMBGbA8mYQSerSUFEzhOR10Vks4hcn2L/v4jIqyLyiog8ISITshmP6V3yymvexWsRpc3rTxhVUw5AW5f1Kxgz2GQtKYhIELgDOB+YAlwqIlMSiq0F5qjqDOAB4JvZisdkJu11CuFIrHN5ZLVLCq3WhGTMoJPNmsJcYLOqblHVTmApsMhfQFVXquoh7+7zQH0W4zEZSFxkJ+hvPvKSwqhoUrARSMYMOiVZPPbRwDbf/SZgXg/lPw08kmqHiCwGFgPU1dXR2NiYVKalpSXl9nxSCDG2tbUDwktrVvNedYCOkMsSb2zeTOu77jtE2773AHjq2ed5uzY44DEWwnmEwogzlzFKJIwGen//FMJ5hMKJszfZTAqSYlvKnkkR+QQwBzgz1X5VXQIsAZgzZ442NDQklWlsbCTV9nxSCDE+t/1xoIPT5s3luFHVdITC8Pj/MWHisUwcUQVrXmLWlMn8fsurTJlxCnMnDR/wGAvhPEJhxJmzGJf/K7y4BM76D/jAdT0WLYTzCIUTZ2+y2XzUBIz33a8HticWEpGzgRuAC1XVpt7Msejoo8SO5nCk+8K1EdVlQPc8SMb0yTtr4MU7obQSVt4CzUkfCyaHspkUVgGTRWSSiJQBlwDL/AVE5BTgTlxC2JnFWEyGEi9e83KDG33kjTYaUeX6FNpt9JE5HKt/AmU1cNXjEAnBS/fkOiLjk7WkoKoh4FpgBbARuF9VN4jI10TkQq/Yt4Bq4Ncisk5ElqU5nBkgkYSL10SEkoAQ8o0+Gl4VrSlYUjB9FAnD68vhhAVQNxXqT4XXU3YlmhzJZp8CqrocWJ6w7Ubf7bOz+fym78IJi+yAG4kU9o0+iiYFu07B9Nmu1+DQHph8rrt/wgJ48uvQsgsoOlqGAAAT3klEQVSqR+U2NgPYFc0mQSRhllSA0mAgdvFaZWmQIeVuxEib1RRMX21f534fPcv9nnC6+/3O6tzEY5JYUjBxojWF0kD3W8NfUxhSFqSy1JKCOUw7Xoayahh+nLs/9mQIlMC2F3Mbl4mxpGDiRNdNKC3primUBIRQJEJbZ5jKsiClwQClQYnNhWRMxnasgzEzIPqlo2wI1E2DplW5jcvEWFIwcULRmkIwfU0BoKI0aDUF0zeRMLz7Z1c78Ks/Fd55Cbx1wE1uWVIwccLe/2VJIKGmEHazpFaWubEJQ8osKZg+2r0Jug7BuJnx28eeDF2tsO+t3MRl4lhSMHFCESgNCuIffRQUN/dRR4gqr6ZQWRq00Uemb3Z4ncyJNYW6qe73e+sHNh6TkiUFEyekGtd0BK7Tucu7TmGIV1OoLCux6xRM3+x4GUoqYeQJ8dtHnwQSgPc25CYuE8eSgokTjpCUFMpKAnSGIhzqDMX6FCpLA3ZFs+mb7etgzHRInASvtBJGHG9JIU9YUjBxQpqcFMpLAnSEIrR2hqnyrlEYUlZicx+ZzEUi8O4ryf0JUXVTXSe0yTlLCiZO2OtT8CsvCdIRCnOoIxRrPqooDdLWZaNFTIb2vgmdLcn9CVF1U2H/29DePLBxmSSWFEycUCS5T6G81NUUDnWFYx3NbvSR1RRMhna87H6PTVdTmOZ+79w4MPGYtCwpmDhhTa4plAUDHGjrQhWGlPuGpFqfgsnU9rUQLIdR70u9PzYCyZqQcs2SgokTStHRXF4aYF9rJ0DcxWs2+shkbMfLMGYaBEtT768dD+W11tmcBywpmDipO5qD7DvUBRDrUxhSFrTRRyYzqrDjlfT9CQAibmSSdTbnnCUFEycc0ZTNR1H+i9e6wkpX2DqbTS/2vQUdB9L3J0SNme5qChH7spFLlhRMnM6waxryKy/tfptE+xQqveRg/QqmV9vTXMmcaMx0Nw3G3i3Zj8mkZUnBxOmMuOsS/Pz3YzWFMps+22Ro+0sQLIPRU3ouN8YbgWRNSDllScHE6QprUk2hury7czCaDKq9GkNLhw1LNb145yU3XXZJWc/lRp3o1lawpJBTlhRMnM5IcvNRdUX3qq21lS5BDK1wv5vbugYuOFN4wiE3HLV+Tu9lS8pdYrCkkFNZXaPZFB7XpxD/XaHGlxSOGuK+7Q31ksMBSwrZsf8v8Nz33QfkmOlwxuehpi7XUfXdro2un+DoDJICuNf65srsxmR6ZDUFE6cropSXxNcUasq7k0L0OoVaSwrZ8/oj8P2/gtV3QbgDVv0YfvQ3sOfNXEfWd++scb+jazL3ZuzJ0PIuNG/PXkymR5YUTJxUo49qKrr7FKLrLAytdInCmo/62Z8fgKWXwcjj4drVcNXj8JknINQGv7gYOlpyHWHfNK2CyqNg+LGZla+f637bms05Y0nBxIQjSliTm4+OGT4kqWy0ptDcbh3N/WbtffCbq+CY0+CKh+GoCW772JPhoz9zk8o9+u+5jbEvVGHLH2HC6e7itEyMmQ4lFbZmcw5ZUjAx0WsOKhNqCuOHVwJw0ez62LbykiAVpQFrPuovq34Mv7sGjm2Ayx6A8pr4/ZM+AKddA2t+UjjfovdugQN/gePmZ/6YkjIYNwu2vZC9uEyPLCmYmIPt7gPe31wErsnota+fx20fmRG3vbaylAOHLCkckXAIVtwAf7gOTjgfLl0KZck1MwAavgRD6+Hhf4ZwAZz3N590v4/tQ1IAV1Pavtam0c4RSwom5qDXFBTtL/CrKA0SDMQ3AdRWlmalpvD0pl18a8VrbN/f1u/Hzis7Xoa7F8Bzt8PcxXDxvVBakb58eTUs/Cbs3AB/+t7AxXm4Nj7s+hIy7U+IOv5siIRgS2NWwjI9s6RgYtLVFNIZVlnG3kOd/RrDnzbv5sqfrOKOlW/yiR+/MPgujlOFt/4Iv/4ULGlwC8t85C5Y+K30M4j6nfi3cNKF0Hgr7N6U9XAPW/MO9zqnfzTz/oSo8XPdjKmbH8tObKZHlhRMTHOb+wD2X5fQk/qjKmnae6jfnl9VuW3F64ytreDuK+ewZXcrP2wswGGYqbTsgme/C9+bBT/7oGtaOe0auHYVTL+ob8da+F9uXePffTZ/J497+ZeAuqTQV8FSOP5v3NDcQmgmG2QsKZiYaFPQ0AxrCseMGMKO5vZ+m0L72c17eHnbfv6x4Tj+5sQ6Ljx5HD96ekvhNiNpxDWB/PpK+PZJ8NiNUD0G/m4JXPcaLPhPN1yzr2rq4Pxvus7YR/+jv6M+cp2t8Nwdri9h5OTDO8aMS6B1F7yxon9jM72ypGBi3mtuB6BuaHlG5SeMGIIqNO3rn9rC7Ss3UTe0PDbK6YsL3ocC31rxer8cf8C07IJnvsO8F/4R7lnkEsPcxfDZF+HvH4GTL3bf9I/EyRfD3Kvh+Ttg5c2uWSpf/PG/4NBu1zF+uI4/2yXQVT/qv7hMRmyaCxPzbnM7FcHM+xQmjKgC4K3dhzh+dE0vpXu2dtPbsPUZ/ndqK+UP3gfb1zL+0F5erlC2bhjGvp9O5ajJfwXHnIZE+rFJoavNTcKWSXt+T0Id7lvtK79yvyNddNROo3LhN+CkD/bcgXy4FtwMnS3w1G1uXP9ZX3HXNPTWhq8Kze/A9nVMfOth2H2v69sItQMCQ8fBUZNg3EwYdwqMmAyBDL4/qsKLP4Jnvg2nXA7HzDv81xYsgfd/Fh77D46qbgAaDv9Ypk+ymhRE5Dzgu0AQ+LGq3pqwvxy4B5gN7AEuVtWt2YzJpLd1dysjKzPvFDxxTA1lJQGe2bSLc6YkzMujCh3N7kM33AWRLjf8srMF2vdD2z7YswV2vYbueJlT9mxiaRmwCag9xn0gDR1HsKuT3WtfpvbttRy19REAPiClsHmaW+y9birUjIGq0W4opwQAcU0Y7Qfcz6Hdrimidbf3s6v7fleri7esBoYMh5qxMHQs1Izzfo+N31Za4drx2/a7i8neWw9bnoItK91zVdfBvKth1hWs27CdhhkN/fGnSS1YAovucIngyf+EJWe6kT7jZkFtvWuaCpa689/Z6qaOOLDNLWRzaDcAExAYNt4lgapR7rU1vwNvPQ0v/KD73NRNcU1BIyZD9WioqIWyavd37WyFPZtdH0DTKpi8AP72v4/89c27Glbfxfte/x40X+ySlcm6rCUFEQkCdwDnAE3AKhFZpqqv+op9GtinqseLyCXAbcDF2YrJpNcZirBu236mDAumLxQJuw/6joPQ3syQtr185ZhX2L7qQZ7Z2sHRgX3UhnZT0bmH8o69BCO9j0wKDx3PW8FJPNQ1k5nz5nP2WQugamRsfxkQmLKbM+56gbPHwxdO2o+8sYJJpfsIvrYcWXtvRq9PAyVEhowkUjmScOVwwnWzkapRlNSMojSgSNs+lyQO7nBLR76xwk3klihQ6j4I/WrGwYkXwLQPw6QG92ENQPL8PR2hMO8eaOdge4ixtRUMryqLTR2SKBSO8Nq7B3li407e2HmQqeOGsmDqGI4bVd1dSMR9eE7/KGz4LWx+wl3c9urvEuIUlzyHHg3vO8+tgjZ2Jk+/sZe/PmtB8pNHwrD7DTft9faXYOdr8Maj0Prz9Cd51IkuGcz+FATSv492Hmzn8Vd3smrrXiaNrOK8aWOYPLo6+TyUlMPH7qXkx+fAkvlw1o0w+VyoHpU+BnPERLPUFiki7we+qqoLvPtfAlDVW3xlVnhlnhOREuBdYJT2ENScOXN09erVSdsbGxtpaGhIG8//PfkkJz3zOe9e9+HFfzvN00qa8vHHSV3eXwbV2Bs/qYzGHyfxOTXl9t6fM/MyiipUBKHCW4c5rngk1P2tOoUDVLMjchQ7dRi7GMYurWWPDuUQFXQRJKRBQpRwiHIOaBUHqKJJR3EI16yyaOY4vvOxmQQCqT8gH1zbxA0PrudQ3KI+ynAOMkr2M1IOUEEnQSIISiuVNOsQDjKEPVpDM1UJZ8N3XsRdxS1AREFRIqpUaxujdQ+jZS+j2ctY2cfQYAehQDntgSq2B8ayVY7mL1pHBCESUcKqRCJKRKEzFEIkQESVsLctUWlQqCwNogphr5z/djS+cbWVvON1uFeVBakqL+m5lUiVStoJEiFEkC5KCRFEiXY/eDF2dlJSWoqqG/2lQECEkoBQEhRKAoG456nSVmr1INW0UqlthCihgzK2Sx2tUtVDQE57V5g9re7LwogqN6RZFUZWl1Nd7q6FSUwOR7ds4Ebu5LjIVgDaqOCgVBOixL0eCXh/dSHxXT6QIpEIgUya2Y7Ajvd/hfefcxgjugARWaOqvU5Xm82kcBFwnqpe5d2/HJinqtf6yqz3yjR599/0yuxOONZiYDFAXV3d7KVLlyY9X0tLC9XV1Unbo7Y0NXHi2/5vld1vvPgz4N+e8F8nqbZnfhyNRBDfN6i05WOJo7uMIrESmu45pW9xJZYZVhFgZFmI0tKypDIqQjg4hFDJEEIlVYSDlXSV1tBRPoKO8hFEghV0hJT9Hep9sHY/OiDuw7a50+3f36F0hd1a0CUBOKYmwPHDAmm/MUe1dilv7AvzXnM7UlJOKOIlS+/5YufKuxEU1xQeFCEg3n3vt4ib/K89rLSHoCOssXhFxPt4ceXE26EKoYjSFYGuCIQjbn9A3IiN2G3vMaFQFxVlZbFzEBAIBuCocmFIqbC3XdnfrnSE1T1G3Aey/1jjqgKcOCLAsPIAe9oirNsV5r3WCO2HOeAr8XV1hbooKy31vXa8pOT9HOYS3On+lEGBMVUBpo0MUl8tHOhUXnovzFsHInR5c28lCoVClASDnBDexOTQJkboHqq0lRINI0S81xRNC7nrcNeIImm+1PSXpvEfYtQxvaxgl8b8+fMzSgru20EWfoCP4voRovcvB76XUGYDUO+7/yYwoqfjzp49W1NZuXJlyu35xGLsH4UQo2phxGkx9p98jxNYrRl8dmezrtMEjPfdrye5kTVWxms+qgX2ZjEmY4wxPchmUlgFTBaRSSJSBlwCLEsoswy4wrt9EfCkl9GMMcbkQNZGH6lqSESuBVbghqTeraobRORruGrMMuAu4F4R2YyrIVySrXiMMcb0LqvXKajqcmB5wrYbfbfbcX0Pxhhj8oBNc2GMMSbGkoIxxpgYSwrGGGNiLCkYY4yJydoVzdkiIruAt1PsGgnsTrE9n1iM/aMQYoTCiNNi7D/5HucEVe114qiCSwrpiMhqzeQS7hyyGPtHIcQIhRGnxdh/CiXO3ljzkTHGmBhLCsYYY2IGU1JYkusAMmAx9o9CiBEKI06Lsf8USpw9GjR9CsYYY47cYKopGGOMOUKWFIwxxsQUdFIQka+LyCsisk5EHhWRcd52EZH/FZHN3v5ZOYzxWyLymhfHgyIyzLfvS16Mr4tIioVyBzTOj4rIBhGJiMichH35FOd5XhybReT6XMbiJyJ3i8hObzXB6LbhIvKYiGzyfh+Vw/jGi8hKEdno/Z3/Kd9i9OKpEJEXReRlL86bvO2TROQFL85fedPx55SIBEVkrYj8Pl9jPCyZrMSTrz/AUN/tzwE/9G4vBB7BrTB4GvBCDmM8Fyjxbt8G3ObdngK8DJQDk3CrzgVzGOdJwPuARmCOb3vexImbgv1N4FigzItrSq7fh15sfw3MAtb7tn0TuN67fX30b5+j+MYCs7zbNcAb3t82b2L0YhCg2rtdCrzg/Q/fD1zibf8h8I958Df/F+AXwO+9+3kX4+H8FHRNQVWbfXer6F6mdxFwjzrPA8NEZOyABwio6qOqGvLuPo9bgS4a41JV7VDVt4DNwNxcxAigqhtV9fUUu/IpzrnAZlXdoqqdwFIvvpxT1T+SvGrgIuBn3u2fAR8a0KB8VHWHqr7k3T4IbASOJo9iBPD+Z1u8u6XejwJ/Azzgbc95nCJSD/wt8GPvvpBnMR6ugk4KACLynyKyDbgMiK7VcDSwzVesyduWa3+Pq8FA/saYKJ/izKdYMlGnqjvAfSgDo3McDwAiMhE4BfctPO9i9Jpl1gE7gcdwtcP9vi9X+fB3/x/gX4GId38E+RfjYcn7pCAij4vI+hQ/iwBU9QZVHQ/cB1wbfViKQ2Vt7G1vMXplbgBCXpwDHmOmcaZ6WIptuRrHnE+xFCQRqQZ+A/xzQk07b6hqWFVn4mrVc3FNm0nFBjaqbiJyAbBTVdf4N6coWpDvzayuvNYfVPXsDIv+AvgD8BVclh7v21cPbO/n0GJ6i1FErgAuAM5Sr8GRAY4R+nQu/QY8zgKJJRPvichYVd3hNV/uzGUwIlKKSwj3qepvvc15FaOfqu4XkUZcn8IwESnxvonn+u9+OnChiCwEKoChuJpDPsV42PK+ptATEZnsu3sh8Jp3exnwSW8U0mnAgWgVeaCJyHnAvwEXquoh365lwCUiUi4ik4DJwIu5iLEX+RTnKmCyN8qjDLem97IcxZKJZcAV3u0rgN/lKhCvzfsuYKOqftu3K29iBBCRUdEReiJSCZyN6/9YCVzkFctpnKr6JVWtV9WJuPfgk6p6GXkU4xHJdU/3kfzgvvWsB14BHgaO9rYLcAeuLfLP+EbT5CDGzbh28HXezw99+27wYnwdOD/H5/LvcN/EO4D3gBV5GudC3MiZN4EbchlLQly/BHYAXd55/DSunfkJYJP3e3gO4zsD15zxiu+9uDCfYvTinAGs9eJcD9zobT8W92VkM/BroDzXf3Mvrga6Rx/lZYx9/bFpLowxxsQUdPORMcaY/mVJwRhjTIwlBWOMMTGWFIwxxsRYUjDGGBNjScEYY0yMJQVjjDEx/x+2UJpJYLjNfgAAAABJRU5ErkJggg==\n", 496 | "text/plain": [ 497 | "
" 498 | ] 499 | }, 500 | "metadata": {}, 501 | "output_type": "display_data" 502 | } 503 | ], 504 | "source": [ 505 | "scores_in = scores[np.where(labels==0)[0]]\n", 506 | "scores_out = scores[np.where(labels==1)[0]]\n", 507 | "\n", 508 | "\n", 509 | "in_ = pd.DataFrame(scores_in, columns=['Inlier'])\n", 510 | "out_ = pd.DataFrame(scores_out, columns=['Outlier'])\n", 511 | "\n", 512 | "\n", 513 | "fig, ax = plt.subplots()\n", 514 | "in_.plot.kde(ax=ax, legend=True, title='Outliers vs Inliers (Deep SVDD)')\n", 515 | "out_.plot.kde(ax=ax, legend=True)\n", 516 | "ax.grid(axis='x')\n", 517 | "ax.grid(axis='y')\n", 518 | "plt.show()" 519 | ] 520 | }, 521 | { 522 | "cell_type": "code", 523 | "execution_count": null, 524 | "metadata": {}, 525 | "outputs": [], 526 | "source": [] 527 | } 528 | ], 529 | "metadata": { 530 | "kernelspec": { 531 | "display_name": "Python 3", 532 | "language": "python", 533 | "name": "python3" 534 | }, 535 | "language_info": { 536 | "codemirror_mode": { 537 | "name": "ipython", 538 | "version": 3 539 | }, 540 | "file_extension": ".py", 541 | "mimetype": "text/x-python", 542 | "name": "python", 543 | "nbconvert_exporter": "python", 544 | "pygments_lexer": "ipython3", 545 | "version": "3.6.5" 546 | } 547 | }, 548 | "nbformat": 4, 549 | "nbformat_minor": 2 550 | } 551 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PyTorch-DAGMM 2 | This is my Minimal PyTorch implementation for [Deep Autoencoding Gaussian Mixture Model for Unsupervised Anomaly Detection](https://openreview.net/pdf?id=BJJLHbb0) (DAGMM, ICLR 2018) 3 | 4 | 5 | # Results 6 | This implementation achieves similar results as the original paper. 7 | 8 | **KDD99Cup**: 9 | | | Precision| Recall | F1-score | 10 | |:------------- |:-------------:| :-------------:| :-------------:| 11 | | Paper | 0.9297 | 0.9442 | 0.9369 | 12 | | This Implementation | 0.9561 | 0.9306 | 0.9432 | 13 | -------------------------------------------------------------------------------- /__pycache__/forward_step.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mperezcarrasco/PyTorch-DAGMM/7ec580c1c0b7197b5d0274bca732ecdf646f45ed/__pycache__/forward_step.cpython-36.pyc -------------------------------------------------------------------------------- /__pycache__/model.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mperezcarrasco/PyTorch-DAGMM/7ec580c1c0b7197b5d0274bca732ecdf646f45ed/__pycache__/model.cpython-36.pyc -------------------------------------------------------------------------------- /__pycache__/preprocess.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mperezcarrasco/PyTorch-DAGMM/7ec580c1c0b7197b5d0274bca732ecdf646f45ed/__pycache__/preprocess.cpython-36.pyc -------------------------------------------------------------------------------- /__pycache__/test.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mperezcarrasco/PyTorch-DAGMM/7ec580c1c0b7197b5d0274bca732ecdf646f45ed/__pycache__/test.cpython-36.pyc -------------------------------------------------------------------------------- /__pycache__/train.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mperezcarrasco/PyTorch-DAGMM/7ec580c1c0b7197b5d0274bca732ecdf646f45ed/__pycache__/train.cpython-36.pyc -------------------------------------------------------------------------------- /data/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mperezcarrasco/PyTorch-DAGMM/7ec580c1c0b7197b5d0274bca732ecdf646f45ed/data/.DS_Store -------------------------------------------------------------------------------- /data/kdd_cup.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mperezcarrasco/PyTorch-DAGMM/7ec580c1c0b7197b5d0274bca732ecdf646f45ed/data/kdd_cup.npz -------------------------------------------------------------------------------- /forward_step.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn.functional as F 3 | from torch.autograd import Variable 4 | 5 | import numpy as np 6 | 7 | 8 | class ComputeLoss: 9 | def __init__(self, model, lambda_energy, lambda_cov, device, n_gmm): 10 | self.model = model 11 | self.lambda_energy = lambda_energy 12 | self.lambda_cov = lambda_cov 13 | self.device = device 14 | self.n_gmm = n_gmm 15 | 16 | def forward(self, x, x_hat, z, gamma): 17 | """Computing the loss function for DAGMM.""" 18 | reconst_loss = torch.mean((x-x_hat).pow(2)) 19 | 20 | sample_energy, cov_diag = self.compute_energy(z, gamma) 21 | 22 | loss = reconst_loss + self.lambda_energy * sample_energy + self.lambda_cov * cov_diag 23 | return Variable(loss, requires_grad=True) 24 | 25 | def compute_energy(self, z, gamma, phi=None, mu=None, cov=None, sample_mean=True): 26 | """Computing the sample energy function""" 27 | if (phi is None) or (mu is None) or (cov is None): 28 | phi, mu, cov = self.compute_params(z, gamma) 29 | 30 | z_mu = (z.unsqueeze(1)- mu.unsqueeze(0)) 31 | 32 | eps = 1e-12 33 | cov_inverse = [] 34 | det_cov = [] 35 | cov_diag = 0 36 | for k in range(self.n_gmm): 37 | cov_k = cov[k] + (torch.eye(cov[k].size(-1))*eps).to(self.device) 38 | cov_inverse.append(torch.inverse(cov_k).unsqueeze(0)) 39 | det_cov.append((Cholesky.apply(cov_k.cpu() * (2*np.pi)).diag().prod()).unsqueeze(0)) 40 | cov_diag += torch.sum(1 / cov_k.diag()) 41 | 42 | cov_inverse = torch.cat(cov_inverse, dim=0) 43 | det_cov = torch.cat(det_cov).to(self.device) 44 | 45 | E_z = -0.5 * torch.sum(torch.sum(z_mu.unsqueeze(-1) * cov_inverse.unsqueeze(0), dim=-2) * z_mu, dim=-1) 46 | E_z = torch.exp(E_z) 47 | E_z = -torch.log(torch.sum(phi.unsqueeze(0)*E_z / (torch.sqrt(det_cov)).unsqueeze(0), dim=1) + eps) 48 | if sample_mean==True: 49 | E_z = torch.mean(E_z) 50 | return E_z, cov_diag 51 | 52 | def compute_params(self, z, gamma): 53 | """Computing the parameters phi, mu and gamma for sample energy function """ 54 | # K: number of Gaussian mixture components 55 | # N: Number of samples 56 | # D: Latent dimension 57 | # z = NxD 58 | # gamma = NxK 59 | 60 | #phi = D 61 | phi = torch.sum(gamma, dim=0)/gamma.size(0) 62 | 63 | #mu = KxD 64 | mu = torch.sum(z.unsqueeze(1) * gamma.unsqueeze(-1), dim=0) 65 | mu /= torch.sum(gamma, dim=0).unsqueeze(-1) 66 | 67 | z_mu = (z.unsqueeze(1) - mu.unsqueeze(0)) 68 | z_mu_z_mu_t = z_mu.unsqueeze(-1) * z_mu.unsqueeze(-2) 69 | 70 | #cov = K x D x D 71 | cov = torch.sum(gamma.unsqueeze(-1).unsqueeze(-1) * z_mu_z_mu_t, dim=0) 72 | cov /= torch.sum(gamma, dim=0).unsqueeze(-1).unsqueeze(-1) 73 | 74 | return phi, mu, cov 75 | 76 | 77 | class Cholesky(torch.autograd.Function): 78 | def forward(ctx, a): 79 | l = torch.cholesky(a, False) 80 | ctx.save_for_backward(l) 81 | return l 82 | def backward(ctx, grad_output): 83 | l, = ctx.saved_variables 84 | linv = l.inverse() 85 | inner = torch.tril(torch.mm(l.t(), grad_output)) * torch.tril( 86 | 1.0 - Variable(l.data.new(l.size(1)).fill_(0.5).diag())) 87 | s = torch.mm(linv.t(), torch.mm(inner, linv)) 88 | return s 89 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | # code based on https://github.com/danieltan07 2 | 3 | import numpy as np 4 | import argparse 5 | import torch 6 | 7 | from train import TrainerDAGMM 8 | from test import eval 9 | from preprocess import get_KDDCup99 10 | 11 | 12 | if __name__ == '__main__': 13 | parser = argparse.ArgumentParser() 14 | parser.add_argument("--num_epochs", type=int, default=200, 15 | help="number of epochs") 16 | parser.add_argument("--patience", type=int, default=50, 17 | help="Patience for Early Stopping") 18 | parser.add_argument('--lr', type=float, default=1e-4, 19 | help='learning rate') 20 | parser.add_argument('--lr_milestones', type=list, default=[50], 21 | help='Milestones at which the scheduler multiply the lr by 0.1') 22 | parser.add_argument("--batch_size", type=int, default=1024, 23 | help="Batch size") 24 | parser.add_argument('--latent_dim', type=int, default=1, 25 | help='Dimension of the latent variable z') 26 | parser.add_argument('--n_gmm', type=int, default=4, 27 | help='Number of Gaussian components ') 28 | parser.add_argument('--lambda_energy', type=float, default=0.1, 29 | help='Parameter labda1 for the relative importance of sampling energy.') 30 | parser.add_argument('--lambda_cov', type=int, default=0.005, 31 | help='Parameter lambda2 for penalizing small values on' 32 | 'the diagonal of the covariance matrix') 33 | #parsing arguments. 34 | args = parser.parse_args() 35 | 36 | #check if cuda is available. 37 | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') 38 | 39 | # Get train and test dataloaders. 40 | data = get_KDDCup99(args) 41 | 42 | DAGMM = TrainerDAGMM(args, data, device) 43 | DAGMM.train() 44 | DAGMM.eval(DAGMM.model, data[1], device) # data[1]: test dataloader -------------------------------------------------------------------------------- /model.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | 6 | class DAGMM(nn.Module): 7 | def __init__(self, n_gmm=2, z_dim=1): 8 | """Network for DAGMM (KDDCup99)""" 9 | super(DAGMM, self).__init__() 10 | #Encoder network 11 | self.fc1 = nn.Linear(118, 60) 12 | self.fc2 = nn.Linear(60, 30) 13 | self.fc3 = nn.Linear(30, 10) 14 | self.fc4 = nn.Linear(10, z_dim) 15 | 16 | #Decoder network 17 | self.fc5 = nn.Linear(z_dim, 10) 18 | self.fc6 = nn.Linear(10, 30) 19 | self.fc7 = nn.Linear(30, 60) 20 | self.fc8 = nn.Linear(60, 118) 21 | 22 | #Estimation network 23 | self.fc9 = nn.Linear(z_dim+2, 10) 24 | self.fc10 = nn.Linear(10, n_gmm) 25 | 26 | def encode(self, x): 27 | h = torch.tanh(self.fc1(x)) 28 | h = torch.tanh(self.fc2(h)) 29 | h = torch.tanh(self.fc3(h)) 30 | return self.fc4(h) 31 | 32 | def decode(self, x): 33 | h = torch.tanh(self.fc5(x)) 34 | h = torch.tanh(self.fc6(h)) 35 | h = torch.tanh(self.fc7(h)) 36 | return self.fc8(h) 37 | 38 | def estimate(self, z): 39 | h = F.dropout(torch.tanh(self.fc9(z)), 0.5) 40 | return F.softmax(self.fc10(h), dim=1) 41 | 42 | def compute_reconstruction(self, x, x_hat): 43 | relative_euclidean_distance = (x-x_hat).norm(2, dim=1) / x.norm(2, dim=1) 44 | cosine_similarity = F.cosine_similarity(x, x_hat, dim=1) 45 | return relative_euclidean_distance, cosine_similarity 46 | 47 | def forward(self, x): 48 | z_c = self.encode(x) 49 | x_hat = self.decode(z_c) 50 | rec_1, rec_2 = self.compute_reconstruction(x, x_hat) 51 | z = torch.cat([z_c, rec_1.unsqueeze(-1), rec_2.unsqueeze(-1)], dim=1) 52 | gamma = self.estimate(z) 53 | return z_c, x_hat, z, gamma -------------------------------------------------------------------------------- /preprocess.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | from torchvision import datasets 4 | from torch.utils.data import DataLoader 5 | import pickle as pl 6 | 7 | 8 | 9 | class KDDCupData: 10 | def __init__(self, data_dir, mode): 11 | """Loading the data for train and test.""" 12 | data = np.load(data_dir, allow_pickle=True) 13 | 14 | labels = data["kdd"][:,-1] 15 | features = data["kdd"][:,:-1] 16 | #In this case, "atack" has been treated as normal data as is mentioned in the paper 17 | normal_data = features[labels==0] 18 | normal_labels = labels[labels==0] 19 | 20 | n_train = int(normal_data.shape[0]*0.5) 21 | ixs = np.arange(normal_data.shape[0]) 22 | np.random.shuffle(ixs) 23 | normal_data_test = normal_data[ixs[n_train:]] 24 | normal_labels_test = normal_labels[ixs[n_train:]] 25 | 26 | if mode == 'train': 27 | self.x = normal_data[ixs[:n_train]] 28 | self.y = normal_labels[ixs[:n_train]] 29 | elif mode == 'test': 30 | anomalous_data = features[labels==1] 31 | anomalous_labels = labels[labels==1] 32 | self.x = np.concatenate((anomalous_data, normal_data_test), axis=0) 33 | self.y = np.concatenate((anomalous_labels, normal_labels_test), axis=0) 34 | 35 | def __len__(self): 36 | """Number of images in the object dataset.""" 37 | return self.x.shape[0] 38 | 39 | def __getitem__(self, index): 40 | """Return a sample from the dataset.""" 41 | return np.float32(self.x[index]), np.float32(self.y[index]) 42 | 43 | 44 | 45 | def get_KDDCup99(args, data_dir='./data/kdd_cup.npz'): 46 | """Returning train and test dataloaders.""" 47 | train = KDDCupData(data_dir, 'train') 48 | dataloader_train = DataLoader(train, batch_size=args.batch_size, 49 | shuffle=True, num_workers=0) 50 | 51 | test = KDDCupData(data_dir, 'test') 52 | dataloader_test = DataLoader(test, batch_size=args.batch_size, 53 | shuffle=False, num_workers=0) 54 | return dataloader_train, dataloader_test -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | 4 | from sklearn.metrics import roc_auc_score 5 | from sklearn.metrics import precision_recall_fscore_support as prf, accuracy_score 6 | 7 | from forward_step import ComputeLoss 8 | 9 | def eval(model, dataloaders, device, n_gmm): 10 | """Testing the DAGMM model""" 11 | dataloader_train, dataloader_test = dataloaders 12 | model.eval() 13 | print('Testing...') 14 | compute = ComputeLoss(model, None, None, device, n_gmm) 15 | with torch.no_grad(): 16 | N_samples = 0 17 | gamma_sum = 0 18 | mu_sum = 0 19 | cov_sum = 0 20 | # Obtaining the parameters gamma, mu and cov using the trainin (clean) data. 21 | for x, _ in dataloader_train: 22 | x = x.float().to(device) 23 | 24 | _, _, z, gamma = model(x) 25 | phi_batch, mu_batch, cov_batch = compute.compute_params(z, gamma) 26 | 27 | batch_gamma_sum = torch.sum(gamma, dim=0) 28 | gamma_sum += batch_gamma_sum 29 | mu_sum += mu_batch * batch_gamma_sum.unsqueeze(-1) 30 | cov_sum += cov_batch * batch_gamma_sum.unsqueeze(-1).unsqueeze(-1) 31 | 32 | N_samples += x.size(0) 33 | 34 | train_phi = gamma_sum / N_samples 35 | train_mu = mu_sum / gamma_sum.unsqueeze(-1) 36 | train_cov = cov_sum / gamma_sum.unsqueeze(-1).unsqueeze(-1) 37 | 38 | # Obtaining Labels and energy scores for train data 39 | energy_train = [] 40 | labels_train = [] 41 | for x, y in dataloader_train: 42 | x = x.float().to(device) 43 | 44 | _, _, z, gamma = model(x) 45 | sample_energy, cov_diag = compute.compute_energy(z, gamma, phi=train_phi, 46 | mu=train_mu, cov=train_cov, 47 | sample_mean=False) 48 | 49 | energy_train.append(sample_energy.detach().cpu()) 50 | labels_train.append(y) 51 | energy_train = torch.cat(energy_train).numpy() 52 | labels_train = torch.cat(labels_train).numpy() 53 | 54 | # Obtaining Labels and energy scores for test data 55 | energy_test = [] 56 | labels_test = [] 57 | for x, y in dataloader_test: 58 | x = x.float().to(device) 59 | 60 | _, _, z, gamma = model(x) 61 | sample_energy, cov_diag = compute.compute_energy(z, gamma, train_phi, 62 | train_mu, train_cov, 63 | sample_mean=False) 64 | 65 | energy_test.append(sample_energy.detach().cpu()) 66 | labels_test.append(y) 67 | energy_test = torch.cat(energy_test).numpy() 68 | labels_test = torch.cat(labels_test).numpy() 69 | 70 | scores_total = np.concatenate((energy_train, energy_test), axis=0) 71 | labels_total = np.concatenate((labels_train, labels_test), axis=0) 72 | 73 | threshold = np.percentile(scores_total, 100 - 20) 74 | pred = (energy_test > threshold).astype(int) 75 | gt = labels_test.astype(int) 76 | precision, recall, f_score, _ = prf(gt, pred, average='binary') 77 | print("Precision : {:0.4f}, Recall : {:0.4f}, F-score : {:0.4f}".format(precision, recall, f_score)) 78 | print('ROC AUC score: {:.2f}'.format(roc_auc_score(labels_total, scores_total)*100)) 79 | return labels_total, scores_total -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import optim 3 | import torch.nn.functional as F 4 | 5 | import numpy as np 6 | from barbar import Bar 7 | 8 | from model import DAGMM 9 | from forward_step import ComputeLoss 10 | from utils.utils import weights_init_normal 11 | 12 | class TrainerDAGMM: 13 | """Trainer class for DAGMM.""" 14 | def __init__(self, args, data, device): 15 | self.args = args 16 | self.train_loader, self.test_loader = data 17 | self.device = device 18 | 19 | 20 | def train(self): 21 | """Training the DAGMM model""" 22 | self.model = DAGMM(self.args.n_gmm, self.args.latent_dim).to(self.device) 23 | self.model.apply(weights_init_normal) 24 | optimizer = optim.Adam(self.model.parameters(), lr=self.args.lr) 25 | 26 | self.compute = ComputeLoss(self.model, self.args.lambda_energy, self.args.lambda_cov, 27 | self.device, self.args.n_gmm) 28 | self.model.train() 29 | for epoch in range(self.args.num_epochs): 30 | total_loss = 0 31 | for x, _ in Bar(self.train_loader): 32 | x = x.float().to(self.device) 33 | optimizer.zero_grad() 34 | 35 | _, x_hat, z, gamma = self.model(x) 36 | 37 | loss = self.compute.forward(x, x_hat, z, gamma) 38 | loss.backward(retain_graph=True) 39 | torch.nn.utils.clip_grad_norm_(self.model.parameters(), 5) 40 | optimizer.step() 41 | 42 | total_loss += loss.item() 43 | print('Training DAGMM... Epoch: {}, Loss: {:.3f}'.format( 44 | epoch, total_loss/len(self.train_loader))) 45 | 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /utils/__pycache__/utils.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mperezcarrasco/PyTorch-DAGMM/7ec580c1c0b7197b5d0274bca732ecdf646f45ed/utils/__pycache__/utils.cpython-36.pyc -------------------------------------------------------------------------------- /utils/utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | 4 | def weights_init_normal(m): 5 | classname = m.__class__.__name__ 6 | if classname.find("Conv") != -1 and classname != 'Conv': 7 | torch.nn.init.normal_(m.weight.data, 0.0, 0.02) 8 | torch.nn.init.normal_(m.bias.data, 0.0, 0.02) 9 | elif classname.find("Linear") != -1: 10 | torch.nn.init.normal_(m.weight.data, 0.0, 0.02) 11 | torch.nn.init.normal_(m.bias.data, 0.0, 0.02) 12 | elif classname.find('BatchNorm') != -1: 13 | m.weight.data.normal_(1.0, 0.01) 14 | m.bias.data.fill_(0) --------------------------------------------------------------------------------