├── .gitignore ├── Data ├── kdd_after_features.npy ├── kdd_history_selected_fn_rows.csv ├── kdd_history_selected_fn_rows_122.csv ├── kdd_history_selected_fp_rows.csv ├── kdd_history_selected_fp_rows_122.csv ├── kdd_selected_fn_rows.csv ├── kdd_selected_fn_rows_122.csv ├── kdd_selected_fp_rows.csv ├── kdd_selected_fp_rows_122.csv ├── kitsune_selected_fn_rows.csv ├── kitsune_selected_fp_rows.csv └── kitsune_selected_tp_rows.csv ├── Demo ├── explanation.ipynb ├── kdd.ipynb ├── kdd_history.ipynb └── kitsune.ipynb ├── LICENSE ├── Models ├── autoencoder_model.h5 ├── kitsune.h5 ├── lstm_history_model.h5 └── lstm_model.h5 ├── README.md ├── Scripts ├── explanation.py ├── kdd.py ├── kdd_history.py └── kitsune.py └── requirements.txt /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | ## 4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 5 | 6 | # User-specific files 7 | *.rsuser 8 | *.suo 9 | *.user 10 | *.userosscache 11 | *.sln.docstates 12 | 13 | # User-specific files (MonoDevelop/Xamarin Studio) 14 | *.userprefs 15 | 16 | # Mono auto generated files 17 | mono_crash.* 18 | 19 | # Build results 20 | [Dd]ebug/ 21 | [Dd]ebugPublic/ 22 | [Rr]elease/ 23 | [Rr]eleases/ 24 | x64/ 25 | x86/ 26 | [Aa][Rr][Mm]/ 27 | [Aa][Rr][Mm]64/ 28 | bld/ 29 | [Bb]in/ 30 | [Oo]bj/ 31 | [Ll]og/ 32 | [Ll]ogs/ 33 | 34 | # Visual Studio 2015/2017 cache/options directory 35 | .vs/ 36 | # Uncomment if you have tasks that create the project's static files in wwwroot 37 | #wwwroot/ 38 | 39 | # Visual Studio 2017 auto generated files 40 | Generated\ Files/ 41 | 42 | # MSTest test Results 43 | [Tt]est[Rr]esult*/ 44 | [Bb]uild[Ll]og.* 45 | 46 | # NUnit 47 | *.VisualState.xml 48 | TestResult.xml 49 | nunit-*.xml 50 | 51 | # Build Results of an ATL Project 52 | [Dd]ebugPS/ 53 | [Rr]eleasePS/ 54 | dlldata.c 55 | 56 | # Benchmark Results 57 | BenchmarkDotNet.Artifacts/ 58 | 59 | # .NET Core 60 | project.lock.json 61 | project.fragment.lock.json 62 | artifacts/ 63 | 64 | # StyleCop 65 | StyleCopReport.xml 66 | 67 | # Files built by Visual Studio 68 | *_i.c 69 | *_p.c 70 | *_h.h 71 | *.ilk 72 | *.meta 73 | *.obj 74 | *.iobj 75 | *.pch 76 | *.pdb 77 | *.ipdb 78 | *.pgc 79 | *.pgd 80 | *.rsp 81 | *.sbr 82 | *.tlb 83 | *.tli 84 | *.tlh 85 | *.tmp 86 | *.tmp_proj 87 | *_wpftmp.csproj 88 | *.log 89 | *.vspscc 90 | *.vssscc 91 | .builds 92 | *.pidb 93 | *.svclog 94 | *.scc 95 | 96 | # Chutzpah Test files 97 | _Chutzpah* 98 | 99 | # Visual C++ cache files 100 | ipch/ 101 | *.aps 102 | *.ncb 103 | *.opendb 104 | *.opensdf 105 | *.sdf 106 | *.cachefile 107 | *.VC.db 108 | *.VC.VC.opendb 109 | 110 | # Visual Studio profiler 111 | *.psess 112 | *.vsp 113 | *.vspx 114 | *.sap 115 | .DS_Store 116 | # Visual Studio Trace Files 117 | *.e2e 118 | 119 | # TFS 2012 Local Workspace 120 | $tf/ 121 | 122 | # Guidance Automation Toolkit 123 | *.gpState 124 | 125 | # ReSharper is a .NET coding add-in 126 | _ReSharper*/ 127 | *.[Rr]e[Ss]harper 128 | *.DotSettings.user 129 | 130 | # TeamCity is a build add-in 131 | _TeamCity* 132 | 133 | # DotCover is a Code Coverage Tool 134 | *.dotCover 135 | 136 | # AxoCover is a Code Coverage Tool 137 | .axoCover/* 138 | !.axoCover/settings.json 139 | 140 | # Visual Studio code coverage results 141 | *.coverage 142 | *.coveragexml 143 | 144 | # NCrunch 145 | _NCrunch_* 146 | .*crunch*.local.xml 147 | nCrunchTemp_* 148 | 149 | # MightyMoose 150 | *.mm.* 151 | AutoTest.Net/ 152 | 153 | # Web workbench (sass) 154 | .sass-cache/ 155 | 156 | # Installshield output folder 157 | [Ee]xpress/ 158 | 159 | # DocProject is a documentation generator add-in 160 | DocProject/buildhelp/ 161 | DocProject/Help/*.HxT 162 | DocProject/Help/*.HxC 163 | DocProject/Help/*.hhc 164 | DocProject/Help/*.hhk 165 | DocProject/Help/*.hhp 166 | DocProject/Help/Html2 167 | DocProject/Help/html 168 | 169 | # Click-Once directory 170 | publish/ 171 | 172 | # Publish Web Output 173 | *.[Pp]ublish.xml 174 | *.azurePubxml 175 | # Note: Comment the next line if you want to checkin your web deploy settings, 176 | # but database connection strings (with potential passwords) will be unencrypted 177 | *.pubxml 178 | *.publishproj 179 | 180 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 181 | # checkin your Azure Web App publish settings, but sensitive information contained 182 | # in these scripts will be unencrypted 183 | PublishScripts/ 184 | 185 | # NuGet Packages 186 | *.nupkg 187 | # NuGet Symbol Packages 188 | *.snupkg 189 | # The packages folder can be ignored because of Package Restore 190 | **/[Pp]ackages/* 191 | # except build/, which is used as an MSBuild target. 192 | !**/[Pp]ackages/build/ 193 | # Uncomment if necessary however generally it will be regenerated when needed 194 | #!**/[Pp]ackages/repositories.config 195 | # NuGet v3's project.json files produces more ignorable files 196 | *.nuget.props 197 | *.nuget.targets 198 | 199 | # Microsoft Azure Build Output 200 | csx/ 201 | *.build.csdef 202 | 203 | # Microsoft Azure Emulator 204 | ecf/ 205 | rcf/ 206 | 207 | # Windows Store app package directories and files 208 | AppPackages/ 209 | BundleArtifacts/ 210 | Package.StoreAssociation.xml 211 | _pkginfo.txt 212 | *.appx 213 | *.appxbundle 214 | *.appxupload 215 | 216 | # Visual Studio cache files 217 | # files ending in .cache can be ignored 218 | *.[Cc]ache 219 | # but keep track of directories ending in .cache 220 | !?*.[Cc]ache/ 221 | 222 | # Others 223 | ClientBin/ 224 | ~$* 225 | *~ 226 | *.dbmdl 227 | *.dbproj.schemaview 228 | *.jfm 229 | *.pfx 230 | *.publishsettings 231 | orleans.codegen.cs 232 | 233 | # Including strong name files can present a security risk 234 | # (https://github.com/github/gitignore/pull/2483#issue-259490424) 235 | #*.snk 236 | 237 | # Since there are multiple workflows, uncomment next line to ignore bower_components 238 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 239 | #bower_components/ 240 | 241 | # RIA/Silverlight projects 242 | Generated_Code/ 243 | 244 | # Backup & report files from converting an old project file 245 | # to a newer Visual Studio version. Backup files are not needed, 246 | # because we have git ;-) 247 | _UpgradeReport_Files/ 248 | Backup*/ 249 | UpgradeLog*.XML 250 | UpgradeLog*.htm 251 | ServiceFabricBackup/ 252 | *.rptproj.bak 253 | 254 | # SQL Server files 255 | *.mdf 256 | *.ldf 257 | *.ndf 258 | 259 | # Business Intelligence projects 260 | *.rdl.data 261 | *.bim.layout 262 | *.bim_*.settings 263 | *.rptproj.rsuser 264 | *- [Bb]ackup.rdl 265 | *- [Bb]ackup ([0-9]).rdl 266 | *- [Bb]ackup ([0-9][0-9]).rdl 267 | 268 | # Microsoft Fakes 269 | FakesAssemblies/ 270 | 271 | # GhostDoc plugin setting file 272 | *.GhostDoc.xml 273 | 274 | # Node.js Tools for Visual Studio 275 | .ntvs_analysis.dat 276 | node_modules/ 277 | 278 | # Visual Studio 6 build log 279 | *.plg 280 | 281 | # Visual Studio 6 workspace options file 282 | *.opt 283 | 284 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 285 | *.vbw 286 | 287 | # Visual Studio LightSwitch build output 288 | **/*.HTMLClient/GeneratedArtifacts 289 | **/*.DesktopClient/GeneratedArtifacts 290 | **/*.DesktopClient/ModelManifest.xml 291 | **/*.Server/GeneratedArtifacts 292 | **/*.Server/ModelManifest.xml 293 | _Pvt_Extensions 294 | 295 | # Paket dependency manager 296 | .paket/paket.exe 297 | paket-files/ 298 | 299 | # FAKE - F# Make 300 | .fake/ 301 | 302 | # CodeRush personal settings 303 | .cr/personal 304 | 305 | # Python Tools for Visual Studio (PTVS) 306 | __pycache__/ 307 | *.pyc 308 | 309 | # Cake - Uncomment if you are using it 310 | # tools/** 311 | # !tools/packages.config 312 | 313 | # Tabs Studio 314 | *.tss 315 | 316 | # Telerik's JustMock configuration file 317 | *.jmconfig 318 | 319 | # BizTalk build output 320 | *.btp.cs 321 | *.btm.cs 322 | *.odx.cs 323 | *.xsd.cs 324 | 325 | # OpenCover UI analysis results 326 | OpenCover/ 327 | 328 | # Azure Stream Analytics local run output 329 | ASALocalRun/ 330 | 331 | # MSBuild Binary and Structured Log 332 | *.binlog 333 | 334 | # NVidia Nsight GPU debugger configuration file 335 | *.nvuser 336 | 337 | # MFractors (Xamarin productivity tool) working folder 338 | .mfractor/ 339 | 340 | # Local History for Visual Studio 341 | .localhistory/ 342 | 343 | # BeatPulse healthcheck temp database 344 | healthchecksdb 345 | 346 | # Backup folder for Package Reference Convert tool in Visual Studio 2017 347 | MigrationBackup/ 348 | 349 | # Ionide (cross platform F# VS Code tools) working folder 350 | .ionide/ 351 | -------------------------------------------------------------------------------- /Data/kdd_after_features.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CactiLab/code-xNIDS/26162bdebff2151f4fcc8874d7521a7766169a20/Data/kdd_after_features.npy -------------------------------------------------------------------------------- /Data/kdd_history_selected_fn_rows.csv: -------------------------------------------------------------------------------- 1 | ,duration,protocol_type,service,flag,src_bytes,dst_bytes,land,wrong_fragment,urgent,hot,num_failed_logins,logged_in,num_compromised,root_shell,su_attempted,num_root,num_file_creations,num_shells,num_access_files,num_outbound_cmds,is_host_login,is_guest_login,count,srv_count,serror_rate,srv_serror_rate,rerror_rate,srv_rerror_rate,same_srv_rate,diff_srv_rate,srv_diff_host_rate,dst_host_count,dst_host_srv_count,dst_host_same_srv_rate,dst_host_diff_srv_rate,dst_host_same_src_port_rate,dst_host_srv_diff_host_rate,dst_host_serror_rate,dst_host_srv_serror_rate,dst_host_rerror_rate,dst_host_srv_rerror_rate,outcome,difficulty 2 | 21920,0,tcp,http,SF,325,4960,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,15,15,0.0,0.0,0.0,0.0,1.0,0.0,0.0,237,255,1.0,0.0,0.0,0.01,0.0,0.0,0.0,0.0,normal,21 3 | 21921,1,tcp,smtp,SF,2599,293,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,206,0.81,0.13,0.0,0.0,0.0,0.0,0.18,0.0,mailbomb,11 4 | 21922,0,tcp,private,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,116,13,1.0,1.0,0.0,0.0,0.11,0.06,0.0,255,13,0.05,0.05,0.0,0.0,1.0,1.0,0.0,0.0,neptune,21 5 | 21923,0,tcp,http,SF,223,5989,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,8,8,0.0,0.0,0.0,0.0,1.0,0.0,0.0,180,255,1.0,0.0,0.01,0.02,0.0,0.0,0.0,0.0,normal,21 6 | 21924,4,tcp,pop_3,SF,32,93,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,144,0.56,0.02,0.0,0.0,0.0,0.0,0.15,0.0,guess_passwd,15 7 | 21925,0,icmp,eco_i,SF,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,65,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1,119,1.0,0.0,1.0,0.27,0.0,0.0,0.0,0.0,saint,15 8 | 21926,4,tcp,pop_3,SF,26,93,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,140,123,0.87,0.02,0.01,0.02,0.0,0.0,0.07,0.0,guess_passwd,9 9 | 21927,0,tcp,http,SF,219,18353,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,8,8,0.0,0.0,0.0,0.0,1.0,0.0,0.0,8,255,1.0,0.0,0.12,0.03,0.0,0.0,0.0,0.0,normal,21 10 | 21928,0,tcp,http,SF,239,1905,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,4,4,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 11 | 21929,0,tcp,ftp_data,SF,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,6,21,0.83,0.33,0.83,0.14,0.0,0.0,0.0,0.0,warezmaster,5 12 | 21930,280,tcp,ftp_data,SF,283618,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,0.0,0.0,0.0,0.0,1.0,0.0,0.0,4,4,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,warezmaster,19 13 | 21931,0,tcp,http,SF,229,2499,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,21,30,0.0,0.0,0.0,0.0,1.0,0.0,0.1,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 14 | -------------------------------------------------------------------------------- /Data/kdd_history_selected_fn_rows_122.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121 2 | 0,0.0,2.3551340931901254e-07,3.786440478921786e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.029354207436399216,0.029354207436399216,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.9294117647058824,1.0,1.0,0.0,0.0,0.01,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 3 | 1,2.3305677262981263e-05,1.8833826179080418e-06,2.2367481054921036e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.003913894324853229,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.807843137254902,0.81,0.13,0.0,0.0,0.0,0.0,0.18,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 4 | 2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.22700587084148727,0.025440313111545987,1.0,1.0,0.0,0.0,0.11,0.06,0.0,1.0,0.050980392156862744,0.05,0.05,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 5 | 3,0.0,1.6159843162504553e-07,4.571974199246487e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.015655577299412915,0.015655577299412915,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.7058823529411764,1.0,1.0,0.0,0.01,0.02,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 6 | 4,9.322270905192505e-05,2.3189012609872004e-08,7.099575897978349e-08,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.5647058823529412,0.56,0.02,0.0,0.0,0.0,0.0,0.15,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 7 | 5,0.0,1.4493132881170003e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.12720156555772993,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.00392156862745098,0.4666666666666667,1.0,0.0,1.0,0.27,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 8 | 6,9.322270905192505e-05,1.8841072745521005e-08,7.099575897978349e-08,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.5490196078431373,0.4823529411764706,0.87,0.02,0.01,0.02,0.0,0.0,0.07,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 9 | 7,0.0,1.5869980504881154e-07,1.4010593167268455e-05,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.015655577299412915,0.015655577299412915,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.03137254901960784,1.0,1.0,0.0,0.12,0.03,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 10 | 8,0.0,1.7319293792998152e-07,1.4542679661987908e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.007827788649706457,0.007827788649706457,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 11 | 9,0.0,8.695879728702001e-09,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.023529411764705882,0.08235294117647059,0.83,0.33,0.83,0.14,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 12 | 10,0.006525589633634754,0.00020552566807458369,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.003913894324853229,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.01568627450980392,0.01568627450980392,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 13 | -------------------------------------------------------------------------------- /Data/kdd_history_selected_fp_rows.csv: -------------------------------------------------------------------------------- 1 | ,duration,protocol_type,service,flag,src_bytes,dst_bytes,land,wrong_fragment,urgent,hot,num_failed_logins,logged_in,num_compromised,root_shell,su_attempted,num_root,num_file_creations,num_shells,num_access_files,num_outbound_cmds,is_host_login,is_guest_login,count,srv_count,serror_rate,srv_serror_rate,rerror_rate,srv_rerror_rate,same_srv_rate,diff_srv_rate,srv_diff_host_rate,dst_host_count,dst_host_srv_count,dst_host_same_srv_rate,dst_host_diff_srv_rate,dst_host_same_src_port_rate,dst_host_srv_diff_host_rate,dst_host_serror_rate,dst_host_srv_serror_rate,dst_host_rerror_rate,dst_host_srv_rerror_rate,outcome,difficulty 2 | 19000,0,tcp,http,SF,142,6103,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,19,255,1.0,0.0,0.05,0.04,0.0,0.0,0.0,0.0,normal,21 3 | 19001,0,udp,private,SF,47,44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,98,98,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.57,0.0,0.0,0.0,0.0,0.0,normal,14 4 | 19002,0,tcp,http,SF,159,601,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,55,255,1.0,0.0,0.02,0.02,0.0,0.0,0.0,0.0,normal,21 5 | 19003,0,tcp,http,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,14,1.0,1.0,0.0,0.0,1.0,0.0,0.0,255,245,0.96,0.01,0.0,0.0,0.2,0.21,0.56,0.59,apache2,15 6 | 19004,0,tcp,other,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,511,1,0.08,0.0,0.91,1.0,0.0,1.0,0.0,255,1,0.0,1.0,0.0,0.0,0.13,0.0,0.87,1.0,saint,20 7 | 19005,0,udp,private,SF,105,105,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,253,0.99,0.01,0.01,0.0,0.0,0.0,0.0,0.0,normal,16 8 | 19006,0,tcp,netstat,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,220,7,1.0,1.0,0.0,0.0,0.03,0.06,0.0,255,7,0.03,0.07,0.0,0.0,1.0,1.0,0.0,0.0,neptune,21 9 | 19007,0,udp,private,SF,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23,3,0.0,0.0,0.0,0.0,0.13,0.43,0.0,255,3,0.01,0.73,0.99,0.0,0.0,0.0,0.01,0.0,normal,4 10 | 19008,5,tcp,pop_3,SF,28,93,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,guess_passwd,18 11 | 19009,0,tcp,private,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,253,2,0.0,0.0,1.0,1.0,0.01,0.07,0.0,255,2,0.01,0.08,0.0,0.0,0.0,0.0,1.0,1.0,neptune,21 12 | 19010,0,tcp,http,SF,185,3427,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,7,7,0.0,0.0,0.0,0.0,1.0,0.0,0.0,7,229,1.0,0.0,0.14,0.04,0.0,0.0,0.0,0.0,normal,21 13 | 19011,0,tcp,supdup,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,298,19,0.0,0.0,1.0,1.0,0.06,0.06,0.0,255,19,0.07,0.06,0.0,0.0,0.0,0.0,1.0,1.0,neptune,20 14 | 19012,0,udp,domain_u,SF,36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,17,0.0,0.0,0.0,0.0,1.0,0.0,0.12,4,52,1.0,0.0,1.0,0.04,0.0,0.0,0.0,0.0,normal,21 15 | 19013,0,tcp,http,SF,320,2771,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,20,20,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.01,0.01,0.0,0.0,normal,21 16 | 19014,0,tcp,daytime,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,197,1,1.0,1.0,0.0,0.0,0.01,0.07,0.0,255,1,0.0,0.06,0.0,0.0,1.0,1.0,0.0,0.0,neptune,19 17 | 19015,0,tcp,private,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,247,3,0.0,0.0,1.0,1.0,0.01,0.07,0.0,255,3,0.01,0.08,0.0,0.0,0.0,0.0,1.0,1.0,neptune,21 18 | 19016,0,tcp,private,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,162,12,1.0,1.0,0.0,0.0,0.07,0.09,0.0,255,12,0.05,0.09,0.0,0.0,1.0,1.0,0.0,0.0,neptune,21 19 | 19017,0,tcp,private,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,250,9,0.0,0.0,1.0,1.0,0.04,0.07,0.0,255,9,0.04,0.07,0.0,0.0,0.0,0.0,1.0,1.0,neptune,21 20 | 19018,0,tcp,http,SF,326,2076,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,8,8,0.0,0.0,0.0,0.0,1.0,0.0,0.0,48,48,1.0,0.0,0.02,0.0,0.0,0.0,0.0,0.0,normal,21 21 | 19019,0,tcp,telnet,RSTO,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,71,18,0.0,0.0,1.0,1.0,0.25,0.06,0.0,255,58,0.23,0.11,0.01,0.0,0.0,0.0,0.99,1.0,neptune,19 22 | 19020,0,tcp,http,SF,259,608,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,3,3,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 23 | 19021,0,tcp,http,SF,210,2393,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,9,12,0.0,0.0,0.0,0.0,1.0,0.0,0.17,183,255,1.0,0.0,0.01,0.03,0.0,0.0,0.0,0.0,normal,21 24 | 19022,0,tcp,private,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,132,19,0.0,0.0,1.0,1.0,0.14,0.06,0.0,255,19,0.07,0.07,0.0,0.0,0.0,0.0,1.0,1.0,neptune,21 25 | 19023,0,udp,private,SF,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,34,17,0.0,0.0,0.0,0.0,0.5,0.15,0.0,255,17,0.07,0.65,1.0,0.0,0.0,0.0,0.0,0.0,normal,3 26 | 19024,0,tcp,ftp_data,SF,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,23,3,0.13,0.09,0.13,0.0,0.0,0.0,0.0,0.0,warezmaster,6 27 | 19025,0,tcp,bgp,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,222,2,0.0,0.0,1.0,1.0,0.01,0.06,0.0,255,2,0.01,0.06,0.0,0.0,0.0,0.0,1.0,1.0,neptune,20 28 | 19026,0,udp,domain_u,SF,45,108,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,2,0.0,0.0,0.0,0.0,0.67,0.67,0.0,126,123,0.98,0.02,0.01,0.0,0.0,0.0,0.0,0.0,normal,21 29 | 19027,7,tcp,imap4,RSTO,0,44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,8,0.0,0.0,1.0,1.0,0.5,1.0,1.0,255,20,0.08,0.03,0.0,0.0,0.0,0.0,0.25,0.95,mscan,13 30 | 19028,903,tcp,http,RSTR,78404,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,52,52,0.0,0.0,1.0,1.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.45,0.45,apache2,15 31 | 19029,0,tcp,private,SH,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1.0,1.0,0.0,0.0,1.0,0.0,0.0,137,1,0.01,1.0,1.0,0.0,1.0,1.0,0.0,0.0,nmap,18 32 | 19030,7771,tcp,telnet,RSTR,0,44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,1.0,1.0,1.0,0.0,0.0,255,116,0.45,0.49,0.0,0.0,0.35,0.76,0.48,0.01,processtable,17 33 | 19031,0,tcp,private,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,264,10,0.0,0.0,1.0,1.0,0.04,0.06,0.0,255,10,0.04,0.06,0.0,0.0,0.0,0.0,1.0,1.0,neptune,21 34 | 19032,5,tcp,imap4,SF,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0.0,0.0,0.0,0.0,0.5,1.0,0.0,137,51,0.2,0.04,0.01,0.04,0.0,0.0,0.16,0.47,mscan,6 35 | 19033,0,tcp,http,SF,214,6356,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,8,8,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 36 | 19034,0,tcp,private,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,99,2,0.0,0.0,1.0,1.0,0.02,0.07,0.0,255,2,0.01,0.07,0.0,0.0,0.0,0.0,1.0,1.0,neptune,21 37 | 19035,0,tcp,http,SF,242,1310,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,10,11,0.0,0.0,0.0,0.0,1.0,0.0,0.18,226,255,1.0,0.0,0.0,0.01,0.0,0.0,0.0,0.0,normal,21 38 | 19036,0,tcp,http,SF,160,36527,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 39 | 19037,0,tcp,ftp_data,SF,12983,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,2,0.0,0.0,0.0,0.0,0.5,0.75,0.0,255,14,0.05,0.03,0.05,0.0,0.0,0.0,0.02,0.0,normal,18 40 | 19038,0,tcp,auth,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,201,4,0.0,0.0,1.0,1.0,0.02,0.08,0.0,255,4,0.02,0.07,0.0,0.0,0.0,0.0,1.0,1.0,neptune,20 41 | 19039,0,tcp,other,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,436,1,0.05,0.0,0.95,1.0,0.0,1.0,0.0,255,1,0.0,1.0,0.0,0.0,0.06,0.0,0.94,1.0,saint,18 42 | 19040,0,tcp,smtp,SF,0,83,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,2,0.0,0.0,0.0,0.0,1.0,0.0,1.0,145,109,0.65,0.08,0.01,0.02,0.01,0.0,0.0,0.0,normal,21 43 | 19041,0,tcp,http,SF,234,1075,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,4,5,0.0,0.0,0.0,0.0,1.0,0.0,0.4,255,226,0.89,0.01,0.0,0.0,0.0,0.0,0.03,0.0,normal,18 44 | 19042,0,tcp,http,SF,221,506,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,14,0.0,0.0,0.0,0.0,1.0,0.0,0.36,17,255,1.0,0.0,0.06,0.03,0.0,0.0,0.0,0.0,normal,21 45 | 19043,0,tcp,imap4,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,17,0.0,0.0,1.0,1.0,0.5,1.0,1.0,255,32,0.13,0.18,0.0,0.0,0.0,0.0,0.26,1.0,mscan,12 46 | 19044,1,tcp,smtp,SF,2599,293,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,161,0.63,0.13,0.0,0.0,0.0,0.0,0.36,0.0,mailbomb,11 47 | 19045,0,tcp,telnet,SF,125,174,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,254,1.0,0.01,0.0,0.0,0.01,0.01,0.01,0.01,guess_passwd,14 48 | 19046,0,tcp,ftp_data,SF,7012,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15,15,0.0,0.0,0.0,0.0,1.0,0.0,0.0,192,56,0.29,0.02,0.29,0.0,0.0,0.0,0.0,0.0,normal,20 49 | 19047,0,udp,domain_u,SF,44,132,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,157,237,0.0,0.0,0.0,0.0,1.0,0.0,0.01,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,18 50 | 19048,0,tcp,http,SF,306,2239,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,3,3,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,253,0.99,0.01,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 51 | 19049,0,tcp,systat,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,173,18,1.0,1.0,0.0,0.0,0.1,0.09,0.0,255,18,0.07,0.09,0.0,0.0,1.0,1.0,0.0,0.0,neptune,18 52 | 19050,0,tcp,private,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,235,8,0.0,0.0,1.0,1.0,0.03,0.07,0.0,255,8,0.03,0.07,0.0,0.0,0.0,0.0,1.0,1.0,neptune,21 53 | 19051,0,tcp,ctf,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,114,3,0.67,1.0,0.33,0.0,0.03,0.07,0.0,255,3,0.01,0.06,0.0,0.0,0.3,1.0,0.7,0.0,neptune,18 54 | 19052,0,tcp,private,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,148,2,1.0,1.0,0.0,0.0,0.01,0.06,0.0,255,2,0.01,0.05,0.0,0.0,1.0,1.0,0.0,0.0,neptune,21 55 | 19053,0,tcp,http,SF,249,622,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,7,7,0.0,0.0,0.0,0.0,1.0,0.0,0.0,55,253,1.0,0.0,0.02,0.02,0.0,0.0,0.0,0.0,normal,21 56 | 19054,6,tcp,ftp_data,SF,3131464,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,13,0.05,0.02,0.05,0.0,0.0,0.0,0.0,0.0,normal,18 57 | 19055,0,tcp,http,SF,223,514,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,4,4,0.0,0.0,0.0,0.0,1.0,0.0,0.0,135,255,1.0,0.0,0.01,0.01,0.0,0.0,0.0,0.0,normal,21 58 | 19056,0,tcp,http,SF,353,4580,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,15,15,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 59 | 19057,0,tcp,netbios_ns,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,129,18,0.0,0.0,1.0,1.0,0.14,0.07,0.0,255,18,0.07,0.07,0.0,0.0,0.0,0.0,1.0,1.0,neptune,20 60 | 19058,0,tcp,sunrpc,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,3,0.0,0.0,1.0,1.0,1.0,0.0,1.0,255,24,0.09,0.03,0.0,0.0,0.0,0.0,0.42,1.0,mscan,13 61 | 19059,0,tcp,private,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,274,12,0.09,0.75,0.91,0.25,0.04,0.06,0.0,255,12,0.05,0.06,0.0,0.0,0.1,0.75,0.9,0.25,neptune,21 62 | 19060,0,tcp,private,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,146,1,1.0,1.0,0.0,0.0,0.01,0.1,0.0,255,1,0.0,0.1,0.0,0.0,1.0,1.0,0.0,0.0,neptune,21 63 | 19061,0,tcp,private,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,247,9,1.0,1.0,0.0,0.0,0.04,0.06,0.0,255,9,0.04,0.06,0.0,0.0,1.0,1.0,0.0,0.0,neptune,21 64 | 19062,4,tcp,pop_3,SF,32,93,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,155,0.61,0.02,0.0,0.0,0.0,0.0,0.13,0.0,guess_passwd,15 65 | 19063,0,tcp,sunrpc,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,3,0.0,0.0,1.0,1.0,0.25,1.0,1.0,183,52,0.19,0.03,0.01,0.04,0.01,0.0,0.89,1.0,mscan,15 66 | 19064,0,tcp,http,SF,361,1164,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,15,15,0.0,0.0,0.0,0.0,1.0,0.0,0.0,247,255,1.0,0.0,0.0,0.01,0.0,0.0,0.0,0.0,normal,21 67 | 19065,0,tcp,http,SF,354,2650,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,12,12,0.0,0.0,0.0,0.0,1.0,0.0,0.0,216,255,1.0,0.0,0.0,0.01,0.0,0.0,0.0,0.0,normal,21 68 | 19066,0,tcp,private,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,132,6,0.0,0.0,1.0,1.0,0.05,0.06,0.0,255,6,0.02,0.05,0.0,0.0,0.0,0.0,1.0,1.0,neptune,21 69 | 19067,0,tcp,other,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,444,1,0.04,0.0,0.93,1.0,0.0,1.0,0.0,255,1,0.0,1.0,0.0,0.0,0.03,0.0,0.96,1.0,saint,18 70 | 19068,0,tcp,http,SF,251,1543,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,8,255,1.0,0.0,0.12,0.01,0.0,0.0,0.0,0.0,normal,21 71 | 19069,1,tcp,smtp,SF,2599,293,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,3,3,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,214,0.84,0.13,0.0,0.0,0.0,0.0,0.15,0.0,mailbomb,14 72 | 19070,0,tcp,telnet,SF,120,174,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,75,0.29,0.02,0.0,0.0,0.0,0.0,0.01,0.04,guess_passwd,11 73 | 19071,0,tcp,http,SF,234,932,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,9,9,0.0,0.0,0.0,0.0,1.0,0.0,0.0,59,255,1.0,0.0,0.02,0.02,0.0,0.0,0.0,0.0,normal,21 74 | 19072,0,udp,private,SF,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,111,15,0.0,0.0,0.0,0.0,0.14,0.77,0.0,255,31,0.12,0.78,1.0,0.0,0.0,0.0,0.0,0.0,normal,7 75 | 19073,0,udp,domain_u,SF,44,78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,211,211,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.01,0.0,0.0,0.0,0.0,0.0,normal,18 76 | 19074,0,tcp,finger,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,54,14,1.0,1.0,0.0,0.0,0.26,0.07,0.0,255,61,0.24,0.02,0.02,0.0,0.97,1.0,0.0,0.0,neptune,19 77 | 19075,0,tcp,telnet,RSTO,0,48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,11,0.0,0.18,1.0,0.73,1.0,0.0,1.0,255,128,0.5,0.01,0.0,0.0,0.0,0.0,0.66,0.33,mscan,12 78 | 19076,0,tcp,private,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,205,3,0.0,0.0,1.0,1.0,0.01,0.08,0.0,255,3,0.01,0.07,0.0,0.0,0.0,0.0,1.0,1.0,neptune,21 79 | 19077,35,tcp,ftp,SF,96,533,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,221,3,0.01,0.03,0.0,0.0,0.0,0.0,0.0,0.0,rootkit,4 80 | 19078,1,tcp,smtp,SF,2599,293,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,4,3,0.0,0.0,0.0,0.0,0.75,0.5,0.0,255,245,0.96,0.04,0.0,0.0,0.0,0.0,0.04,0.0,mailbomb,18 81 | 19079,0,udp,domain_u,SF,43,43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,0.0,0.0,0.0,0.0,1.0,0.0,0.67,255,253,0.99,0.01,0.01,0.0,0.0,0.0,0.0,0.0,normal,21 82 | 19080,0,tcp,http,SF,228,1462,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,7,7,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 83 | 19081,0,tcp,daytime,RSTR,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,1.0,1.0,1.0,0.0,0.0,25,1,0.04,0.92,0.8,0.0,0.0,0.0,0.8,1.0,portsweep,15 84 | 19082,0,tcp,telnet,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,19,0.0,0.16,1.0,0.21,1.0,0.0,0.58,58,84,0.33,0.09,0.02,0.02,0.0,0.0,0.9,0.7,mscan,13 85 | 19083,0,tcp,http,SF,219,6854,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,17,17,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 86 | 19084,0,tcp,http,SF,328,277,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,3,3,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,11,0.04,0.01,0.0,0.0,0.01,0.0,0.02,0.0,normal,18 87 | 19085,0,tcp,other,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,486,1,0.07,0.0,0.93,1.0,0.0,1.0,0.0,255,1,0.0,1.0,0.0,0.0,0.03,0.0,0.97,1.0,satan,20 88 | 19086,0,udp,private,SF,52,52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,506,506,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,254,1.0,0.01,0.76,0.0,0.0,0.0,0.0,0.0,normal,17 89 | 19087,0,tcp,http,SF,144,3547,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,2,3,0.0,0.0,0.0,0.0,1.0,0.0,0.67,8,255,1.0,0.0,0.12,0.04,0.0,0.0,0.0,0.0,normal,21 90 | 19088,0,tcp,private,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,146,20,0.0,0.0,1.0,1.0,0.14,0.06,0.0,255,20,0.08,0.06,0.0,0.0,0.0,0.0,1.0,1.0,neptune,21 91 | 19089,3,tcp,pop_3,SF,30,93,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,254,1.0,0.01,0.0,0.0,0.0,0.0,0.0,0.0,guess_passwd,17 92 | 19090,0,tcp,pop_3,RSTO,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,230,12,0.0,0.0,1.0,1.0,0.05,0.07,0.0,255,12,0.05,0.09,0.0,0.0,0.0,0.0,1.0,1.0,neptune,21 93 | 19091,3,tcp,imap4,RSTO,0,44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,6,0.0,0.0,1.0,1.0,0.5,1.0,1.0,106,59,0.27,0.05,0.01,0.03,0.0,0.0,0.96,0.49,mscan,13 94 | 19092,0,tcp,http,SF,236,8117,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,25,30,0.0,0.0,0.0,0.0,1.0,0.0,0.07,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 95 | 19093,1,tcp,smtp,SF,2599,293,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,3,3,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,39,0.15,0.16,0.0,0.0,0.0,0.0,0.83,0.0,mailbomb,13 96 | 19094,0,tcp,imap4,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,4,1.0,1.0,0.0,0.0,1.0,0.0,1.0,52,86,0.35,0.06,0.02,0.03,1.0,1.0,0.0,0.0,mscan,18 97 | 19095,2092,tcp,http,RSTR,74024,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,7,7,0.0,0.0,1.0,1.0,1.0,0.0,0.0,255,244,0.96,0.01,0.0,0.0,0.02,0.02,0.48,0.5,apache2,15 98 | 19096,0,tcp,http,SF,306,608,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,9,9,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 99 | 19097,1,tcp,smtp,SF,2005,328,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,234,141,0.58,0.03,0.0,0.01,0.0,0.0,0.0,0.0,normal,21 100 | 19098,0,tcp,http,SF,245,663,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,43,43,0.0,0.0,0.0,0.0,1.0,0.0,0.0,43,255,1.0,0.0,0.02,0.04,0.0,0.0,0.0,0.0,normal,21 101 | 19099,0,tcp,gopher,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,186,4,1.0,1.0,0.0,0.0,0.02,0.09,0.0,255,4,0.02,0.09,0.0,0.0,1.0,1.0,0.0,0.0,neptune,19 102 | 19100,0,tcp,private,REJ,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,132,15,0.0,0.0,1.0,1.0,0.11,0.05,0.0,255,15,0.06,0.06,0.0,0.0,0.0,0.0,1.0,1.0,neptune,21 103 | 19101,0,tcp,smtp,SF,1215,335,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,164,0.64,0.03,0.0,0.0,0.0,0.0,0.19,0.0,normal,18 104 | 19102,282,tcp,ftp,SF,156,593,0,0,0,2,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,6,6,1.0,0.0,0.17,0.0,0.0,0.0,0.0,0.0,warezmaster,12 105 | 19103,4,tcp,pop_3,SF,30,93,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,223,0.87,0.04,0.0,0.0,0.0,0.0,0.0,0.0,guess_passwd,18 106 | 19104,0,tcp,http,SF,280,13254,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,12,12,0.0,0.0,0.0,0.0,1.0,0.0,0.0,12,254,1.0,0.0,0.08,0.04,0.0,0.0,0.0,0.0,normal,21 107 | 19105,0,tcp,http,SF,203,15007,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,3,3,0.0,0.0,0.0,0.0,1.0,0.0,0.0,5,255,1.0,0.0,0.2,0.02,0.0,0.0,0.0,0.0,normal,21 108 | 19106,0,udp,other,SF,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,440,1,0.0,0.0,0.0,0.0,0.0,1.0,0.0,255,1,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,satan,17 109 | 19107,0,tcp,imap4,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1.0,1.0,0.0,0.0,1.0,0.0,0.0,92,67,0.34,0.03,0.01,0.03,1.0,0.46,0.0,0.54,mscan,17 110 | 19108,8140,tcp,telnet,SF,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,57,0.22,0.04,0.0,0.0,0.0,0.02,0.11,0.23,processtable,8 111 | 19109,0,icmp,eco_i,SF,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,70,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1,199,1.0,0.0,1.0,0.26,0.0,0.0,0.0,0.0,saint,15 112 | 19110,0,tcp,http,SF,170,603,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1,255,1.0,0.0,1.0,0.02,0.0,0.0,0.0,0.0,normal,21 113 | 19111,0,tcp,http,SF,324,1227,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,6,7,0.0,0.0,0.0,0.0,1.0,0.0,0.29,162,162,1.0,0.0,0.01,0.0,0.0,0.0,0.0,0.0,normal,21 114 | 19112,0,tcp,http,SF,308,825,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,12,12,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 115 | 19113,0,tcp,http,SF,243,5333,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,2,0.0,0.0,0.0,0.0,1.0,0.0,1.0,108,255,1.0,0.0,0.01,0.05,0.0,0.0,0.0,0.0,normal,21 116 | 19114,0,udp,private,SF,54,51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,260,260,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.86,0.0,0.0,0.0,0.0,0.0,normal,17 117 | -------------------------------------------------------------------------------- /Data/kdd_history_selected_fp_rows_122.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121 2 | 0,0.0,1.0290124345630702e-07,4.659001258641061e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.07450980392156863,1.0,1.0,0.0,0.05,0.04,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 3 | 1,0.0,3.4058862270749505e-08,3.358939134527391e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1917808219178082,0.1917808219178082,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.57,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 4 | 2,0.0,1.1522040640530152e-07,4.588005499661277e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.21568627450980393,1.0,1.0,0.0,0.02,0.02,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 5 | 3,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0273972602739726,0.0273972602739726,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.9607843137254902,0.96,0.01,0.0,0.0,0.2,0.21,0.56,0.59,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 6 | 4,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0019569471624266144,0.08,0.0,0.91,1.0,0.0,1.0,0.0,1.0,0.00392156862745098,0.0,1.0,0.0,0.0,0.13,0.0,0.87,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 7 | 5,0.0,7.608894762614251e-08,8.01565020739491e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.9921568627450981,0.99,0.01,0.01,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 8 | 6,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.43052837573385516,0.0136986301369863,1.0,1.0,0.0,0.0,0.03,0.06,0.0,1.0,0.027450980392156862,0.03,0.07,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 9 | 7,0.0,7.246566440585001e-10,7.633952578471343e-10,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04500978473581213,0.005870841487279843,0.0,0.0,0.0,0.0,0.13,0.43,0.0,1.0,0.011764705882352941,0.01,0.73,0.99,0.0,0.0,0.0,0.01,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 10 | 8,0.00011652838631490631,2.0290386033638003e-08,7.099575897978349e-08,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 11 | 9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.49510763209393344,0.003913894324853229,0.0,0.0,1.0,1.0,0.01,0.07,0.0,1.0,0.00784313725490196,0.01,0.08,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 12 | 10,0.0,1.3406147915082252e-07,2.616155548642129e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0136986301369863,0.0136986301369863,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.027450980392156862,0.8980392156862745,1.0,0.0,0.14,0.04,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 13 | 11,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.5831702544031311,0.03718199608610567,0.0,0.0,1.0,1.0,0.06,0.06,0.0,1.0,0.07450980392156863,0.07,0.06,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 14 | 12,0.0,2.6087639186106005e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.007827788649706457,0.033268101761252444,0.0,0.0,0.0,0.0,1.0,0.0,0.12,0.01568627450980392,0.20392156862745098,1.0,0.0,1.0,0.04,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 15 | 13,0.0,2.3189012609872005e-07,2.115368259494409e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.03913894324853229,0.03913894324853229,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.01,0.01,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 16 | 14,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.38551859099804303,0.0019569471624266144,1.0,1.0,0.0,0.0,0.01,0.07,0.0,1.0,0.00392156862745098,0.0,0.06,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 17 | 15,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.48336594911937375,0.005870841487279843,0.0,0.0,1.0,1.0,0.01,0.07,0.0,1.0,0.011764705882352941,0.01,0.08,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 18 | 16,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.31702544031311153,0.023483365949119372,1.0,1.0,0.0,0.0,0.07,0.09,0.0,1.0,0.047058823529411764,0.05,0.09,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 19 | 17,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.4892367906066536,0.01761252446183953,0.0,0.0,1.0,1.0,0.04,0.07,0.0,1.0,0.03529411764705882,0.04,0.07,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 20 | 18,0.0,2.3623806596307103e-07,1.5848085552906507e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.015655577299412915,0.015655577299412915,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.18823529411764706,0.18823529411764706,1.0,0.0,0.02,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 21 | 19,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13894324853228962,0.03522504892367906,0.0,0.0,1.0,1.0,0.25,0.06,0.0,1.0,0.22745098039215686,0.23,0.11,0.01,0.0,0.0,0.0,0.99,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 22 | 20,0.0,1.8768607081115154e-07,4.6414431677105765e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.005870841487279843,0.005870841487279843,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 23 | 21,0.0,1.5217789525228502e-07,1.8268048520281923e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.01761252446183953,0.023483365949119372,0.0,0.0,0.0,0.0,1.0,0.0,0.17,0.7176470588235294,1.0,1.0,0.0,0.01,0.03,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 24 | 22,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2583170254403131,0.03718199608610567,0.0,0.0,1.0,1.0,0.14,0.06,0.0,1.0,0.07450980392156863,0.07,0.07,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 25 | 23,0.0,7.246566440585001e-10,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.06653620352250489,0.033268101761252444,0.0,0.0,0.0,0.0,0.5,0.15,0.0,1.0,0.06666666666666667,0.07,0.65,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 26 | 24,0.0,8.695879728702001e-09,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.09019607843137255,0.011764705882352941,0.13,0.09,0.13,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 27 | 25,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.4344422700587084,0.003913894324853229,0.0,0.0,1.0,1.0,0.01,0.06,0.0,1.0,0.00784313725490196,0.01,0.06,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 28 | 26,0.0,3.2609548982632507e-08,8.24466878474905e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.005870841487279843,0.003913894324853229,0.0,0.0,0.0,0.0,0.67,0.67,0.0,0.49411764705882355,0.4823529411764706,0.98,0.02,0.01,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 29 | 27,0.00016313974084086885,0.0,3.358939134527391e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.015655577299412915,0.0,0.0,1.0,1.0,0.5,1.0,1.0,1.0,0.0784313725490196,0.08,0.03,0.0,0.0,0.0,0.0,0.25,0.95,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 30 | 28,0.02104502656847208,5.681597952076265e-05,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.10176125244618395,0.10176125244618395,0.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.45,0.45,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0 31 | 29,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.5372549019607843,0.00392156862745098,0.01,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0 32 | 30,0.1811084180106274,0.0,3.358939134527391e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,0.4549019607843137,0.45,0.49,0.0,0.0,0.35,0.76,0.48,0.01,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0 33 | 31,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.5166340508806262,0.019569471624266144,0.0,0.0,1.0,1.0,0.04,0.06,0.0,1.0,0.0392156862745098,0.04,0.06,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 34 | 32,0.00011652838631490631,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.0019569471624266144,0.0,0.0,0.0,0.0,0.5,1.0,0.0,0.5372549019607843,0.2,0.2,0.04,0.01,0.04,0.0,0.0,0.16,0.47,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 35 | 33,0.0,1.5507652182851902e-07,4.852140258876386e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.015655577299412915,0.015655577299412915,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 36 | 34,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.19373776908023482,0.003913894324853229,0.0,0.0,1.0,1.0,0.02,0.07,0.0,1.0,0.00784313725490196,0.01,0.07,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 37 | 35,0.0,1.7536690786215703e-07,1.000047787779746e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.019569471624266144,0.021526418786692758,0.0,0.0,0.0,0.0,1.0,0.0,0.18,0.8862745098039215,1.0,1.0,0.0,0.0,0.01,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 38 | 36,0.0,1.1594506304936002e-07,2.7884538583382275e-05,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.003913894324853229,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 39 | 37,0.0,9.408217209811507e-06,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.007827788649706457,0.003913894324853229,0.0,0.0,0.0,0.0,0.5,0.75,0.0,1.0,0.054901960784313725,0.05,0.03,0.05,0.0,0.0,0.0,0.02,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 40 | 38,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3933463796477495,0.007827788649706457,0.0,0.0,1.0,1.0,0.02,0.08,0.0,1.0,0.01568627450980392,0.02,0.07,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 41 | 39,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8532289628180039,0.0019569471624266144,0.05,0.0,0.95,1.0,0.0,1.0,0.0,1.0,0.00392156862745098,0.0,1.0,0.0,0.0,0.06,0.0,0.94,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 42 | 40,0.0,0.0,6.336180640131215e-08,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.003913894324853229,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.5686274509803921,0.42745098039215684,0.65,0.08,0.01,0.02,0.01,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 43 | 41,0.0,1.6956965470968903e-07,8.206499021856694e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.007827788649706457,0.009784735812133072,0.0,0.0,0.0,0.0,1.0,0.0,0.4,1.0,0.8862745098039215,0.89,0.01,0.0,0.0,0.0,0.0,0.03,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 44 | 42,0.0,1.6014911833692852e-07,3.8627800047065e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0273972602739726,0.0,0.0,0.0,0.0,1.0,0.0,0.36,0.06666666666666667,1.0,1.0,0.0,0.06,0.03,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 45 | 43,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.033268101761252444,0.0,0.0,1.0,1.0,0.5,1.0,1.0,1.0,0.12549019607843137,0.13,0.18,0.0,0.0,0.0,0.0,0.26,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 46 | 44,2.3305677262981263e-05,1.8833826179080418e-06,2.2367481054921036e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.003913894324853229,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.6313725490196078,0.63,0.13,0.0,0.0,0.0,0.0,0.36,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 47 | 45,0.0,9.058208050731251e-08,1.3283077486540137e-07,0.0,0.0,0.0,0.0,0.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.996078431372549,1.0,0.01,0.0,0.0,0.01,0.01,0.01,0.01,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 48 | 46,0.0,5.081292388138203e-06,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.029354207436399216,0.029354207436399216,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.7529411764705882,0.2196078431372549,0.29,0.02,0.29,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 49 | 47,0.0,3.188489233857401e-08,1.0076817403582173e-07,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.30724070450097846,0.4637964774951076,0.0,0.0,0.0,0.0,1.0,0.0,0.01,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 50 | 48,0.0,2.2174493308190104e-07,1.7092419823197337e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.005870841487279843,0.005870841487279843,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.9921568627450981,0.99,0.01,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 51 | 49,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3385518590998043,0.03522504892367906,1.0,1.0,0.0,0.0,0.1,0.09,0.0,1.0,0.07058823529411765,0.07,0.09,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 52 | 50,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.4598825831702544,0.015655577299412915,0.0,0.0,1.0,1.0,0.03,0.07,0.0,1.0,0.03137254901960784,0.03,0.07,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 53 | 51,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.22309197651663404,0.005870841487279843,0.67,1.0,0.33,0.0,0.03,0.07,0.0,1.0,0.011764705882352941,0.01,0.06,0.0,0.0,0.3,1.0,0.7,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 54 | 52,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2896281800391389,0.003913894324853229,1.0,1.0,0.0,0.0,0.01,0.06,0.0,1.0,0.00784313725490196,0.01,0.05,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 55 | 53,0.0,1.8043950437056653e-07,4.748318503809175e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0136986301369863,0.0136986301369863,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.21568627450980393,0.9921568627450981,1.0,0.0,0.02,0.02,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 56 | 54,0.0001398340635778876,0.002269236193230007,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.003913894324853229,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.050980392156862744,0.05,0.02,0.05,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 57 | 55,0.0,1.6159843162504553e-07,3.9238516253342705e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.007827788649706457,0.007827788649706457,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.5294117647058824,1.0,1.0,0.0,0.01,0.01,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 58 | 56,0.0,2.558037953526505e-07,3.496350280939875e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.029354207436399216,0.029354207436399216,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 59 | 57,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.25244618395303325,0.03522504892367906,0.0,0.0,1.0,1.0,0.14,0.07,0.0,1.0,0.07058823529411765,0.07,0.07,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 60 | 58,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.005870841487279843,0.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,0.09411764705882353,0.09,0.03,0.0,0.0,0.0,0.0,0.42,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 61 | 59,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.5362035225048923,0.023483365949119372,0.09,0.75,0.91,0.25,0.04,0.06,0.0,1.0,0.047058823529411764,0.05,0.06,0.0,0.0,0.1,0.75,0.9,0.25,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 62 | 60,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2857142857142857,0.0019569471624266144,1.0,1.0,0.0,0.0,0.01,0.1,0.0,1.0,0.00392156862745098,0.0,0.1,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 63 | 61,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.48336594911937375,0.01761252446183953,1.0,1.0,0.0,0.0,0.04,0.06,0.0,1.0,0.03529411764705882,0.04,0.06,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 64 | 62,9.322270905192505e-05,2.3189012609872004e-08,7.099575897978349e-08,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.6078431372549019,0.61,0.02,0.0,0.0,0.0,0.0,0.13,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 65 | 63,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.007827788649706457,0.005870841487279843,0.0,0.0,1.0,1.0,0.25,1.0,1.0,0.7176470588235294,0.20392156862745098,0.19,0.03,0.01,0.04,0.01,0.0,0.89,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 66 | 64,0.0,2.6160104850511857e-07,8.885920801340643e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.029354207436399216,0.029354207436399216,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.9686274509803922,1.0,1.0,0.0,0.0,0.01,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 67 | 65,0.0,2.5652845199670904e-07,2.022997433294906e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.023483365949119372,0.023483365949119372,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.8470588235294118,1.0,1.0,0.0,0.0,0.01,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 68 | 66,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2583170254403131,0.011741682974559686,0.0,0.0,1.0,1.0,0.05,0.06,0.0,1.0,0.023529411764705882,0.02,0.05,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 69 | 67,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8688845401174168,0.0019569471624266144,0.04,0.0,0.93,1.0,0.0,1.0,0.0,1.0,0.00392156862745098,0.0,1.0,0.0,0.0,0.03,0.0,0.96,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 70 | 68,0.0,1.8188881765868354e-07,1.1779188828581283e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.03137254901960784,1.0,1.0,0.0,0.12,0.01,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 71 | 69,2.3305677262981263e-05,1.8833826179080418e-06,2.2367481054921036e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.005870841487279843,0.005870841487279843,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.8392156862745098,0.84,0.13,0.0,0.0,0.0,0.0,0.15,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 72 | 70,0.0,8.695879728702002e-08,1.3283077486540137e-07,0.0,0.0,0.0,0.0,0.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.29411764705882354,0.29,0.02,0.0,0.0,0.0,0.0,0.01,0.04,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 73 | 71,0.0,1.6956965470968903e-07,7.114843803135292e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.01761252446183953,0.01761252446183953,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.23137254901960785,1.0,1.0,0.0,0.02,0.02,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 74 | 72,0.0,7.246566440585001e-10,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2172211350293542,0.029354207436399216,0.0,0.0,0.0,0.0,0.14,0.77,0.0,1.0,0.12156862745098039,0.12,0.78,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 75 | 73,0.0,3.188489233857401e-08,5.9544830112076475e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.41291585127201563,0.41291585127201563,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.01,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 76 | 74,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.10567514677103718,0.0273972602739726,1.0,1.0,0.0,0.0,0.26,0.07,0.0,1.0,0.2392156862745098,0.24,0.02,0.02,0.0,0.97,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 77 | 75,0.0,0.0,3.6642972376662446e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.021526418786692758,0.0,0.18,1.0,0.73,1.0,0.0,1.0,1.0,0.5019607843137255,0.5,0.01,0.0,0.0,0.0,0.0,0.66,0.33,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 78 | 76,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.40117416829745595,0.005870841487279843,0.0,0.0,1.0,1.0,0.01,0.08,0.0,1.0,0.011764705882352941,0.01,0.07,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 79 | 77,0.0008156987042043443,6.956703782961601e-08,4.068896724325226e-07,0.0,0.0,0.0,0.012987012987012988,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.8666666666666667,0.011764705882352941,0.01,0.03,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 80 | 78,2.3305677262981263e-05,1.8833826179080418e-06,2.2367481054921036e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.007827788649706457,0.005870841487279843,0.0,0.0,0.0,0.0,0.75,0.5,0.0,1.0,0.9607843137254902,0.96,0.04,0.0,0.0,0.0,0.0,0.04,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 81 | 79,0.0,3.116023569451551e-08,3.2825996087426775e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.005870841487279843,0.0,0.0,0.0,0.0,1.0,0.0,0.67,1.0,0.9921568627450981,0.99,0.01,0.01,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 82 | 80,0.0,1.6522171484533802e-07,1.1160838669725104e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0136986301369863,0.0136986301369863,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 83 | 81,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.09803921568627451,0.00392156862745098,0.04,0.92,0.8,0.0,0.0,0.0,0.8,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0 84 | 82,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.03718199608610567,0.0,0.16,1.0,0.21,1.0,0.0,0.58,0.22745098039215686,0.32941176470588235,0.33,0.09,0.02,0.02,0.0,0.0,0.9,0.7,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 85 | 83,0.0,1.5869980504881154e-07,5.232311097284258e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.033268101761252444,0.033268101761252444,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 86 | 84,0.0,2.3768737925118805e-07,2.114604864236562e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.005870841487279843,0.005870841487279843,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.043137254901960784,0.04,0.01,0.0,0.0,0.01,0.0,0.02,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 87 | 85,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9510763209393346,0.0019569471624266144,0.07,0.0,0.93,1.0,0.0,1.0,0.0,1.0,0.00392156862745098,0.0,1.0,0.0,0.0,0.03,0.0,0.97,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 88 | 86,0.0,3.768214549104201e-08,3.969655340805098e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9902152641878669,0.9902152641878669,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.996078431372549,1.0,0.01,0.76,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 89 | 87,0.0,1.0435055674442402e-07,2.7077629795837855e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.005870841487279843,0.0,0.0,0.0,0.0,1.0,0.0,0.67,0.03137254901960784,1.0,1.0,0.0,0.12,0.04,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 90 | 88,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2857142857142857,0.03913894324853229,0.0,0.0,1.0,1.0,0.14,0.06,0.0,1.0,0.0784313725490196,0.08,0.06,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 91 | 89,6.99170317889438e-05,2.1739699321755006e-08,7.099575897978349e-08,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.996078431372549,1.0,0.01,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 92 | 90,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.4500978473581213,0.023483365949119372,0.0,0.0,1.0,1.0,0.05,0.07,0.0,1.0,0.047058823529411764,0.05,0.09,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 93 | 91,6.99170317889438e-05,0.0,3.358939134527391e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.011741682974559686,0.0,0.0,1.0,1.0,0.5,1.0,1.0,0.4156862745098039,0.23137254901960785,0.27,0.05,0.01,0.03,0.0,0.0,0.96,0.49,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 94 | 92,0.0,1.7101896799780602e-07,6.196479307945189e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04892367906066536,0.05870841487279843,0.0,0.0,0.0,0.0,1.0,0.0,0.07,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 95 | 93,2.3305677262981263e-05,1.8833826179080418e-06,2.2367481054921036e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.005870841487279843,0.005870841487279843,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.15294117647058825,0.15,0.16,0.0,0.0,0.0,0.0,0.83,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 96 | 94,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.007827788649706457,1.0,1.0,0.0,0.0,1.0,0.0,1.0,0.20392156862745098,0.33725490196078434,0.35,0.06,0.02,0.03,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 97 | 95,0.048755476834156805,5.364198341978641e-05,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0136986301369863,0.0136986301369863,0.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,0.9568627450980391,0.96,0.01,0.0,0.0,0.02,0.02,0.48,0.5,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0 98 | 96,0.0,2.2174493308190104e-07,4.6414431677105765e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.01761252446183953,0.01761252446183953,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 99 | 97,2.3305677262981263e-05,1.4529365713372928e-06,2.5039364457386005e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.9176470588235294,0.5529411764705883,0.58,0.03,0.0,0.01,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 100 | 98,0.0,1.7754087779433253e-07,5.0613105595265e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.08414872798434442,0.08414872798434442,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.16862745098039217,1.0,1.0,0.0,0.02,0.04,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 101 | 99,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3639921722113503,0.007827788649706457,1.0,1.0,0.0,0.0,0.02,0.09,0.0,1.0,0.01568627450980392,0.02,0.09,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 102 | 100,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2583170254403131,0.029354207436399216,0.0,0.0,1.0,1.0,0.11,0.05,0.0,1.0,0.058823529411764705,0.06,0.06,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 103 | 101,0.0,8.804578225310777e-07,2.5573741137879e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.6431372549019607,0.64,0.03,0.0,0.0,0.0,0.0,0.19,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 104 | 102,0.006572200988160716,1.1304643647312603e-07,4.5269338790335065e-07,0.0,0.0,0.0,0.025974025974025976,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.023529411764705882,0.023529411764705882,1.0,0.0,0.17,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 105 | 103,9.322270905192505e-05,2.1739699321755006e-08,7.099575897978349e-08,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.8745098039215686,0.87,0.04,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 106 | 104,0.0,2.0290386033638005e-07,1.0118040747505918e-05,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.023483365949119372,0.023483365949119372,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.047058823529411764,0.996078431372549,1.0,0.0,0.08,0.04,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 107 | 105,0.0,1.4710529874387552e-07,1.1456272634511945e-05,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.005870841487279843,0.005870841487279843,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0196078431372549,1.0,1.0,0.0,0.2,0.02,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 108 | 106,0.0,7.246566440585001e-10,7.633952578471343e-10,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8610567514677103,0.0019569471624266144,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.00392156862745098,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 109 | 107,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.3607843137254902,0.2627450980392157,0.34,0.03,0.01,0.03,1.0,0.46,0.0,0.54,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 110 | 108,0.18970821292066747,0.0,1.1450928867707014e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.22352941176470587,0.22,0.04,0.0,0.0,0.0,0.02,0.11,0.23,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 111 | 109,0.0,1.4493132881170003e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.136986301369863,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.00392156862745098,0.7803921568627451,1.0,0.0,1.0,0.26,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 112 | 110,0.0,1.2319162948994502e-07,4.6032734048182197e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.00392156862745098,1.0,1.0,0.0,1.0,0.02,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 113 | 111,0.0,2.3478875267495405e-07,9.366859813784338e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.011741682974559686,0.0136986301369863,0.0,0.0,0.0,0.0,1.0,0.0,0.29,0.6352941176470588,0.6352941176470588,1.0,0.0,0.01,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 114 | 112,0.0,2.2319424637001803e-07,6.298010877238858e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.023483365949119372,0.023483365949119372,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 115 | 113,0.0,1.7609156450621552e-07,4.071186910098767e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.003913894324853229,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.4235294117647059,1.0,1.0,0.0,0.01,0.05,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 116 | 114,0.0,3.913145877915901e-08,3.893315815020385e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.5088062622309197,0.5088062622309197,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.86,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 117 | -------------------------------------------------------------------------------- /Data/kdd_selected_fn_rows.csv: -------------------------------------------------------------------------------- 1 | ,duration,protocol_type,service,flag,src_bytes,dst_bytes,land,wrong_fragment,urgent,hot,num_failed_logins,logged_in,num_compromised,root_shell,su_attempted,num_root,num_file_creations,num_shells,num_access_files,num_outbound_cmds,is_host_login,is_guest_login,count,srv_count,serror_rate,srv_serror_rate,rerror_rate,srv_rerror_rate,same_srv_rate,diff_srv_rate,srv_diff_host_rate,dst_host_count,dst_host_srv_count,dst_host_same_srv_rate,dst_host_diff_srv_rate,dst_host_same_src_port_rate,dst_host_srv_diff_host_rate,dst_host_serror_rate,dst_host_srv_serror_rate,dst_host_rerror_rate,dst_host_srv_rerror_rate,outcome,difficulty 2 | 21920,0,tcp,http,SF,325,4960,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,15,15,0.0,0.0,0.0,0.0,1.0,0.0,0.0,237,255,1.0,0.0,0.0,0.01,0.0,0.0,0.0,0.0,normal,21 3 | 21921,1,tcp,smtp,SF,2599,293,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,206,0.81,0.13,0.0,0.0,0.0,0.0,0.18,0.0,mailbomb,11 4 | 21922,0,tcp,private,S0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,116,13,1.0,1.0,0.0,0.0,0.11,0.06,0.0,255,13,0.05,0.05,0.0,0.0,1.0,1.0,0.0,0.0,neptune,21 5 | 21923,0,tcp,http,SF,223,5989,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,8,8,0.0,0.0,0.0,0.0,1.0,0.0,0.0,180,255,1.0,0.0,0.01,0.02,0.0,0.0,0.0,0.0,normal,21 6 | 21924,4,tcp,pop_3,SF,32,93,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,144,0.56,0.02,0.0,0.0,0.0,0.0,0.15,0.0,guess_passwd,15 7 | 21925,0,icmp,eco_i,SF,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,65,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1,119,1.0,0.0,1.0,0.27,0.0,0.0,0.0,0.0,saint,15 8 | 21926,4,tcp,pop_3,SF,26,93,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,140,123,0.87,0.02,0.01,0.02,0.0,0.0,0.07,0.0,guess_passwd,9 9 | 21927,0,tcp,http,SF,219,18353,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,8,8,0.0,0.0,0.0,0.0,1.0,0.0,0.0,8,255,1.0,0.0,0.12,0.03,0.0,0.0,0.0,0.0,normal,21 10 | 21928,0,tcp,http,SF,239,1905,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,4,4,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 11 | 21929,0,tcp,ftp_data,SF,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0.0,0.0,0.0,0.0,1.0,0.0,0.0,6,21,0.83,0.33,0.83,0.14,0.0,0.0,0.0,0.0,warezmaster,5 12 | 21930,280,tcp,ftp_data,SF,283618,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,0.0,0.0,0.0,0.0,1.0,0.0,0.0,4,4,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,warezmaster,19 13 | -------------------------------------------------------------------------------- /Data/kdd_selected_fn_rows_122.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121 2 | 0,0.0,2.3551340931901254e-07,3.786440478921786e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.029354207436399216,0.029354207436399216,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.9294117647058824,1.0,1.0,0.0,0.0,0.01,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 3 | 1,2.3305677262981263e-05,1.8833826179080418e-06,2.2367481054921036e-07,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.003913894324853229,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.807843137254902,0.81,0.13,0.0,0.0,0.0,0.0,0.18,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 4 | 2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.22700587084148727,0.025440313111545987,1.0,1.0,0.0,0.0,0.11,0.06,0.0,1.0,0.050980392156862744,0.05,0.05,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0 5 | 3,0.0,1.6159843162504553e-07,4.571974199246487e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.015655577299412915,0.015655577299412915,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.7058823529411764,1.0,1.0,0.0,0.01,0.02,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 6 | 4,9.322270905192505e-05,2.3189012609872004e-08,7.099575897978349e-08,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.5647058823529412,0.56,0.02,0.0,0.0,0.0,0.0,0.15,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 7 | 5,0.0,1.4493132881170003e-08,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.12720156555772993,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.00392156862745098,0.4666666666666667,1.0,0.0,1.0,0.27,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 8 | 6,9.322270905192505e-05,1.8841072745521005e-08,7.099575897978349e-08,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.5490196078431373,0.4823529411764706,0.87,0.02,0.01,0.02,0.0,0.0,0.07,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 9 | 7,0.0,1.5869980504881154e-07,1.4010593167268455e-05,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.015655577299412915,0.015655577299412915,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.03137254901960784,1.0,1.0,0.0,0.12,0.03,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 10 | 8,0.0,1.7319293792998152e-07,1.4542679661987908e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.007827788649706457,0.007827788649706457,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 11 | 9,0.0,8.695879728702001e-09,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0019569471624266144,0.0019569471624266144,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.023529411764705882,0.08235294117647059,0.83,0.33,0.83,0.14,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 12 | 10,0.006525589633634754,0.00020552566807458369,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.003913894324853229,0.003913894324853229,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.01568627450980392,0.01568627450980392,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 13 | -------------------------------------------------------------------------------- /Data/kdd_selected_fp_rows.csv: -------------------------------------------------------------------------------- 1 | ,duration,protocol_type,service,flag,src_bytes,dst_bytes,land,wrong_fragment,urgent,hot,num_failed_logins,logged_in,num_compromised,root_shell,su_attempted,num_root,num_file_creations,num_shells,num_access_files,num_outbound_cmds,is_host_login,is_guest_login,count,srv_count,serror_rate,srv_serror_rate,rerror_rate,srv_rerror_rate,same_srv_rate,diff_srv_rate,srv_diff_host_rate,dst_host_count,dst_host_srv_count,dst_host_same_srv_rate,dst_host_diff_srv_rate,dst_host_same_src_port_rate,dst_host_srv_diff_host_rate,dst_host_serror_rate,dst_host_srv_serror_rate,dst_host_rerror_rate,dst_host_srv_rerror_rate,outcome,difficulty 2 | 570,0,tcp,http,SF,257,5320,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,15,15,0.07,0.07,0.0,0.0,1.0,0.0,0.0,255,255,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,normal,21 3 | 571,0,tcp,http,SF,239,3222,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,6,6,0.0,0.0,0.0,0.0,1.0,0.0,0.0,255,7,0.03,0.01,0.0,0.0,0.02,0.0,0.02,0.0,normal,18 4 | -------------------------------------------------------------------------------- /Data/kdd_selected_fp_rows_122.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121 2 | 0,0.0,1.8623675752303453e-07,4.0612627717467545e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.029354207436399216,0.029354207436399216,0.07,0.07,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 3 | 1,0.0,1.7319293792998152e-07,2.4596595207834666e-06,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.011741682974559686,0.011741682974559686,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.027450980392156862,0.03,0.01,0.0,0.0,0.02,0.0,0.02,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0 4 | -------------------------------------------------------------------------------- /Data/kitsune_selected_fn_rows.csv: -------------------------------------------------------------------------------- 1 | 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114 2 | 0.280254660522236,2.220446049250313e-16,0.0,0.37040338218178065,1.1102230246251565e-16,2.7755575615628914e-17,0.47819725268080876,7.771561172376096e-16,6.938893903907228e-17,0.914858663555604,5.551115123125783e-16,1.3877787807814457e-16,0.9063202588020085,1.4432899320127035e-15,2.0816681711721685e-16,0.280254660522236,2.220446049250313e-16,0.0,0.37040338218178065,1.1102230246251565e-16,2.7755575615628914e-17,0.47819725268080876,7.771561172376096e-16,6.938893903907228e-17,0.914858663555604,5.551115123125783e-16,1.3877787807814457e-16,0.9063202588020085,1.4432899320127035e-15,2.0816681711721685e-16,0.2802546605222346,3.3306690738754696e-16,2.7343496911136356e-09,0.018693502231513226,1.3877787807814457e-17,0.008387074228685273,0.37033318949470967,0.3704033821817849,5.551115123125783e-16,8.830111086766124e-09,0.018865528650366503,6.938893903907228e-17,0.007925561546165463,0.3747856272318816,0.4781972525167385,5.551115123125783e-16,0.0,0.0189843321895119,2.7755575615628914e-17,0.007421369060361796,0.37710156997688604,0.9148586635566355,1.4432899320127035e-15,4.002673914538235e-09,0.018985929229117127,5.551115123125783e-17,0.007052146934873102,0.3390947586503122,0.9063202588020607,1.5543122344752192e-15,1.0986106246946292e-08,0.01898608900178045,1.3877787807814457e-16,6.344456383978329e-12,1.3315555104354783e-07,0.280254660522236,2.0497976838808533e-13,4.080875319164382e-24,0.37040338218178076,1.7823163284156343e-13,2.1585021863806234e-23,0.4781972526808088,4.810759630458732e-11,2.027123339725902e-10,0.914858663555604,7.721683241929707e-06,3.0886505759545906e-05,0.9063202588020086,1.579894951902205e-05,6.319481123563892e-05,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 3 | 0.2799947351127922,2.220446049250313e-16,0.0,0.3701969326826342,1.1102230246251565e-16,2.7755575615628914e-17,0.478106189112748,3.3306690738754696e-16,4.163336342344337e-17,0.9148200886919201,8.881784197001252e-16,5.551115123125783e-17,0.9062757647883759,1.1102230246251565e-16,2.3592239273284576e-16,0.2799947351127922,2.220446049250313e-16,0.0,0.3701969326826342,1.1102230246251565e-16,2.7755575615628914e-17,0.478106189112748,3.3306690738754696e-16,4.163336342344337e-17,0.9148200886919201,8.881784197001252e-16,5.551115123125783e-17,0.9062757647883759,1.1102230246251565e-16,2.3592239273284576e-16,0.2799947351527995,3.3306690738754696e-16,2.7343496911136356e-09,0.018693502231513226,1.3877787807814457e-17,0.008387074228685273,0.37033318949470967,0.3701969327839002,3.3306690738754696e-16,3.100458945137774e-09,0.018865528650366503,6.938893903907228e-17,0.007925561546165463,0.3747856272318815,0.47810618902664564,3.3306690738754696e-16,3.427685246748524e-09,0.0189843321895119,2.7755575615628914e-17,0.007421369060361796,0.37710156997688604,0.914820088714274,8.881784197001252e-16,6.6977544621593665e-09,0.018985929229117127,5.551115123125783e-17,0.007052146934873102,0.3390947586503122,0.9062757647907227,1.2212453270876722e-15,7.870794405118176e-09,0.01898608900178045,1.3877787807814457e-16,6.344456383978329e-12,1.3315555105929883e-07,0.27999473511279227,2.0523177992939761e-13,4.066468318844383e-24,0.3701969326826343,1.7838300085031017e-13,2.1590266501365014e-23,0.47810618911274805,4.812626177942776e-11,2.0278678770346577e-10,0.9148200886919201,7.722145374803156e-06,3.0888354262455694e-05,0.9062757647883761,1.5799753099600397e-05,6.319802545635527e-05,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 4 | 0.2805987069599485,2.220446049250313e-16,0.0,0.3706757925117492,1.1102230246251565e-16,2.7755575615628914e-17,0.478315538749441,7.771561172376096e-16,6.938893903907228e-17,0.9148878227921842,5.551115123125783e-16,1.3877787807814457e-16,0.9063365573340505,1.4432899320127035e-15,2.0816681711721685e-16,0.2805987069599485,2.220446049250313e-16,0.0,0.3706757925117492,1.1102230246251565e-16,2.7755575615628914e-17,0.478315538749441,7.771561172376096e-16,6.938893903907228e-17,0.9148878227921842,5.551115123125783e-16,1.3877787807814457e-16,0.9063365573340505,1.4432899320127035e-15,2.0816681711721685e-16,0.2805987069599471,3.3306690738754696e-16,2.7343496911136356e-09,0.018693502231513226,1.3877787807814457e-17,0.008387074228685273,0.37033318949470967,0.3706757925117534,5.551115123125783e-16,8.830111086766124e-09,0.018865528650366503,6.938893903907228e-17,0.007925561546165463,0.3747856272318815,0.4783155385853293,5.551115123125783e-16,1.4615699206466104e-09,0.0189843321895119,2.7755575615628914e-17,0.007421369060361796,0.3771015699768817,0.9148878227932152,1.4432899320127035e-15,3.8669503155119855e-09,0.018985929229117127,5.551115123125783e-17,0.007052146934873102,0.3390947586503122,0.9063365573341025,1.5543122344752192e-15,1.0937387162623935e-08,0.01898608900178045,1.3877787807814457e-16,6.344456383978329e-12,1.3315555106758343e-07,0.2805987069599485,2.0463099192733304e-13,4.075234411270268e-24,0.37067579251174926,1.7802370565490212e-13,2.1566268832570212e-23,0.478315538749441,4.8092569117490164e-11,2.0265250619961854e-10,0.9148878227921842,7.721400140906308e-06,3.088537337297416e-05,0.9063365573340507,1.5798657842228736e-05,6.319364456534632e-05,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 5 | 0.2803379172581358,2.220446049250313e-16,1.3877787807814457e-17,0.37046863801205787,1.1102230246251565e-16,2.7755575615628914e-17,0.47822416342170154,3.3306690738754696e-16,4.163336342344337e-17,0.9148491876384832,8.881784197001252e-16,5.551115123125783e-17,0.9062920573508189,1.1102230246251565e-16,2.3592239273284576e-16,0.2803379172581358,2.220446049250313e-16,1.3877787807814457e-17,0.37046863801205787,1.1102230246251565e-16,2.7755575615628914e-17,0.47822416342170154,3.3306690738754696e-16,4.163336342344337e-17,0.9148491876384832,8.881784197001252e-16,5.551115123125783e-17,0.9062920573508189,1.1102230246251565e-16,2.3592239273284576e-16,0.28033791729813334,3.3306690738754696e-16,3.1004610545615208e-09,0.018693502231513226,1.3877787807814457e-17,0.008387074228685273,0.3703331894947096,0.3704686381133091,3.3306690738754696e-16,2.9231407849827917e-09,0.018865528650366503,6.938893903907228e-17,0.007925561546165463,0.3747856272318815,0.47822416333555423,3.3306690738754696e-16,3.427685246748524e-09,0.0189843321895119,2.7755575615628914e-17,0.007421369060361796,0.3771015699768817,0.9148491876608361,8.881784197001252e-16,6.777020555848168e-09,0.018985929229117127,5.551115123125783e-17,0.007052146934873102,0.3390947586503122,0.9062920573531656,1.2212453270876722e-15,7.93835613910332e-09,0.01898608900178045,1.3877787807814457e-16,6.344456383978329e-12,1.3315555105193237e-07,0.28033791725813584,2.0488324039808714e-13,4.060842209773768e-24,0.37046863801205804,1.78175382227851e-13,2.1571497582989e-23,0.4782241634217016,4.8111226399403796e-11,2.027269265242486e-10,0.9148491876384831,7.721862244880589e-06,3.088722176029001e-05,0.9062920573508191,1.5799461393649428e-05,6.319685866943758e-05,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 6 | 0.28096147507675523,2.220446049250313e-16,0.0,0.3709630605056613,1.1102230246251565e-16,0.0,0.47844022350367976,7.771561172376096e-16,6.938893903907228e-17,0.9149182065970182,5.551115123125783e-16,1.3877787807814457e-16,0.906352977182614,1.4432899320127035e-15,2.0816681711721685e-16,0.28096147507675523,2.220446049250313e-16,0.0,0.3709630605056613,1.1102230246251565e-16,0.0,0.47844022350367976,7.771561172376096e-16,6.938893903907228e-17,0.9149182065970182,5.551115123125783e-16,1.3877787807814457e-16,0.906352977182614,1.4432899320127035e-15,2.0816681711721685e-16,0.28096147507675384,3.3306690738754696e-16,2.3109472668814135e-09,0.018693502231513226,1.3877787807814457e-17,0.008387074228685273,0.37033318949470967,0.3709630605056655,5.551115123125783e-16,8.769422299437224e-09,0.018865528650366503,6.938893903907228e-17,0.007925561546165463,0.3747856272318815,0.47844022333952463,5.551115123125783e-16,1.0334860189864514e-09,0.0189843321895119,2.7755575615628914e-17,0.007421369060361796,0.37710156997687994,0.9149182065980493,1.4432899320127035e-15,3.8669503155119855e-09,0.018985929229117127,5.551115123125783e-17,0.007052146934873102,0.3390947586503122,0.9063529771826664,1.5543122344752192e-15,1.088844997498839e-08,0.01898608900178045,1.3877787807814457e-16,6.344456383978329e-12,1.3315555106025746e-07,0.2809614750767553,2.0426438211005143e-13,4.069644738900715e-24,0.3709630605056614,1.7780484353683426e-13,2.1547568327682168e-23,0.4784402235036799,4.8077546485902463e-11,2.0259271166631698e-10,0.9149182065970182,7.721117059662701e-06,3.08842410655216e-05,0.9063529771826142,1.5798366176103996e-05,6.319247793772693e-05,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 7 | 0.2806684923435774,2.220446049250313e-16,0.0,0.3707303784200399,1.1102230246251565e-16,2.7755575615628914e-17,0.4783378632951442,3.3306690738754696e-16,4.163336342344337e-17,0.9148774700071082,8.881784197001252e-16,5.551115123125783e-17,0.9063082690264382,1.1102230246251565e-16,2.498001805406602e-16,0.2806684923435774,2.220446049250313e-16,0.0,0.3707303784200399,1.1102230246251565e-16,2.7755575615628914e-17,0.4783378632951442,3.3306690738754696e-16,4.163336342344337e-17,0.9148774700071082,8.881784197001252e-16,5.551115123125783e-17,0.9063082690264382,1.1102230246251565e-16,2.498001805406602e-16,0.28066849238356334,3.3306690738754696e-16,3.2681729544847826e-09,0.018693502231513226,1.3877787807814457e-17,0.008387074228685273,0.37033318949470967,0.3707303785212737,3.3306690738754696e-16,2.7343478037344937e-09,0.018865528650366503,6.938893903907228e-17,0.007925561546165463,0.37478562723188147,0.47833786320895266,5.551115123125783e-16,3.5801004938385006e-09,0.0189843321895119,2.7755575615628914e-17,0.007421369060361796,0.3771015699768802,0.9148774700294611,8.881784197001252e-16,6.617538850228755e-09,0.018985929229117127,5.551115123125783e-17,0.007052146934873102,0.3390947586503122,0.9063082690287845,1.2212453270876722e-15,7.870794405118176e-09,0.01898608900178045,1.3877787807814457e-16,6.344456383978329e-12,1.3315555107615384e-07,0.28066849234357744,2.045481624084771e-13,4.0552066106107545e-24,0.3707303784200401,1.7797561582274686e-13,2.1552744091886798e-23,0.47833786329514427,4.80962015589796e-11,2.026670972360887e-10,0.9148774700071082,7.721579134091185e-06,3.08860893346539e-05,0.9063082690264384,1.5799169698302027e-05,6.319569192493149e-05,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 8 | 0.281028229631663,2.220446049250313e-16,1.3877787807814457e-17,0.3710152495173278,1.1102230246251565e-16,2.7755575615628914e-17,0.47846151936525005,3.3306690738754696e-16,4.163336342344337e-17,0.9149076572917396,8.881784197001252e-16,5.551115123125783e-17,0.9063246694135725,1.1102230246251565e-16,2.3592239273284576e-16,0.281028229631663,2.220446049250313e-16,1.3877787807814457e-17,0.3710152495173278,1.1102230246251565e-16,2.7755575615628914e-17,0.47846151936525005,3.3306690738754696e-16,4.163336342344337e-17,0.9149076572917396,8.881784197001252e-16,5.551115123125783e-17,0.9063246694135725,1.1102230246251565e-16,2.3592239273284576e-16,0.2810282296716417,3.3306690738754696e-16,3.2681729544847826e-09,0.018693502231513226,1.3877787807814457e-17,0.008387074228685273,0.37033318949470967,0.3710152496185505,3.3306690738754696e-16,2.9231407849827917e-09,0.018865528650366503,6.938893903907228e-17,0.007925561546165463,0.3747856272318815,0.4784615192790126,3.3306690738754696e-16,3.427685246748524e-09,0.0189843321895119,2.7755575615628914e-17,0.007421369060361796,0.37710156997687994,0.9149076573140923,8.881784197001252e-16,6.617538850228755e-09,0.018985929229117127,5.551115123125783e-17,0.007052146934873102,0.3390947586503122,0.9063246694159188,1.2212453270876722e-15,7.93835613910332e-09,0.01898608900178045,1.3877787807814457e-16,6.344456383978329e-12,1.3315555106043216e-07,0.2810282296316631,2.0418477601404626e-13,4.04964127560242e-24,0.3710152495173279,1.777586364724921e-13,2.1534054400373545e-23,0.47846151936525017,4.8081179132894823e-11,2.0260730105836757e-10,0.9149076572917396,7.721296043021414e-06,3.088495698789739e-05,0.9063246694135726,1.579887801361781e-05,6.31945252230776e-05,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 9 | 0.27797343622751647,2.220446049250313e-16,0.0,0.3685908514838584,1.1102230246251565e-16,0.0,0.4774194329946105,7.771561172376096e-16,6.938893903907228e-17,0.9147293561672178,5.551115123125783e-16,1.3877787807814457e-16,0.9063476768395735,1.4432899320127035e-15,2.0816681711721685e-16,0.27797343622751647,2.220446049250313e-16,0.0,0.3685908514838584,1.1102230246251565e-16,0.0,0.4774194329946105,7.771561172376096e-16,6.938893903907228e-17,0.9147293561672178,5.551115123125783e-16,1.3877787807814457e-16,0.9063476768395735,1.4432899320127035e-15,2.0816681711721685e-16,0.277973436227515,3.3306690738754696e-16,1.7900520510316653e-09,0.018693502231513226,1.3877787807814457e-17,0.008387074228685273,0.37033318949470984,0.3685908514838625,5.551115123125783e-16,8.769422299437224e-09,0.018865528650366503,6.938893903907228e-17,0.007925561546165463,0.3747856272318815,0.47741943283080485,5.551115123125783e-16,1.790050274674826e-09,0.0189843321895119,2.7755575615628914e-17,0.007421369060361796,0.37710156997688254,0.9147293561682492,1.4432899320127035e-15,3.8669503155119855e-09,0.018985929229117127,5.551115123125783e-17,0.007052146934873102,0.3390947586503122,0.9063476768396257,1.5543122344752192e-15,1.0839291963993247e-08,0.01898608900178045,1.3877787807814457e-16,6.344456383978329e-12,1.331555510688719e-07,0.27797343622751647,2.072948740484636e-13,4.088459923812038e-24,0.3685908514838585,1.7961682795401765e-13,2.1543406712240317e-23,0.47741943299461054,4.8063176755452206e-11,2.0253280700227286e-10,0.914729356167218,7.720833930442544e-06,3.088310856582757e-05,0.9063476768395735,1.5798074513700715e-05,6.319131132482226e-05,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 10 | 0.2783723081664282,2.220446049250313e-16,0.0,0.368906683236743,1.1102230246251565e-16,0.0,0.4775563812306209,7.771561172376096e-16,6.938893903907228e-17,0.914762083695074,5.551115123125783e-16,1.3877787807814457e-16,0.9063643288510368,1.4432899320127035e-15,2.0816681711721685e-16,0.2783723081664282,2.220446049250313e-16,0.0,0.368906683236743,1.1102230246251565e-16,0.0,0.4775563812306209,7.771561172376096e-16,6.938893903907228e-17,0.914762083695074,5.551115123125783e-16,1.3877787807814457e-16,0.9063643288510368,1.4432899320127035e-15,2.0816681711721685e-16,0.2783723081664268,3.3306690738754696e-16,1.7900520510316653e-09,0.018693502231513226,1.3877787807814457e-17,0.008387074228685273,0.37033318949470984,0.3689066832367472,5.551115123125783e-16,8.646767024611535e-09,0.018865528650366503,5.551115123125783e-17,0.007925561546165463,0.37478562723188147,0.4775563810667677,5.551115123125783e-16,2.3109449354130618e-09,0.0189843321895119,2.7755575615628914e-17,0.007421369060361796,0.3771015699768833,0.9147620836961052,1.4432899320127035e-15,3.726286612604213e-09,0.018985929229117127,5.551115123125783e-17,0.007052146934873102,0.3390947586503122,0.906364328851089,1.5543122344752192e-15,1.0937387162623935e-08,0.01898608900178045,1.3877787807814457e-16,6.344456383978329e-12,1.3315555105236492e-07,0.2783723081664282,2.0688463371974126e-13,4.082883792971333e-24,0.36890668323674314,1.7937363964087868e-13,2.1524670722883074e-23,0.47755638123062094,4.804812774486383e-11,2.024729372200746e-10,0.914762083695074,7.72055082164808e-06,3.0881976148175615e-05,0.9063643288510368,1.5797782861997343e-05,6.319014475488699e-05,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 11 | 0.2786850256725189,2.220446049250313e-16,0.0,0.36915405135431806,1.1102230246251565e-16,0.0,0.47766380439974315,7.771561172376096e-16,6.938893903907228e-17,0.91478915714549,5.551115123125783e-16,1.3877787807814457e-16,0.9063804206549466,1.4432899320127035e-15,2.0816681711721685e-16,0.2786850256725189,2.220446049250313e-16,0.0,0.36915405135431806,1.1102230246251565e-16,0.0,0.47766380439974315,7.771561172376096e-16,6.938893903907228e-17,0.91478915714549,5.551115123125783e-16,1.3877787807814457e-16,0.9063804206549466,1.4432899320127035e-15,2.0816681711721685e-16,0.27868502567251746,3.3306690738754696e-16,1.7900520510316653e-09,0.018693502231513226,1.3877787807814457e-17,0.008387074228685273,0.37033318949470984,0.36915405135432217,5.551115123125783e-16,8.769422299437224e-09,0.018865528650366503,6.938893903907228e-17,0.007925561546165463,0.3747856272318815,0.47766380423585225,5.551115123125783e-16,1.790050274674826e-09,0.0189843321895119,2.7755575615628914e-17,0.007421369060361796,0.37710156997688254,0.9147891571465209,1.4432899320127035e-15,3.4276850802150705e-09,0.018985929229117127,5.551115123125783e-17,0.007052146934873102,0.33909475865031224,0.9063804206549988,1.5543122344752192e-15,1.088844997498839e-08,0.01898608900178045,1.3877787807814457e-16,6.344456383978329e-12,1.331555510606942e-07,0.2786850256725189,2.0656293726950535e-13,4.077147497213059e-24,0.36915405135431817,1.7918315816914066e-13,2.150586052765102e-23,0.4776638043997432,4.803310410908542e-11,2.0241309859203172e-10,0.91478915714549,7.72026773161494e-06,3.088084380555845e-05,0.9063804206549467,1.579749122085709e-05,6.318897822719776e-05,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 12 | -------------------------------------------------------------------------------- /Data/kitsune_selected_fp_rows.csv: -------------------------------------------------------------------------------- 1 | 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114 2 | 0.01354294524065905,2.220446049250313e-16,0.0,0.017234868500703125,1.1102230246251565e-16,0.0,0.02076907587465317,5.551115123125783e-16,0.0,0.046191928466426196,6.661338147750939e-16,1.3877787807814457e-17,0.22772968017893835,8.881784197001252e-16,1.3877787807814457e-17,0.01354294524065905,2.220446049250313e-16,0.0,0.017234868500703125,1.1102230246251565e-16,0.0,0.02076907587465317,5.551115123125783e-16,0.0,0.046191928466426196,6.661338147750939e-16,1.3877787807814457e-17,0.22772968017893835,8.881784197001252e-16,1.3877787807814457e-17,0.013542945240659006,3.3306690738754696e-16,3.1004610545615208e-09,0.8844442203517303,0.10984870723675216,0.00838707422868507,0.3703331881329597,0.01723486850071999,3.3306690738754696e-16,1.4615703647358202e-09,0.8939410181474639,0.10609876992048106,0.00792556154616539,0.3747856262702262,0.020769075870188863,5.551115123125783e-16,2.5315133322934003e-09,0.9027349228778084,0.0963629643423438,0.007421369060361715,0.3771015694256938,0.046191909071128565,8.881784197001252e-16,3.4276850802150705e-09,0.902489313051176,0.09794161150805812,0.007052146934872531,0.33909475589672655,0.22772174087400956,9.992007221626409e-16,1.2990716780070244e-08,0.9019669165750824,0.09911494521525589,9.009777147434773e-12,1.379327795373708e-07,0.013542945240659052,6.165457871902446e-12,8.090618310728096e-22,0.017234868500703125,6.207321863716613e-12,8.4418927553020715e-22,0.020769075874653174,3.7754651526688884e-12,9.493254426306788e-22,0.04619190906976472,3.8846331159041915e-12,1.000389729199492e-21,0.2277217408738933,4.6429093619613e-12,2.9488289090842585e-12,0.33503621042067216,1.1102230246251565e-16,3.100461110072672e-09,0.8826068513034514,0.10984870723675218,1.0982643872898905e-13,1.0443921649686949e-07,0.38084750269311934,2.220446049250313e-16,1.4615703647358202e-09,0.8925163894061073,0.10609876992048106,9.79036601432234e-14,1.1910506764766707e-07,0.4291894670163571,2.220446049250313e-16,2.5315133322934003e-09,0.9027349228778083,0.0963629643423438,1.2465459543191555e-13,1.102629432182675e-07,0.4437169989057574,6.661338147750939e-16,3.4276850802150705e-09,0.9024893130511761,0.09794161150805812,2.241524744687994e-13,3.5007761166922293e-07,0.44410923258102475,9.992007221626409e-16,1.2990716835581395e-08,0.9019669165750824,0.09911494521525592,4.579512317466954e-13,5.054341184520875e-07 3 | 0.518534406370792,3.3306690738754696e-16,1.3877787807814457e-17,0.7444845376327693,0.0,6.938893903907228e-17,0.9394544373252108,3.3306690738754696e-16,8.326672684688674e-17,0.6351116897396527,6.661338147750939e-16,1.3877787807814457e-17,0.3644724695211713,8.881784197001252e-16,0.0,0.518534406370792,3.3306690738754696e-16,1.3877787807814457e-17,0.7444845376327693,0.0,6.938893903907228e-17,0.9394544373252108,3.3306690738754696e-16,8.326672684688674e-17,0.6351116897396527,6.661338147750939e-16,1.3877787807814457e-17,0.3644724695211713,8.881784197001252e-16,0.0,0.5185344063707992,3.3306690738754696e-16,5.46869932671612e-09,0.018693502231513337,5.551115123125783e-17,0.008387074228685273,0.37033318949470917,0.7444845376333921,3.3306690738754696e-16,6.200917945786699e-09,0.018865528650366392,5.551115123125783e-17,0.007925561546165463,0.3747856272318822,0.9394544370129385,3.3306690738754696e-16,3.427685246748524e-09,0.018984332189511566,5.551115123125783e-17,0.007421369060361796,0.37710156997689126,0.6351116897403769,6.661338147750939e-16,5.0630264425421956e-09,0.01898592922911624,1.6653345369377348e-16,0.007052146934873102,0.33909475865031014,0.36447246952118906,1.3322676295501878e-15,1.088844997498839e-08,0.018986089001780226,1.3877787807814457e-16,6.344456383978339e-12,1.3315555201510217e-07,0.5185344063707918,6.311241603213779e-11,2.5215493012790347e-10,0.7444845376327696,2.4050853329537157e-08,9.620326790821982e-08,0.939454437325211,5.715309072473481e-06,2.28611164934692e-05,0.6351116897396526,3.808478744008832e-05,0.00015233335979071224,0.36447246952117135,4.443115004120724e-05,0.00017771671569797177,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 4 | 0.5188089295276193,3.3306690738754696e-16,1.3877787807814457e-17,0.744692461205224,0.0,6.938893903907228e-17,0.9395461152996408,3.3306690738754696e-16,8.326672684688674e-17,0.6351418852719892,6.661338147750939e-16,1.3877787807814457e-17,0.3644890102663669,8.881784197001252e-16,0.0,0.5188089295276193,3.3306690738754696e-16,1.3877787807814457e-17,0.744692461205224,0.0,6.938893903907228e-17,0.9395461152996408,3.3306690738754696e-16,8.326672684688674e-17,0.6351418852719892,6.661338147750939e-16,1.3877787807814457e-17,0.3644890102663669,8.881784197001252e-16,0.0,0.5188089295276266,3.3306690738754696e-16,5.66064156659607e-09,0.018693502231513337,5.551115123125783e-17,0.008387074228685273,0.37033318949470917,0.7446924612058465,3.3306690738754696e-16,6.200917945786699e-09,0.018865528650366392,5.551115123125783e-17,0.007925561546165463,0.3747856272318822,0.9395461149873359,3.3306690738754696e-16,3.726286723626515e-09,0.018984332189511566,5.551115123125783e-17,0.007421369060361796,0.37710156997689087,0.6351418852727135,6.661338147750939e-16,5.269765013249383e-09,0.01898592922911624,1.6653345369377348e-16,0.007052146934873102,0.33909475865031025,0.3644890102663846,1.3322676295501878e-15,1.0937387162623935e-08,0.018986089001780226,1.3877787807814457e-16,6.344456383978339e-12,1.3315555200249186e-07,0.5188089295276191,6.306239965186044e-11,2.51955189056821e-10,0.7446924612052241,2.4040338714938064e-08,9.616120960884882e-08,0.939546115299641,5.714450213390575e-06,2.2857681096456827e-05,0.6351418852719892,3.8082776167490745e-05,0.00015232531531311611,0.36448901026636693,4.442911039752304e-05,0.00017770855784822483,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 5 | 0.5185673909859693,3.3306690738754696e-16,6.938893903907228e-17,0.744514503678267,0.0,6.938893903907228e-17,0.9394566080134366,3.3306690738754696e-16,1.3877787807814457e-17,0.6350640673782983,8.881784197001252e-16,6.938893903907228e-17,0.36444012260450265,7.771561172376096e-16,4.163336342344337e-17,0.5185673909859693,3.3306690738754696e-16,6.938893903907228e-17,0.744514503678267,0.0,6.938893903907228e-17,0.9394566080134366,3.3306690738754696e-16,1.3877787807814457e-17,0.6350640673782983,8.881784197001252e-16,6.938893903907228e-17,0.36444012260450265,7.771561172376096e-16,4.163336342344337e-17,0.518567391904309,5.551115123125783e-16,6.200922220145344e-09,0.018693502231513337,5.551115123125783e-17,0.008387074228685273,0.37033318949470917,0.7445145045193263,2.220446049250313e-16,8.203043466714632e-09,0.018865528650366392,6.938893903907228e-17,0.007925561546165463,0.3747856272318822,0.939456607977772,3.3306690738754696e-16,7.59453994136905e-09,0.018984332189511566,5.551115123125783e-17,0.007421369060361796,0.3771015699768908,0.6350640673907428,3.3306690738754696e-16,1.218461953556016e-08,0.01898592922911624,1.6653345369377348e-16,0.007052146934873102,0.33909475865031025,0.36444012260515307,8.881784197001252e-16,8.005347662543016e-09,0.018986089001780226,1.3877787807814457e-16,6.344456383978339e-12,1.3315555193197147e-07,0.5185673909859692,6.314116277346735e-11,2.5226998472633017e-10,0.7445145036782671,2.4057386691214506e-08,9.622940139238222e-08,0.9394566080134368,5.715889993604202e-06,2.2863440151441452e-05,0.6350640673782983,3.808803911772363e-05,0.00015234636551049495,0.3644401226045027,4.443513966259005e-05,0.0001777326727653266,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 6 | 0.5190404904616491,3.3306690738754696e-16,1.3877787807814457e-17,0.7448634223287383,0.0,6.938893903907228e-17,0.9396222601937421,3.3306690738754696e-16,8.326672684688674e-17,0.6351710309659276,6.661338147750939e-16,1.3877787807814457e-17,0.3645054907649642,8.881784197001252e-16,0.0,0.5190404904616491,3.3306690738754696e-16,1.3877787807814457e-17,0.7448634223287383,0.0,6.938893903907228e-17,0.9396222601937421,3.3306690738754696e-16,8.326672684688674e-17,0.6351710309659276,6.661338147750939e-16,1.3877787807814457e-17,0.3645054907649642,8.881784197001252e-16,0.0,0.5190404904616565,3.3306690738754696e-16,5.7542122733345025e-09,0.018693502231513337,5.551115123125783e-17,0.008387074228685273,0.37033318949470917,0.7448634223293606,3.3306690738754696e-16,6.200917945786699e-09,0.018865528650366392,6.938893903907228e-17,0.007925561546165463,0.3747856272318822,0.9396222598814097,3.3306690738754696e-16,3.5801004938385006e-09,0.018984332189511566,5.551115123125783e-17,0.007421369060361796,0.377101569976891,0.6351710309666516,6.661338147750939e-16,5.1674297063541985e-09,0.01898592922911624,1.6653345369377348e-16,0.007052146934873102,0.3390947586503102,0.36450549076498195,1.3322676295501878e-15,1.1034610225557628e-08,0.018986089001780226,1.3877787807814457e-16,6.344456383978339e-12,1.3315555190803397e-07,0.5190404904616491,6.301245773735533e-11,2.517556951778236e-10,0.7448634223287383,2.4029831115278593e-08,9.611917934137956e-08,0.9396222601937422,5.713591552952802e-06,2.285424649400044e-05,0.6351710309659276,3.808076509339034e-05,0.00015231727162941263,0.3645054907649642,4.4427070939681416e-05,0.000177700400741746,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 7 | 0.5188637704723204,3.3306690738754696e-16,6.938893903907228e-17,0.7447412527975079,0.0,6.938893903907228e-17,0.9395562046240746,3.3306690738754696e-16,1.3877787807814457e-17,0.63509479843317,8.881784197001252e-16,6.938893903907228e-17,0.3644566940839522,7.771561172376096e-16,4.163336342344337e-17,0.5188637704723204,3.3306690738754696e-16,6.938893903907228e-17,0.7447412527975079,0.0,6.938893903907228e-17,0.9395562046240746,3.3306690738754696e-16,1.3877787807814457e-17,0.63509479843317,8.881784197001252e-16,6.938893903907228e-17,0.3644566940839522,7.771561172376096e-16,4.163336342344337e-17,0.5188637713904567,5.551115123125783e-16,6.200922220145344e-09,0.018693502231513337,5.551115123125783e-17,0.008387074228685273,0.37033318949470917,0.7447412536384558,2.220446049250313e-16,8.203043466714632e-09,0.018865528650366392,6.938893903907228e-17,0.007925561546165463,0.3747856272318822,0.9395562045883625,3.3306690738754696e-16,7.523891565330842e-09,0.018984332189511566,5.551115123125783e-17,0.007421369060361796,0.37710156997689104,0.635094798445614,3.3306690738754696e-16,1.2228370593891924e-08,0.01898592922911624,1.6653345369377348e-16,0.007052146934873102,0.33909475865031025,0.3644566940846025,8.881784197001252e-16,7.870794405118176e-09,0.018986089001780226,1.3877787807814457e-16,6.344456383978339e-12,1.3315555195398189e-07,0.5188637704723202,6.309112249497773e-11,2.52070173620722e-10,0.744741252797508,2.404686990556013e-08,9.61873344229186e-08,0.9395562046240749,5.715031056445433e-06,2.2860004442138048e-05,0.63509479843317,3.808602752427978e-05,0.00015233831974966556,0.36445669408395226,4.443309965489701e-05,0.00017772451345974034,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 8 | 0.5194191638832706,3.3306690738754696e-16,1.3877787807814457e-17,0.7451610530587566,0.0,6.938893903907228e-17,0.9397516670895597,3.3306690738754696e-16,8.326672684688674e-17,0.6352037770301969,6.661338147750939e-16,1.3877787807814457e-17,0.3645221778774559,8.881784197001252e-16,0.0,0.5194191638832706,3.3306690738754696e-16,1.3877787807814457e-17,0.7451610530587566,0.0,6.938893903907228e-17,0.9397516670895597,3.3306690738754696e-16,8.326672684688674e-17,0.6352037770301969,6.661338147750939e-16,1.3877787807814457e-17,0.3645221778774559,8.881784197001252e-16,0.0,0.5194191638832778,3.3306690738754696e-16,5.7542122733345025e-09,0.018693502231513337,5.551115123125783e-17,0.008387074228685273,0.37033318949470917,0.7451610530593794,3.3306690738754696e-16,6.200917945786699e-09,0.018865528650366392,6.938893903907228e-17,0.007925561546165463,0.3747856272318822,0.9397516667771819,3.3306690738754696e-16,3.2681696238157087e-09,0.018984332189511566,5.551115123125783e-17,0.007421369060361796,0.37710156997689154,0.6352037770309207,6.661338147750939e-16,5.468693553556392e-09,0.01898592922911624,1.6653345369377348e-16,0.007052146934873102,0.3390947586503103,0.36452217787747354,1.3322676295501878e-15,1.1034610225557628e-08,0.018986089001780226,1.3877787807814457e-16,6.344456383978339e-12,1.331555519539539e-07,0.5194191638832705,6.296254864300579e-11,2.5155650446209763e-10,0.7451610530587567,2.401933227776576e-08,9.607718421753021e-08,0.9397516670895598,5.712733139735013e-06,2.285081288043951e-05,0.6352037770301969,3.8078754229151875e-05,0.000152309228785072,0.364522177877456,4.442503166880965e-05,0.00017769224438305686,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 9 | 0.5197097720955041,3.3306690738754696e-16,1.3877787807814457e-17,0.7453829121892855,0.0,6.938893903907228e-17,0.9398492314872111,3.3306690738754696e-16,8.326672684688674e-17,0.6352343709944956,6.661338147750939e-16,1.3877787807814457e-17,0.3645387414889256,8.881784197001252e-16,0.0,0.5197097720955041,3.3306690738754696e-16,1.3877787807814457e-17,0.7453829121892855,0.0,6.938893903907228e-17,0.9398492314872111,3.3306690738754696e-16,8.326672684688674e-17,0.6352343709944956,6.661338147750939e-16,1.3877787807814457e-17,0.3645387414889256,8.881784197001252e-16,0.0,0.5197097720955112,3.3306690738754696e-16,5.46869932671612e-09,0.018693502231513337,5.551115123125783e-17,0.008387074228685273,0.37033318949470917,0.7453829121899082,2.220446049250313e-16,6.286451914583324e-09,0.018865528650366392,6.938893903907228e-17,0.007925561546165463,0.3747856272318822,0.939849231174799,3.3306690738754696e-16,2.9231397857820696e-09,0.018984332189511566,5.551115123125783e-17,0.007421369060361796,0.37710156997689215,0.6352343709952195,6.661338147750939e-16,5.468693553556392e-09,0.01898592922911624,1.6653345369377348e-16,0.007052146934873102,0.3390947586503103,0.3645387414889433,1.3322676295501878e-15,1.0986106246946292e-08,0.018986089001780226,1.3877787807814457e-16,6.344456383978339e-12,1.3315555196600112e-07,0.519709772095504,6.29127326307439e-11,2.513575825534826e-10,0.7453829121892858,2.4008841163670227e-08,9.603521993033558e-08,0.9398492314872113,5.711874944574405e-06,2.2847380139071876e-05,0.6352343709944956,3.807674356793571e-05,0.0001523011867527194,0.3645387414889257,4.442299258419524e-05,0.00017768408876929172,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 10 | 0.5191257670827654,3.3306690738754696e-16,6.938893903907228e-17,0.7449384253083543,0.0,6.938893903907228e-17,0.9396433739671934,3.3306690738754696e-16,1.3877787807814457e-17,0.6351246896699686,8.881784197001252e-16,5.551115123125783e-17,0.36447321737002725,7.771561172376096e-16,4.163336342344337e-17,0.5191257670827654,3.3306690738754696e-16,6.938893903907228e-17,0.7449384253083543,0.0,6.938893903907228e-17,0.9396433739671934,3.3306690738754696e-16,1.3877787807814457e-17,0.6351246896699686,8.881784197001252e-16,5.551115123125783e-17,0.36447321737002725,7.771561172376096e-16,4.163336342344337e-17,0.5191257680006377,3.3306690738754696e-16,6.1141917639950805e-09,0.018693502231513337,5.551115123125783e-17,0.008387074228685273,0.37033318949470917,0.7449384261491572,2.220446049250313e-16,8.203043466714632e-09,0.018865528650366392,6.938893903907228e-17,0.007925561546165463,0.3747856272318822,0.939643373931434,3.3306690738754696e-16,7.523891565330842e-09,0.018984332189511566,5.551115123125783e-17,0.007421369060361796,0.37710156997689215,0.6351246896824129,3.3306690738754696e-16,1.2358694401193304e-08,0.01898592922911624,1.6653345369377348e-16,0.007052146934873102,0.33909475865031036,0.36447321737067756,8.881784197001252e-16,7.802647750132508e-09,0.018986089001780226,1.3877787807814457e-16,6.344456383978339e-12,1.3315555198996353e-07,0.5191257670827653,6.304115711802156e-11,2.518706214586772e-10,0.7449384253083544,2.4036360504080573e-08,9.614529696783424e-08,0.9396433739671936,5.714172328020952e-06,2.285656956774871e-05,0.6351246896699687,3.808401613174027e-05,0.0001523302747923573,0.36447321737002725,4.443105983333552e-05,0.00017771635489857837,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 11 | 0.5193730968636884,3.3306690738754696e-16,6.938893903907228e-17,0.7451230035805713,0.0,6.938893903907228e-17,0.9397252581432776,3.3306690738754696e-16,1.3877787807814457e-17,0.6351542238535683,8.881784197001252e-16,5.551115123125783e-17,0.3644897201666083,7.771561172376096e-16,4.163336342344337e-17,0.5193730968636884,3.3306690738754696e-16,6.938893903907228e-17,0.7451230035805713,0.0,6.938893903907228e-17,0.9397252581432776,3.3306690738754696e-16,1.3877787807814457e-17,0.6351542238535683,8.881784197001252e-16,5.551115123125783e-17,0.3644897201666083,7.771561172376096e-16,4.163336342344337e-17,0.5193730977812713,3.3306690738754696e-16,6.200922220145344e-09,0.018693502231513337,5.551115123125783e-17,0.008387074228685273,0.37033318949470917,0.745123004421215,2.220446049250313e-16,8.203043466714632e-09,0.018865528650366392,6.938893903907228e-17,0.007925561546165463,0.3747856272318822,0.9397252581074713,3.3306690738754696e-16,7.59453994136905e-09,0.018984332189511566,5.551115123125783e-17,0.007421369060361796,0.3771015699768921,0.6351542238660122,3.3306690738754696e-16,1.2358694401193304e-08,0.01898592922911624,1.6653345369377348e-16,0.007052146934873102,0.33909475865031036,0.3644897201672586,8.881784197001252e-16,7.664536838536407e-09,0.018986089001780226,1.3877787807814457e-16,6.344456383978339e-12,1.3315555203986475e-07,0.5193730968636883,6.299125932276588e-11,2.516713221507746e-10,0.7451230035805714,2.4025858299023185e-08,9.61032882889027e-08,0.9397252581432776,5.7133138034318566e-06,2.2853135508682434e-05,0.6351542238535683,3.8082004938944685e-05,0.00015232223063393028,0.36448972016660836,4.442902019776516e-05,0.00017770819708128063,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 12 | -------------------------------------------------------------------------------- /Data/kitsune_selected_tp_rows.csv: -------------------------------------------------------------------------------- 1 | 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114 2 | 0.4426662511319454,3.3306690738754696e-16,4.163336342344337e-17,0.6636736609039069,1.1102230246251565e-16,6.938893903907228e-17,0.903741973310143,3.3306690738754696e-16,1.3877787807814457e-17,0.6416921052801715,8.881784197001252e-16,2.7755575615628914e-17,0.370490840640081,5.551115123125783e-16,4.163336342344337e-17,0.4426662511319454,3.3306690738754696e-16,4.163336342344337e-17,0.6636736609039069,1.1102230246251565e-16,6.938893903907228e-17,0.903741973310143,3.3306690738754696e-16,1.3877787807814457e-17,0.6416921052801715,8.881784197001252e-16,2.7755575615628914e-17,0.370490840640081,5.551115123125783e-16,4.163336342344337e-17,0.44266625169624124,3.3306690738754696e-16,0.0,0.018693502231513337,0.0,0.008387074228685273,0.37033318949470917,0.6636736615318652,2.220446049250313e-16,6.454120071719416e-09,0.018865528650366392,2.7755575615628914e-17,0.007925561546165463,0.3747856272318823,0.9037419732598784,3.3306690738754696e-16,7.664537227114465e-09,0.018984332189511788,6.938893903907228e-17,0.007421369060361796,0.3771015699768888,0.6416921052924383,3.3306690738754696e-16,1.113098424543324e-08,0.01898592922911646,1.249000902703301e-16,0.007052146934873102,0.3390947586503096,0.37049084064072724,8.881784197001252e-16,6.5363390255868126e-09,0.018986089001780226,1.1102230246251565e-16,6.344456383978339e-12,1.3315555271160863e-07,0.44266625113194547,4.549209492466287e-11,1.8156805323061072e-10,0.6636736609039071,2.0148691063049342e-08,8.059455089021545e-08,0.9037419733101432,5.39031764206437e-06,2.1561165184628483e-05,0.6416921052801715,3.732929402995559e-05,0.0001493116140413809,0.3704908406400811,4.366692308335997e-05,0.000174660077167214,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 3 | 0.4440357424640918,2.220446049250313e-16,1.3877787807814457e-17,0.6647331792760505,0.0,6.938893903907228e-17,0.9042081789219588,3.3306690738754696e-16,9.71445146547012e-17,0.6418666684239951,8.881784197001252e-16,0.0,0.3705896969625475,8.881784197001252e-16,0.0,0.4440357424640918,2.220446049250313e-16,1.3877787807814457e-17,0.6647331792760505,0.0,6.938893903907228e-17,0.9042081789219588,3.3306690738754696e-16,9.71445146547012e-17,0.6418666684239951,8.881784197001252e-16,0.0,0.3705896969625475,8.881784197001252e-16,0.0,0.44403574246409544,3.3306690738754696e-16,1.7900520510316653e-09,0.018693502231513337,0.0,0.008387074228685273,0.37033318949470917,0.6647331792765169,3.3306690738754696e-16,4.736029346474879e-09,0.018865528650366392,2.7755575615628914e-17,0.007925561546165463,0.3747856272318823,0.9042081786205918,5.551115123125783e-16,6.1141855467461426e-09,0.018984332189511788,6.938893903907228e-17,0.007421369060361796,0.37710156997688876,0.6418666684246559,8.881784197001252e-16,3.8669503155119855e-09,0.01898592922911646,1.249000902703301e-16,0.007052146934873102,0.3390947586503096,0.3705896969625623,1.2212453270876722e-15,1.023098883390361e-08,0.018986089001780226,1.1102230246251565e-16,6.344456383978339e-12,1.3315555272933861e-07,0.44403574246409183,4.5304485671580216e-11,1.8081977664918045e-10,0.6647331792760507,2.0103999257454624e-08,8.041578465509068e-08,0.9042081789219588,5.386414468510003e-06,2.1545552658950245e-05,0.6418666684239951,3.731836349599224e-05,0.00014926789516938314,0.37058969696254757,4.365518415929493e-05,0.0001746131255713157,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 4 | 0.44295454737761386,3.3306690738754696e-16,4.163336342344337e-17,0.6638887685276342,1.1102230246251565e-16,6.938893903907228e-17,0.9038329946757389,3.3306690738754696e-16,1.3877787807814457e-17,0.6417220858363781,8.881784197001252e-16,2.7755575615628914e-17,0.37050736780347643,5.551115123125783e-16,4.163336342344337e-17,0.44295454737761386,3.3306690738754696e-16,4.163336342344337e-17,0.6638887685276342,1.1102230246251565e-16,6.938893903907228e-17,0.9038329946757389,3.3306690738754696e-16,1.3877787807814457e-17,0.6417220858363781,8.881784197001252e-16,2.7755575615628914e-17,0.37050736780347643,5.551115123125783e-16,4.163336342344337e-17,0.4429545479417532,3.3306690738754696e-16,1.0334870181871736e-09,0.018693502231513337,0.0,0.008387074228685273,0.37033318949471844,0.6638887691554879,2.220446049250313e-16,6.536341468077467e-09,0.018865528650366392,2.7755575615628914e-17,0.007925561546165463,0.3747856272318823,0.903832994625428,3.3306690738754696e-16,7.664537227114465e-09,0.018984332189511788,6.938893903907228e-17,0.007421369060361796,0.37710156997688876,0.6417220858486448,3.3306690738754696e-16,1.12265309271109e-08,0.01898592922911646,1.249000902703301e-16,0.007052146934873102,0.33909475865030964,0.37050736780412263,8.881784197001252e-16,6.617538850228755e-09,0.018986089001780226,1.1102230246251565e-16,6.344456383978339e-12,1.3315555268621548e-07,0.44295454737761386,4.544986955075199e-11,1.8139961963600548e-10,0.6638887685276343,2.013881080323616e-08,8.055503005702457e-08,0.9038329946757391,5.389475619182141e-06,2.1557797129464118e-05,0.6417220858363781,3.7327342868624135e-05,0.00014930380997874262,0.3705073678034765,4.366495108179315e-05,0.00017465218984983745,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 5 | 0.4433398303764975,3.3306690738754696e-16,4.163336342344337e-17,0.6641910967043859,1.1102230246251565e-16,6.938893903907228e-17,0.9039635980120929,3.3306690738754696e-16,1.3877787807814457e-17,0.641754876700662,8.881784197001252e-16,2.7755575615628914e-17,0.37052405722377546,5.551115123125783e-16,4.163336342344337e-17,0.4433398303764975,3.3306690738754696e-16,4.163336342344337e-17,0.6641910967043859,1.1102230246251565e-16,6.938893903907228e-17,0.9039635980120929,3.3306690738754696e-16,1.3877787807814457e-17,0.641754876700662,8.881784197001252e-16,2.7755575615628914e-17,0.37052405722377546,5.551115123125783e-16,4.163336342344337e-17,0.4433398309406037,3.3306690738754696e-16,1.0334870181871736e-09,0.018693502231513337,0.0,0.008387074228685273,0.37033318949471844,0.6641910973322176,2.220446049250313e-16,6.536341468077467e-09,0.018865528650366392,2.7755575615628914e-17,0.007925561546165463,0.3747856272318823,0.9039635979617335,2.220446049250313e-16,7.802648083199415e-09,0.018984332189511788,6.938893903907228e-17,0.007421369060361796,0.3771015699768887,0.6417548767129287,3.3306690738754696e-16,1.113098424543324e-08,0.01898592922911646,1.249000902703301e-16,0.007052146934873102,0.3390947586503096,0.3705240572244216,8.881784197001252e-16,6.5363390255868126e-09,0.018986089001780226,1.1102230246251565e-16,6.344456383978339e-12,1.3315555272924361e-07,0.4433398303764975,4.540768099060705e-11,1.8123148839599107e-10,0.6641910967043863,2.012893986066411e-08,8.051554657506717e-08,0.903963598012093,5.388633849442114e-06,2.155443008687823e-05,0.641754876700662,3.732539190896092e-05,0.00014929600672269536,0.3705240572237755,4.3662979258096503e-05,0.00017464430324385595,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 6 | 0.4435604962691923,3.3306690738754696e-16,4.163336342344337e-17,0.6643455117456902,1.1102230246251565e-16,6.938893903907228e-17,0.9040271172807174,3.3306690738754696e-16,1.3877787807814457e-17,0.6417829053920107,8.881784197001252e-16,2.7755575615628914e-17,0.3705404716959244,5.551115123125783e-16,4.163336342344337e-17,0.4435604962691923,3.3306690738754696e-16,4.163336342344337e-17,0.6643455117456902,1.1102230246251565e-16,6.938893903907228e-17,0.9040271172807174,3.3306690738754696e-16,1.3877787807814457e-17,0.6417829053920107,8.881784197001252e-16,2.7755575615628914e-17,0.3705404716959244,5.551115123125783e-16,4.163336342344337e-17,0.443560496833056,3.3306690738754696e-16,1.0334870181871736e-09,0.018693502231513337,0.0,0.008387074228685273,0.37033318949471844,0.6643455123733601,2.220446049250313e-16,6.536341468077467e-09,0.018865528650366392,2.7755575615628914e-17,0.007925561546165463,0.3747856272318823,0.9040271172303129,3.3306690738754696e-16,7.664537227114465e-09,0.018984332189511788,6.938893903907228e-17,0.007421369060361796,0.37710156997688876,0.641782905404277,3.3306690738754696e-16,1.1082902040548959e-08,0.01898592922911646,1.249000902703301e-16,0.007052146934873102,0.3390947586503096,0.3705404716965706,8.881784197001252e-16,6.5363390255868126e-09,0.018986089001780226,1.1102230246251565e-16,6.344456383978339e-12,1.3315555272919462e-07,0.44356049626919225,4.536561850334243e-11,1.8106359647763014e-10,0.6643455117456906,2.0119076083260818e-08,8.047609161388539e-08,0.9040271172807174,5.387792270313649e-06,2.155106380668017e-05,0.6417829053920107,3.7323441136466415e-05,0.00014928820421522177,0.3705404716959245,4.3661007610788154e-05,0.0001746364173433221,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 7 | 0.44394721104440366,3.3306690738754696e-16,4.163336342344337e-17,0.664649141916318,1.1102230246251565e-16,4.163336342344337e-17,0.9041583160209654,3.3306690738754696e-16,1.3877787807814457e-17,0.6418157386113046,8.881784197001252e-16,2.7755575615628914e-17,0.37055716356185797,5.551115123125783e-16,2.7755575615628914e-17,0.44394721104440366,3.3306690738754696e-16,4.163336342344337e-17,0.664649141916318,1.1102230246251565e-16,4.163336342344337e-17,0.9041583160209654,3.3306690738754696e-16,1.3877787807814457e-17,0.6418157386113046,8.881784197001252e-16,2.7755575615628914e-17,0.37055716356185797,5.551115123125783e-16,2.7755575615628914e-17,0.44394721160823625,3.3306690738754696e-16,1.7900520510316653e-09,0.018693502231513337,0.0,0.008387074228685273,0.3703331894947145,0.6646491425439669,2.220446049250313e-16,6.536341468077467e-09,0.018865528650366392,2.7755575615628914e-17,0.007925561546165463,0.3747856272318823,0.9041583159705124,3.3306690738754696e-16,7.45257344725303e-09,0.018984332189511788,6.938893903907228e-17,0.007421369060361796,0.3771015699768888,0.6418157386235709,3.3306690738754696e-16,1.1082902040548959e-08,0.01898592922911646,1.249000902703301e-16,0.007052146934873102,0.3390947586503096,0.3705571635625042,8.881784197001252e-16,6.286449527603821e-09,0.018986089001780226,1.1102230246251565e-16,6.344456383978339e-12,1.331555528686864e-07,0.44394721104440366,4.5323565003674344e-11,1.808960060705163e-10,0.6646491419163182,2.0109221605811145e-08,8.043667399324728e-08,0.9041583160209655,5.38695094468894e-06,2.1547698540525454e-05,0.6418157386113046,3.732149056570802e-05,0.00014928040251461686,0.370557163561858,4.36590361413138e-05,0.00017462853215404403,0.0,1.1102230246251565e-16,0.0,0.01865466782950831,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.018835463608276926,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0189843321895119,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.018985929229116794,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.018986089001779893,0.0,3.2247763382619056e-13,4.879286697625974e-07 8 | 0.4385380377066354,2.220446049250313e-16,1.3877787807814457e-17,0.6597394507569534,0.0,6.938893903907228e-17,0.9019318021994494,3.3306690738754696e-16,9.71445146547012e-17,0.6417284070696884,8.881784197001252e-16,0.0,0.37059650934588906,8.881784197001252e-16,0.0,0.4385380377066354,2.220446049250313e-16,1.3877787807814457e-17,0.6597394507569534,0.0,6.938893903907228e-17,0.9019318021994494,3.3306690738754696e-16,9.71445146547012e-17,0.6417284070696884,8.881784197001252e-16,0.0,0.37059650934588906,8.881784197001252e-16,0.0,0.43853803770663907,3.3306690738754696e-16,0.0,0.018693502231513337,0.0,0.008387074228685273,0.37033318949470917,0.6597394507574161,3.3306690738754696e-16,4.621891369627207e-09,0.018865528650366392,2.7755575615628914e-17,0.007925561546165463,0.3747856272318823,0.9019318018988391,5.551115123125783e-16,6.286449805159577e-09,0.018984332189511788,6.938893903907228e-17,0.007421369060361796,0.37710156997688876,0.641728407070349,8.881784197001252e-16,4.002673914538235e-09,0.01898592922911646,1.249000902703301e-16,0.007052146934873102,0.33909475865030964,0.3705965093459037,1.2212453270876722e-15,1.0334859357197246e-08,0.018986089001780226,1.1102230246251565e-16,6.344456383978339e-12,1.3315555283194842e-07,0.4385380377066354,4.5264370262303046e-11,1.8065034948307739e-10,0.6597394507569536,2.0094080183553413e-08,8.037610359702041e-08,0.9019318021994495,5.385571282390397e-06,2.1542179949283815e-05,0.6417284070696884,3.7316412925332314e-05,0.00014926009346855762,0.3705965093458891,4.3653213162159504e-05,0.0001746052422706741,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 9 | 0.4389217664014375,2.220446049250313e-16,1.3877787807814457e-17,0.6600402888203178,0.0,6.938893903907228e-17,0.9020616824730117,3.3306690738754696e-16,9.71445146547012e-17,0.6417611448980974,8.881784197001252e-16,0.0,0.37061319569336726,8.881784197001252e-16,0.0,0.4389217664014375,2.220446049250313e-16,1.3877787807814457e-17,0.6600402888203178,0.0,6.938893903907228e-17,0.9020616824730117,3.3306690738754696e-16,9.71445146547012e-17,0.6417611448980974,8.881784197001252e-16,0.0,0.37061319569336726,8.881784197001252e-16,0.0,0.43892176640144115,3.3306690738754696e-16,1.0334870181871736e-09,0.018693502231513337,0.0,0.008387074228685273,0.37033318949471844,0.6600402888207806,3.3306690738754696e-16,4.621891369627207e-09,0.018865528650366392,2.7755575615628914e-17,0.007925561546165463,0.3747856272318823,0.9020616821723562,5.551115123125783e-16,6.286449805159577e-09,0.018984332189511788,6.938893903907228e-17,0.007421369060361796,0.37710156997688876,0.6417611448987579,8.881784197001252e-16,4.1339437983900496e-09,0.01898592922911646,1.249000902703301e-16,0.007052146934873102,0.33909475865030975,0.370613195693382,1.2212453270876722e-15,1.0334859357197246e-08,0.018986089001780226,1.1102230246251565e-16,6.344456383978339e-12,1.331555528318952e-07,0.4389217664014375,4.5221931844434595e-11,1.8048122891421513e-10,0.6600402888203181,2.0084169258613916e-08,8.033646018779995e-08,0.9020616824730118,5.384728349227395e-06,2.1538808253029734e-05,0.6417611448980974,3.7314462555984884e-05,0.00014925229257342656,0.3706131956933673,4.3651242342630934e-05,0.00017459735968082895,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 10 | 0.4393192650745621,2.220446049250313e-16,1.3877787807814457e-17,0.660353556191883,0.0,6.938893903907228e-17,0.9021972267592623,3.3306690738754696e-16,9.71445146547012e-17,0.641794285722338,8.881784197001252e-16,0.0,0.3706299053137449,8.881784197001252e-16,0.0,0.4393192650745621,2.220446049250313e-16,1.3877787807814457e-17,0.660353556191883,0.0,6.938893903907228e-17,0.9021972267592623,3.3306690738754696e-16,9.71445146547012e-17,0.641794285722338,8.881784197001252e-16,0.0,0.3706299053137449,8.881784197001252e-16,0.0,0.4393192650745658,3.3306690738754696e-16,1.4615714194476936e-09,0.018693502231513337,0.0,0.008387074228685273,0.3703331894947157,0.6603535561923456,3.3306690738754696e-16,4.621891369627207e-09,0.018865528650366392,2.7755575615628914e-17,0.007925561546165463,0.3747856272318823,0.9021972264585592,5.551115123125783e-16,6.370835470281833e-09,0.018984332189511788,6.938893903907228e-17,0.007421369060361796,0.3771015699768887,0.6417942857229983,8.881784197001252e-16,4.002673914538235e-09,0.01898592922911646,1.249000902703301e-16,0.007052146934873102,0.33909475865030964,0.3706299053137595,1.2212453270876722e-15,1.0334859357197246e-08,0.018986089001780226,1.1102230246251565e-16,6.344456383978339e-12,1.3315555283184196e-07,0.43931926507456215,4.517956584171224e-11,1.803124194070608e-10,0.6603535561918832,2.0074267916047088e-08,8.029685511968217e-08,0.9021972267592623,5.38388567459764e-06,2.1535437590895576e-05,0.641794285722338,3.731251238927678e-05,0.00014924449248876316,0.37062990531374496,4.364927170092418e-05,0.00017458947780217896,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 11 | 0.4397214746616252,2.220446049250313e-16,1.3877787807814457e-17,0.6606710757440524,0.0,6.938893903907228e-17,0.9023347087402926,3.3306690738754696e-16,9.71445146547012e-17,0.6418275644140062,8.881784197001252e-16,0.0,0.37064662289593076,8.881784197001252e-16,0.0,0.4397214746616252,2.220446049250313e-16,1.3877787807814457e-17,0.6606710757440524,0.0,6.938893903907228e-17,0.9023347087402926,3.3306690738754696e-16,9.71445146547012e-17,0.6418275644140062,8.881784197001252e-16,0.0,0.37064662289593076,8.881784197001252e-16,0.0,0.4397214746616288,3.3306690738754696e-16,0.0,0.018693502231513337,0.0,0.008387074228685273,0.37033318949470917,0.660671075744515,3.3306690738754696e-16,4.736029346474879e-09,0.018865528650366392,2.7755575615628914e-17,0.007925561546165463,0.3747856272318823,0.9023347084395419,5.551115123125783e-16,6.370835470281833e-09,0.018984332189511788,6.938893903907228e-17,0.007421369060361796,0.3771015699768887,0.6418275644146663,8.881784197001252e-16,4.1339437983900496e-09,0.01898592922911646,1.249000902703301e-16,0.007052146934873102,0.33909475865030975,0.3706466228959454,1.2212453270876722e-15,1.023098883390361e-08,0.018986089001780226,1.1102230246251565e-16,6.344456383978339e-12,1.3315555286847132e-07,0.43972147466162514,4.513727621640249e-11,1.801439219125796e-10,0.6606710757440525,2.006437620763719e-08,8.025728859196421e-08,0.9023347087402926,5.38304326019238e-06,2.153206796964379e-05,0.6418275644140062,3.7310562425595544e-05,0.00014923669321611666,0.3706466228959308,4.36473012370577e-05,0.00017458159663479727,0.0,1.1102230246251565e-16,0.0,0.0,0.0,1.1916178523745036e-13,1.0923088629683882e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.0114845242524606e-13,1.2270447098278781e-07,0.0,2.220446049250313e-16,0.0,0.0,0.0,1.27893575651596e-13,1.1244095778984321e-07,0.0,6.661338147750939e-16,0.0,0.0,0.0,2.454142742559712e-13,3.605521883101832e-07,0.0,8.881784197001252e-16,0.0,0.0,0.0,3.2247763382619056e-13,4.879286697625974e-07 12 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Feng Wei 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Models/autoencoder_model.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CactiLab/code-xNIDS/26162bdebff2151f4fcc8874d7521a7766169a20/Models/autoencoder_model.h5 -------------------------------------------------------------------------------- /Models/kitsune.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CactiLab/code-xNIDS/26162bdebff2151f4fcc8874d7521a7766169a20/Models/kitsune.h5 -------------------------------------------------------------------------------- /Models/lstm_history_model.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CactiLab/code-xNIDS/26162bdebff2151f4fcc8874d7521a7766169a20/Models/lstm_history_model.h5 -------------------------------------------------------------------------------- /Models/lstm_model.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CactiLab/code-xNIDS/26162bdebff2151f4fcc8874d7521a7766169a20/Models/lstm_model.h5 -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | ![](https://img.shields.io/badge/license-MIT-green.svg) 3 | ![](https://img.shields.io/badge/language-python-blue.svg) 4 | ![](https://img.shields.io/badge/framework-keras-orange.svg) 5 | 6 | 7 | 8 | ## xNIDS: Explaining Deep Learning-based Network Intrusion Detection Systems for Active Intrusion Responses 9 | 10 | This repo includes the source code for the "Explaining Deep Learning-based Network Intrusion Detection Systems for Active Intrusion Responses" project. The code is hardware-independent and can be optimized for execution on Google Colab. 11 | 12 | 13 | ## Implementation Notes 14 | 15 | Testing and running the code is straightforward, giving users the flexibility to utilize either Google Colab or a local machine for their testing purposes. 16 | 17 | 1. **Google Colab**: 18 | > git clone https://github.com/CactiLab/code-xNIDS.git 19 | - Rename the folder 20 | > mv code-xNIDS xNIDS 21 | - Upload the folder to **Colab Notebooks** folder under google drive 22 | - Run the Demo **explanation.ipynb** notebook 23 | - Although a GPU is not required, it can significantly accelerate the execution 24 | 25 | 2. **Local Machine**: 26 | 27 | > pip install -r requirements.txt 28 | 29 | > python explanation.py 30 | 31 | 3. **Retrain the DL-NIDS** 32 | - Please carefully update the testcases accordingly in the **explantion.py** if the users want to retrain the models. 33 | - **kitsune.ipynb** contains the reimplementation of kitnet. Please download the dataset and put them under the **Data** folder. 34 | - **kdd.ipynb** includes one *Autoencoder* and one **stateless** *RNN* based DL-NIDS 35 | - **kdd_histroy.ipynb** contains one **stateful** LSTM based DL-NIDS. 36 | 37 | ## Contribute 38 | 39 | Contributions are always welcome! 40 | 41 | ## Citation & Paper 42 | 43 | 44 | The results of this project was published in the paper entitled "xNIDS: Explaining Deep Learning-based Network Intrusion Detection Systems for Active Intrusion Responses" in the USENIX Security 2023. If you want to cite our paper in your work, please use the following BibTeX entry. 45 | 46 | ``` 47 | @inproceedings{wei2023xnids, 48 | title = {{xNIDS: Explaining Deep Learning-based Network Intrusion Detection Systems for Active Intrusion Responses}}, 49 | author = {Wei, Feng and Li, Hongda and Zhao, Ziming and Hu, Hongxin}, 50 | booktitle = {{USENIX Security}}, 51 | year = {2023}, 52 | } 53 | 54 | ``` 55 | 56 | ## Star History 57 | 58 | [![Star History Chart](https://api.star-history.com/svg?repos=CactiLab/code-xNIDS&type=Date)](https://star-history.com/#CactiLab/code-xNIDS&Date) 59 | -------------------------------------------------------------------------------- /Scripts/explanation.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """explanation.ipynb 3 | 4 | Automatically generated by Colaboratory. 5 | 6 | Original file is located at 7 | https://colab.research.google.com/drive/1C8GW5VHJnc-QEGUiyTFIMrBLIURAvfjZ 8 | """ 9 | 10 | 11 | import random 12 | import matplotlib.pyplot as plt 13 | import numpy as np 14 | import pandas as pd 15 | import tensorflow as tf 16 | from tensorflow import keras 17 | import random 18 | import math 19 | import asgl 20 | import re 21 | import warnings 22 | warnings.filterwarnings("ignore") 23 | 24 | class Explanation: 25 | def __init__(self, current_sample, history_samples, model_path, original_score, feature_names, group_sizes,target): 26 | self.current_sample = current_sample 27 | self.history_samples = history_samples 28 | self.model_path = model_path 29 | self.original_score = original_score 30 | self.feature_names = feature_names 31 | self.group_size = group_sizes 32 | self.target = target 33 | self.relevant_history = [] # List to store relevant history samples 34 | self.delta = 0.001 35 | self.step = 10 36 | self.new_input = [] 37 | self.weighted_samples = [] 38 | self.coef = [] 39 | 40 | def search_proper_input(self): 41 | step = 0 # Current step 42 | for i in range(len(self.history_samples)): 43 | if step <= self.step: 44 | self.new_input = self.history_samples[-(i+1):] 45 | print(len(self.new_input)) 46 | model = keras.models.load_model(self.model_path) # Load the Keras model 47 | new_current_score = model.predict(self.new_input,batch_size=1) 48 | current_score = new_current_score [-1] 49 | print("searched current score", current_score) 50 | current_delta = abs(current_score - self.original_score) 51 | 52 | if current_delta <= self.delta: 53 | return self.new_input # Found proper input 54 | step += 1 55 | i *= 2 # Double the value of i 56 | else: 57 | print("Cannot find the proper input within the max steps") 58 | return self.current_sample # Cannot find proper input 59 | 60 | return None # Proper input not found within max_steps 61 | 62 | 63 | 64 | def capture_relevant_history(self): 65 | """ 66 | Capture the relevant history samples by running the model on current and historical samples. 67 | """ 68 | model = keras.models.load_model(self.model_path) # Load the Keras model 69 | model.summary() 70 | 71 | if self.target == 'lstm': 72 | current_prediction = model.predict(self.current_sample) 73 | current_score = current_prediction 74 | elif self.target =='kitsune': 75 | current_prediction = model.predict(self.current_sample) 76 | current_score = np.mean(np.square(self.current_sample - current_prediction), axis=1) 77 | else: 78 | current_prediction = model.predict(self.current_sample,batch_size=1) 79 | current_score = current_prediction 80 | print("Current prediciton score:",current_score) 81 | 82 | # Check the difference between current score and the original score 83 | difference = abs(current_score - self.original_score) 84 | print("original score:", self.original_score) 85 | print("difference:",difference) 86 | if difference > self.delta: 87 | print("The output is determined by current input and relevant history.") 88 | # approximate the history inputs here. 89 | self.new_input = self.history_samples 90 | self.new_input = self.search_proper_input() 91 | else: 92 | print("The output is determined by current input.") 93 | self.new_input = self.current_sample 94 | 95 | 96 | def weighted_sampling(self, num_samples): 97 | 98 | self.weighted_samples = np.array(self.weighted_samples) 99 | for idx, input_value in enumerate(self.new_input): 100 | distance = np.abs(idx+1) # Distance from the current input position 101 | # Determine the number of selected features based on the distance 102 | new_weight = distance / (distance +1) 103 | print("weight:", new_weight) 104 | 105 | for i in range(num_samples): 106 | if self.target != 'kitsune': 107 | 108 | random_sample38 =np.zeros((1, 38)) 109 | # Determine the number of selected features based on the distance 110 | num_selected = int(new_weight * 38/4) 111 | np.random.seed() # Reset the random seed for each random sample 112 | 113 | # Select random indices for the features to include in the random sample 114 | selected_indices = np.random.choice(38, size=num_selected, replace=False) 115 | #print("shape38", random_sample38.shape) 116 | #print("shape input", input_value.shape) 117 | # Set the selected features to their corresponding values from the input 118 | input_value38 = input_value[:, :38] 119 | random_sample38 = random_sample38.reshape(1, 38) # Reshape random_sample38 to match input_value38 120 | 121 | random_sample38[:, selected_indices] = input_value38[:, selected_indices] 122 | #random_sample3 = np.zeros((1, 3)) 123 | #index = np.random.randint(0, 3) 124 | #random_sample3[0, index] = 1 125 | #random_sample70 = np.zeros((1, 70)) 126 | #index = np.random.randint(0, 70) 127 | #random_sample70[0, index] = 1 128 | #random_sample11 = np.zeros((1, 11)) 129 | #index = np.random.randint(0, 11) 130 | #random_sample11[0, index] = 1 131 | #random_sample = np.concatenate((random_sample38, random_sample3,random_sample70,random_sample11), axis=1) 132 | input_value_right3 = input_value[:, 38:41] 133 | input_value_right3 = input_value_right3.reshape(1, 3) 134 | input_value_right70 = input_value[:, 41:111] 135 | input_value_right70 = input_value_right70.reshape(1, 70) 136 | input_value_right11 = input_value[:, 111:] 137 | input_value_right11 = input_value_right11.reshape(1, 11) 138 | random_sample = np.concatenate((random_sample38,input_value_right3,input_value_right70,input_value_right11), axis=1) 139 | 140 | else: 141 | random_sample = np.copy(input_value) # Initialize the random sample with a copy of input_value 142 | 143 | distance = 1 144 | 145 | # Determine the number of selected features based on the distance 146 | num_selected = 50 147 | #num_selected = int(np.ceil(distance / (distance + 1) * len(input_value) )) 148 | np.random.seed() # Reset the random seed for each random sample 149 | 150 | # Select random indices for the features to include in the random sample 151 | selected_indices = np.random.choice(len(input_value), size=num_selected, replace=False) 152 | 153 | # Set the selected features to a random fraction of their corresponding values from the input 154 | random_sample[selected_indices] = 0 155 | 156 | 157 | # Add the random sample to the list 158 | self.weighted_samples = np.append(self.weighted_samples, random_sample) 159 | 160 | def sparse_group_lasso(self): 161 | # Placeholder implementation: Calculate the weights using sparse group lasso 162 | group_index = [] 163 | for index, value in enumerate(self.group_size): 164 | group_index.extend([index + 1] * value) 165 | #print(group_index) 166 | model = keras.models.load_model(self.model_path) # reLoad the Keras model 167 | #y_scores = model.predict(self.weighted_samples.reshape(-1,1,len(self.feature_names))) 168 | if self.target == 'lstm': 169 | y_scores = model.predict(self.weighted_samples.reshape(-1,1,len(self.feature_names))) 170 | elif self.target == 'kitsune': 171 | y_scores = model.predict(self.weighted_samples.reshape(-1,len(self.feature_names))) 172 | y_scores = np.mean(np.square(self.weighted_samples.reshape(-1,len(self.feature_names))- y_scores), axis=1) 173 | else: 174 | y_scores = model.predict(self.weighted_samples.reshape(-1,1,len(self.feature_names)),batch_size=1) 175 | 176 | #print(y_scores) 177 | x = self.weighted_samples.reshape(len(y_scores), -1) 178 | #print(np.shape(x)) 179 | y = y_scores.reshape(len(y_scores)) 180 | del model 181 | #print(y_scores) 182 | # Define parameters grid 183 | lambda1 = (10.0 ** np.arange(-3, 1.01, 0.6)).tolist() 184 | #lambda1 = (0.001, 0.01, 0.1, 1, 10) 185 | alpha = np.arange(0, 1, 0.2).tolist() 186 | #alpha = (0, 0.1, 0.2, 0.4) 187 | power_weight = [0, 0.2, 1] 188 | 189 | # Define model parameters 190 | model = 'lm' 191 | penalization = 'sgl' 192 | tau = 0.5 193 | 194 | # Define cv class 195 | cross_validation_class = asgl.CV(model=model, penalization=penalization, lambda1=lambda1, alpha=alpha, 196 | tau=0.5, parallel=True, weight_technique='pca_pct', 197 | lasso_power_weight=power_weight, gl_power_weight=power_weight, variability_pct=0.85, 198 | nfolds=5, error_type='QRE', random_state=42) 199 | 200 | # Compute error using k-fold cross validation 201 | error = cross_validation_class.cross_validation(x, y, group_index) 202 | 203 | # Obtain the mean error across different folds 204 | error = np.mean(error, axis=1) 205 | 206 | # Select the minimum error 207 | minimum_error_idx = np.argmin(error) 208 | 209 | # Select the parameters index associated to mininum error values 210 | optimal_parameters = cross_validation_class.retrieve_parameters_value(minimum_error_idx) 211 | 212 | # Define asgl class using optimal values 213 | asgl_model = asgl.ASGL(model=model, penalization=penalization, tau=tau, 214 | intercept=cross_validation_class.intercept, 215 | lambda1=optimal_parameters.get('lambda1'), 216 | alpha=optimal_parameters.get('alpha'), 217 | lasso_weights=optimal_parameters.get('lasso_weights'), 218 | gl_weights=optimal_parameters.get('gl_weights')) 219 | 220 | # Split data into train / test 221 | train_idx, test_idx = asgl.train_test_split(nrows=x.shape[0], train_pct=0.7, random_state=1) 222 | 223 | # Solve the model 224 | asgl_model.fit(x=x[train_idx, :], y=y[train_idx], group_index=group_index) 225 | 226 | # Obtain betas 227 | self.coef = asgl_model.coef_ 228 | 229 | #print("coef:", self.coef) 230 | #return self.coef 231 | 232 | def visualization(self, group_sizes, group_names, feature_names): 233 | # Get weights and group features 234 | weights = self.coef[0] 235 | #print(weights) 236 | #print(np.shape(weights)) 237 | # Normalize weights to range 0-1 238 | weights = (weights - np.min(weights)) / (np.max(weights) - np.min(weights)) 239 | #group_features = self.group_features 240 | 241 | # Create color bar visualization 242 | plt.figure(figsize=(50, 3)) 243 | cmap = plt.colormaps.get_cmap("coolwarm") 244 | colors = cmap(weights) 245 | start_index = 0 246 | total_features = sum(group_sizes) 247 | feature_index = 0 248 | 249 | # Iterate over groups 250 | for group_size, group_name in zip(group_sizes, group_names): 251 | group_weights = weights[start_index:start_index + group_size] 252 | group_colors = colors[start_index:start_index + group_size] 253 | group_labels = feature_names[feature_index:feature_index + group_size] 254 | 255 | # Plot color bars for features in the group 256 | for i, (weight, color, label) in enumerate(zip(group_weights, group_colors, group_labels)): 257 | plt.bar(feature_index + i, weight, color=color) 258 | 259 | # Connect the feature to its group name with a line 260 | plt.plot([feature_index + i, feature_index + group_size//2], [weight, 1.3*max(weights)], color='grey', linestyle='-') 261 | 262 | # Add group name below x-axis with adjusted spacing 263 | plt.text(feature_index + group_size // 2, 1.5, group_name, ha='center', va='top') 264 | 265 | feature_index += group_size 266 | start_index += group_size 267 | 268 | # Set x-axis ticks and labels 269 | plt.xticks(range(total_features), feature_names, rotation=45, ha='right') 270 | 271 | # Set y-axis label, x-axis label, and title 272 | #plt.ylabel("Weights") 273 | #plt.xlabel("Features") 274 | #plt.title("Weights Visualization") 275 | 276 | # Remove top, left, and right spines 277 | plt.gca().spines['top'].set_visible(False) 278 | plt.gca().spines['left'].set_visible(False) 279 | plt.gca().spines['right'].set_visible(False) 280 | 281 | # Hide y-axis ticks 282 | plt.yticks([]) 283 | 284 | # Add color bar legend 285 | # Get the current axes 286 | plt.colorbar(plt.cm.ScalarMappable(cmap=cmap), label="Importance Score",shrink=0.5, location='right', pad=0.00, anchor=(0, 0)) 287 | plt.ylim(0, top= 2) # Set the new minimum and maximum values for the y-axis 288 | # Display the figure 289 | plt.show() 290 | 291 | num_samples = 100 292 | 293 | 294 | # explain kitsune 295 | 296 | # We use the false postives, and true postives from kitsune to demostrate the explanation 297 | 298 | # Location of the instances: 299 | 300 | # 373907 true postive (1000000 for training) 301 | # reconstruction_error 0.26677650458466096 302 | 303 | # 456016 fasle negative 304 | # reconstruction_error 0.14151900999030606 305 | 306 | # 373081 false psitive 307 | # 0.3029833795420598 308 | # kitsune_selected_fp_rows = pd.DataFrame(X_test[kfp-10:kfp]) 309 | 310 | # threshold 0.20776055640832747 311 | 312 | 313 | kitsune_model = "../Models/kitsune.h5" 314 | kitsune_group_features = ["Group 1/Feature 1", "Group 1/Feature 2""Group 2/Feature 3", "Group 2/Feature 4","Group 2/Feature 5",] 315 | kitsune_group_sizes = [15, 15,15,35,35] 316 | kitsune_group_names = ["MAC-IP", "IP","Jitter","IPtoIP","Socket"] 317 | kitsune_feature_names = [f'{i}' for i in range(1,116)] 318 | # 5 types of streams 319 | # packet sizes from a Mac-IP 3 320 | # packet size from an IP 3 321 | # Jitter froma an IP 3 322 | # packet size between two IPs 7 323 | # packet size between two Sockets 7 324 | # Total 23 * 5 time windows 325 | 326 | kitsune_tp_data = pd.read_csv("../Data/kitsune_selected_tp_rows.csv") 327 | kitsune_tp_current_sample = kitsune_tp_data.iloc[-1] 328 | kitsune_tp_history_samples = kitsune_tp_data.iloc[:-1] 329 | kitsune_tp_prediction_score = 0.26677650458466096 330 | kitsune_tp_explanation = Explanation(kitsune_tp_current_sample.values.reshape(1,-1), kitsune_tp_history_samples, kitsune_model, kitsune_tp_prediction_score, kitsune_feature_names, kitsune_group_sizes, 'kitsune') 331 | kitsune_tp_explanation.capture_relevant_history() 332 | kitsune_tp_explanation.weighted_sampling(num_samples) 333 | kitsune_tp_explanation.sparse_group_lasso() 334 | kitsune_tp_explanation.visualization(kitsune_group_sizes, kitsune_group_names, kitsune_feature_names) 335 | 336 | kitsune_fn_data = pd.read_csv("../Data/kitsune_selected_fn_rows.csv") 337 | kitsune_fn_current_sample = kitsune_fn_data.iloc[-1] 338 | kitsune_fn_history_samples = kitsune_fn_data 339 | kitsune_fn_prediction_score = 0.14151900999030606 340 | kitsune_fn_explanation = Explanation(kitsune_fn_current_sample.values.reshape(1,-1), kitsune_fn_history_samples, kitsune_model, kitsune_fn_prediction_score, kitsune_feature_names, kitsune_group_sizes, 'kitsune') 341 | kitsune_fn_explanation.capture_relevant_history() 342 | kitsune_fn_explanation.weighted_sampling(num_samples) 343 | kitsune_fn_explanation.sparse_group_lasso() 344 | kitsune_fn_explanation.visualization(kitsune_group_sizes, kitsune_group_names, kitsune_feature_names) 345 | 346 | kitsune_fp_data = pd.read_csv("../Data/kitsune_selected_fp_rows.csv") 347 | kitsune_fp_current_sample = kitsune_fp_data.iloc[-1] 348 | kitsune_fp_history_samples = kitsune_fp_data 349 | kitsune_fp_prediction_score = 0.3029833795420598 350 | kitsune_fp_explanation = Explanation(kitsune_fp_current_sample.values.reshape(1,-1), kitsune_fp_history_samples, kitsune_model, kitsune_fp_prediction_score, kitsune_feature_names, kitsune_group_sizes, 'kitsune') 351 | kitsune_fp_explanation.capture_relevant_history() 352 | kitsune_fp_explanation.weighted_sampling(num_samples) 353 | kitsune_fp_explanation.sparse_group_lasso() 354 | kitsune_fp_explanation.visualization(kitsune_group_sizes, kitsune_group_names, kitsune_feature_names) 355 | 356 | # explain RNN-IDS (stateless) 357 | # We use the false postives, and false negatives from RNN-IDS to demostrate the explanation 358 | 359 | # Location of the instances: 360 | 361 | # 571 false postive tcp http SF 362 | # predicted_probabilities[571] array([0.7116914], dtype=float32) 363 | 364 | # 21930 false negatives warezmaster 365 | # predicted_probabilities[21930] array([0.44356757], dtype=float32) 366 | 367 | 368 | import pandas as pd 369 | kdd_model = "../Models/lstm_model.h5" 370 | kdd_feature_names = np.load('../Data/kdd_after_features.npy') 371 | kdd_feature_names =kdd_feature_names[kdd_feature_names != 'Class'] 372 | kdd_group_sizes = [1, 2, 1, 1, 1, 1, 2, 1, 3, 3,1,2,1,1,4,2,2,5,4,3,70,11 ] 373 | kdd_group_names = [f'g{i}' for i in range(1,len(kdd_group_sizes)+1)] 374 | 375 | kdd_fn_data = pd.read_csv("../Data/kdd_selected_fn_rows_122.csv") 376 | kdd_fn_feature_data = kdd_fn_data.iloc[:,1 :] 377 | kdd_fn_current_sample = kdd_fn_feature_data.iloc[-1] 378 | kdd_fn_current_sample = kdd_fn_current_sample.values.reshape(1,1,-1) 379 | kdd_fn_history_samples = kdd_fn_feature_data 380 | kdd_fn_history_samples = kdd_fn_history_samples.values.reshape(len(kdd_fn_history_samples),1,-1) 381 | kdd_fn_prediction_score = 0.44356757 382 | kdd_fn_explanation = Explanation(kdd_fn_current_sample, kdd_fn_history_samples, kdd_model, kdd_fn_prediction_score, kdd_feature_names, kdd_group_sizes,'lstm') 383 | kdd_fn_explanation.capture_relevant_history() 384 | kdd_fn_explanation.weighted_sampling(num_samples) 385 | kdd_fn_explanation.sparse_group_lasso() 386 | kdd_fn_explanation.visualization(kdd_group_sizes, kdd_group_names, kdd_feature_names) 387 | 388 | kdd_fp_data = pd.read_csv("../Data/kdd_selected_fp_rows_122.csv") 389 | kdd_fp_feature_data = kdd_fp_data.iloc[:,1 :] 390 | kdd_fp_current_sample = kdd_fp_feature_data.iloc[-1] 391 | kdd_fp_current_sample = kdd_fp_current_sample.values.reshape(1,1,-1) 392 | kdd_fp_history_samples = kdd_fp_feature_data 393 | kdd_fp_history_samples = kdd_fp_history_samples.values.reshape(len(kdd_fp_history_samples),1,-1) 394 | kdd_fp_prediction_score = 0.7116914 395 | kdd_fp_explanation = Explanation(kdd_fp_current_sample, kdd_fp_history_samples, kdd_model, kdd_fp_prediction_score, kdd_feature_names, kdd_group_sizes,'lstm') 396 | kdd_fp_explanation.capture_relevant_history() 397 | kdd_fp_explanation.weighted_sampling(num_samples) 398 | kdd_fp_explanation.sparse_group_lasso() 399 | kdd_fp_explanation.visualization(kdd_group_sizes, kdd_group_names, kdd_feature_names) 400 | 401 | # explain stateful lstm 402 | # 19114 fale positive 403 | # predicted_probabilities[19114] array([0.92638266], dtype=float32) 404 | # 19114 0 udp private SF 54 51 0 405 | 406 | kdd_history_model = "../Models/lstm_history_model.h5" 407 | 408 | kdd_history_fp_data = pd.read_csv("../Data/kdd_history_selected_fp_rows_122.csv") 409 | kdd_history_fp_feature_data = kdd_history_fp_data.iloc[:,1 :] 410 | kdd_history_fp_current_sample = kdd_history_fp_feature_data.iloc[-1] 411 | kdd_history_fp_current_sample = kdd_history_fp_current_sample.values.reshape(1,1,-1) 412 | kdd_history_fp_history_samples = kdd_history_fp_feature_data 413 | kdd_history_fp_history_samples = kdd_history_fp_history_samples.values.reshape(len(kdd_history_fp_history_samples),1,-1) 414 | kdd_history_fp_prediction_score = 0.92638266 415 | kdd_history_fp_explanation = Explanation(kdd_history_fp_current_sample, kdd_history_fp_history_samples, kdd_history_model, kdd_history_fp_prediction_score, kdd_feature_names, kdd_group_sizes,'lstmHistory') 416 | kdd_history_fp_explanation.capture_relevant_history() 417 | kdd_history_fp_explanation.weighted_sampling(num_samples) 418 | kdd_history_fp_explanation.sparse_group_lasso() 419 | kdd_history_fp_explanation.visualization(kdd_group_sizes, kdd_group_names, kdd_feature_names) 420 | -------------------------------------------------------------------------------- /Scripts/kdd.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """kdd.ipynb 3 | 4 | Automatically generated by Colaboratory. 5 | 6 | Original file is located at 7 | https://colab.research.google.com/drive/1CP2t-gIrSpZifU7VP457AR52dGk9dHf_ 8 | """ 9 | 10 | 11 | 12 | 13 | import warnings 14 | warnings.filterwarnings("ignore") 15 | import itertools 16 | from sklearn.preprocessing import MinMaxScaler 17 | import numpy as np 18 | import pandas as pd 19 | from keras.utils.data_utils import get_file 20 | 21 | # Downloading training and test sets to local drive 22 | try: 23 | training_set_path = get_file('KDDTrain%2B.csv', origin='https://raw.githubusercontent.com/defcom17/NSL_KDD/master/KDDTrain%2B.csv') 24 | except: 25 | print('Error downloading') 26 | raise 27 | 28 | 29 | try: 30 | test_set_path = get_file('KDDTest%2B.csv', origin='https://raw.githubusercontent.com/defcom17/NSL_KDD/master/KDDTest%2B.csv') 31 | except: 32 | print('Error downloading') 33 | raise 34 | training_df = pd.read_csv(training_set_path, header=None) 35 | testing_df = pd.read_csv(test_set_path, header=None) 36 | 37 | training_df.head() 38 | 39 | testing_df.head() 40 | 41 | columns = [ 42 | 'duration', 43 | 'protocol_type', 44 | 'service', 45 | 'flag', 46 | 'src_bytes', 47 | 'dst_bytes', 48 | 'land', 49 | 'wrong_fragment', 50 | 'urgent', 51 | 'hot', 52 | 'num_failed_logins', 53 | 'logged_in', 54 | 'num_compromised', 55 | 'root_shell', 56 | 'su_attempted', 57 | 'num_root', 58 | 'num_file_creations', 59 | 'num_shells', 60 | 'num_access_files', 61 | 'num_outbound_cmds', 62 | 'is_host_login', 63 | 'is_guest_login', 64 | 'count', 65 | 'srv_count', 66 | 'serror_rate', 67 | 'srv_serror_rate', 68 | 'rerror_rate', 69 | 'srv_rerror_rate', 70 | 'same_srv_rate', 71 | 'diff_srv_rate', 72 | 'srv_diff_host_rate', 73 | 'dst_host_count', 74 | 'dst_host_srv_count', 75 | 'dst_host_same_srv_rate', 76 | 'dst_host_diff_srv_rate', 77 | 'dst_host_same_src_port_rate', 78 | 'dst_host_srv_diff_host_rate', 79 | 'dst_host_serror_rate', 80 | 'dst_host_srv_serror_rate', 81 | 'dst_host_rerror_rate', 82 | 'dst_host_srv_rerror_rate', 83 | 'outcome', 84 | 'difficulty' 85 | ] 86 | training_df.columns = columns 87 | testing_df.columns = columns 88 | 89 | print("Training set has {} rows.".format(len(training_df))) 90 | print("Testing set has {} rows.".format(len(testing_df))) 91 | 92 | training_outcomes=training_df["outcome"].unique() 93 | testing_outcomes=testing_df["outcome"].unique() 94 | print("The training set has {} possible outcomes \n".format(len(training_outcomes)) ) 95 | print(", ".join(training_outcomes)+".") 96 | print("\nThe testing set has {} possible outcomes \n".format(len(testing_outcomes))) 97 | print(", ".join(testing_outcomes)+".") 98 | 99 | # A list ot attack names that belong to each general attack type 100 | dos_attacks=["snmpgetattack","back","land","neptune","smurf","teardrop","pod","apache2","udpstorm","processtable","mailbomb"] 101 | r2l_attacks=["snmpguess","worm","httptunnel","named","xlock","xsnoop","sendmail","ftp_write","guess_passwd","imap","multihop","phf","spy","warezclient","warezmaster"] 102 | u2r_attacks=["sqlattack","buffer_overflow","loadmodule","perl","rootkit","xterm","ps"] 103 | probe_attacks=["ipsweep","nmap","portsweep","satan","saint","mscan"] 104 | 105 | # Our new labels 106 | classes=["Normal","Dos","R2L","U2R","Probe"] 107 | 108 | #Helper function to label samples to 5 classes 109 | def label_attack (row): 110 | if row["outcome"] in dos_attacks: 111 | return classes[1] 112 | if row["outcome"] in r2l_attacks: 113 | return classes[2] 114 | if row["outcome"] in u2r_attacks: 115 | return classes[3] 116 | if row["outcome"] in probe_attacks: 117 | return classes[4] 118 | return classes[0] 119 | 120 | 121 | #We combine the datasets temporarily to do the labeling 122 | test_samples_length = len(testing_df) 123 | df=pd.concat([training_df,testing_df]) 124 | df["Class"]=df.apply(label_attack,axis=1) 125 | 126 | 127 | # The old outcome field is dropped since it was replaced with the Class field, the difficulty field will be dropped as well. 128 | df=df.drop("outcome",axis=1) 129 | df=df.drop("difficulty",axis=1) 130 | 131 | # we again split the data into training and test sets. 132 | type_testing_df = testing_df 133 | training_df= df.iloc[:-test_samples_length, :] 134 | testing_df= df.iloc[-test_samples_length:,:] 135 | 136 | training_outcomes=training_df["Class"].unique() 137 | testing_outcomes=testing_df["Class"].unique() 138 | print("The training set has {} possible outcomes \n".format(len(training_outcomes)) ) 139 | print(", ".join(training_outcomes)+".") 140 | print("\nThe testing set has {} possible outcomes \n".format(len(testing_outcomes))) 141 | print(", ".join(testing_outcomes)+".") 142 | 143 | # Helper function for scaling continous values 144 | def minmax_scale_values(training_df,testing_df, col_name): 145 | scaler = MinMaxScaler() 146 | scaler = scaler.fit(training_df[col_name].values.reshape(-1, 1)) 147 | train_values_standardized = scaler.transform(training_df[col_name].values.reshape(-1, 1)) 148 | training_df[col_name] = train_values_standardized 149 | test_values_standardized = scaler.transform(testing_df[col_name].values.reshape(-1, 1)) 150 | testing_df[col_name] = test_values_standardized 151 | 152 | 153 | #Helper function for one hot encoding 154 | def encode_text(training_df,testing_df, name): 155 | training_set_dummies = pd.get_dummies(training_df[name]) 156 | testing_set_dummies = pd.get_dummies(testing_df[name]) 157 | for x in training_set_dummies.columns: 158 | dummy_name = "{}_{}".format(name, x) 159 | training_df[dummy_name] = training_set_dummies[x] 160 | if x in testing_set_dummies.columns : 161 | testing_df[dummy_name]=testing_set_dummies[x] 162 | else : 163 | testing_df[dummy_name]=np.zeros(len(testing_df)) 164 | training_df.drop(name, axis=1, inplace=True) 165 | testing_df.drop(name, axis=1, inplace=True) 166 | 167 | 168 | sympolic_columns=["protocol_type","service","flag"] 169 | label_column="Class" 170 | for column in df.columns : 171 | if column in sympolic_columns: 172 | encode_text(training_df,testing_df,column) 173 | elif not column == label_column: 174 | minmax_scale_values(training_df,testing_df, column) 175 | 176 | training_df.head(5) 177 | 178 | training_df.columns 179 | 180 | # Assuming 'data' is the DataFrame 181 | column_names = training_df.columns.tolist() 182 | 183 | # Display all column names 184 | for name in column_names: 185 | print(name) 186 | np.save('../Data/kdd_after_features.npy', column_names) 187 | 188 | x,y=training_df,training_df.pop("Class").values 189 | x=x.values 190 | x_test,y_test=testing_df,testing_df.pop("Class").values 191 | x_test=x_test.values 192 | y0=np.ones(len(y),np.int8) 193 | y0[np.where(y==classes[0])]=0 194 | y0_test=np.ones(len(y_test),np.int8) 195 | y0_test[np.where(y_test==classes[0])]=0 196 | input_shape = x.shape[1] 197 | 198 | import numpy as np 199 | import matplotlib.pyplot as plt 200 | from sklearn.metrics import roc_curve, auc 201 | from tensorflow import keras 202 | 203 | # Define the autoencoder architecture with additional layers 204 | input_dim = x.shape[1] 205 | encoding_dim = 100 # Adjust the size of the encoding layer as per your requirements 206 | 207 | input_data = keras.Input(shape=(input_dim,)) 208 | encoded = keras.layers.Dense(256, activation='relu')(input_data) 209 | dropout_encoded = keras.layers.Dropout(0.1)(encoded) 210 | encoded2 = keras.layers.Dense(128, activation='relu')(dropout_encoded) 211 | dropout_encoded2 = keras.layers.Dropout(0.1)(encoded2) 212 | encoded3 = keras.layers.Dense(64, activation='relu')(dropout_encoded2) 213 | dropout_encoded3 = keras.layers.Dropout(0.1)(encoded3) 214 | encoded4 = keras.layers.Dense(128, activation='relu')(dropout_encoded3) 215 | dropout_encoded4 = keras.layers.Dropout(0.1)(encoded4) 216 | encoded5 = keras.layers.Dense(256, activation='relu')(dropout_encoded4) 217 | dropout_encoded5 = keras.layers.Dropout(0.1)(encoded5) 218 | decoded = keras.layers.Dense(input_dim, activation='sigmoid')(dropout_encoded5) 219 | 220 | autoencoder = keras.Model(input_data, decoded) 221 | 222 | from tensorflow.keras.optimizers import Adam 223 | 224 | # Assuming 'autoencoder' is the model you want to compile 225 | learning_rate = 0.001 # Learning rate value 226 | 227 | # Create an optimizer with the desired learning rate 228 | optimizer = Adam(learning_rate=learning_rate) 229 | autoencoder.compile(optimizer=optimizer, loss='mse') 230 | # Train the autoencoder 231 | autoencoder.fit(x, x, epochs=1, batch_size=5000) 232 | 233 | # Save the model 234 | model_path = "../Models/autoencoder_model.h5" 235 | autoencoder.save(model_path) 236 | print("Model saved.") 237 | 238 | # Load the model 239 | loaded_model = keras.models.load_model(model_path) 240 | print("Model loaded.") 241 | 242 | # Use the loaded model for anomaly detection 243 | reconstructed_data = loaded_model.predict(x_test) 244 | mse = np.mean(np.power(x_test - reconstructed_data, 2), axis=1) # Compute mean squared error 245 | 246 | # Compute ROC curve and AUC 247 | fpr, tpr, thresholds = roc_curve(y0_test, mse) 248 | roc_auc = auc(fpr, tpr) 249 | 250 | # Plot ROC curve 251 | plt.figure() 252 | plt.plot(fpr, tpr, color='darkorange', lw=2, label='ROC curve (AUC = %0.2f)' % roc_auc) 253 | plt.plot([0, 1], [0, 1], color='navy', lw=2, linestyle='--') 254 | plt.xlim([0.0, 1.0]) 255 | plt.ylim([0.0, 1.05]) 256 | plt.xlabel('False Positive Rate') 257 | plt.ylabel('True Positive Rate') 258 | plt.title('Receiver Operating Characteristic') 259 | plt.legend(loc="lower right") 260 | plt.show() 261 | 262 | x_train = np.reshape(x, (x.shape[0],1,x.shape[1])) 263 | x_train.shape 264 | x_test = np.reshape(x_test, (x_test.shape[0],1,x_test.shape[1])) 265 | x_test.shape 266 | 267 | # LSTM requirements 268 | from tensorflow import keras 269 | from keras.layers import LSTM 270 | from keras.layers import Input 271 | from keras.models import Model 272 | from keras.layers import Dense # importing dense layer 273 | from keras.models import Sequential #importing Sequential layer 274 | 275 | lst = Sequential() 276 | # input layer and LSTM layer with 50 neurons 277 | lst.add(LSTM(50,input_dim=122)) 278 | 279 | # outpute layer with sigmoid activation 280 | lst.add(Dense(1,activation='sigmoid')) 281 | lst.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy']) 282 | lst.summary() 283 | history = lst.fit(x_train, y0, epochs=100, batch_size=5000,validation_split=0.2) 284 | test_results = lst.evaluate(x_test, y0_test, verbose=1) 285 | print(f'Test results - Loss: {test_results[0]} - Accuracy: {test_results[1]*100}%') 286 | # Save the model 287 | model_path = "../Models/lstm_model.h5" 288 | lst.save(model_path) 289 | print("Model saved.") 290 | 291 | # Load the model 292 | new_model = keras.models.load_model(model_path) 293 | 294 | # Check its architecture 295 | new_model.summary() 296 | print("Model loaded.") 297 | 298 | import matplotlib.pyplot as plt 299 | # Plot of accuracy vs epoch of train and test dataset 300 | plt.plot(history.history['accuracy']) 301 | plt.plot(history.history['val_accuracy']) 302 | plt.title("Plot of accuracy vs epoch for train and test dataset") 303 | plt.ylabel('accuracy') 304 | plt.xlabel('epoch') 305 | plt.legend(['train', 'test'], loc='best') 306 | plt.show() 307 | 308 | # Prepare data for explanations. 309 | # Make predictions 310 | predicted_probabilities = lst.predict(x_test) 311 | predicted_labels = (predicted_probabilities >= 0.5).astype(int) 312 | 313 | rnn_false_positives = [] # Store indices of false positives 314 | rnn_false_negatives = [] # Store indices of false negatives 315 | for i in range(len(predicted_labels)): 316 | if predicted_labels[i][0] != y0_test[i]: 317 | if predicted_labels[i][0] == 1: # False positive 318 | rnn_false_positives.append(i) 319 | else: 320 | rnn_false_negatives.append(i) 321 | 322 | #rnn_false_positives 323 | 324 | #rnn_false_negatives 325 | 326 | kdd_selected_fp_rows = type_testing_df.loc[570:571] 327 | kdd_selected_fp_rows_122 = pd.DataFrame(x_test[570:572].reshape(2,122)) 328 | kdd_selected_fp_rows.to_csv('../Data/kdd_selected_fp_rows.csv', index=True) 329 | kdd_selected_fp_rows_122.to_csv('../Data/kdd_selected_fp_rows_122.csv', index=True) 330 | 331 | # 22439 false postives udp private normal 332 | # 571 0.69 333 | 334 | predicted_probabilities[571] 335 | 336 | y0_test[571] 337 | 338 | predicted_labels[571][0] 339 | 340 | kdd_selected_fp_rows 341 | 342 | y0_test[21930] 343 | 344 | predicted_labels[21930][0] 345 | 346 | kdd_selected_fn_rows = type_testing_df.loc[21920:21930] 347 | kdd_selected_fn_rows_122 = pd.DataFrame(x_test[21920:21931].reshape(11,122)) 348 | kdd_selected_fn_rows.to_csv('../Data/kdd_selected_fn_rows.csv', index=True) 349 | kdd_selected_fn_rows_122.to_csv('../Data/kdd_selected_fn_rows_122.csv', index=True) 350 | 351 | predicted_probabilities[21930] 352 | 353 | kdd_selected_fn_rows 354 | 355 | kdd_selected_fn_rows_122 356 | -------------------------------------------------------------------------------- /Scripts/kdd_history.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """kdd_history.ipynb 3 | 4 | Automatically generated by Colaboratory. 5 | 6 | Original file is located at 7 | https://colab.research.google.com/drive/1CBVgNAk2A9VSOtotJQNLxAAbrAcBfcvX 8 | """ 9 | 10 | 11 | 12 | 13 | 14 | import warnings 15 | warnings.filterwarnings("ignore") 16 | import itertools 17 | from sklearn.preprocessing import MinMaxScaler 18 | import numpy as np 19 | import pandas as pd 20 | from keras.utils.data_utils import get_file 21 | 22 | # Downloading training and test sets to local drive 23 | try: 24 | training_set_path = get_file('KDDTrain%2B.csv', origin='https://raw.githubusercontent.com/defcom17/NSL_KDD/master/KDDTrain%2B.csv') 25 | except: 26 | print('Error downloading') 27 | raise 28 | 29 | 30 | try: 31 | test_set_path = get_file('KDDTest%2B.csv', origin='https://raw.githubusercontent.com/defcom17/NSL_KDD/master/KDDTest%2B.csv') 32 | except: 33 | print('Error downloading') 34 | raise 35 | training_df = pd.read_csv(training_set_path, header=None) 36 | testing_df = pd.read_csv(test_set_path, header=None) 37 | 38 | training_df.head() 39 | 40 | testing_df.head() 41 | 42 | columns = [ 43 | 'duration', 44 | 'protocol_type', 45 | 'service', 46 | 'flag', 47 | 'src_bytes', 48 | 'dst_bytes', 49 | 'land', 50 | 'wrong_fragment', 51 | 'urgent', 52 | 'hot', 53 | 'num_failed_logins', 54 | 'logged_in', 55 | 'num_compromised', 56 | 'root_shell', 57 | 'su_attempted', 58 | 'num_root', 59 | 'num_file_creations', 60 | 'num_shells', 61 | 'num_access_files', 62 | 'num_outbound_cmds', 63 | 'is_host_login', 64 | 'is_guest_login', 65 | 'count', 66 | 'srv_count', 67 | 'serror_rate', 68 | 'srv_serror_rate', 69 | 'rerror_rate', 70 | 'srv_rerror_rate', 71 | 'same_srv_rate', 72 | 'diff_srv_rate', 73 | 'srv_diff_host_rate', 74 | 'dst_host_count', 75 | 'dst_host_srv_count', 76 | 'dst_host_same_srv_rate', 77 | 'dst_host_diff_srv_rate', 78 | 'dst_host_same_src_port_rate', 79 | 'dst_host_srv_diff_host_rate', 80 | 'dst_host_serror_rate', 81 | 'dst_host_srv_serror_rate', 82 | 'dst_host_rerror_rate', 83 | 'dst_host_srv_rerror_rate', 84 | 'outcome', 85 | 'difficulty' 86 | ] 87 | training_df.columns = columns 88 | testing_df.columns = columns 89 | 90 | print("Training set has {} rows.".format(len(training_df))) 91 | print("Testing set has {} rows.".format(len(testing_df))) 92 | 93 | training_outcomes=training_df["outcome"].unique() 94 | testing_outcomes=testing_df["outcome"].unique() 95 | print("The training set has {} possible outcomes \n".format(len(training_outcomes)) ) 96 | print(", ".join(training_outcomes)+".") 97 | print("\nThe testing set has {} possible outcomes \n".format(len(testing_outcomes))) 98 | print(", ".join(testing_outcomes)+".") 99 | 100 | # A list ot attack names that belong to each general attack type 101 | dos_attacks=["snmpgetattack","back","land","neptune","smurf","teardrop","pod","apache2","udpstorm","processtable","mailbomb"] 102 | r2l_attacks=["snmpguess","worm","httptunnel","named","xlock","xsnoop","sendmail","ftp_write","guess_passwd","imap","multihop","phf","spy","warezclient","warezmaster"] 103 | u2r_attacks=["sqlattack","buffer_overflow","loadmodule","perl","rootkit","xterm","ps"] 104 | probe_attacks=["ipsweep","nmap","portsweep","satan","saint","mscan"] 105 | 106 | # Our new labels 107 | classes=["Normal","Dos","R2L","U2R","Probe"] 108 | 109 | #Helper function to label samples to 5 classes 110 | def label_attack (row): 111 | if row["outcome"] in dos_attacks: 112 | return classes[1] 113 | if row["outcome"] in r2l_attacks: 114 | return classes[2] 115 | if row["outcome"] in u2r_attacks: 116 | return classes[3] 117 | if row["outcome"] in probe_attacks: 118 | return classes[4] 119 | return classes[0] 120 | 121 | 122 | #We combine the datasets temporarily to do the labeling 123 | test_samples_length = len(testing_df) 124 | df=pd.concat([training_df,testing_df]) 125 | df["Class"]=df.apply(label_attack,axis=1) 126 | 127 | 128 | # The old outcome field is dropped since it was replaced with the Class field, the difficulty field will be dropped as well. 129 | df=df.drop("outcome",axis=1) 130 | df=df.drop("difficulty",axis=1) 131 | 132 | # we again split the data into training and test sets. 133 | type_testing_df = testing_df 134 | training_df= df.iloc[:-test_samples_length, :] 135 | testing_df= df.iloc[-test_samples_length:,:] 136 | 137 | training_outcomes=training_df["Class"].unique() 138 | testing_outcomes=testing_df["Class"].unique() 139 | print("The training set has {} possible outcomes \n".format(len(training_outcomes)) ) 140 | print(", ".join(training_outcomes)+".") 141 | print("\nThe testing set has {} possible outcomes \n".format(len(testing_outcomes))) 142 | print(", ".join(testing_outcomes)+".") 143 | 144 | # Helper function for scaling continous values 145 | def minmax_scale_values(training_df,testing_df, col_name): 146 | scaler = MinMaxScaler() 147 | scaler = scaler.fit(training_df[col_name].values.reshape(-1, 1)) 148 | train_values_standardized = scaler.transform(training_df[col_name].values.reshape(-1, 1)) 149 | training_df[col_name] = train_values_standardized 150 | test_values_standardized = scaler.transform(testing_df[col_name].values.reshape(-1, 1)) 151 | testing_df[col_name] = test_values_standardized 152 | 153 | 154 | #Helper function for one hot encoding 155 | def encode_text(training_df,testing_df, name): 156 | training_set_dummies = pd.get_dummies(training_df[name]) 157 | testing_set_dummies = pd.get_dummies(testing_df[name]) 158 | for x in training_set_dummies.columns: 159 | dummy_name = "{}_{}".format(name, x) 160 | training_df[dummy_name] = training_set_dummies[x] 161 | if x in testing_set_dummies.columns : 162 | testing_df[dummy_name]=testing_set_dummies[x] 163 | else : 164 | testing_df[dummy_name]=np.zeros(len(testing_df)) 165 | training_df.drop(name, axis=1, inplace=True) 166 | testing_df.drop(name, axis=1, inplace=True) 167 | 168 | 169 | sympolic_columns=["protocol_type","service","flag"] 170 | label_column="Class" 171 | for column in df.columns : 172 | if column in sympolic_columns: 173 | encode_text(training_df,testing_df,column) 174 | elif not column == label_column: 175 | minmax_scale_values(training_df,testing_df, column) 176 | 177 | training_df.head(5) 178 | 179 | training_df.columns 180 | 181 | # Assuming 'data' is the DataFrame 182 | column_names = training_df.columns.tolist() 183 | 184 | # Display all column names 185 | for name in column_names: 186 | print(name) 187 | np.save('../Data/kdd_after_features.npy', column_names) 188 | 189 | x,y=training_df,training_df.pop("Class").values 190 | x=x.values 191 | x_test,y_test=testing_df,testing_df.pop("Class").values 192 | x_test=x_test.values 193 | y0=np.ones(len(y),np.int8) 194 | y0[np.where(y==classes[0])]=0 195 | y0_test=np.ones(len(y_test),np.int8) 196 | y0_test[np.where(y_test==classes[0])]=0 197 | input_shape = x.shape[1] 198 | 199 | x_test.shape[0] 200 | 201 | x_test.shape[-1] 202 | 203 | from tensorflow import keras 204 | from keras.layers import LSTM, Input, Dense, Dropout 205 | from keras.models import Model, Sequential 206 | 207 | # Reshape the training and test data 208 | x_train = np.reshape(x, (x.shape[0], 1, x.shape[-1])) 209 | x_test = np.reshape(x_test, (x_test.shape[0], 1, x_test.shape[-1])) 210 | 211 | # LSTM requirements 212 | lst = Sequential() 213 | 214 | # Input layer and LSTM layer with 50 neurons 215 | lst.add(LSTM(50, batch_input_shape=(1, 1, x.shape[-1]), stateful=True, return_sequences=True)) 216 | lst.add(Dropout(0.2)) # Dropout layer with 20% dropout rate 217 | 218 | # Additional LSTM layer with 50 neurons 219 | lst.add(LSTM(10)) 220 | lst.add(Dropout(0.2)) # Dropout layer with 20% dropout rate 221 | # Output layer with sigmoid activation 222 | lst.add(Dense(1, activation='sigmoid')) 223 | 224 | lst.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) 225 | lst.summary() 226 | 227 | # Training the model with stateful LSTM 228 | for epoch in range(2): 229 | history = lst.fit(x_train, y0, epochs=1, batch_size=1, validation_split=0.2, shuffle=False) 230 | lst.reset_states() 231 | 232 | test_results = lst.evaluate(x_test, y0_test, batch_size=1, verbose=1) 233 | print(f'Test results - Loss: {test_results[0]} - Accuracy: {test_results[1] * 100}%') 234 | 235 | # Save the model 236 | model_path = "../Models/lstm_history_model.h5" 237 | lst.save(model_path) 238 | print("Model saved.") 239 | 240 | # Load the model 241 | new_model = keras.models.load_model(model_path) 242 | new_model.summary() 243 | print("Model loaded.") 244 | 245 | import matplotlib.pyplot as plt 246 | 247 | # Plot accuracy vs epoch of train and test dataset 248 | #plt.plot(history.history['accuracy']) 249 | #plt.plot(history.history['val_accuracy']) 250 | #plt.title("Plot of accuracy vs epoch for train and test dataset") 251 | #plt.ylabel('accuracy') 252 | #plt.xlabel('epoch') 253 | #plt.legend(['train', 'test'], loc='best') 254 | #plt.show() 255 | 256 | # Prepare data for explanations. 257 | # Make predictions 258 | predicted_probabilities = lst.predict(x_test,batch_size=1) 259 | predicted_labels = (predicted_probabilities >= 0.5).astype(int) 260 | 261 | rnn_false_positives = [] # Store indices of false positives 262 | rnn_false_negatives = [] # Store indices of false negatives 263 | for i in range(len(predicted_labels)): 264 | if predicted_labels[i][0] != y0_test[i]: 265 | if predicted_labels[i][0] == 1: # False positive 266 | rnn_false_positives.append(i) 267 | else: 268 | rnn_false_negatives.append(i) 269 | 270 | rnn_false_positives 271 | 272 | #rnn_false_negatives 273 | 274 | kdd_selected_fp_rows = type_testing_df.loc[19000:19114] 275 | kdd_selected_fp_rows_122 = pd.DataFrame(x_test[19000:19115].reshape(115,122)) 276 | kdd_selected_fp_rows.to_csv('../Data/kdd_history_selected_fp_rows.csv', index=True) 277 | kdd_selected_fp_rows_122.to_csv('../Data/kdd_history_selected_fp_rows_122.csv', index=True) 278 | # 19114 279 | 280 | kdd_selected_fp_rows 281 | 282 | predicted_probabilities[19114] 283 | 284 | idx = 19114 285 | 286 | y0_test[idx] 287 | 288 | lst.predict(x_test[idx].reshape(1,1,-1)) 289 | 290 | lst.predict(x_test[idx-1:idx+1],batch_size=1) 291 | 292 | lst.predict(x_test[idx-3:idx+1],batch_size=1) 293 | 294 | lst.predict(x_test[idx-7:idx+1],batch_size=1) 295 | 296 | lst.predict(x_test[idx-15:idx+1],batch_size=1) 297 | 298 | #kdd_selected_fp_rows 299 | -------------------------------------------------------------------------------- /Scripts/kitsune.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """kitsune.ipynb 3 | 4 | Automatically generated by Colaboratory. 5 | 6 | Original file is located at 7 | https://colab.research.google.com/drive/1K6N1iTHM-_j2Deffzj5T3XJZrOXd4sZc 8 | """ 9 | 10 | 11 | 12 | import pandas as pd 13 | OS_Scan_data_total = pd.read_csv("../Data/OS_Scan_dataset.csv") 14 | OS_Scan_data_total 15 | 16 | OS_Scan_label_total = pd.read_csv("../Data/OS_Scan_labels.csv") 17 | OS_Scan_label_total 18 | 19 | # Drop the last row 20 | OS_Scan_label_total = OS_Scan_label_total.drop(OS_Scan_label_total.index[-1]) 21 | OS_Scan_label_total 22 | 23 | # Add labels as a new column to the train dataset 24 | OS_Scan_data_total['labels'] = OS_Scan_label_total.iloc[:, 1] 25 | 26 | # Save the updated train dataset to a new CSV file 27 | #OS_Scan_data_total.to_csv('../Data/merged_OS_Scan.csv', index=False) 28 | 29 | import psutil 30 | 31 | # Get disk usage information 32 | disk_usage = psutil.disk_usage('/') 33 | 34 | # Print the used disk space in bytes 35 | print("Used disk space:", disk_usage.used) 36 | 37 | # Print the used disk space in a human-readable format 38 | print("Used disk space:", psutil.disk_usage('/').used / (1024**3), "GB") 39 | 40 | # Calculate the distribution of each label 41 | # Get the label column (assuming it is the last column) 42 | label_column = OS_Scan_data_total.iloc[:, -1] 43 | # Calculate the distribution of each label 44 | label_distribution = label_column.value_counts(normalize=True) 45 | 46 | # Print the label distribution 47 | print(label_distribution) 48 | 49 | import numpy as np 50 | import matplotlib.pyplot as plt 51 | from tensorflow import keras 52 | from tensorflow.keras.models import Model 53 | from tensorflow.keras.layers import Input, Dense 54 | from sklearn.model_selection import train_test_split 55 | 56 | # Assuming you have your data loaded into the data_array 57 | # Remove the first column from the data 58 | feature_data = OS_Scan_data_total.iloc[:, :-1] 59 | label_data = OS_Scan_data_total.iloc[:, -1] 60 | #del OS_Scan_data_total 61 | #del OS_Scan_label_total 62 | 63 | 64 | # Calculate the index for splitting the data 65 | split_index = 1000000 #1M for training 66 | 67 | 68 | 69 | from sklearn.preprocessing import RobustScaler 70 | 71 | # Create a RobustScaler object 72 | scaler = RobustScaler() 73 | 74 | # Normalize each column using RobustScaler 75 | normalized_data = scaler.fit_transform(feature_data) 76 | 77 | # Create a new DataFrame with the normalized values 78 | df = pd.DataFrame(normalized_data, columns=feature_data.columns) 79 | 80 | 81 | from sklearn.preprocessing import MinMaxScaler 82 | 83 | # Assuming your DataFrame is called 'df' 84 | 85 | # Create an instance of MinMaxScaler 86 | scaler = MinMaxScaler() 87 | 88 | # Fit the scaler on the entire DataFrame 89 | scaler.fit(df) 90 | 91 | # Transform the entire DataFrame with the scaler 92 | normalized_df = pd.DataFrame(scaler.transform(df), columns=df.columns) 93 | 94 | 95 | 96 | # Split the data into training and testing sets 97 | X_train = normalized_df[:split_index] 98 | X_test = normalized_df[split_index:] 99 | Y_test = label_data[split_index:] 100 | 101 | X_train = X_train.to_numpy() 102 | X_test = X_test.to_numpy() 103 | 104 | del normalized_data 105 | del OS_Scan_data_total 106 | del feature_data 107 | 108 | # Define the autoencoder architecture with additional layers 109 | input_dim = X_train.shape[1] 110 | encoding_dim = 100 # Adjust the size of the encoding layer as per your requirements 111 | 112 | input_data = keras.Input(shape=(input_dim,)) 113 | encoded = keras.layers.Dense(256, activation='relu')(input_data) 114 | dropout_encoded = keras.layers.Dropout(0.1)(encoded) 115 | encoded2 = keras.layers.Dense(128, activation='relu')(dropout_encoded) 116 | dropout_encoded2 = keras.layers.Dropout(0.1)(encoded2) 117 | encoded3 = keras.layers.Dense(64, activation='relu')(dropout_encoded2) 118 | dropout_encoded3 = keras.layers.Dropout(0.1)(encoded3) 119 | encoded4 = keras.layers.Dense(128, activation='relu')(dropout_encoded3) 120 | dropout_encoded4 = keras.layers.Dropout(0.1)(encoded4) 121 | encoded5 = keras.layers.Dense(256, activation='relu')(dropout_encoded4) 122 | dropout_encoded5 = keras.layers.Dropout(0.1)(encoded5) 123 | decoded = keras.layers.Dense(input_dim, activation='sigmoid')(dropout_encoded5) 124 | 125 | autoencoder = keras.Model(input_data, decoded) 126 | 127 | from tensorflow.keras.optimizers import Adam 128 | 129 | # Assuming 'autoencoder' is the model you want to compile 130 | learning_rate = 0.001 # Learning rate value 131 | 132 | # Create an optimizer with the desired learning rate 133 | optimizer = Adam(learning_rate=learning_rate) 134 | autoencoder.compile(optimizer=optimizer, loss='mse') 135 | 136 | # Train the autoencoder 137 | autoencoder.fit(X_train, X_train, epochs=10, batch_size=5000) 138 | 139 | 140 | # Save the model 141 | model_path = "../Models/kitsune.h5" 142 | autoencoder.save(model_path) 143 | print("Model saved.") 144 | 145 | # Load the model 146 | loaded_model = keras.models.load_model(model_path) 147 | print("Model loaded.") 148 | 149 | 150 | 151 | # Reconstruction 152 | reconstructed_data = autoencoder.predict(X_test) 153 | 154 | # Compute reconstruction error 155 | reconstruction_error = np.mean(np.square(X_test - reconstructed_data), axis=1) 156 | 157 | # Set the threshold for anomaly detection 158 | threshold = np.mean(reconstruction_error) + 2 * np.std(reconstruction_error) 159 | 160 | # Classify anomalies 161 | predicted_labels = (reconstruction_error > threshold).astype(int) 162 | 163 | # Evaluate performance 164 | accuracy = np.mean(predicted_labels == Y_test) 165 | 166 | print(f"Accuracy: {accuracy}") 167 | 168 | # Reconstruction 169 | reconstructed_data1 = autoencoder.predict(normalized_df) 170 | 171 | # Compute reconstruction error 172 | reconstruction_error1 = np.mean(np.square(normalized_df - reconstructed_data1), axis=1) 173 | 174 | # Create scatter plot 175 | fig, ax = plt.subplots() 176 | ax.scatter(OS_Scan_label_total.index, reconstruction_error1, c=label_data, cmap='coolwarm') 177 | 178 | # Format x-axis as dates 179 | 180 | # Set axis labels and title 181 | ax.set_xlabel('Timestamp') 182 | # Set y-axis to log scale with negative values 183 | # Set y-axis to log scale 184 | #ax.set_yscale('log') 185 | ax.set_ylabel('Reconstruction Error') 186 | ax.set_title('Reconstruction Error vs. Timestamp') 187 | # Plot the horizontal line on the Axes object 188 | ax.axhline(y=threshold, color='red', linestyle='--') 189 | # Plot the vertical line 190 | ax.axvline(x =1000000, color='blue', linestyle='-.') 191 | # Show the plot 192 | plt.show() 193 | 194 | import psutil 195 | 196 | # Get disk usage information 197 | disk_usage = psutil.disk_usage('/') 198 | 199 | # Print the used disk space in bytes 200 | print("Used disk space:", disk_usage.used) 201 | 202 | # Print the used disk space in a human-readable format 203 | print("Used disk space:", psutil.disk_usage('/').used / (1024**3), "GB") 204 | 205 | # Prepare data for explanations 206 | threshold 207 | 208 | predicted_labels 209 | 210 | Y_test = label_data[split_index:] 211 | 212 | kitsune_false_positives = [] # Store indices of false positives 213 | kitsune_false_negatives = [] # Store indices of false negatives 214 | kitsune_true_positives = [] # Store indices of false negatives 215 | for i in range(len(reconstruction_error)): 216 | if predicted_labels[i] != Y_test.values[i]: 217 | if predicted_labels[i] == 1: # False positive 218 | kitsune_false_positives.append(i) 219 | else: # False negative 220 | kitsune_false_negatives.append(i) 221 | else: 222 | if predicted_labels[i] == 1: # True positive 223 | kitsune_true_positives.append(i) 224 | 225 | kfn = kitsune_false_negatives[39000] 226 | kfn 227 | 228 | reconstruction_error[kfn] 229 | 230 | Y_test.iloc[kfn] 231 | 232 | label_data.iloc[1000000+kfn] 233 | 234 | kitsune_selected_fn_rows = pd.DataFrame(X_test[kfn-10:kfn]) 235 | kitsune_selected_fn_rows.to_csv('../Data/kitsune_selected_fn_rows.csv', index=False) 236 | kitsune_selected_fn_rows 237 | 238 | ktp = kitsune_true_positives[20000] 239 | ktp 240 | 241 | reconstruction_error[ktp] 242 | 243 | Y_test.iloc[ktp] 244 | 245 | label_data.iloc[1000000+ktp] 246 | 247 | kitsune_selected_tp_rows = pd.DataFrame(X_test[ktp-10:ktp]) 248 | kitsune_selected_tp_rows.to_csv('../Data/kitsune_selected_tp_rows.csv', index=False) 249 | kitsune_selected_tp_rows 250 | 251 | len(kitsune_false_positives) 252 | 253 | kfp = kitsune_false_positives[20000] 254 | kfp 255 | 256 | reconstruction_error[kfp] 257 | 258 | kitsune_selected_fp_rows = pd.DataFrame(X_test[kfp-10:kfp]) 259 | # Save DataFrame as CSV 260 | kitsune_selected_fp_rows.to_csv('../Data/kitsune_selected_fp_rows.csv', index=False) 261 | kitsune_selected_fp_rows 262 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | tensorflow==2.12.1 2 | keras==2.13.1 3 | matplotlib==3.7.1 4 | more_itertools==9.1.0 5 | numpy==1.22.4 6 | asgl==1.0.5 7 | psutil==5.9.5 8 | scikit_learn==1.5.0 9 | pandas==1.5.3 10 | --------------------------------------------------------------------------------