├── data
├── team_id.txt
└── test.csv
├── .gitignore
├── Makefile.png
├── imgs
├── Makefile.png
├── data_snippet.png
├── fig-1_home-away.jpg
├── fig-2_shots-diff.jpg
└── all-figs
│ ├── fig-10_win-y.jpg
│ ├── fig-6_win-x.jpg
│ ├── fig-9_save-x.jpg
│ ├── fig-11_shots-y.jpg
│ ├── fig-12_goals-y.jpg
│ ├── fig-13_save-y.jpg
│ ├── fig-1_home-away.jpg
│ ├── fig-2_win-diff.jpg
│ ├── fig-5_save-diff.jpg
│ ├── fig-7_shots-x.jpg
│ ├── fig-8_goals-x.jpg
│ ├── fig-3_shots-diff.jpg
│ └── fig-4_goals-diff.jpg
├── results
├── dtree.pdf
├── final_result.csv
├── max_depth.png
├── model_selection.csv
├── dtree
└── feature_importance.csv
├── source
├── get_data.py
├── exploring_nhl_data.R
├── building_model.py
├── finding_best_model.py
└── cleaning_nhl_data.R
├── CONDUCT.md
├── Dockerfile
├── Makefile
├── doc
├── results_report.md
└── results_report.Rmd
└── README.md
/data/team_id.txt:
--------------------------------------------------------------------------------
1 | team_id
2 | 23
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .ipynb_checkpoints/
2 | .DS_Store
3 |
--------------------------------------------------------------------------------
/Makefile.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/Makefile.png
--------------------------------------------------------------------------------
/imgs/Makefile.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/Makefile.png
--------------------------------------------------------------------------------
/results/dtree.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/results/dtree.pdf
--------------------------------------------------------------------------------
/results/final_result.csv:
--------------------------------------------------------------------------------
1 | ,depth,algorithm,accuracy
2 | 0,1,RandomForest,0.6056338028169014
3 |
--------------------------------------------------------------------------------
/imgs/data_snippet.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/data_snippet.png
--------------------------------------------------------------------------------
/results/max_depth.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/results/max_depth.png
--------------------------------------------------------------------------------
/imgs/fig-1_home-away.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/fig-1_home-away.jpg
--------------------------------------------------------------------------------
/imgs/fig-2_shots-diff.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/fig-2_shots-diff.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-10_win-y.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-10_win-y.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-6_win-x.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-6_win-x.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-9_save-x.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-9_save-x.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-11_shots-y.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-11_shots-y.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-12_goals-y.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-12_goals-y.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-13_save-y.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-13_save-y.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-1_home-away.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-1_home-away.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-2_win-diff.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-2_win-diff.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-5_save-diff.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-5_save-diff.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-7_shots-x.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-7_shots-x.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-8_goals-x.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-8_goals-x.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-3_shots-diff.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-3_shots-diff.jpg
--------------------------------------------------------------------------------
/imgs/all-figs/fig-4_goals-diff.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UBC-MDS/nhl-game-predictor/HEAD/imgs/all-figs/fig-4_goals-diff.jpg
--------------------------------------------------------------------------------
/results/model_selection.csv:
--------------------------------------------------------------------------------
1 | ,depth,scores_dt,scores_rf
2 | 0,1,0.5312372812372812,0.5883801383801383
3 | 1,2,0.5521164021164021,0.5743691493691495
4 | 2,3,0.5346662596662597,0.5745014245014245
5 | 3,4,0.516137566137566,0.570929995929996
6 | 4,5,0.5409849409849411,0.567908017908018
7 | 5,6,0.5238400488400489,0.5457977207977208
8 | 6,7,0.5371591371591372,0.5355006105006105
9 | 7,8,0.5344017094017094,0.531105006105006
10 |
--------------------------------------------------------------------------------
/source/get_data.py:
--------------------------------------------------------------------------------
1 | # importing the required library
2 | # !conda install pandas
3 | # !pip install pandas
4 | import pandas as pd
5 |
6 | # reading the data file from the data folder
7 | nhl_data = pd.read_csv("../data/game_teams_stats.csv")
8 |
9 | # subsetting the data for the Vancouver team (team_id = 23)
10 | nhl_data_van = nhl_data[nhl_data['team_id'] == 23]
11 |
12 | # looking at a snippet of the data
13 | nhl_data_van.head(10)
14 |
--------------------------------------------------------------------------------
/results/dtree:
--------------------------------------------------------------------------------
1 | digraph Tree {
2 | node [shape=box, style="filled, rounded", color="black", fontname=helvetica] ;
3 | edge [fontname=helvetica] ;
4 | 0 [label=gini = 0.489
samples = 277
value = [159, 118]
class = Loss>, fillcolor="#e5813942"] ;
5 | 1 [label=gini = 0.5
samples = 201
value = [102, 99]
class = Loss>, fillcolor="#e5813908"] ;
6 | 0 -> 1 [labeldistance=2.5, labelangle=45, headlabel="True"] ;
7 | 2 [label=samples = 84
value = [31, 53]
class = Win>, fillcolor="#399de56a"] ;
8 | 1 -> 2 ;
9 | 3 [label=samples = 117
value = [71, 46]
class = Loss>, fillcolor="#e581395a"] ;
10 | 1 -> 3 ;
11 | 4 [label=gini = 0.375
samples = 76
value = [57, 19]
class = Loss>, fillcolor="#e58139aa"] ;
12 | 0 -> 4 [labeldistance=2.5, labelangle=-45, headlabel="False"] ;
13 | 5 [label=samples = 37
value = [23, 14]
class = Loss>, fillcolor="#e5813964"] ;
14 | 4 -> 5 ;
15 | 6 [label=samples = 39
value = [34, 5]
class = Loss>, fillcolor="#e58139da"] ;
16 | 4 -> 6 ;
17 | }
18 |
--------------------------------------------------------------------------------
/CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Code of Conduct
2 |
3 | **Our Pledge**
4 | We pledge to not be [jerks](https://www.psychologytoday.com/us/blog/here-there-and-everywhere/201201/6-reasons-why-youre-jerk). We welcome any and all genuine contributions to improve the quality of our project.
5 |
6 | **Our Standards**
7 | Don't be a [jerk](https://www.psychologytoday.com/us/blog/here-there-and-everywhere/201201/6-reasons-why-youre-jerk).
8 |
9 | **Our Responsibilities**
10 | We will do our best to engage with all contributions, and to be open and transparent as we try to improve the project. But know that we are busy and if we don't respond right away it is not because we hate you.
11 |
12 | **Scope**
13 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community.
14 |
15 | **Enforcement**
16 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project’s leadership.
17 |
18 | **Attribution**
19 | This Code of Conduct is adapted from the Contributor Covenant, version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
20 |
21 | For answers to common questions about this code of conduct, see
22 | https://www.contributor-covenant.org/faq
23 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Docker file for nhl-game-prediction
2 | # Aditya Sharma, Shayne Andrews (Dec 06, 2018)
3 |
4 | # Purpose:
5 | # This docker file installs all the required software to
6 | # run the analysis from top to bottom and produce the results.
7 |
8 | # Use rocker/tidyverse as the base image
9 | FROM rocker/tidyverse
10 |
11 | # Install the cowsay package
12 | RUN apt-get update -qq && apt-get -y --no-install-recommends install \
13 | && install2.r --error \
14 | --deps TRUE \
15 | cowsay
16 |
17 | # Install R packages
18 | RUN Rscript -e "install.packages('rmarkdown')"
19 | RUN Rscript -e "install.packages('knitr')"
20 | RUN Rscript -e "install.packages('zoo')"
21 | RUN Rscript -e "install.packages('here')"
22 | RUN Rscript -e "install.packages('tidyverse')"
23 | RUN Rscript -e "install.packages('gridExtra')"
24 |
25 | # Installing makefile2graphpackages
26 | # get OS updates and install build tools
27 | RUN apt-get update
28 | RUN apt-get install -y build-essential
29 |
30 | # install git
31 | RUN apt-get install -y wget
32 | RUN apt-get install -y make git
33 |
34 | # clone, build makefile2graph,
35 | # then copy key makefile2graph files to usr/bin so they will be in $PATH
36 | RUN git clone https://github.com/lindenb/makefile2graph.git
37 |
38 | RUN make -C makefile2graph/.
39 |
40 | RUN cp makefile2graph/makefile2graph usr/bin
41 | RUN cp makefile2graph/make2graph usr/bin
42 |
43 |
44 | # Install python 3
45 | RUN apt-get update \
46 | && apt-get install -y python3-pip python3-dev \
47 | && cd /usr/local/bin \
48 | && ln -s /usr/bin/python3 python \
49 | && pip3 install --upgrade pip
50 |
51 | # Get python package dependencies
52 | RUN apt-get install -y python3-tk
53 |
54 | # Install python packages
55 | RUN pip3 install numpy
56 | RUN pip3 install pandas
57 | RUN pip3 install scikit-learn
58 | RUN pip3 install argparse
59 | RUN apt-get install -y graphviz && pip install graphviz
60 | RUN apt-get update && \
61 | pip3 install matplotlib && \
62 | rm -rf /var/lib/apt/lists/*
63 |
--------------------------------------------------------------------------------
/results/feature_importance.csv:
--------------------------------------------------------------------------------
1 | ,features,importance
2 | 0,home_game.x,0.0
3 | 1,shots_ratio_prev1.x,0.0
4 | 2,shots_ratio_prev10.x,0.0
5 | 3,goals_ratio_prev1.x,0.0
6 | 4,goals_ratio_prev3.x,0.0
7 | 5,won_prev1.x,0.0
8 | 6,won_prev3.x,0.0
9 | 7,won_prev5.x,0.0
10 | 8,save_ratio_prev1.x,0.0
11 | 9,save_ratio_prev3.x,0.0
12 | 10,home_game.y,0.0
13 | 11,goals_ratio_prev10.y,0.0
14 | 12,won_prev1.y,0.0
15 | 13,won_prev3.y,0.0
16 | 14,won_prev5.y,0.0
17 | 15,won_prev1.diff,0.0
18 | 16,won_prev3.diff,0.0
19 | 17,won_prev5.diff,0.0
20 | 18,won_prev10.diff,0.0
21 | 19,shots_ratio_prev3.diff,0.0
22 | 20,shots_ratio_prev10.diff,0.0
23 | 21,goals_ratio_prev5.diff,0.0
24 | 22,save_ratio_prev5.diff,0.0
25 | 23,save_ratio_prev5.y,0.006740034204196274
26 | 24,shots_ratio_prev5.x,0.007381942223643535
27 | 25,shots_ratio_prev5.diff,0.012303237039405892
28 | 26,won_prev10.y,0.012918398891376186
29 | 27,shots_ratio_prev1.diff,0.012945145058853153
30 | 28,save_ratio_prev5.x,0.013577896732677664
31 | 29,game_id,0.016308152820509035
32 | 30,season.x,0.019480125312392665
33 | 31,save_ratio_prev1.y,0.020697830311677448
34 | 32,save_ratio_prev10.x,0.020803655357540873
35 | 33,save_ratio_prev10.y,0.02128832833182049
36 | 34,shots_ratio_prev10.y,0.021970066141796238
37 | 35,won_prev10.x,0.023797050589377165
38 | 36,save_ratio_prev1.diff,0.024547260638515193
39 | 37,goals_ratio_prev5.y,0.024606474078811787
40 | 38,save_ratio_prev3.y,0.02467437814668221
41 | 39,goals_ratio_prev1.diff,0.025556936364929488
42 | 40,goals_ratio_prev1.y,0.03444906371033649
43 | 41,goals_ratio_prev3.diff,0.03496709474357462
44 | 42,team_id.y,0.03766575336336649
45 | 43,goals_ratio_prev10.diff,0.03774350449363548
46 | 44,goals_ratio_prev5.x,0.04330553181969519
47 | 45,shots_ratio_prev5.y,0.04789472051319979
48 | 46,save_ratio_prev3.diff,0.04917666211289904
49 | 47,shots_ratio_prev1.y,0.05531138333824215
50 | 48,shots_ratio_prev3.x,0.057994558803350624
51 | 49,shots_ratio_prev3.y,0.06431032694224251
52 | 50,save_ratio_prev10.diff,0.07152065148052206
53 | 51,goals_ratio_prev3.y,0.07717416650204013
54 | 52,goals_ratio_prev10.x,0.07888966993269009
55 |
--------------------------------------------------------------------------------
/source/exploring_nhl_data.R:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env Rscript
2 | # exploring_nhl_data.R
3 | # Aditya, Shayne, Nov 2018
4 | #
5 | # R script for exploring cleaned data from data/train.csv and generating exhibits
6 | # The script takes an input file and names of output files for figures as arguments
7 | # Usage: Rscript source/exploring_nhl_data.R data/train.csv imgs/fig-1_home-away.jpg home_game.x "Canucks Home Game?" TRUE "Figure 1: Impact of game location - home or away"
8 |
9 | # loading the required libraries
10 | library(tidyverse)
11 | library(gridExtra)
12 |
13 | # getting cmd arguments into variables
14 | args <- commandArgs(trailingOnly = TRUE)
15 | input_file <- args[1]
16 | output_file <- args[2]
17 | x_var <- args[3]
18 | x_lab <- args[4]
19 | x_categorical <- args[5]
20 | fig_title <- args[6]
21 |
22 | # comment these lines if running via shell
23 | # input_file <- "data/train.csv"
24 | # output_file <- "imgs/fig-2_win-diff.jpg"
25 | # x_var <- "won_prev1.diff"
26 | # x_lab <- ""
27 | # x_categorical <- FALSE
28 | # fig_title <- "Figure 2: Difference of moving average wins ratio between Canucks and opponent \n Note: single feature density histogram, blue line is for games won by Canucks, red is for losses"
29 |
30 | main <- function() {
31 | # reading the input data
32 | nhl_data <- read_csv(input_file)
33 |
34 | # call the necessary plot functions
35 | if(x_categorical) {
36 | generate_plot_categorical(nhl_data)
37 | } else {
38 | generate_plot_row(nhl_data)
39 | }
40 | }
41 |
42 | # first figure: home or away
43 | generate_plot_categorical <- function(data) {
44 |
45 | # subset just the data we need for plot
46 | plot_data <- data[c("won.x", x_var)]
47 | colnames(plot_data)[1] <- "won"
48 | colnames(plot_data)[2] <- "x"
49 |
50 | plot_data %>%
51 | ggplot(aes(x = x, y = won)) +
52 | geom_bin2d() +
53 | stat_bin2d(geom = "text", aes(label = ..count..)) +
54 | scale_fill_gradient(low='gray', high='red') +
55 | labs(x=x_lab, y="Did Canucks win?",
56 | title = fig_title) +
57 | guides(fill = FALSE) +
58 | theme_minimal()
59 | ggsave(output_file)
60 | }
61 |
62 | # function to generate row of plots for all moving averages
63 | generate_plot_row <- function(data) {
64 | plot_data <- data[c("won.x", x_var)]
65 | colnames(plot_data)[1] <- "won"
66 | colnames(plot_data)[2] <- "x"
67 | plot1 <- generate_plot_numerical(plot_data, "previous 1-games")
68 |
69 | plot_data[2] <- data[,str_replace(x_var,"1","3")]
70 | plot3 <- generate_plot_numerical(plot_data, "previous 3-games")
71 |
72 | plot_data[2] <- data[,str_replace(x_var,"1","5")]
73 | plot5 <- generate_plot_numerical(plot_data, "previous 5-games")
74 |
75 | plot_data[2] <- data[,str_replace(x_var,"1","10")]
76 | plot10 <- generate_plot_numerical(plot_data, "previous 10-games")
77 |
78 | g <- arrangeGrob(nrow=1, plot1, plot3, plot5, plot10,
79 | top=paste(fig_title," \n Note: blue line is for games won by Canucks, red is for losses"))
80 | ggsave(file=output_file, g, width=200, height = 50, units = "mm")
81 | }
82 |
83 | # function to generate a single density plot for a given variable
84 | generate_plot_numerical <- function(data, title) {
85 |
86 | x_min = min(data[2])
87 | x_max = max(data[2])
88 |
89 | data %>%
90 | ggplot(aes(x = x)) +
91 | geom_density(bw=0.2, aes(group=won, colour=won), show.legend=F) +
92 | xlim(x_min, x_max) +
93 | labs(x=x_lab,y="", caption=title) +
94 | theme_minimal()
95 | }
96 |
97 | # call main function
98 | main()
99 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile
2 | # Objective : This script runs the NHL win/loss prediction analysis from top to bottom
3 | # Team members :
4 | # Aditya Sharma (ashrm21)
5 | # Shayne Andrews (shayne19)
6 |
7 |
8 | ###########################
9 | # Run the whole pipeline
10 | ###########################
11 | # The Master step :
12 | # The all command will run the whole analysis including
13 | # - cleaning the data
14 | # - EDA
15 | # - hyperparameter tuning and cross-validation and finding feature importance
16 | # - fitting the final model
17 | # - creating the report for the analysis
18 | all : doc/results_report.md Makefile.png
19 |
20 | #################################
21 | # Steps involved in the analysis
22 | #################################
23 |
24 | # Step 1: Cleans the data and creates train and test datasets
25 | data/train.csv data/test.csv : source/cleaning_nhl_data.R data/game_teams_stats.csv data/team_id.txt
26 | Rscript source/cleaning_nhl_data.R data/game_teams_stats.csv data/train.csv data/test.csv data/team_id.txt
27 |
28 | # Step 2: EDA along with creating relavent graph using data generated frmo step 1
29 | imgs/fig-1_home-away.jpg : source/exploring_nhl_data.R data/train.csv
30 | Rscript source/exploring_nhl_data.R data/train.csv imgs/fig-1_home-away.jpg home_game.x "Canucks Home Game?" TRUE "Figure 1: Impact of game location - home or away"
31 |
32 | # Step 3: EDA along with creating relavent graph using data generated frmo step 1
33 | imgs/fig-2_shots-diff.jpg : source/exploring_nhl_data.R data/train.csv
34 | Rscript source/exploring_nhl_data.R data/train.csv imgs/fig-2_shots-diff.jpg shots_ratio_prev1.diff "" FALSE "Figure 2: Difference of moving average shots ratio between Canucks and opponent"
35 |
36 | # Step 4: Uses data created from step 1 to generate model selection table and feature importances using cross validation
37 | results/model_selection.csv results/feature_importance.csv results/max_depth.png : source/finding_best_model.py data/train.csv data/test.csv
38 | python3 source/finding_best_model.py data/train.csv data/test.csv results/
39 |
40 | # Step 5: Uses the output from step 4 to build the final model using top 12 features and generate the final results
41 | results/final_result.csv results/dtree results/dtree.pdf : source/building_model.py results/model_selection.csv data/train.csv data/test.csv results/feature_importance.csv
42 | python3 source/building_model.py results/model_selection.csv data/train.csv data/test.csv results/ results/feature_importance.csv
43 |
44 | # Step 6: creating the markdown report file using the output from all the above steps
45 | doc/results_report.md : doc/results_report.Rmd data/train.csv data/test.csv imgs/fig-1_home-away.jpg imgs/fig-2_shots-diff.jpg results/model_selection.csv results/feature_importance.csv results/max_depth.png results/final_result.csv results/dtree results/dtree.pdf
46 | Rscript -e "rmarkdown::render('doc/results_report.Rmd')"
47 |
48 | Makefile.png : Makefile
49 | makefile2graph > Makefile.dot
50 | dot -Tpng Makefile.dot -o Makefile.png
51 |
52 | ###########################
53 | # Remove all files
54 | ###########################
55 | # Removes all the files generated during the analysis
56 | clean :
57 | rm -f results/model_selection.csv
58 | rm -f results/dtree
59 | rm -f results/dtree.pdf
60 | rm -f results/feature_importance.csv
61 | rm -f results/final_result.csv
62 | rm -f results/max_depth.png
63 | rm -f data/train.csv
64 | rm -f data/test.csv
65 | rm -f imgs/fig-1_home-away.jpg
66 | rm -f imgs/fig-2_shots-diff.jpg
67 | rm -f doc/results_report.md
68 | rm -f doc/results_report.html
69 | rm -f Rplots.pdf
70 | rm -f Makefile.dot
71 | rm -f Makefile.png
72 |
--------------------------------------------------------------------------------
/source/building_model.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # building_model.py
3 | # Shayne, Aditya, Nov 2018
4 | #
5 | # This script builds the final model on the data using
6 | # the exported information from finding_best_model.py
7 |
8 | # Dependencies: argparse, pandas, numpy, graphviz, sklearn, matplotlib
9 | #
10 | # Usage: python3 source/building_model.py results/model_selection.csv data/train.csv data/test.csv results/ results/feature_importance.csv
11 |
12 | # importing the required libraries
13 | import pandas as pd
14 | import numpy as np
15 | import argparse
16 | import graphviz
17 | import matplotlib.pyplot as plt
18 | from sklearn import tree
19 | from sklearn.ensemble import RandomForestClassifier
20 |
21 | np.random.seed(1234)
22 |
23 | # read in command line arguments
24 | parser = argparse.ArgumentParser()
25 |
26 | parser.add_argument('model_select')
27 | parser.add_argument('training_data')
28 | parser.add_argument('test_data')
29 | parser.add_argument('output_folder')
30 | parser.add_argument('imp_features')
31 | args = parser.parse_args()
32 |
33 | # putting values from arguments into variables
34 | model_select = args.model_select
35 | train_data = args.training_data
36 | test_data = args.test_data
37 | output_folder = args.output_folder
38 | features = args.imp_features
39 |
40 | # reading requored files
41 | select_model = pd.read_csv(model_select)
42 | feature_importance = pd.read_csv(features)
43 | train = pd.read_csv(train_data)
44 | test = pd.read_csv(test_data)
45 |
46 | # extracting the features to be used in the final model
47 | features = feature_importance.sort_values(by = ['importance'], ascending = False)
48 | top_n_features = [i for i in features['features'][:12]]
49 |
50 | # subsetting the train and test data with relevant features
51 | Xtrain = train.loc[:, top_n_features]
52 | Xtest = test.loc[:, top_n_features]
53 | ytrain = train['won.x']
54 | ytest = test['won.x']
55 |
56 |
57 | def build_final_model_dt(best_depth):
58 | '''
59 | function to build final model using a decision tree
60 | '''
61 | # fitting the final model using a random forest with n_estimators = 500 and max_depth = best depth
62 | final_model = tree.DecisionTreeClassifier(max_depth = best_depth, random_state = 1234)
63 | final_model.fit(Xtrain, ytrain)
64 |
65 | # predicting on the test dataset
66 | predictions = final_model.predict(Xtest)
67 | accuracy = final_model.score(Xtest, ytest)
68 | #accuracy_dt = final_model_dt.score(Xtest, ytest)
69 | graph = save_and_show_decision_tree(final_model, feature_names = top_n_features,
70 | class_names=['Loss', 'Win'],
71 | save_file_prefix = output_folder + 'dtree')
72 | #print("Final accuracy obtained with DT on the test dataset is: {0}".format(accuracy))
73 | return accuracy
74 |
75 |
76 | def build_final_model_rf(best_depth):
77 | '''
78 | function to build final model using a randomc forest
79 | '''
80 | # fitting the final model using a random forest with n_estimators = 500 and max_depth = best depth
81 | final_model = RandomForestClassifier(n_estimators = 500, max_depth = best_depth, random_state = 1234)
82 | final_model.fit(Xtrain, ytrain)
83 |
84 | # predicting on the test dataset
85 | predictions = final_model.predict(Xtest)
86 | accuracy = final_model.score(Xtest, ytest)
87 | #accuracy_dt = final_model_dt.score(Xtest, ytest)
88 | print("Final accuracy obtained with RF on the test dataset is: {0}".format(accuracy))
89 | return accuracy
90 |
91 | # function to save decision tree created and store it in results directory
92 | def save_and_show_decision_tree(model,
93 | feature_names,
94 | class_names,
95 | save_file_prefix, **kwargs):
96 | """
97 | Saves the decision tree model as a pdf
98 | """
99 | dot_data = tree.export_graphviz(model, out_file=None,
100 | feature_names=feature_names,
101 | class_names=class_names,
102 | filled=True, rounded=True,
103 | special_characters=True, **kwargs)
104 |
105 | graph = graphviz.Source(dot_data)
106 | graph.render(save_file_prefix)
107 | return graph
108 |
109 |
110 | def export_final_result(best_depth_rf, best_rf_accuracy):
111 | '''
112 | function to export final table of summary
113 | '''
114 | result = pd.DataFrame({'depth' : [best_depth_rf],
115 | 'algorithm' : ['RandomForest'],
116 | 'accuracy' : [best_rf_accuracy]})
117 | result.to_csv(output_folder + 'final_result.csv')
118 |
119 | def main():
120 |
121 | best_depth_dt = np.argmax(select_model['scores_dt']) + 1
122 | #print("The best depth for decision tree is: {0}".format(best_depth_dt))
123 | best_depth_rf = np.argmax(select_model['scores_rf']) + 1
124 | print("The best depth for random forest is: {0}".format(best_depth_rf))
125 |
126 | # building final models
127 | best_dt_accuracy = build_final_model_dt(best_depth_dt)
128 | best_rf_accuracy = build_final_model_rf(best_depth_rf)
129 |
130 | # exporting final model using random forest
131 | export_final_result(best_depth_rf, best_rf_accuracy)
132 |
133 |
134 | # call main function
135 | if __name__ == "__main__":
136 | main()
137 |
--------------------------------------------------------------------------------
/source/finding_best_model.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # building_model.py
3 | # Shayne, Aditya, Nov 2018
4 | #
5 | # This script finds the best parameters for the models
6 | # using decison trees and random forest algorithm and
7 | # exports the features and model selection info
8 |
9 | # Dependencies: argparse, pandas, numpy, graphviz, sklearn, matplotlib
10 | #
11 | # Usage: python3 source/finding_best_model.py data/train.csv data/test.csv results/
12 |
13 | # importing the required libraries
14 | import pandas as pd
15 | import numpy as np
16 | import argparse
17 | import matplotlib.pyplot as plt
18 | from sklearn import tree
19 | from sklearn.ensemble import RandomForestClassifier
20 | from sklearn.model_selection import cross_val_score
21 |
22 | from sklearn.tree import export_graphviz
23 | # !pip install graphviz
24 | import graphviz
25 |
26 | np.random.seed(1234)
27 |
28 | # read in command line arguments
29 | parser = argparse.ArgumentParser()
30 | parser.add_argument('training_data')
31 | parser.add_argument('test_data')
32 | parser.add_argument('output_folder')
33 | args = parser.parse_args()
34 |
35 | # putting values from arguments into variables
36 | train_data = args.training_data
37 | test_data = args.test_data
38 | output_folder = args.output_folder
39 |
40 | # reading required files
41 | train = pd.read_csv(train_data)
42 | test = pd.read_csv(test_data)
43 |
44 | def get_features():
45 | ''' function to get the relevant features to put into the model
46 | '''
47 | features = [f for f in train.columns.values if f not in 'won.x']
48 | return features
49 |
50 | def create_train_test_model(features):
51 | '''
52 | function to extract the data based on the selected features
53 | '''
54 | Xtrain = train.loc[:, features]
55 | ytrain = train['won.x']
56 |
57 | Xtest = test.loc[:, features]
58 | ytest = test['won.x']
59 |
60 | return Xtrain, ytrain, Xtest, ytest
61 |
62 |
63 | def get_feature_importance(Xtrain, ytrain, features):
64 | '''
65 | function to build a decision tree with all
66 | features to see feature importance
67 | '''
68 | model = tree.DecisionTreeClassifier(random_state = 1234)
69 | model.fit(Xtrain, ytrain)
70 | model.score(Xtrain, ytrain)
71 |
72 | imp = model.feature_importances_
73 | features = np.array(features)
74 |
75 | importance_map = zip(features, imp)
76 | sorted_features = sorted(importance_map, key=lambda x: x[1])
77 |
78 | top_n_features = [i[0] for i in sorted_features[:20]]
79 | return top_n_features, sorted_features
80 |
81 |
82 | def tuning_max_depth_dt(depth, Xtrain, ytrain):
83 | '''
84 | Function to tune max_depth for a decision tree
85 | '''
86 | # building a decision tree classifier
87 | print("Current Depth: {0}".format(depth))
88 | model = tree.DecisionTreeClassifier(max_depth = depth, random_state = 1234)
89 | model.fit(Xtrain, ytrain)
90 |
91 | curr_score = cross_val_score(model, Xtrain, ytrain, cv = 10)
92 | return np.mean(curr_score)
93 |
94 |
95 | def tuning_max_depth_rf(depth, Xtrain, ytrain):
96 | '''
97 | Function to tune max_depth for a decision tree
98 | '''
99 | # building a random forest classifier
100 | print("Current Depth: {0}".format(depth))
101 | model = RandomForestClassifier(max_depth = depth, random_state = 1234,
102 | n_estimators = 300, verbose = 1)
103 | model.fit(Xtrain, ytrain)
104 |
105 | curr_score = cross_val_score(model, Xtrain, ytrain, cv = 10)
106 | return np.mean(curr_score)
107 |
108 | def export_cross_valid_plot(depth_vals, validation_dt):
109 | '''
110 | plotting and saving the variation of accuracy with max_depth
111 | '''
112 | plt.plot(depth_vals, validation_dt)
113 | plt.xlabel("max depth - hyperparameter")
114 | plt.ylabel("Accuracy")
115 | plt.title("Validation accuracy with max depth in a decison tree")
116 | plt.savefig(output_folder + "max_depth.png")
117 |
118 |
119 | def export_feature_importance(sorted_features):
120 | '''
121 | writing features with their importances in a data frameand exporting it
122 | '''
123 | feature_importance = pd.DataFrame(sorted_features, columns=['features', 'importance'])
124 | feature_importance.to_csv(output_folder + "feature_importance.csv")
125 |
126 | def main():
127 |
128 | features = get_features()
129 |
130 | # extracting the data based on the selected features
131 | Xtrain, ytrain, Xtest, ytest = create_train_test_model(features)
132 |
133 | top_n_features, sorted_features = get_feature_importance(Xtrain, ytrain, features)
134 | Xtrain = train.loc[:, top_n_features]
135 | Xtest = test.loc[:, top_n_features]
136 |
137 | # using cross valudation to tune max_depth in a decision tree
138 | validation_dt = np.zeros(8)
139 | validation_rf = np.zeros(8)
140 |
141 | # trying different values of the hyperparameter max_depth
142 | depth_vals = np.arange(1, 9)
143 | for depth in depth_vals:
144 | validation_dt[depth - 1] = tuning_max_depth_dt(depth, Xtrain, ytrain)
145 |
146 | depth_vals = np.arange(1, 9)
147 | for depth in depth_vals:
148 | validation_rf[depth - 1] = tuning_max_depth_rf(depth, Xtrain, ytrain)
149 |
150 | # scoring the models on a large depth
151 | model = tree.DecisionTreeClassifier(max_depth = 25, random_state = 1234)
152 | score_depth_25_dt = tuning_max_depth_dt(25, Xtrain, ytrain)
153 | score_depth_25_rf = tuning_max_depth_rf(25, Xtrain, ytrain)
154 |
155 | print("Cross validation score with depth 25 on Decision Tree: {0}".format(score_depth_25_dt))
156 | print("Cross validation score with depth 25 on Random Forest: {0}".format(score_depth_25_rf))
157 |
158 | # exporting model selection table
159 | select_model = pd.DataFrame({'depth' : np.arange(1,9), 'scores_dt' : validation_dt, 'scores_rf' : validation_rf})
160 | select_model.to_csv(output_folder + "model_selection.csv")
161 |
162 | # exporting plot for CV and feature importance table
163 | export_cross_valid_plot(depth_vals, validation_dt)
164 | export_feature_importance(sorted_features)
165 |
166 | # call main function
167 | if __name__ == "__main__":
168 | main()
169 |
--------------------------------------------------------------------------------
/source/cleaning_nhl_data.R:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env Rscript
2 | # cleaning_nhl_data.R
3 | # Aditya, Shayne, Nov 2018
4 | #
5 | # R script for reading and cleaning data from game_teams_stats.csv file.
6 | # The script takes an input file and names of output train and test file as arguments
7 | # The specific team for which the analysis is being done needs to be provided as the last arguement
8 | # Usage: Rscript source/cleaning_nhl_data.R data/game_teams_stats.csv data/train.csv data/test.csv data/team_id.txt
9 |
10 | # loading the required libraries
11 | library(tidyverse)
12 | library(zoo)
13 |
14 | # getting cmd arguments into variables
15 | args <- commandArgs(trailingOnly = TRUE)
16 | input_file <- args[1]
17 | output_file_train <- args[2]
18 | output_file_test <- args[3]
19 | team <- args[4]
20 |
21 | # reading the input
22 | nhl_data <- read_csv(input_file)
23 |
24 | # reading the id of the team for which the analysis is to be performed
25 | team_of_interest <- read_tsv(team)
26 |
27 | # getting the columns with missing values
28 | nhl_data_missing <- nhl_data %>%
29 | select_if(function(x) any(is.na(x))) %>%
30 | summarise_all(funs(sum(is.na(.))))
31 |
32 | # getting the columns with empty values
33 | nhl_data_empty <- nhl_data %>%
34 | select_if(function(x) any(x == "")) %>%
35 | summarise_all(funs(sum(. == "")))
36 |
37 | # getting information by season
38 | nhl_data_req <- nhl_data %>%
39 | arrange(game_id) %>%
40 | mutate(season = str_sub(game_id, start = 1, end = 4),
41 | reg_season = str_sub(game_id, start = 5, end = 6)) %>%
42 | group_by(season, reg_season) %>%
43 | filter(reg_season == "02")
44 |
45 | # removing 2012 season as it is too long ago and was shortened by lockout
46 | nhl_data_req <- nhl_data_req %>%
47 | left_join(nhl_data, by = c("game_id" = "game_id")) %>%
48 | filter(team_id.x != team_id.y, season != "2012")
49 |
50 | # creating new features for the model
51 | nhl_data_ready <- nhl_data_req %>%
52 | arrange(team_id.x, game_id) %>%
53 | group_by(team_id.x, season) %>%
54 | mutate(won_prev1 = rollapply(won.x, mean, align='right', fill=NA, width = list(-1:-1)),
55 | won_prev3 = rollapply(won.x, mean, align='right', fill=NA, width = list(-3:-1)),
56 | won_prev5 = rollapply(won.x, mean, align='right', fill=NA, width = list(-5:-1)),
57 | won_prev10 = rollapply(won.x, mean, align='right', fill=NA, width = list(-10:-1)),
58 |
59 | home_game = HoA.x=="home",
60 |
61 | shots_ratio = shots.x / (shots.x + shots.y),
62 | goals_ratio = goals.x / (goals.x + goals.y),
63 | save_ratio = 1 - goals.y / shots.y,
64 |
65 | shots_ratio_prev1 = rollapply(shots_ratio, mean, align='right', fill=NA, width = list(-1:-1)),
66 | shots_ratio_prev3 = rollapply(shots_ratio, mean, align='right', fill=NA, width = list(-3:-1)),
67 | shots_ratio_prev5 = rollapply(shots_ratio, mean, align='right', fill=NA, width = list(-5:-1)),
68 | shots_ratio_prev10 = rollapply(shots_ratio, mean, align='right', fill=NA, width = list(-10:-1)),
69 |
70 | goals_ratio_prev1 = rollapply(goals_ratio, mean, align='right', fill=NA, width = list(-1:-1)),
71 | goals_ratio_prev3 = rollapply(goals_ratio, mean, align='right', fill=NA, width = list(-3:-1)),
72 | goals_ratio_prev5 = rollapply(goals_ratio, mean, align='right', fill=NA, width = list(-5:-1)),
73 | goals_ratio_prev10 = rollapply(goals_ratio, mean, align='right', fill=NA, width = list(-10:-1)),
74 |
75 | save_ratio_prev1 = rollapply(save_ratio, mean, align='right', fill=NA, width = list(-1:-1)),
76 | save_ratio_prev3 = rollapply(save_ratio, mean, align='right', fill=NA, width = list(-3:-1)),
77 | save_ratio_prev5 = rollapply(save_ratio, mean, align='right', fill=NA, width = list(-5:-1)),
78 | save_ratio_prev10 = rollapply(save_ratio, mean, align='right', fill=NA, width = list(-10:-1))) %>%
79 | drop_na() %>%
80 | select(game_id, season, team_id = team_id.x, home_game,
81 | shots_ratio_prev1, shots_ratio_prev3, shots_ratio_prev5, shots_ratio_prev10,
82 | goals_ratio_prev1, goals_ratio_prev3, goals_ratio_prev5, goals_ratio_prev10,
83 | won_prev1, won_prev3, won_prev5, won_prev10,
84 | save_ratio_prev1, save_ratio_prev3, save_ratio_prev5, save_ratio_prev10,
85 | won = won.x)
86 |
87 | # adding opponent information
88 | nhl_data_ready <- nhl_data_ready %>%
89 | left_join(nhl_data_ready, by = c("game_id" = "game_id")) %>%
90 | filter(team_id.x != team_id.y) %>%
91 | filter(team_id.x == as.numeric(team_of_interest$team_id)) %>%
92 | mutate(won_prev1.diff = won_prev1.x - won_prev1.y,
93 | won_prev3.diff = won_prev3.x - won_prev3.y,
94 | won_prev5.diff = won_prev5.x - won_prev5.y,
95 | won_prev10.diff = won_prev10.x - won_prev10.y,
96 |
97 | shots_ratio_prev1.diff = shots_ratio_prev1.x - shots_ratio_prev1.y,
98 | shots_ratio_prev3.diff = shots_ratio_prev3.x - shots_ratio_prev3.y,
99 | shots_ratio_prev5.diff = shots_ratio_prev5.x - shots_ratio_prev5.y,
100 | shots_ratio_prev10.diff = shots_ratio_prev10.x - shots_ratio_prev10.y,
101 |
102 | goals_ratio_prev1.diff = goals_ratio_prev1.x - goals_ratio_prev1.y,
103 | goals_ratio_prev3.diff = goals_ratio_prev3.x - goals_ratio_prev3.y,
104 | goals_ratio_prev5.diff = goals_ratio_prev5.x - goals_ratio_prev5.y,
105 | goals_ratio_prev10.diff = goals_ratio_prev10.x - goals_ratio_prev10.y,
106 |
107 | save_ratio_prev1.diff = save_ratio_prev1.x - save_ratio_prev1.y,
108 | save_ratio_prev3.diff = save_ratio_prev3.x - save_ratio_prev3.y,
109 | save_ratio_prev5.diff = save_ratio_prev5.x - save_ratio_prev5.y,
110 | save_ratio_prev10.diff = save_ratio_prev10.x - save_ratio_prev10.y) %>%
111 | group_by(season.x) %>%
112 | select(-c(won.y, team_id.x, season.y))
113 |
114 | # creating training data
115 | nhl_data_train <- nhl_data_ready %>%
116 | filter(season.x != "2017")
117 |
118 | # craeting test data
119 | nhl_data_test <- nhl_data_ready %>%
120 | filter(season.x == "2017")
121 |
122 | # writing the train and the test data to csv files
123 | write_csv(nhl_data_train, output_file_train)
124 | write_csv(nhl_data_test, output_file_test)
125 |
--------------------------------------------------------------------------------
/doc/results_report.md:
--------------------------------------------------------------------------------
1 | Results of NHL Prediction Model
2 | ================
3 | Shayne Andrews & Aditya Sharma
4 | November 2018
5 |
6 | Introduction
7 | ------------
8 |
9 | NHL hockey games are notoriously difficult to predict. There is [common acceptance](https://www.nhlnumbers.com/2013/08/01/machine-learning-and-hockey-is-there-a-theoretical-limit-on-predictions) among hockey analytics enthusiasts that it is not possible to do better than 62% accuracy (i.e. 38% is due to luck). Interestingly, the NHL has recently [announced a partnership](https://www.nhl.com/news/nhl-mgm-resorts-sports-betting-partnership/c-301392322) with MGM Resorts to enable sports betting.
10 |
11 | > Could we do better than 62% accuracy using supervised machine learning?
12 |
13 | The Question
14 | ------------
15 |
16 | **Will the Vancouver Canucks win or lose their next game?**
17 |
18 | We simplify the analysis here by focusing on a single team (Canucks), although we still need to consider data on all of the other teams as an opponents for each prediction. In the future we would like to extend the model to allow predictions for any team, and to investigate whether a single model for all teams performs better/worse than separate models for each team.
19 |
20 | The Data
21 | --------
22 |
23 | The NHL has committed to providing detailed player tracking data starting next season, but in the meantime, we will see how accurate we can be with less granular data. Actually the data we are use here is *very* basic, but we are building a reproducible pipeline here which will allow us to easily expand the model in the future.
24 |
25 | Plenty of data is available publicly on the [NHL website](www.nhl.com), but we've managed to avoid scraping ourselves and obtained the data directly from [Martin Ellis on Kaggle](https://www.kaggle.com/martinellis/nhl-game-data).
26 |
27 | Notably, this data includes all games starting with the 2012-13 season through the end of the 2017-18 season. We've excluded the 2012-13 season from the model for two reasons: 1) this was a lockout-shortened season, and 2) it is older data and not as relevant for future predictions. Alse we exclude playoff and pre-season games and use only the regular season.
28 |
29 | There are several tables of interest, but we have focused exclusively on `game_teams_stats.csv`, with the following key variables driving our prediction:
30 | - Did the team of interest win the game? (won column, TRUE/FALSE)
31 | - Home or Away game? (HoA column, home/away)
32 | - Who is the opponent? (team\_id column, 1-31)
33 | - What proportion of *goals* were scored by the team of interest? (calculated using goals column for both teams, 0-1)
34 | - What proportion of *shots* were made by the team of interest? (calculated using shots column for both teams, 0-1)
35 | - What proportion of *shots* were *saved* by the team of interest? (calculated using goals and shots columns, 0-1)
36 |
37 | However, when predicting game results we don't know the goals/shots/saves in advance from the game we're trying to predict. So we will look backwards, and see if we can use these stats from previous games for both teams to make better predictions. In particular, we will use 1-, 3-, 5-, and 10-game averages for `won_ratio`, `goals_ratio`, `shots_ratio`, and `save_ratio`. One side effect of this is that we must "throw away" the first 10 games of each season as we don't want to be looking back into the previous season to make these predictions.
38 |
39 | In addition to the above ratios for each team, we have engineered an additional *difference* feature which is the ratio for the Canucks minus the ratio for their opponent. The idea here is that it will give the decision tree a sense of the relative strength of both teams together in one feature.
40 |
41 | So we have 72 games in each of 5 seasons, 360 games in total for the Canucks. We are splitting this data into train/test by season rather than randomly. For a prediction such as this, we want to train the model looking backwards and test the model looking forwards. So we will do this as if we are standing at the beginning of the 2017-18 season, and put that season of data aside to test our final model.
42 |
43 | Some observations about our data (specifically on the training data):
44 |
45 |
46 |
47 |
48 | We can see in Figure 1 that when the Canucks are on the road they are far more likely to lose the game. This makes sense and supports the commonly accepted idea of "home ice advantage".
49 |
50 |

51 |
52 | We can see in Figure 2 that there isn't a huge difference in shot ratio difference as a predictor of wither the Canucks win or lose there next game, but there is a bit. Using only the previous 1 game appears to be slightly better than the others.
53 |
54 | The Model
55 | ---------
56 |
57 | We looked at many different combinations of features and used 10-fold cross validation on our training data to choose features and hyperparameters. We also considered both a single decision tree model, and a random forest which should be a bit more robust.
58 |
59 | In the end, we included the top 12 features using the `feature_importances_` attribute in the `sklearn` decision tree. They are as follows (in descending order of importance):
60 | - `goals_ratio_prev10.x`
61 | - `goals_ratio_prev3.y`
62 | - `save_ratio_prev10.diff`
63 | - `shots_ratio_prev3.y`
64 | - `shots_ratio_prev3.x`
65 | - `shots_ratio_prev1.y`
66 | - `save_ratio_prev3.diff`
67 | - `shots_ratio_prev5.y`
68 | - `goals_ratio_prev5.x`
69 | - `goals_ratio_prev10.diff`
70 | - `team_id.y`
71 | - `goals_ratio_prev3.diff`
72 |
73 | Note: those ending in `.x` are for the Canucks, `.y` is their opponent, and `.diff` is the difference between the two.
74 |
75 | Testing out some different depths it is clear that we are overfitting if we go beyond depth 3 or 4 on a decision tree.
76 |
77 | 
78 |
79 | Looking at both decision trees and random forests ([full results here](https://github.com/UBC-MDS/DSCI-522_nhl-game-predictor/blob/master/results/model_selection.csv)), we get optimal depth of 2 on with cross validation accuracy of 55.2% on a decision tree and depth of 1 on a random forest with **cross validation accuracy of 58.8%**.
80 |
81 | So we go forward with the **depth 1 random forest** prediction model. (number of trees = 500)
82 |
83 | The Results
84 | -----------
85 |
86 | Re-training this model on the entire training set and then testing our prediction accuracy on the test set (2017-18 season), we get a **test accuracy of 60.6%** ([see here](https://github.com/UBC-MDS/DSCI-522_nhl-game-predictor/blob/master/results/final_result.csv)). This is a little surprising to have a better test accuracy than training, and we think it is due to some bias in the small test data (only 72 games). We need to do some more testing on other teams to be more confident in our model.
87 |
88 | In any case, the accuracy is not great but is close to the suggested 62% maximum. We are only using very basic data and came close to this mark, so if we perhaps were to look at which players are playing (e.g. injuries, backup goalie?) and whether each team had enough rest between games (back-to-backs are common and create some unfairness). Player tracking data could be especially helpful for predictions.
89 |
90 | References
91 | ----------
92 |
93 | Data:
94 | Lags and Moving Means in dplyr:
95 | 62% accuracy cap for NHL predictions:
96 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Predicting win/loss for NHL games using Supervised Learning
2 |
3 |
4 |
5 | ### Contributors
6 |
7 | - [Shayne Andrews](https://github.com/shayne-andrews)
8 | - [Aditya Sharma](https://github.com/adityashrm21/)
9 |
10 | ## Introduction
11 | NHL hockey games are notoriously difficult to predict. There is [common acceptance](https://www.nhlnumbers.com/2013/08/01/machine-learning-and-hockey-is-there-a-theoretical-limit-on-predictions) among hockey analytics enthusiasts that it is not possible to do better than 62% accuracy (i.e. 38% is due to luck). Interestingly, the NHL has recently [announced a partnership](https://www.nhl.com/news/nhl-mgm-resorts-sports-betting-partnership/c-301392322) with MGM Resorts to enable sports betting.
12 |
13 | > Could we do better than 62% accuracy using supervised machine learning?
14 |
15 | At this stage the model is focused on the smaller subquestion of predicting games for **only the Vancouver Canucks**.
16 |
17 | ## Model Description
18 |
19 | Here is **[the report](https://github.com/UBC-MDS/DSCI-522_nhl-game-predictor/blob/master/doc/results_report.md)** from our initial model build which uses machine learning decision trees and random forests.
20 |
21 | The main variables we use in this model are as follows:
22 | - Did the team of interest win the game? (won column, TRUE/FALSE)
23 | - Home or Away game? (HoA column, home/away)
24 | - Who is the opponent? (team_id column, 1-31)
25 | - What proportion of *goals* were scored by the team of interest? (calculated using goals column for both teams, 0-1)
26 | - What proportion of *shots* were made by the team of interest? (calculated using shots column for both teams, 0-1)
27 | - What proportion of *shots* were *saved* by the team of interest? (calculated using goals and shots columns, 0-1)
28 |
29 | ## Usage
30 |
31 | #### Using Docker
32 |
33 | 1. Install [Docker](https://www.docker.com/get-started).
34 | 2. Download and clone this repository.
35 | 3. Run the following code in terminal to download the Docker image:
36 | ```
37 | docker pull adityashrm21/dsci-522_nhl-game-predictor
38 | ```
39 |
40 | 4. Use the command line to navigate to the root of this repo.
41 | 5. Type the following code into terminal to run the analysis (filling in PATH_ON_YOUR_COMPUTER with the absolute path to the root of this project on your computer):
42 |
43 | ```
44 | docker run --rm -it -e PASSWORD=nhlprediction -v :/home/nhl/nhl-game-predictor adityashrm21/dsci-522_nhl-game-predictor:latest make -C "/home/nhl/nhl-game-predictor" all
45 | ```
46 |
47 | 6. If you would like a fresh start, type the following:
48 |
49 | ```
50 | docker run --rm -it -e PASSWORD=nhlprediction -v :/home/nhl/nhl-game-predictor adityashrm21/dsci-522_nhl-game-predictor:latest make -C "/home/nhl/nhl-game-predictor" clean
51 | ```
52 |
53 | #### Using make command
54 |
55 | Use [Makefile](https://github.com/UBC-MDS/DSCI-522_nhl-game-predictor/blob/master/Makefile) to run the whole pipeline:
56 |
57 | - Clone this repository and then navigate to the root directory and run the following commands:
58 |
59 | ```bash
60 | make clean # to clean up existing files and pre-existing results/images
61 | make all # to run all the scripts and create fresh results and output
62 | ```
63 | The description of the files and the make commands is provided below as well as in the [Makefile](https://github.com/UBC-MDS/DSCI-522_nhl-game-predictor/blob/master/Makefile).
64 |
65 | ```bash
66 | #################################
67 | # Steps involved in the analysis
68 | #################################
69 |
70 | # Step 1: Cleans the data and creates train and test datasets
71 | data/train.csv data/test.csv : source/cleaning_nhl_data.R data/game_teams_stats.csv data/team_id.txt
72 | Rscript source/cleaning_nhl_data.R data/game_teams_stats.csv data/train.csv data/test.csv data/team_id.txt
73 |
74 | # Step 2: EDA along with creating relavent graph using data generated frmo step 1
75 | imgs/fig-1_home-away.jpg : source/exploring_nhl_data.R data/train.csv
76 | Rscript source/exploring_nhl_data.R data/train.csv imgs/fig-1_home-away.jpg home_game.x "Canucks Home Game?" TRUE "Figure 1: Impact of game location - home or away"
77 |
78 | # Step 3: EDA along with creating relavent graph using data generated frmo step 1
79 | imgs/fig-2_shots-diff.jpg : source/exploring_nhl_data.R data/train.csv
80 | Rscript source/exploring_nhl_data.R data/train.csv imgs/fig-2_shots-diff.jpg shots_ratio_prev1.diff "" FALSE "Figure 2: Difference of moving average shots ratio between Canucks and opponent"
81 |
82 | # Step 4: Uses data created from step 1 to generate model selection table and feature importances using cross validation
83 | results/model_selection.csv results/feature_importance.csv results/max_depth.png : source/finding_best_model.py data/train.csv data/test.csv
84 | python3 source/finding_best_model.py data/train.csv data/test.csv results/
85 |
86 | # Step 5: Uses the output from step 4 to build the final model using top 12 features and generate the final results
87 | results/final_result.csv results/dtree results/dtree.pdf : source/building_model.py results/model_selection.csv data/train.csv data/test.csv results/feature_importance.csv
88 | python3 source/building_model.py results/model_selection.csv data/train.csv data/test.csv results/ results/feature_importance.csv
89 |
90 | # Step 6: creating the markdown report file using the output from all the above steps
91 | doc/results_report.md : doc/results_report.Rmd data/train.csv data/test.csv imgs/fig-1_home-away.jpg imgs/fig-2_shots-diff.jpg results/model_selection.csv results/feature_importance.csv results/max_depth.png results/final_result.csv results/dtree results/dtree.pdf
92 | Rscript -e "rmarkdown::render('doc/results_report.Rmd')"
93 | ```
94 | #### Running scripts manually
95 |
96 | - Clone this repo, and using the command line, navigate to the root of this project.
97 |
98 | - Run the following commands:
99 |
100 | ```
101 | Rscript source/cleaning_nhl_data.R data/game_teams_stats.csv data/train.csv data/test.csv data/team_id.txt
102 | Rscript source/exploring_nhl_data.R data/train.csv imgs/fig-1_home-away.jpg home_game.x "Canucks Home Game?" TRUE "Figure 1: Impact of game location - home or away"
103 | Rscript source/exploring_nhl_data.R data/train.csv imgs/fig-2_shots-diff.jpg shots_ratio_prev1.diff "" FALSE "Figure 2: Difference of moving average shots ratio between Canucks and opponent"
104 | python3 source/finding_best_model.py data/train.csv data/test.csv results/
105 | python3 source/building_model.py results/model_selection.csv data/train.csv data/test.csv results/ results/feature_importance.csv
106 | Rscript -e "rmarkdown::render('doc/results_report.Rmd')"
107 | ```
108 |
109 | ## Makefile dependency graph
110 |
111 |
112 |
113 |
114 |
115 | ## Dependencies
116 | - R (3.5.1) & R libraries:
117 |
118 | | Package | Version |
119 | | :------------- | :------------- |
120 | | `knitr` | 1.20 |
121 | | `tidyverse` | 1.2.1 |
122 | | `rmarkdown` | 1.10 |
123 | |`here` | 0.1 |
124 | |`zoo` | 1.8.4 |
125 | |`gridExtra` | 2.3 |
126 |
127 | - Python & Python libraries:
128 |
129 | | Package | Version |
130 | | :------------- | :------------- |
131 | | `matplotlib` | 2.2.2 |
132 | | `numpy` | 1.14.3 |
133 | | `pandas` | 0.23.0 |
134 | |`sklearn` | 0.19.1 |
135 | |`argparse` | 1.4.0 |
136 | |`graphviz` | 0.10.1 |
137 |
--------------------------------------------------------------------------------
/doc/results_report.Rmd:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Results of NHL Prediction Model"
3 | author: "Shayne Andrews & Aditya Sharma"
4 | date: "November 2018"
5 | output: github_document
6 | ---
7 |
8 | ```{r setup, include=FALSE}
9 | knitr::opts_chunk$set(echo = TRUE)
10 | knitr::opts_knit$set(root.dir = here::here())
11 | ```
12 |
13 | ## Introduction
14 | NHL hockey games are notoriously difficult to predict. There is [common acceptance](https://www.nhlnumbers.com/2013/08/01/machine-learning-and-hockey-is-there-a-theoretical-limit-on-predictions) among hockey analytics enthusiasts that it is not possible to do better than 62% accuracy (i.e. 38% is due to luck). Interestingly, the NHL has recently [announced a partnership](https://www.nhl.com/news/nhl-mgm-resorts-sports-betting-partnership/c-301392322) with MGM Resorts to enable sports betting.
15 |
16 | > Could we do better than 62% accuracy using supervised machine learning?
17 |
18 | ## The Question
19 |
20 | **Will the Vancouver Canucks win or lose their next game?**
21 |
22 | We simplify the analysis here by focusing on a single team (Canucks), although we still need to consider data on all of the other teams as an opponents for each prediction. In the future we would like to extend the model to allow predictions for any team, and to investigate whether a single model for all teams performs better/worse than separate models for each team.
23 |
24 | ## The Data
25 | The NHL has committed to providing detailed player tracking data starting next season, but in the meantime, we will see how accurate we can be with less granular data. Actually the data we are use here is *very* basic, but we are building a reproducible pipeline here which will allow us to easily expand the model in the future.
26 |
27 | Plenty of data is available publicly on the [NHL website](www.nhl.com), but we've managed to avoid scraping ourselves and obtained the data directly from [Martin Ellis on Kaggle](https://www.kaggle.com/martinellis/nhl-game-data).
28 |
29 | Notably, this data includes all games starting with the 2012-13 season through the end of the 2017-18 season. We've excluded the 2012-13 season from the model for two reasons: 1) this was a lockout-shortened season, and 2) it is older data and not as relevant for future predictions. Alse we exclude playoff and pre-season games and use only the regular season.
30 |
31 | There are several tables of interest, but we have focused exclusively on `game_teams_stats.csv`, with the following key variables driving our prediction:
32 | - Did the team of interest win the game? (won column, TRUE/FALSE)
33 | - Home or Away game? (HoA column, home/away)
34 | - Who is the opponent? (team_id column, 1-31)
35 | - What proportion of *goals* were scored by the team of interest? (calculated using goals column for both teams, 0-1)
36 | - What proportion of *shots* were made by the team of interest? (calculated using shots column for both teams, 0-1)
37 | - What proportion of *shots* were *saved* by the team of interest? (calculated using goals and shots columns, 0-1)
38 |
39 | However, when predicting game results we don't know the goals/shots/saves in advance from the game we're trying to predict. So we will look backwards, and see if we can use these stats from previous games for both teams to make better predictions. In particular, we will use 1-, 3-, 5-, and 10-game averages for `won_ratio`, `goals_ratio`, `shots_ratio`, and `save_ratio`. One side effect of this is that we must "throw away" the first 10 games of each season as we don't want to be looking back into the previous season to make these predictions.
40 |
41 | In addition to the above ratios for each team, we have engineered an additional _difference_ feature which is the ratio for the Canucks minus the ratio for their opponent. The idea here is that it will give the decision tree a sense of the relative strength of both teams together in one feature.
42 |
43 | So we have 72 games in each of 5 seasons, 360 games in total for the Canucks. We are splitting this data into train/test by season rather than randomly. For a prediction such as this, we want to train the model looking backwards and test the model looking forwards. So we will do this as if we are standing at the beginning of the 2017-18 season, and put that season of data aside to test our final model.
44 |
45 | Some observations about our data (specifically on the training data):
46 |
47 |
48 |
49 | We can see in Figure 1 that when the Canucks are on the road they are far more likely to lose the game. This makes sense and supports the commonly accepted idea of "home ice advantage".
50 |
51 |
52 | 
53 |
54 | We can see in Figure 2 that there isn't a huge difference in shot ratio difference as a predictor of wither the Canucks win or lose there next game, but there is a bit. Using only the previous 1 game appears to be slightly better than the others.
55 |
56 | ## The Model
57 |
58 | We looked at many different combinations of features and used 10-fold cross validation on our training data to choose features and hyperparameters. We also considered both a single decision tree model, and a random forest which should be a bit more robust.
59 |
60 | In the end, we included the top 12 features using the `feature_importances_` attribute in the `sklearn` decision tree. They are as follows (in descending order of importance):
61 | - `goals_ratio_prev10.x`
62 | - `goals_ratio_prev3.y`
63 | - `save_ratio_prev10.diff`
64 | - `shots_ratio_prev3.y`
65 | - `shots_ratio_prev3.x`
66 | - `shots_ratio_prev1.y`
67 | - `save_ratio_prev3.diff`
68 | - `shots_ratio_prev5.y`
69 | - `goals_ratio_prev5.x`
70 | - `goals_ratio_prev10.diff`
71 | - `team_id.y`
72 | - `goals_ratio_prev3.diff`
73 |
74 | Note: those ending in `.x` are for the Canucks, `.y` is their opponent, and `.diff` is the difference between the two.
75 |
76 | Testing out some different depths it is clear that we are overfitting if we go beyond depth 3 or 4 on a decision tree.
77 |
78 | 
79 |
80 | Looking at both decision trees and random forests ([full results here](https://github.com/UBC-MDS/DSCI-522_nhl-game-predictor/blob/master/results/model_selection.csv)), we get optimal depth of 2 on with cross validation accuracy of 55.2% on a decision tree and depth of 1 on a random forest with **cross validation accuracy of 58.8%**.
81 |
82 | So we go forward with the **depth 1 random forest** prediction model. (number of trees = 500)
83 |
84 | ## The Results
85 |
86 | Re-training this model on the entire training set and then testing our prediction accuracy on the test set (2017-18 season), we get a **test accuracy of 60.6%** ([see here](https://github.com/UBC-MDS/DSCI-522_nhl-game-predictor/blob/master/results/final_result.csv)). This is a little surprising to have a better test accuracy than training, and we think it is due to some bias in the small test data (only 72 games). We need to do some more testing on other teams to be more confident in our model.
87 |
88 | In any case, the accuracy is not great but is close to the suggested 62% maximum. We are only using very basic data and came close to this mark, so if we perhaps were to look at which players are playing (e.g. injuries, backup goalie?) and whether each team had enough rest between games (back-to-backs are common and create some unfairness). Player tracking data could be especially helpful for predictions.
89 |
90 | ## References
91 | Data: https://www.kaggle.com/martinellis/nhl-game-data
92 | Lags and Moving Means in dplyr: https://danieljhocking.wordpress.com/2014/12/03/lags-and-moving-means-in-dplyr/
93 | 62% accuracy cap for NHL predictions: https://www.nhlnumbers.com/2013/08/01/machine-learning-and-hockey-is-there-a-theoretical-limit-on-predictions
94 |
--------------------------------------------------------------------------------
/data/test.csv:
--------------------------------------------------------------------------------
1 | game_id,season.x,home_game.x,shots_ratio_prev1.x,shots_ratio_prev3.x,shots_ratio_prev5.x,shots_ratio_prev10.x,goals_ratio_prev1.x,goals_ratio_prev3.x,goals_ratio_prev5.x,goals_ratio_prev10.x,won_prev1.x,won_prev3.x,won_prev5.x,won_prev10.x,save_ratio_prev1.x,save_ratio_prev3.x,save_ratio_prev5.x,save_ratio_prev10.x,won.x,team_id.y,home_game.y,shots_ratio_prev1.y,shots_ratio_prev3.y,shots_ratio_prev5.y,shots_ratio_prev10.y,goals_ratio_prev1.y,goals_ratio_prev3.y,goals_ratio_prev5.y,goals_ratio_prev10.y,won_prev1.y,won_prev3.y,won_prev5.y,won_prev10.y,save_ratio_prev1.y,save_ratio_prev3.y,save_ratio_prev5.y,save_ratio_prev10.y,won_prev1.diff,won_prev3.diff,won_prev5.diff,won_prev10.diff,shots_ratio_prev1.diff,shots_ratio_prev3.diff,shots_ratio_prev5.diff,shots_ratio_prev10.diff,goals_ratio_prev1.diff,goals_ratio_prev3.diff,goals_ratio_prev5.diff,goals_ratio_prev10.diff,save_ratio_prev1.diff,save_ratio_prev3.diff,save_ratio_prev5.diff,save_ratio_prev10.diff
2 | 2017020176,2017,TRUE,0.5263157894736842,0.5424032623064686,0.5462256875426114,0.5119171132722965,0.75,0.85,0.71,0.6269047619047619,1,1,0.8,0.6,0.9259259259259259,0.9594356261022927,0.9231938431938431,0.9126022126022126,FALSE,25,FALSE,0.4444444444444444,0.4835426044381268,0.4701255626628761,0.523388762377341,0.6666666666666666,0.49537037037037035,0.5226190476190476,0.5313095238095238,1,0.3333333333333333,0.6,0.6,0.9666666666666667,0.8907647907647908,0.8932678074613559,0.8987638042182222,0,0.6666666666666667,0.20000000000000007,0,0.08187134502923976,0.058860657868341826,0.07610012487973528,-0.011471649105044501,0.08333333333333337,0.35462962962962963,0.18738095238095231,0.09559523809523807,-0.040740740740740744,0.06867083533750196,0.0299260357324872,0.01383840838399042
3 | 2017020183,2017,TRUE,0.5735294117647058,0.5209360547337843,0.5703065698955525,0.526647103629095,0.3333333333333333,0.6944444444444444,0.71,0.6002380952380952,0,0.6666666666666666,0.8,0.5,0.9310344827586207,0.9523201362281822,0.9436864540312816,0.9114199465923604,FALSE,1,FALSE,0.4032258064516129,0.4476704768878222,0.4808326302225489,0.46589310212237517,0.5714285714285714,0.35714285714285715,0.4253968253968254,0.5229365079365079,1,0.6666666666666666,0.8,0.8,0.9189189189189189,0.8932872266205599,0.8901082901082901,0.9043063649035324,-1,0,0,-0.30000000000000004,0.17030360531309297,0.07326557784596216,0.0894739396730036,0.06075400150671978,-0.23809523809523808,0.33730158730158727,0.2846031746031746,0.07730158730158732,0.012115563839701804,0.05903290960762231,0.05357816392299153,0.007113581688827919
4 | 2017020210,2017,TRUE,0.5873015873015873,0.5623822628466592,0.5576081571971399,0.5453772623592537,0,0.3611111111111111,0.5766666666666667,0.5502380952380952,0,0.3333333333333333,0.6,0.5,0.9230769230769231,0.9266791105871566,0.9464836568284845,0.9084895436619574,TRUE,5,FALSE,0.5641025641025641,0.5277641098536621,0.5258160677335987,0.5232093532716672,0.3333333333333333,0.35277777777777775,0.4116666666666666,0.4548412698412698,0,0.3333333333333333,0.4,0.6,0.9411764705882353,0.8651519166225048,0.8988464336220012,0.8955157249923937,0,0,0.19999999999999996,-0.09999999999999998,0.023199023199023228,0.03461815299299709,0.03179208946354117,0.02216790908758648,-0.3333333333333333,0.00833333333333336,0.16500000000000004,0.09539682539682537,-0.018099547511312153,0.06152719396465178,0.047637223206483226,0.012973818669563708
5 | 2017020222,2017,TRUE,0.35,0.5036103330220977,0.500021950300588,0.5188388008207921,0.6666666666666666,0.3333333333333333,0.55,0.5835714285714285,1,0.3333333333333333,0.6,0.6,0.9487179487179487,0.9342764515178308,0.9457510560958837,0.9233613385337524,FALSE,17,FALSE,0.49295774647887325,0.4984828021679266,0.4794209442407146,0.4836714344100901,1,0.625,0.555,0.41369047619047616,1,0.6666666666666666,0.6,0.3,1,0.9363636363636364,0.9373330559600582,0.9103592349928581,0,-0.3333333333333333,0,0.3,-0.14295774647887327,0.0051275308541710585,0.020601006059873472,0.03516736641070206,-0.33333333333333337,-0.2916666666666667,-0.0050000000000000044,0.16988095238095235,-0.05128205128205132,-0.002087184845805612,0.008418000135825499,0.013002103540894261
6 | 2017020230,2017,FALSE,0.4827586206896552,0.4733534026637475,0.5039810818459265,0.5153289486040433,0.4,0.35555555555555557,0.43,0.595,0,0.3333333333333333,0.4,0.6,0.9,0.923931623931624,0.9257510560958837,0.9318798570522708,TRUE,20,TRUE,0.53125,0.5059573578595318,0.5072781184194228,0.4983270444865249,0.5,0.611111111111111,0.49047619047619045,0.43333333333333335,1,1,0.6,0.5,0.8666666666666667,0.937227109807755,0.9111868405972967,0.9020410244186663,-1,-0.6666666666666667,-0.19999999999999996,0.09999999999999998,-0.048491379310344807,-0.032603955195784295,-0.003297036573496337,0.01700190411751845,-0.09999999999999998,-0.2555555555555555,-0.06047619047619046,0.16166666666666663,0.033333333333333326,-0.013295485876131052,0.014564215498586996,0.02983883263360454
7 | 2017020240,2017,FALSE,0.39622641509433965,0.40966167859466496,0.4779632069700576,0.5120944472563345,0.625,0.5638888888888889,0.40499999999999997,0.5575,1,0.6666666666666666,0.4,0.6,0.90625,0.9183226495726495,0.9218158709106985,0.9225048570522708,FALSE,24,TRUE,0.45569620253164556,0.4602389716806673,0.45678854429872295,0.4643236827366625,0.42857142857142855,0.43452380952380953,0.4107142857142857,0.48762987012987014,0,0,0.2,0.4,0.9069767441860466,0.9060292850990526,0.9067116996708016,0.8942076194288686,1,0.6666666666666666,0.2,0.19999999999999996,-0.05946978743730591,-0.05057729308600234,0.021174662671334665,0.04777076451967194,0.19642857142857145,0.12936507936507935,-0.005714285714285727,0.06987012987012986,-7.267441860465684e-4,0.012293364473596968,0.015104171239896935,0.028297237623402194
8 | 2017020260,2017,FALSE,0.36363636363636365,0.4142071331401195,0.43598459734438916,0.5031455836199709,0.2,0.4083333333333333,0.37833333333333335,0.5441666666666667,0,0.3333333333333333,0.4,0.6,0.8857142857142857,0.8973214285714286,0.9127518315018315,0.9282191427665566,FALSE,28,TRUE,0.5238095238095238,0.5641653538803882,0.5722163175913909,0.5456965339069648,0.16666666666666666,0.48888888888888893,0.5333333333333333,0.5889285714285715,0,0.6666666666666666,0.8,0.7,0.8333333333333334,0.9149572649572649,0.9146309246309247,0.9199078826828188,0,-0.3333333333333333,-0.4,-0.09999999999999998,-0.1601731601731602,-0.14995822074026877,-0.13623172024700175,-0.04255095028699396,0.033333333333333354,-0.0805555555555556,-0.15499999999999997,-0.04476190476190478,0.052380952380952306,-0.017635836385836323,-0.001879093129093179,0.008311260083737837
9 | 2017020274,2017,FALSE,0.5694444444444444,0.4431024077250492,0.4324131687729606,0.4950106629850502,0,0.275,0.37833333333333335,0.4775,0,0.3333333333333333,0.4,0.5,0.8387096774193549,0.8768913210445468,0.8958783823703178,0.9211810195994011,TRUE,26,TRUE,0.45,0.450837584230745,0.4694311219670184,0.4858497810075289,0.3333333333333333,0.3968253968253968,0.44880952380952377,0.5144047619047619,0,0.3333333333333333,0.4,0.5,0.9393939393939394,0.9132605120977214,0.905483188979063,0.9205847317444334,0,0,0,0,0.11944444444444441,-0.007735176505695773,-0.03701795319405782,0.009160881977521296,-0.3333333333333333,-0.12182539682539678,-0.07047619047619041,-0.03690476190476194,-0.10068426197458458,-0.0363691910531746,-0.009604806608745187,5.962878549676809e-4
10 | 2017020287,2017,TRUE,0.4576271186440678,0.46356930890829195,0.45393859250177415,0.4769802714011811,0.6,0.26666666666666666,0.365,0.4575,1,0.3333333333333333,0.4,0.5,0.9375,0.8873079877112136,0.8936347926267281,0.9196929243613059,FALSE,54,FALSE,0.42857142857142855,0.4787126715092817,0.5025653996188982,0.49369225652356147,0.2,0.4380952380952381,0.473968253968254,0.5103174603174603,0,0.3333333333333333,0.4,0.4,0.75,0.8579638752052545,0.8622068965517241,0.8817195045455686,1,0,0,0.09999999999999998,0.02905569007263925,-0.015143362600989752,-0.04862680711712408,-0.01671198512238037,0.39999999999999997,-0.17142857142857143,-0.10896825396825399,-0.05281746031746026,0.1875,0.02934411250595903,0.03142789607500396,0.03797341981573732
11 | 2017020303,2017,TRUE,0.4117647058823529,0.47961208965695507,0.43973980954031366,0.4718604456931201,0.2857142857142857,0.29523809523809524,0.34214285714285714,0.38607142857142857,0,0.3333333333333333,0.4,0.4,0.8333333333333334,0.869847670250896,0.8803014592933948,0.9030262576946393,FALSE,19,FALSE,0.5652173913043478,0.5064220249537427,0.5463183088576572,0.5435077764621434,0.8,0.4831168831168831,0.5398701298701298,0.5432683982683982,1,0.3333333333333333,0.6,0.7,0.9666666666666667,0.8683545614580097,0.8981287243356209,0.9139162206381822,-1,0,-0.19999999999999996,-0.29999999999999993,-0.15345268542199486,-0.0268099352967876,-0.10657849931734353,-0.07164733076902335,-0.5142857142857143,-0.18787878787878787,-0.1977272727272727,-0.15719696969696967,-0.1333333333333333,0.001493108792886333,-0.017827265042226093,-0.010889962943542941
12 | 2017020316,2017,FALSE,0.42592592592592593,0.43177258348411557,0.44567971170663095,0.4618214593383443,0.42857142857142855,0.4380952380952381,0.3028571428571428,0.3539285714285714,0,0.3333333333333333,0.2,0.3,0.8709677419354839,0.880600358422939,0.8732450076804915,0.8975304392955951,TRUE,4,TRUE,0.5571428571428572,0.5498168498168499,0.526936114732725,0.5100442936300987,0.4444444444444444,0.3148148148148148,0.3388888888888889,0.42896825396825394,0,0,0.2,0.3,0.8387096774193549,0.8760345389377647,0.9130851108270464,0.9147249394782602,0,0.3333333333333333,0,0,-0.13121693121693123,-0.1180442663327343,-0.08125640302609399,-0.048222834291754446,-0.015873015873015872,0.12328042328042327,-0.03603174603174608,-0.07503968253968252,0.032258064516129004,0.004565819485174316,-0.03984010314655484,-0.017194500182665107
13 | 2017020324,2017,FALSE,0.4864864864864865,0.44139237276492177,0.4702497362766555,0.45311716681052233,0.7142857142857143,0.47619047619047616,0.4057142857142857,0.3920238095238095,1,0.3333333333333333,0.4,0.4,0.9473684210526316,0.8838898321071497,0.8855758347481607,0.8991638331249961,TRUE,5,TRUE,0.4864864864864865,0.5465098406274876,0.5262694344758497,0.5461413695488622,0.3333333333333333,0.5462962962962963,0.4677777777777778,0.44805555555555554,0,0.6666666666666666,0.4,0.4,0.9473684210526316,0.9256379585326954,0.9010048949352855,0.8870004034067751,1,-0.3333333333333333,0,0,0,-0.10511746786256587,-0.05601969819919422,-0.09302420273833983,0.380952380952381,-0.07010582010582012,-0.0620634920634921,-0.056031746031746044,0,-0.041748126425545706,-0.015429060187124755,0.012163429718221042
14 | 2017020340,2017,FALSE,0.45121951219512196,0.45454397486917814,0.44660474982679105,0.4395089592998758,0.7142857142857143,0.6190476190476191,0.5485714285714286,0.46345238095238095,1,0.6666666666666666,0.6,0.5,0.9555555555555556,0.9246305728478904,0.9089450103754009,0.9024116963728593,FALSE,1,TRUE,0.5915492957746479,0.5052236918780442,0.521190276284453,0.478094891619272,0.5,0.4523809523809524,0.3880952380952381,0.40904761904761905,0,0.3333333333333333,0.4,0.3,0.9310344827586207,0.9002141086319585,0.9094011924519023,0.9087448730275809,1,0.3333333333333333,0.19999999999999996,0.2,-0.1403297835795259,-0.05067971700886603,-0.07458552645766198,-0.038585932319396166,0.2142857142857143,0.16666666666666669,0.1604761904761905,0.0544047619047619,0.02452107279693494,0.02441646421593191,-4.561820765014435e-4,-0.006333176654721551
15 | 2017020360,2017,FALSE,0.49019607843137253,0.47596735903766035,0.453118541784252,0.45352856714301304,0.4,0.6095238095238096,0.5085714285714286,0.4367857142857143,0,0.6666666666666666,0.4,0.4,0.8846153846153846,0.929179787074524,0.8983680872984778,0.896001439962603,FALSE,3,TRUE,0.4305555555555556,0.4850078802206462,0.46218565987669974,0.49800728802927857,0.6666666666666666,0.8412698412698413,0.5714285714285714,0.6037698412698412,1,1,0.6,0.8,0.975609756097561,0.9817689085981769,0.9516305588909105,0.9355219872069298,-1,-0.33333333333333337,-0.19999999999999996,-0.4,0.05964052287581695,-0.009040521182985872,-0.009067118092447757,-0.04447872088626553,-0.2666666666666666,-0.2317460317460317,-0.06285714285714283,-0.16698412698412696,-0.09099437148217637,-0.052589121523652915,-0.05326247159243269,-0.039520547244326854
16 | 2017020368,2017,FALSE,0.6153846153846154,0.5189334020037033,0.4938425236847045,0.4667911666125091,0.5,0.5380952380952381,0.5514285714285714,0.4467857142857143,0,0.3333333333333333,0.4,0.4,0.85,0.8967236467236467,0.9017014206318111,0.891001439962603,FALSE,2,TRUE,0.42857142857142855,0.4957679136783614,0.48268356049055416,0.47490235398942615,0.6666666666666666,0.5978835978835979,0.5503968253968254,0.49996031746031744,1,1,0.8,0.7,0.96875,0.9188230994152047,0.9109092442645075,0.9008792659879321,-1,-0.6666666666666667,-0.4,-0.29999999999999993,0.18681318681318687,0.023165488325341876,0.011158963194150329,-0.008111187376917073,-0.16666666666666663,-0.059788359788359835,0.0010317460317459393,-0.05317460317460315,-0.11875000000000002,-0.022099452691557997,-0.009207823632696344,-0.009877826025329162
17 | 2017020383,2017,FALSE,0.4098360655737705,0.5051389197965861,0.4906245516142734,0.46815213166045216,0.2857142857142857,0.3952380952380952,0.5228571428571429,0.41285714285714287,0,0,0.4,0.3,0.8611111111111112,0.8652421652421652,0.8997300944669366,0.886487551073714,TRUE,18,TRUE,0.44285714285714284,0.46247593085394084,0.4745621714155903,0.470929908149721,0.6,0.7,0.645,0.6255952380952381,1,0.6666666666666666,0.8,0.8,0.9487179487179487,0.9551282051282051,0.94214096248579,0.9214962629756845,-1,-0.6666666666666666,-0.4,-0.5,-0.033021077283372335,0.04266298894264525,0.016062380198683057,-0.0027777764892688572,-0.3142857142857143,-0.30476190476190473,-0.12214285714285711,-0.21273809523809523,-0.08760683760683752,-0.08988603988603983,-0.04241086801885341,-0.035008711901970524
18 | 2017020402,2017,TRUE,0.5,0.5084068936527953,0.4933272543169761,0.4817884952968158,0.625,0.47023809523809523,0.505,0.45535714285714285,1,0.3333333333333333,0.4,0.4,0.90625,0.8724537037037037,0.8915064102564103,0.8885411225022855,TRUE,10,FALSE,0.45121951219512196,0.543739837398374,0.49683777217082514,0.4692193578045834,0.6,0.5777777777777777,0.5577777777777778,0.6388888888888888,1,0.6666666666666666,0.6,0.7,0.9111111111111111,0.8902116402116402,0.9124545447405418,0.935045658285431,0,-0.3333333333333333,-0.19999999999999996,-0.29999999999999993,0.04878048780487804,-0.035332943745578604,-0.003510517853849038,0.012569137492232374,0.025000000000000022,-0.10753968253968249,-0.05277777777777781,-0.183531746031746,-0.004861111111111094,-0.017757936507936556,-0.020948134484131464,-0.046504535783145506
19 | 2017020423,2017,TRUE,0.41935483870967744,0.4430636347611493,0.48695431961988717,0.4667795347233391,0.6666666666666666,0.5257936507936508,0.49547619047619046,0.5220238095238096,1,0.6666666666666666,0.4,0.5,0.9722222222222222,0.9131944444444444,0.8948397435897436,0.9018923769825722,TRUE,12,FALSE,0.5465116279069767,0.5183696747080407,0.5429858110846054,0.5347591490411469,0.6,0.4222222222222222,0.4422222222222222,0.5003968253968254,1,0.3333333333333333,0.4,0.5,0.9487179487179487,0.901259559154296,0.8836128783497205,0.8881748447156795,0,0.3333333333333333,0,0,-0.12715678919729928,-0.07530603994689145,-0.05603149146471825,-0.06797961431780775,0.06666666666666665,0.10357142857142859,0.053253968253968276,0.02162698412698416,0.023504273504273532,0.011934885290148411,0.011226865240023143,0.013717532266892651
20 | 2017020436,2017,TRUE,0.4230769230769231,0.44747725392886684,0.47353048854899726,0.46332451516662465,1,0.7638888888888888,0.6154761904761905,0.5620238095238095,1,1,0.6,0.5,1,0.9594907407407407,0.9179166666666667,0.9081423769825723,FALSE,4,FALSE,0.559322033898305,0.4621982537236774,0.4342111828106475,0.4675287664553645,0.6666666666666666,0.4603174603174603,0.41507936507936505,0.4178571428571428,1,0.6666666666666666,0.4,0.2,0.9230769230769231,0.9225071225071225,0.9166264454499748,0.8984647717475043,0,0.33333333333333337,0.19999999999999996,0.3,-0.13624511082138197,-0.014720999794810563,0.03931930573834974,-0.0042042512887398575,0.33333333333333337,0.30357142857142855,0.20039682539682546,0.14416666666666667,0.07692307692307687,0.036983618233618176,0.001290221216691867,0.009677605235067999
21 | 2017020453,2017,FALSE,0.5873015873015873,0.47657778302939596,0.46791388293239167,0.4808782033085481,0.2,0.6222222222222222,0.5554761904761905,0.5534523809523809,0,0.6666666666666666,0.6,0.5,0.8461538461538461,0.9394586894586895,0.9171474358974359,0.9094244282646236,FALSE,20,TRUE,0.5,0.6017316017316017,0.584753720574616,0.5237723520130144,0.6,0.4619047619047619,0.5604761904761905,0.48023809523809524,1,0.3333333333333333,0.4,0.4,0.9459459459459459,0.8911226497433394,0.8922493474217612,0.9046925286201989,-1,0.3333333333333333,0.19999999999999996,0.09999999999999998,0.08730158730158732,-0.1251538187022057,-0.11683983764222439,-0.04289414870446634,-0.39999999999999997,0.16031746031746036,-0.0050000000000000044,0.07321428571428568,-0.0997920997920998,0.048336039715350054,0.02489808847567465,0.004731899644424664
22 | 2017020464,2017,FALSE,0.4067796610169492,0.47238605713181986,0.4673026020210274,0.4789635768176504,0.3333333333333333,0.5111111111111111,0.565,0.5439285714285714,0,0.3333333333333333,0.6,0.5,0.8857142857142857,0.9106227106227106,0.9220680708180709,0.9108990826425037,FALSE,52,TRUE,0.4166666666666667,0.46164750957854406,0.5271334332833584,0.5264916000870669,0.42857142857142855,0.33174603174603173,0.5263203463203463,0.5276046176046176,0,0,0.4,0.5,0.8571428571428572,0.8447420634920635,0.8810387864823349,0.9009000010151638,0,0.3333333333333333,0.19999999999999996,0,-0.009887005649717506,0.010738547553275801,-0.059830831262330963,-0.047528023269416464,-0.09523809523809523,0.17936507936507934,0.03867965367965365,0.016323953823953796,0.02857142857142847,0.06588064713064712,0.04102928433573594,0.009999081627339979
23 | 2017020478,2017,TRUE,0.52,0.5046937494395122,0.4713026020210274,0.48231492816900173,0.16666666666666666,0.23333333333333334,0.47333333333333333,0.48916666666666664,0,0,0.4,0.4,0.7916666666666666,0.8411782661782662,0.8991514041514042,0.8953289072039072,FALSE,18,FALSE,0.5301204819277109,0.44344009404250373,0.46606405642550225,0.47031311392054626,0.5,0.6130952380952381,0.5428571428571428,0.5939285714285715,0,0.6666666666666666,0.6,0.7,0.9230769230769231,0.9345441595441596,0.9180479242979244,0.9300944433918572,0,-0.6666666666666666,-0.19999999999999996,-0.29999999999999993,-0.010120481927710867,0.061253655397008444,0.005238545595525168,0.012001814248455478,-0.33333333333333337,-0.3797619047619048,-0.06952380952380949,-0.10476190476190483,-0.1314102564102565,-0.0933658933658934,-0.018896520146520213,-0.034765536187949975
24 | 2017020495,2017,TRUE,0.35135135135135137,0.4260436707894335,0.4577019045493622,0.47232811208462466,0.125,0.20833333333333331,0.365,0.43023809523809525,0,0,0.2,0.3,0.8541666666666666,0.8438492063492063,0.875540293040293,0.8851900183150183,TRUE,28,FALSE,0.4603174603174603,0.5710636745119504,0.5490239007480386,0.5084669650293016,0.6,0.6761904761904762,0.5568253968253968,0.5930952380952381,1,0.6666666666666666,0.6,0.6,0.9411764705882353,0.9248366013071896,0.895315753887762,0.9140306287286651,-1,-0.6666666666666666,-0.39999999999999997,-0.3,-0.10896610896610892,-0.1450200037225169,-0.09132199619867643,-0.03613885294467689,-0.475,-0.4678571428571429,-0.1918253968253968,-0.16285714285714287,-0.08700980392156865,-0.08098739495798324,-0.01977546084746895,-0.028840610413646828
25 | 2017020509,2017,TRUE,0.5194805194805194,0.4636106236106236,0.47698262383008144,0.4752565561895394,0.5714285714285714,0.2876984126984127,0.27928571428571425,0.4473809523809524,1,0.3333333333333333,0.2,0.4,0.9189189189189189,0.8549174174174173,0.8593240768240767,0.8886203717453718,FALSE,20,FALSE,0.5333333333333333,0.5413454270597128,0.5434513240324378,0.554102522303527,0,0.3,0.43333333333333335,0.4569047619047619,0,0,0.4,0.3,0.9285714285714286,0.9265562024182714,0.9284562439734854,0.9024248677696953,1,0.3333333333333333,-0.2,0.10000000000000003,-0.013852813852813894,-0.07773480344908917,-0.06646870020235635,-0.07884596611398759,0.5714285714285714,-0.012301587301587313,-0.1540476190476191,-0.009523809523809545,-0.009652509652509744,-0.07163878500085408,-0.06913216714940862,-0.013804496024323543
26 | 2017020524,2017,TRUE,0.3090909090909091,0.3933075933075933,0.42134048818794584,0.4446271855601687,0.14285714285714285,0.27976190476190477,0.26785714285714285,0.4116666666666667,0,0.3333333333333333,0.2,0.4,0.8421052631578947,0.8717302829144934,0.8585143602248865,0.8878308980611612,FALSE,8,FALSE,0.42424242424242425,0.44288472964943554,0.45663992869875225,0.46951404347581693,0,0.3055555555555555,0.34904761904761905,0.5737662337662337,0,0.3333333333333333,0.2,0.6,0.9210526315789473,0.9132675438596491,0.9050776434329065,0.932310568755861,0,0,0,-0.19999999999999996,-0.11515151515151517,-0.04957713634184224,-0.03529944051080641,-0.02488685791564821,0.14285714285714285,-0.025793650793650758,-0.0811904761904762,-0.16209956709956702,-0.07894736842105265,-0.04153726094515575,-0.046563283208020056,-0.044479670694699824
27 | 2017020538,2017,FALSE,0.5492957746478874,0.45928906773977196,0.44984371091413344,0.45857315646758046,0.4166666666666667,0.376984126984127,0.2845238095238095,0.42476190476190473,0,0.3333333333333333,0.2,0.4,0.78125,0.8474247273589378,0.8376215030820293,0.8798447869500501,FALSE,28,TRUE,0.42,0.4536123136123136,0.5227421008110663,0.507228894248858,0.375,0.46785714285714286,0.5664285714285714,0.5290079365079365,0,0.3333333333333333,0.4,0.5,0.8275862068965517,0.8895875591615957,0.900419202163624,0.9019407645698354,0,0,-0.2,-0.09999999999999998,0.12929577464788738,0.005676754127458383,-0.07289838989693287,-0.04865573778127752,0.041666666666666685,-0.09087301587301588,-0.2819047619047619,-0.10424603174603181,-0.04633620689655171,-0.042162831802657896,-0.06279769908159472,-0.022095977619785323
28 | 2017020556,2017,TRUE,0.5555555555555556,0.471314079764784,0.45695482202524457,0.464128712023136,0.4444444444444444,0.33465608465608465,0.3400793650793651,0.4067063492063492,0,0,0.2,0.3,0.84375,0.8223684210526315,0.8480381697486961,0.8735947869500501,FALSE,19,FALSE,0.5070422535211268,0.4166331321260899,0.45190295619873083,0.4932062154426073,0.4,0.24444444444444444,0.39666666666666667,0.5011904761904762,0,0,0.2,0.5,0.9142857142857143,0.9222410439801745,0.9200112930547714,0.9242240373319833,0,0,0,-0.2,0.04851330203442883,0.05468094763869413,0.005051865826513735,-0.029077503419471296,0.0444444444444444,0.09021164021164021,-0.056587301587301575,-0.094484126984127,-0.07053571428571426,-0.09987262292754295,-0.0719731233060753,-0.05062925038193322
29 | 2017020573,2017,TRUE,0.47692307692307695,0.52725813570884,0.4820691671395897,0.46988553584447595,0.25,0.37037037037037035,0.36507936507936506,0.36503968253968255,0,0,0.2,0.2,0.9117647058823529,0.8455882352941176,0.8595577775918333,0.8675490353160632,TRUE,16,FALSE,0.5797101449275363,0.5743569656613134,0.5342163014127033,0.546521571619571,0.2,0.33333333333333337,0.4866666666666667,0.47833333333333333,0,0.3333333333333333,0.6,0.5,0.8620689655172413,0.8815121659949245,0.9109536316432868,0.9029946729645005,0,-0.3333333333333333,-0.39999999999999997,-0.3,-0.1027870680044593,-0.0470988299524735,-0.05214713427311357,-0.07663603577509509,0.04999999999999999,0.03703703703703698,-0.12158730158730163,-0.11329365079365078,0.049695740365111596,-0.035923930700806905,-0.05139585405145353,-0.035445637648437334
30 | 2017020592,2017,TRUE,0.49206349206349204,0.5081807081807082,0.4765857616561842,0.47678419274313283,0.7142857142857143,0.4695767195767196,0.39365079365079364,0.33646825396825397,1,0.3333333333333333,0.2,0.2,0.9375,0.897671568627451,0.8632739938080495,0.8612990353160631,FALSE,26,FALSE,0.417910447761194,0.45003794586390083,0.44421631590543725,0.4612412212718285,0.4,0.35555555555555557,0.45904761904761904,0.46809523809523806,0,0.3333333333333333,0.4,0.5,0.9230769230769231,0.9424369313482217,0.9358143291611034,0.9114845185990822,1,0,-0.2,-0.3,0.07415304430229802,0.05814276231680732,0.03236944575074696,0.015542971471304345,0.3142857142857143,0.11402116402116402,-0.0653968253968254,-0.1316269841269841,0.014423076923076872,-0.044765362720770696,-0.07254033535305393,-0.05018548328301908
31 | 2017020612,2017,TRUE,0.4098360655737705,0.4596075448534465,0.49673479295275647,0.4590376405703511,0.42857142857142855,0.4642857142857143,0.4507936507936508,0.3593253968253968,0,0.3333333333333333,0.2,0.2,0.8888888888888888,0.9127178649237473,0.8726307189542484,0.8655725395895675,FALSE,24,FALSE,0.5098039215686274,0.5501429738562091,0.5397594439873852,0.5003423932565385,0.7142857142857143,0.526984126984127,0.6273015873015872,0.5461507936507937,1,0.6666666666666666,0.8,0.6,0.92,0.9112215320910974,0.9159636884854276,0.9096648507786615,-1,-0.3333333333333333,-0.6000000000000001,-0.39999999999999997,-0.09996785599485691,-0.09053542900276262,-0.043024651034628714,-0.041304752686187385,-0.28571428571428575,-0.0626984126984127,-0.17650793650793645,-0.18682539682539684,-0.0311111111111112,0.001496332832649938,-0.043332969531179266,-0.04409231118909407
32 | 2017020637,2017,FALSE,0.5344827586206896,0.4787941054193174,0.49377218974731696,0.4718079503307252,0,0.38095238095238093,0.36746031746031743,0.3259920634920635,0,0.3333333333333333,0.2,0.2,0.8148148148148149,0.8804012345679012,0.8793436819172114,0.8584825924996203,FALSE,10,TRUE,0.4444444444444444,0.43884140550807216,0.47148972534624917,0.4932306458110816,0.5,0.2777777777777778,0.37965367965367963,0.39704906204906204,1,0.3333333333333333,0.4,0.4,0.9428571428571428,0.8979177334016043,0.8866497997048282,0.9051966507327857,-1,0,-0.2,-0.2,0.0900383141762452,0.039952699911245226,0.022282464401067792,-0.021422695480356413,-0.5,0.10317460317460314,-0.0121933621933622,-0.07105699855699854,-0.12804232804232796,-0.017516498833703098,-0.007306117787616806,-0.04671405823316532
33 | 2017020648,2017,FALSE,0.5,0.48143960806482006,0.48266107863620583,0.4698079503307252,0.5,0.30952380952380953,0.37857142857142856,0.3593253968253968,0,0,0.2,0.2,0.9444444444444444,0.8827160493827161,0.8994825708061002,0.8737603702773982,FALSE,8,TRUE,0.4578313253012048,0.49247253096894616,0.4724532155510647,0.4831611229456273,0.5,0.23333333333333334,0.24,0.325,1,0.3333333333333333,0.2,0.4,0.9777777777777777,0.931986531986532,0.9248781937017231,0.918474623585759,-1,-0.3333333333333333,0,-0.2,0.042168674698795205,-0.011032922904126108,0.010207863085141156,-0.013353172614902098,0,0.0761904761904762,0.13857142857142857,0.034325396825396814,-0.033333333333333326,-0.049270482603815946,-0.025395622895622916,-0.044714253308360874
34 | 2017020665,2017,FALSE,0.5135135135135135,0.48598598598598597,0.4804553564304836,0.4785205590433339,0.25,0.34523809523809523,0.29285714285714287,0.34325396825396826,0,0,0,0.1,0.9166666666666666,0.9166666666666666,0.8907407407407407,0.8770073672743951,TRUE,29,TRUE,0.6081081081081081,0.5337586033238207,0.5316187983579288,0.4951890273645145,0.25,0.45,0.4033333333333333,0.3961111111111111,0,0.6666666666666666,0.6,0.4,0.896551724137931,0.9323477082097772,0.9378177158349572,0.91785133473171,0,-0.6666666666666666,-0.6,-0.30000000000000004,-0.09459459459459463,-0.04777261733783478,-0.05116344192744521,-0.016668468321180574,0,-0.10476190476190478,-0.11047619047619045,-0.052857142857142825,0.02011494252873558,-0.015681041543110563,-0.04707697509421649,-0.04084396745731489
35 | 2017020682,2017,FALSE,0.5,0.48598598598598597,0.4984881433157295,0.497611468134243,0.7142857142857143,0.4166666666666667,0.35,0.4003968253968254,1,0.3333333333333333,0.2,0.2,0.9310344827586207,0.9121966794380587,0.8991698595146871,0.8859002892344677,TRUE,30,TRUE,0.5540540540540541,0.512137641673932,0.49768333057751857,0.502844674260964,0.8,0.6222222222222222,0.5677777777777778,0.5005555555555555,1,0.6666666666666666,0.6,0.6,0.9696969696969697,0.9458924217544907,0.917790121983085,0.9226831897213377,0,-0.3333333333333333,-0.39999999999999997,-0.39999999999999997,-0.05405405405405406,-0.026151655687946085,8.048127382109249e-4,-0.005233206126721013,-0.08571428571428574,-0.20555555555555555,-0.21777777777777785,-0.10015873015873011,-0.03866248693834906,-0.03369574231643202,-0.018620262468397875,-0.03678290048686994
36 | 2017020720,2017,FALSE,0.5208333333333334,0.511448948948949,0.49575825825825826,0.4947652240027876,0.6,0.5214285714285715,0.47,0.41873015873015873,1,0.6666666666666666,0.4,0.3,0.9130434782608696,0.920248209228719,0.9188155922038981,0.8990796370605547,FALSE,22,TRUE,0.45454545454545453,0.5085219542362399,0.5186070389660965,0.5010975497488251,0.6,0.5333333333333333,0.3933333333333333,0.3323809523809524,1,0.6666666666666666,0.4,0.3,0.9444444444444444,0.9234006734006733,0.896151166840822,0.8954574756939442,0,0,0,0,0.06628787878787884,0.0029269947127090745,-0.022848780707838245,-0.006332325746037537,0,-0.011904761904761862,0.07666666666666666,0.08634920634920634,-0.0314009661835748,-0.003152464171954361,0.022664425363076113,0.0036221613666105323
37 | 2017020723,2017,FALSE,0.43548387096774194,0.4854390681003584,0.48285503245180666,0.48275805554400625,0.2857142857142857,0.5333333333333333,0.42714285714285716,0.40285714285714286,0,0.6666666666666666,0.4,0.3,0.8571428571428572,0.9004069393874492,0.9013552747435806,0.9004189227748404,FALSE,52,TRUE,0.5303030303030303,0.5062072858683028,0.5047239171820448,0.5238904083143923,0.5,0.34444444444444444,0.49393939393939396,0.5679220779220779,1,0.3333333333333333,0.6,0.7,0.967741935483871,0.9320356286000137,0.9294419653953023,0.9262922390275281,-1,0.3333333333333333,-0.19999999999999996,-0.39999999999999997,-0.09481915933528834,-0.020768217767944352,-0.021868884730238125,-0.041132352770386016,-0.2142857142857143,0.18888888888888888,-0.0667965367965368,-0.16506493506493508,-0.11059907834101379,-0.031628689212564454,-0.028086690651721757,-0.02587331625268774
38 | 2017020740,2017,TRUE,0.4461538461538462,0.4674903501516405,0.483196912793687,0.47968113246708316,0,0.29523809523809524,0.37,0.37785714285714284,0,0.3333333333333333,0.4,0.3,0.9722222222222222,0.9141361858753163,0.9180219414102473,0.9064646744088274,TRUE,26,FALSE,0.5660377358490566,0.4952449018486754,0.507877745919566,0.5077547709726995,0.6666666666666666,0.41666666666666663,0.35666666666666663,0.46119047619047615,1,0.3333333333333333,0.2,0.3,0.9130434782608696,0.9172510518934082,0.8897445705299842,0.8954238800117088,-1,0,0.2,0,-0.11988388969521041,-0.027754551697034946,-0.024680833125879043,-0.02807363850561634,-0.6666666666666666,-0.12142857142857139,0.013333333333333364,-0.08333333333333331,0.05917874396135259,-0.0031148660180918553,0.028277370880263053,0.01104079439711858
39 | 2017020757,2017,TRUE,0.5223880597014925,0.4680085922743602,0.4849718220312828,0.4827135892308832,0.75,0.34523809523809523,0.47,0.38142857142857145,1,0.3333333333333333,0.6,0.3,0.9375,0.9222883597883598,0.9221886080769139,0.9064646744088274,FALSE,7,FALSE,0.5074626865671642,0.488363268403744,0.4756216915333008,0.484813112239048,1,0.5972222222222222,0.594047619047619,0.46124458874458873,1,0.6666666666666666,0.6,0.3,1,0.9169823232323232,0.9222155377302436,0.8852375636845717,0,-0.3333333333333333,0,0,0.01492537313432829,-0.020354676129383797,0.009350130497982001,-0.002099523008164794,-0.25,-0.251984126984127,-0.12404761904761907,-0.07981601731601728,-0.0625,0.005306036556036564,-2.6929653329621495e-5,0.02122711072425565
40 | 2017020772,2017,TRUE,0.5357142857142857,0.5014187305232081,0.4921146791741399,0.4953014112449347,0,0.25,0.3271428571428571,0.3385714285714286,0,0.3333333333333333,0.4,0.3,0.8461538461538461,0.9186253561253561,0.9052124807559591,0.902191170135323,TRUE,21,FALSE,0.5211267605633803,0.44716090387337,0.4449998390273187,0.45060876174014863,0.25,0.41666666666666663,0.525,0.6552777777777777,0,0.3333333333333333,0.6,0.8,0.9117647058823529,0.9157495256166983,0.9298068582271618,0.948289813970554,0,0,-0.19999999999999996,-0.5,0.014587525150905445,0.05425782664983808,0.04711484014682121,0.044692649504786075,-0.25,-0.16666666666666663,-0.1978571428571429,-0.31670634920634916,-0.06561085972850678,0.002875830508657806,-0.024594377471202744,-0.04609864383523099
41 | 2017020787,2017,TRUE,0.4642857142857143,0.5074626865671642,0.4808051553646161,0.4882817068114372,0.5714285714285714,0.44047619047619047,0.3214285714285714,0.3957142857142857,1,0.6666666666666666,0.4,0.4,0.9,0.8945512820512821,0.9026037851037851,0.9107096886538416,TRUE,16,FALSE,0.4189189189189189,0.4857934857934858,0.4959168536964726,0.5093189128192425,0.6666666666666666,0.6333333333333333,0.44,0.48,1,0.6666666666666666,0.4,0.5,0.9767441860465116,0.9505813953488372,0.9270108287941551,0.9271506811673443,0,0,0,-0.09999999999999998,0.04536679536679539,0.0216692007736784,-0.015111698331856493,-0.021037206007805287,-0.09523809523809523,-0.19285714285714284,-0.1185714285714286,-0.0842857142857143,-0.07674418604651156,-0.05603011329755514,-0.024407043690369945,-0.016440992513502728
42 | 2017020803,2017,TRUE,0.42857142857142855,0.47619047619047616,0.47942266688535345,0.48113884966858006,0.6666666666666666,0.4126984126984127,0.3976190476190476,0.41238095238095235,1,0.6666666666666666,0.6,0.5,0.9285714285714286,0.8915750915750916,0.9168894993894994,0.90912238706654,FALSE,14,FALSE,0.4025974025974026,0.3896404604934752,0.417983219958057,0.4479351659360269,0.6363636363636364,0.5732323232323232,0.6582251082251083,0.5213347763347763,1,0.6666666666666666,0.8,0.6,0.9130434782608696,0.9349784567175872,0.9428052558487341,0.9048946914164305,0,0,-0.20000000000000007,-0.09999999999999998,0.025974025974025927,0.08655001569700094,0.06143944692729647,0.03320368373255317,0.030303030303030276,-0.16053391053391053,-0.2606060606060607,-0.10895382395382397,0.01552795031055898,-0.04340336514249554,-0.025915756459234718,0.0042276956501094975
43 | 2017020818,2017,FALSE,0.5409836065573771,0.47794691647150667,0.4983886189660596,0.4907927658798733,0.3333333333333333,0.5238095238095237,0.46428571428571425,0.41714285714285715,0,0.6666666666666666,0.6,0.5,0.8571428571428572,0.8952380952380953,0.8938736263736264,0.9059477838919369,FALSE,13,TRUE,0.5409836065573771,0.5296429373456097,0.49593644733887265,0.5099684857573562,0.6,0.6888888888888889,0.5085714285714286,0.5012554112554113,1,1,0.6,0.5,0.9285714285714286,0.9452561327561327,0.9189931445366228,0.9060770320384264,-1,-0.33333333333333337,0,0,0,-0.05169602087410308,0.002452171627186972,-0.0191757198774829,-0.26666666666666666,-0.16507936507936516,-0.04428571428571432,-0.08411255411255414,-0.0714285714285714,-0.05001803751803746,-0.0251195181629964,-1.2924814648951344e-4
44 | 2017020830,2017,FALSE,0.39705882352941174,0.45553795288607246,0.4733227717316435,0.4791472968814631,0.25,0.41666666666666663,0.36428571428571427,0.41714285714285715,0,0.3333333333333333,0.4,0.5,0.926829268292683,0.9041811846689896,0.891739480032163,0.9069640440545385,FALSE,14,TRUE,0.4927536231884058,0.45145580640947713,0.4241382796222909,0.45159701608397823,0.25,0.5176767676767676,0.5272727272727272,0.486017316017316,0,0.6666666666666666,0.6,0.5,0.8285714285714285,0.8936696154087459,0.9145801476236259,0.8997467837685229,0,-0.3333333333333333,-0.19999999999999996,0,-0.09569479965899408,0.0040821464765953275,0.04918449210935261,0.027550280797484894,0,-0.101010101010101,-0.16298701298701296,-0.06887445887445887,0.09825783972125446,0.010511569260243658,-0.02284066759146297,0.007217260286015548
45 | 2017020839,2017,FALSE,0.4461538461538462,0.46139875874687836,0.4554106838195556,0.47376268149684775,0.2857142857142857,0.28968253968253965,0.42142857142857143,0.3742857142857143,0,0,0.4,0.4,0.8611111111111112,0.8816944121822171,0.894730933023616,0.8999717068897876,FALSE,12,TRUE,0.4827586206896552,0.5039354132457581,0.5206678841030612,0.5181167976504107,0.3333333333333333,0.2611111111111111,0.49,0.4362121212121212,0,0,0.4,0.4,0.9333333333333333,0.9042285200179937,0.9358704453441296,0.910063766171576,0,0,0,0,-0.036604774535809015,-0.04253665449887972,-0.06525720028350562,-0.044354116153562995,-0.047619047619047616,0.028571428571428525,-0.06857142857142856,-0.06192640692640694,-0.07222222222222219,-0.022534107835776584,-0.041139512320513605,-0.010092059281788468
46 | 2017020855,2017,FALSE,0.4,0.41440422322775267,0.44255354096241273,0.4616793481635144,0.2,0.24523809523809523,0.34714285714285714,0.3342857142857143,0,0,0.2,0.3,0.8787878787878788,0.8889094193972243,0.8904885087811918,0.8965461469424885,TRUE,25,TRUE,0.4931506849315068,0.4965257097478658,0.5243394231523932,0.5258671878678223,0.5,0.611111111111111,0.6980952380952381,0.5922619047619048,1,1,1,0.7,0.9189189189189189,0.9454622892122891,0.9497146495130366,0.9434303301449395,-1,-1,-0.8,-0.39999999999999997,-0.0931506849315068,-0.08212148652011314,-0.08178588218998051,-0.06418783970430786,-0.3,-0.3658730158730158,-0.35095238095238096,-0.25797619047619047,-0.040131040131040074,-0.0565528698150648,-0.059226140731844845,-0.046884183202451024
47 | 2017020878,2017,TRUE,0.5774647887323944,0.4745395449620802,0.47233221299460587,0.47587743993997966,1,0.49523809523809526,0.4138095238095238,0.4057142857142857,1,0.3333333333333333,0.2,0.4,1,0.9132996632996633,0.904774223066906,0.9108318612282027,FALSE,13,FALSE,0.5555555555555556,0.5797139644388222,0.5560250999747687,0.5217889113648043,0.5833333333333334,0.5277777777777778,0.57,0.5126190476190476,1,0.6666666666666666,0.8,0.6,0.84375,0.8971094491927826,0.9114799552299553,0.906502970244537,0,-0.3333333333333333,-0.6000000000000001,-0.19999999999999996,0.021909233176838794,-0.105174419476742,-0.0836928869801628,-0.04591147142482466,0.41666666666666663,-0.032539682539682535,-0.15619047619047616,-0.10690476190476195,0.15625,0.016190214106880707,-0.0067057321630492606,0.004328890983665734
48 | 2017020889,2017,FALSE,0.5211267605633803,0.49953051643192486,0.4683608437958065,0.48337473138093306,0.42857142857142855,0.5428571428571428,0.43285714285714283,0.44857142857142857,0,0.3333333333333333,0.2,0.4,0.8823529411764706,0.9203802733214498,0.9098162398736287,0.9018449331236276,FALSE,28,TRUE,0.6119402985074627,0.5648842227016927,0.5781761769726509,0.5326298538586604,0.3333333333333333,0.47777777777777775,0.4116666666666667,0.4798593073593074,0,0.6666666666666666,0.4,0.4,0.9230769230769231,0.906656863646111,0.8836910878846362,0.8959725200437103,0,-0.3333333333333333,-0.2,0,-0.09081353794408242,-0.06535370626976783,-0.10981533317684439,-0.04925512247772729,0.09523809523809523,0.06507936507936507,0.02119047619047615,-0.03128787878787881,-0.040723981900452566,0.013723409675338805,0.026125151988992457,0.00587241307991726
49 | 2017020904,2017,TRUE,0.5945945945945946,0.5643953812967898,0.5078679980088431,0.4905953848702433,0.2,0.5428571428571428,0.4228571428571429,0.39357142857142857,0,0.3333333333333333,0.2,0.3,0.8666666666666667,0.9163398692810457,0.8977837195484254,0.8947615997902942,TRUE,6,FALSE,0.5714285714285714,0.49257409257409257,0.5220577727357388,0.5320274457730426,0.7142857142857143,0.5575396825396826,0.6259523809523809,0.6129761904761905,1,0.6666666666666666,0.8,0.8,0.9333333333333333,0.903842940685046,0.9189291410344043,0.9273326324070031,-1,-0.3333333333333333,-0.6000000000000001,-0.5,0.02316602316602323,0.07182128872269722,-0.014189774726895754,-0.04143206090279933,-0.5142857142857142,-0.014682539682539741,-0.20309523809523805,-0.21940476190476188,-0.06666666666666665,0.012496928595999712,-0.021145421485978866,-0.032571032616708906
50 | 2017020926,2017,TRUE,0.34782608695652173,0.48784914737149887,0.4882024461693782,0.4718065649944669,0.8571428571428571,0.4952380952380952,0.5371428571428571,0.47928571428571426,1,0.3333333333333333,0.4,0.4,0.9777777777777777,0.9089324618736383,0.9211170528817587,0.9079239929526873,FALSE,21,FALSE,0.3939393939393939,0.4272273211108878,0.438644084974225,0.44349413810880667,0.3333333333333333,0.49206349206349204,0.45634920634920634,0.4174603174603175,0,0.3333333333333333,0.4,0.4,0.9,0.8952380952380953,0.8957575757575758,0.8918252312203925,1,0,0,0,-0.04611330698287219,0.06062182626061108,0.04955836119515317,0.028312426885660213,0.5238095238095237,0.0031746031746031633,0.0807936507936508,0.06182539682539678,0.07777777777777772,0.01369436663554302,0.02535947712418296,0.016098761732294853
51 | 2017020945,2017,FALSE,0.45161290322580644,0.4646778615923076,0.4985250268145395,0.4705392838884761,0.4444444444444444,0.5005291005291005,0.586031746031746,0.46658730158730155,0,0.3333333333333333,0.4,0.3,0.8529411764705882,0.8991285403050109,0.9159477124183006,0.9032181105997462,FALSE,54,TRUE,0.45614035087719296,0.5287481822927731,0.5243180528225163,0.5141488993722207,0.7,0.45555555555555555,0.5761904761904761,0.5007539682539682,1,0.6666666666666666,0.8,0.6,0.9032258064516129,0.9041055718475073,0.9226635655779148,0.8911245618380293,-1,-0.3333333333333333,-0.4,-0.3,-0.004527447651386518,-0.06407032070046548,-0.025793026007976838,-0.043609615483744646,-0.25555555555555554,0.04497354497354494,0.009841269841269873,-0.03416666666666668,-0.050284629981024676,-0.004977031542496402,-0.006715853159614205,0.012093548761716866
52 | 2017020963,2017,FALSE,0.4925373134328358,0.4306587678717213,0.48153953175462777,0.4769358723746168,0.3333333333333333,0.5449735449735449,0.45269841269841266,0.4332539682539682,0,0.3333333333333333,0.2,0.2,0.8235294117647058,0.8847494553376906,0.8806535947712418,0.8927139089190739,TRUE,53,TRUE,0.59375,0.5105092592592593,0.4576017063803788,0.45747469472405755,1,0.7619047619047619,0.7333333333333333,0.6023809523809524,1,0.6666666666666666,0.8,0.6,1,0.956140350877193,0.9576950506347168,0.9239907549789745,-1,-0.3333333333333333,-0.6000000000000001,-0.39999999999999997,-0.1012126865671642,-0.07985049138753797,0.02393782537424899,0.01946117765055927,-0.6666666666666667,-0.21693121693121697,-0.2806349206349206,-0.16912698412698413,-0.17647058823529416,-0.0713908955395024,-0.077041455863475,-0.031276846059900665
53 | 2017020967,2017,FALSE,0.391304347826087,0.4451515214949097,0.4555750492071691,0.4619679465014878,0.75,0.5092592592592593,0.516984126984127,0.4749206349206349,1,0.3333333333333333,0.4,0.3,0.9761904761904762,0.8842203548085901,0.8994211017740429,0.9046186708238358,FALSE,21,TRUE,0.5,0.4810413129598189,0.472497412326482,0.4411691050961676,0.16666666666666666,0.3740740740740741,0.3196825396825397,0.42968253968253967,0,0.3333333333333333,0.2,0.4,0.8275862068965517,0.8731705575783537,0.8610451916898694,0.887911013878514,1,0,0.2,-0.10000000000000003,-0.10869565217391303,-0.03588979146490917,-0.01692236311931289,0.020798841405320234,0.5833333333333334,0.13518518518518519,0.1973015873015873,0.045238095238095244,0.14860426929392445,0.011049797230236358,0.038375910084173515,0.016707656945321836
54 | 2017020982,2017,TRUE,0.5333333333333333,0.47239166486408535,0.44332279695491683,0.47559539748187996,0.25,0.4444444444444444,0.526984126984127,0.4749206349206349,0,0.3333333333333333,0.4,0.3,0.8928571428571429,0.8975256769374417,0.9046591970121381,0.9012214582802818,FALSE,3,FALSE,0.5277777777777778,0.4498224251648909,0.4808318125161652,0.4835662949967066,0.4,0.2833333333333333,0.3093939393939394,0.3411255411255411,0,0,0,0.2,0.9117647058823529,0.9101619553556426,0.8718956990119113,0.8888582187353171,0,0.3333333333333333,0.4,0.09999999999999998,0.005555555555555536,0.022569239699194443,-0.03750901556124836,-0.007970897514826658,-0.15000000000000002,0.1611111111111111,0.2175901875901876,0.1337950937950938,-0.018907563025210017,-0.012636278418200919,0.03276349800022682,0.012363239544964655
55 | 2017020997,2017,TRUE,0.6707317073170732,0.5317897961588312,0.5079039210270272,0.49805318359820266,0.45454545454545453,0.48484848484848486,0.44646464646464645,0.4918037518037518,0,0.3333333333333333,0.2,0.3,0.7777777777777778,0.8822751322751323,0.8646591970121382,0.8928881249469485,FALSE,18,FALSE,0.5384615384615384,0.526691618240914,0.5202457401753177,0.5023687763828795,0.6666666666666666,0.7373737373737373,0.7374242424242424,0.6151406926406926,1,1,1,0.8,0.9444444444444444,0.9387464387464387,0.9462543991955756,0.9323782344769347,-1,-0.6666666666666667,-0.8,-0.5,0.13227016885553478,0.005098177917917113,-0.012341819148290467,-0.004315592784676803,-0.2121212121212121,-0.2525252525252525,-0.290959595959596,-0.1233369408369408,-0.16666666666666663,-0.05647130647130638,-0.08159520218343741,-0.03949010952998622
56 | 2017021019,2017,TRUE,0.38571428571428573,0.5299264421215641,0.494724197524723,0.4966246121696313,0.42857142857142855,0.3777056277056277,0.4432900432900433,0.5146608946608946,0,0,0.2,0.3,0.9069767441860466,0.8592038882736558,0.8754663105552298,0.8957070114867652,TRUE,2,FALSE,0.35064935064935066,0.48416706750040084,0.49343584656084655,0.4483071991234305,0.4,0.3277777777777778,0.36333333333333334,0.4791666666666667,0,0,0,0.2,0.94,0.8615384615384615,0.8854470260921874,0.9121125186483345,0,0,0.2,0.09999999999999998,0.03506493506493508,0.04575937462116331,0.0012883509638764723,0.0483174130462008,0.028571428571428525,0.04992784992784993,0.07995670995670995,0.03549422799422791,-0.03302325581395338,-0.002334573264805684,-0.009980715536957563,-0.0164055071615693
57 | 2017021031,2017,TRUE,0.40350877192982454,0.4866515883203945,0.4769184892241208,0.4792290104893743,0.5714285714285714,0.4848484848484848,0.4909090909090909,0.4718037518037518,1,0.3333333333333333,0.4,0.3,0.9117647058823529,0.8655064092820591,0.8931133693787593,0.8868834820750006,FALSE,53,FALSE,0.49295774647887325,0.4796787259504741,0.528296366005067,0.4702407028593896,0.42857142857142855,0.5734126984126984,0.594047619047619,0.6494047619047619,0,0.6666666666666666,0.6,0.7,0.8888888888888888,0.9205026455026455,0.9300793650793651,0.9412556289096725,1,-0.3333333333333333,-0.19999999999999996,-0.39999999999999997,-0.08944897454904871,0.006972862369920396,-0.05137787678094624,0.008988307629984649,0.14285714285714285,-0.08856421356421357,-0.10313852813852814,-0.17760101010101015,0.022875816993464082,-0.05499623622058636,-0.03696599570060577,-0.05437214683467195
58 | 2017021047,2017,TRUE,0.3333333333333333,0.37418546365914784,0.46532428632557005,0.4604496677663696,0.3333333333333333,0.4444444444444444,0.40757575757575754,0.4622799422799423,0,0.3333333333333333,0.2,0.3,0.9411764705882353,0.9199726402188783,0.8861105682583111,0.892765835016177,FALSE,30,FALSE,0.5,0.5112889983579638,0.5372331691297209,0.5309987373461047,0.75,0.5583333333333333,0.5554545454545454,0.5968939393939394,1,0.6666666666666666,0.6,0.7,0.935483870967742,0.877256781038761,0.8623540686232567,0.8996633096071602,-1,-0.3333333333333333,-0.39999999999999997,-0.39999999999999997,-0.16666666666666669,-0.13710353469881598,-0.07190888280415081,-0.07054906957973511,-0.4166666666666667,-0.11388888888888893,-0.1478787878787879,-0.13461399711399713,0.005692599620493288,0.04271585918011722,0.02375649963505444,-0.006897474590983221
59 | 2017021062,2017,FALSE,0.47058823529411764,0.4024767801857585,0.4527752667177269,0.4480490318363219,0.2857142857142857,0.3968253968253968,0.4147186147186147,0.47085137085137085,0,0.3333333333333333,0.2,0.3,0.8611111111111112,0.9046840958605664,0.8797613619091047,0.8922102794606215,FALSE,53,TRUE,0.4745762711864407,0.5447335614439935,0.5160558231409059,0.5088923598287799,0.2857142857142857,0.4603174603174603,0.5345238095238095,0.5922619047619048,0,0.3333333333333333,0.6,0.6,0.8387096774193549,0.889591678965493,0.9082788169031053,0.9243148470480439,0,0,-0.39999999999999997,-0.3,-0.00398803589232305,-0.14225678125823504,-0.06328055642317904,-0.06084332799245801,0,-0.06349206349206349,-0.1198051948051948,-0.12141053391053391,0.02240143369175629,0.015092416895073457,-0.0285174549940006,-0.03210456758742242
60 | 2017021069,2017,FALSE,0.5294117647058824,0.4444444444444444,0.42451127819548873,0.4662075996112579,0,0.20634920634920634,0.32380952380952377,0.3851370851370851,0,0,0.2,0.2,0.9583333333333334,0.9202069716775599,0.9158724730202159,0.8902658350161771,FALSE,26,TRUE,0.4819277108433735,0.5235346489626769,0.5004844257412424,0.5018453501342415,0.2222222222222222,0.44907407407407407,0.5723015873015873,0.5211507936507936,0,0.3333333333333333,0.6,0.6,0.8372093023255813,0.8662492546213476,0.9024876480109039,0.910310424072052,0,-0.3333333333333333,-0.39999999999999997,-0.39999999999999997,0.04748405386250887,-0.07909020451823245,-0.07597314754575368,-0.03563775052298357,-0.2222222222222222,-0.24272486772486773,-0.24849206349206354,-0.13601370851370853,0.12112403100775204,0.05395771705621233,0.01338482500931204,-0.020044589055874917
61 | 2017021082,2017,FALSE,0.5147058823529411,0.5049019607843137,0.4503095975232198,0.4725168975239714,0,0.09523809523809523,0.23809523809523808,0.34069264069264066,0,0,0.2,0.2,0.9090909090909091,0.9095117845117845,0.9162953060011884,0.8958808082782092,FALSE,24,TRUE,0.4888888888888889,0.48391583181542197,0.4443401094178917,0.4314097505137504,0.3333333333333333,0.3333333333333333,0.5333333333333333,0.5833333333333334,0,0,0.4,0.5,0.8260869565217391,0.8763919198701807,0.9108351519221085,0.9326941006059999,0,0,-0.2,-0.3,0.02581699346405225,0.020986128968891737,0.005969488105328102,0.041107147010221,-0.3333333333333333,-0.23809523809523808,-0.29523809523809524,-0.2426406926406927,0.08300395256916993,0.03311986464160377,0.005460154079079849,-0.036813292327790736
62 | 2017021107,2017,TRUE,0.463768115942029,0.5026285876669508,0.46236146632566066,0.46963997777489075,0,0,0.1238095238095238,0.30735930735930733,0,0,0,0.2,0.9189189189189189,0.9287810537810538,0.9177261486085015,0.9054197589936304,FALSE,28,FALSE,0.45161290322580644,0.5355049546700401,0.5557645112635625,0.5371780598092585,0.6363636363636364,0.6109307359307359,0.5665584415584416,0.5308189033189034,1,1,0.8,0.6,0.8823529411764706,0.886608489549666,0.9165804783451842,0.9147794577493287,-1,-1,-0.8,-0.39999999999999997,0.012155212716222552,-0.03287636700308927,-0.09340304493790186,-0.06753808203436773,-0.6363636363636364,-0.6109307359307359,-0.4427489177489178,-0.22345959595959602,0.0365659777424483,0.04217256423138782,0.0011456702633173554,-0.009359698755698309
63 | 2017021130,2017,FALSE,0.5081967213114754,0.49555690653548184,0.4973341439212891,0.4813292151234296,0.375,0.125,0.13214285714285715,0.26985930735930735,0,0,0,0.1,0.8333333333333334,0.8871143871143871,0.8961575211575211,0.8911340447079161,FALSE,54,TRUE,0.4246575342465753,0.5024720516454102,0.4885715050320419,0.507191168030031,1,0.5353535353535354,0.5412121212121213,0.5150505050505051,1,0.3333333333333333,0.6,0.5,1,0.8603174603174604,0.8999404761904762,0.898717105263158,-1,-0.3333333333333333,-0.6,-0.4,0.0835391870649001,-0.006915145109928356,0.00876263888924722,-0.02586195290660137,-0.625,-0.41035353535353536,-0.40906926406926414,-0.24519119769119774,-0.16666666666666663,0.02679692679692669,-0.0037829550329551154,-0.007583060555241827
64 | 2017021143,2017,FALSE,0.5172413793103449,0.49640207218794974,0.5066647727245346,0.4797200197211307,0.2,0.19166666666666668,0.115,0.26485930735930735,0,0,0,0.1,0.8571428571428572,0.8697983697983698,0.8953638703638703,0.8875626161364876,TRUE,16,TRUE,0.5294117647058824,0.49826803961021027,0.4906381605138855,0.4675775600934176,0.16666666666666666,0.3287037037037037,0.3972222222222222,0.43747474747474746,0,0,0.2,0.3,0.875,0.865599593495935,0.8823405253283302,0.8844992474838994,0,0,-0.2,-0.19999999999999998,-0.012170385395537497,-0.0018659674222605283,0.016026612210649105,0.012142459627713098,0.033333333333333354,-0.13703703703703704,-0.2822222222222222,-0.1726154401154401,-0.017857142857142794,0.004198776302434815,0.013023345035540146,0.003063368652588161
65 | 2017021148,2017,FALSE,0.39705882352941174,0.474165641383744,0.48019418448924045,0.4523527313423646,0.7142857142857143,0.4297619047619048,0.25785714285714284,0.29083333333333333,1,0.3333333333333333,0.2,0.2,0.9512195121951219,0.8805652342237709,0.8939411061362281,0.904906789578222,FALSE,19,TRUE,0.47619047619047616,0.5587655914522776,0.5698345182700594,0.5036934497095003,0.6666666666666666,0.5978835978835979,0.532063492063492,0.47774891774891776,1,1,0.8,0.6,0.9545454545454546,0.8904040404040404,0.8812779973649539,0.8942397116949546,0,-0.6666666666666667,-0.6000000000000001,-0.39999999999999997,-0.07913165266106442,-0.08459995006853355,-0.08964033378081893,-0.05134071836713566,0.04761904761904767,-0.16812169312169312,-0.2742063492063492,-0.18691558441558442,-0.003325942350332678,-0.009838806180269555,0.01266310877127419,0.010667077883267373
66 | 2017021164,2017,FALSE,0.4444444444444444,0.452914882428067,0.4661418969075411,0.45822574721538045,0.2,0.37142857142857144,0.2978571428571429,0.2679761904761905,0,0.3333333333333333,0.2,0.2,0.84,0.882787456445993,0.8801229243180463,0.8982091151596173,TRUE,25,TRUE,0.5384615384615384,0.5172147375537206,0.5132700190028205,0.507036795215696,0.4,0.3873015873015873,0.4123809523809524,0.3711904761904762,0,0,0,0.1,0.9166666666666666,0.875,0.8706969696969697,0.8903778128189893,0,0.3333333333333333,0.2,0.1,-0.09401709401709402,-0.06429985512565362,-0.047128122095279446,-0.04881104800031555,-0.2,-0.015873015873015872,-0.11452380952380953,-0.1032142857142857,-0.07666666666666666,0.007787456445992991,0.009425954621076538,0.00783130234062801
67 | 2017021185,2017,TRUE,0.46551724137931033,0.4356735031177222,0.46649172199499733,0.464426594160329,0.8,0.5714285714285714,0.45785714285714285,0.29083333333333333,1,0.6666666666666666,0.4,0.2,0.967741935483871,0.9196538158929977,0.8898875276310367,0.9038068381197691,TRUE,24,FALSE,0.5428571428571428,0.3994708994708995,0.48462250537722235,0.4744425239723682,0.5555555555555556,0.6518518518518519,0.6577777777777778,0.6288888888888888,1,0.6666666666666666,0.8,0.6,0.875,0.9345238095238095,0.9238515406162465,0.9248433462691775,0,0,-0.4,-0.39999999999999997,-0.07733990147783248,0.036202603646822706,-0.018130783382225013,-0.010015929812039193,0.24444444444444446,-0.08042328042328051,-0.19992063492063494,-0.3380555555555555,0.092741935483871,-0.014869993630811873,-0.03396401298520979,-0.021036508149408384
68 | 2017021198,2017,TRUE,0.5555555555555556,0.4885057471264368,0.4759634888438134,0.48664881638255125,0.8,0.6000000000000001,0.5428571428571429,0.3375,1,0.6666666666666666,0.6,0.3,0.9583333333333334,0.9220250896057348,0.9148875276310366,0.9055225243942789,TRUE,22,FALSE,0.4807692307692308,0.47335164835164834,0.4818705780520849,0.4636910546361957,0.3,0.44814814814814813,0.5588888888888889,0.49396825396825395,0,0.3333333333333333,0.6,0.5,0.7407407407407407,0.8467320920436467,0.8789636249740872,0.9110646046948359,1,0.3333333333333333,0,-0.2,0.0747863247863248,0.01515409877478846,-0.005907089208271488,0.022957761746355554,0.5,0.15185185185185196,-0.016031746031745953,-0.15646825396825392,0.21759259259259267,0.07529299756208807,0.03592390265694945,-0.0055420803005569175
69 | 2017021208,2017,TRUE,0.45454545454545453,0.49187275049344015,0.46342430389083533,0.48504453830768496,0.6666666666666666,0.7555555555555555,0.6361904761904762,0.3755952380952381,1,1,0.8,0.4,0.9722222222222222,0.9660991636798089,0.9379034006469097,0.91663363550539,TRUE,29,FALSE,0.42424242424242425,0.4972867634157957,0.4967847564621758,0.4950110326463525,0.8333333333333334,0.6222222222222222,0.6983333333333334,0.6653174603174603,1,0.6666666666666666,0.8,0.9,0.9736842105263158,0.9274185463659148,0.9377011278195488,0.9308242481203007,0,0.33333333333333337,0,-0.5,0.030303030303030276,-0.005414012922355549,-0.03336045257134046,-0.009966494338667531,-0.16666666666666674,0.1333333333333333,-0.062142857142857166,-0.2897222222222222,-0.0014619883040936088,0.03868061731389405,2.0227282736084984e-4,-0.014190612614910725
70 | 2017021236,2017,TRUE,0.4915254237288136,0.5005421446099412,0.4823176239307157,0.48125590420997805,0.5555555555555556,0.674074074074074,0.6044444444444445,0.4311507936507937,1,1,0.8,0.5,0.8666666666666667,0.9324074074074075,0.9209928315412187,0.9074669688387234,FALSE,54,FALSE,0.5507246376811594,0.5153781579155777,0.5252075242892982,0.4978531237652087,0.6,0.5238095238095238,0.5742857142857143,0.5610822510822511,1,0.6666666666666666,0.6,0.5,0.935483870967742,0.9055779569892474,0.9297835558027439,0.9151082281178221,0,0.33333333333333337,0.20000000000000007,0,-0.05919921395234584,-0.014836013305636553,-0.04288990035858248,-0.016597219555230636,-0.0444444444444444,0.1502645502645502,0.030158730158730163,-0.1299314574314574,-0.0688172043010753,0.026829450418160095,-0.008790724261525162,-0.0076412592790987555
71 | 2017021250,2017,TRUE,0.390625,0.44556529275808937,0.4715537350418268,0.46884781597468395,0.5,0.5740740740740741,0.6644444444444445,0.48115079365079366,0,0.6666666666666666,0.8,0.5,0.8974358974358975,0.9121082621082621,0.9324800110283982,0.9063014676732222,TRUE,53,FALSE,0.328125,0.4022090752378458,0.47351620520232,0.4646029712288676,0.8,0.7111111111111111,0.7066666666666667,0.6085497835497835,1,0.6666666666666666,0.8,0.6,0.9767441860465116,0.9541528239202658,0.9486985909038835,0.9288689601843725,-1,0,0,-0.09999999999999998,0.0625,0.043356217520243545,-0.0019624701604932238,0.004244844745816323,-0.30000000000000004,-0.13703703703703707,-0.04222222222222216,-0.12739898989898985,-0.07930828861061412,-0.04204456181200367,-0.01621857987548536,-0.02256749251115031
72 | 2017021269,2017,FALSE,0.6779661016949152,0.5200388418079096,0.5140435071049478,0.4902676145499726,0.5714285714285714,0.5423280423280423,0.6187301587301587,0.5382936507936508,1,0.6666666666666666,0.8,0.6,0.8421052631578947,0.868735942420153,0.9073526765632028,0.8986201020971198,FALSE,22,TRUE,0.5,0.4992319508448541,0.5047839257516678,0.48810747168209606,0.5714285714285714,0.3238095238095238,0.32095238095238093,0.43492063492063493,1,0.3333333333333333,0.2,0.4,0.9090909090909091,0.8946969696969697,0.8716329966329966,0.8937242367294679,0,0.3333333333333333,0.6000000000000001,0.19999999999999996,0.17796610169491522,0.02080689096305549,0.009259581353280089,0.0021601428678765244,0,0.2185185185185185,0.29777777777777775,0.10337301587301584,-0.06698564593301437,-0.025961027276816706,0.035719679930206194,0.004895865367651964
73 |
--------------------------------------------------------------------------------